diff --git a/chimney/src/main/scala/io/scalaland/chimney/PartialTransformer.scala b/chimney/src/main/scala/io/scalaland/chimney/PartialTransformer.scala index c6b770999..c4dd51910 100644 --- a/chimney/src/main/scala/io/scalaland/chimney/PartialTransformer.scala +++ b/chimney/src/main/scala/io/scalaland/chimney/PartialTransformer.scala @@ -6,6 +6,12 @@ import io.scalaland.chimney.internal.runtime.{TransformerFlags, TransformerOverr /** Type class expressing partial transformation between source type `From` and target type `To`, with the ability of * reporting path-annotated transformation error(s). * + * @note + * You should not need to instantiate this class manually, if you can derive it - take a look at + * [[io.scalaland.chimney.PartialTransformer.derive]] and [[io.scalaland.chimney.PartialTransformer.define]] methods + * for that. Manual intantiation is only necessary if you want to add support for the transformation that is not + * supported out of the box. Even then consult [[https://chimney.readthedocs.io/cookbook/#integrations]] first! + * * @see * [[https://chimney.readthedocs.io/supported-transformations/]] * @see @@ -177,7 +183,7 @@ object PartialTransformer extends PartialTransformerCompanionPlatform { // extended by PartialTransformerCompanionPlatform private[chimney] trait PartialTransformerLowPriorityImplicits1 { this: PartialTransformer.type => - /** Extracts [[io.scalaland.chimney.PartialTransformer]] from existing [[io.scalaland.chimney.Codec#decode]]. + /** Extracts [[io.scalaland.chimney.PartialTransformer]] from existing [[io.scalaland.chimney.Codec.decode]]. * * @tparam Domain * type of domain value diff --git a/chimney/src/main/scala/io/scalaland/chimney/Patcher.scala b/chimney/src/main/scala/io/scalaland/chimney/Patcher.scala index f7bad49d8..1e3f025d9 100644 --- a/chimney/src/main/scala/io/scalaland/chimney/Patcher.scala +++ b/chimney/src/main/scala/io/scalaland/chimney/Patcher.scala @@ -4,6 +4,12 @@ import io.scalaland.chimney.dsl.{PatcherDefinition, PatcherDefinitionCommons} import io.scalaland.chimney.internal.runtime.{PatcherFlags, PatcherOverrides} /** Type class definition that wraps patching behavior. + * + * @note + * You should not need to instantiate this class manually, if you can derive it - take a look at + * [[io.scalaland.chimney.Patcher.derive]] and [[io.scalaland.chimney.Patcher.define]] methods for that. Manual + * intantiation is only necessary if you want to add support for the transformation that is not supported out of the + * box. Even then consult [[https://chimney.readthedocs.io/cookbook/#integrations]] first! * * @see * [[https://chimney.readthedocs.io/supported-patching/]] diff --git a/chimney/src/main/scala/io/scalaland/chimney/Transformer.scala b/chimney/src/main/scala/io/scalaland/chimney/Transformer.scala index 5604bdf24..52513b15e 100644 --- a/chimney/src/main/scala/io/scalaland/chimney/Transformer.scala +++ b/chimney/src/main/scala/io/scalaland/chimney/Transformer.scala @@ -4,6 +4,12 @@ import io.scalaland.chimney.dsl.{PartialTransformerDefinition, TransformerDefini import io.scalaland.chimney.internal.runtime.{TransformerFlags, TransformerOverrides} /** Type class expressing total transformation between source type `From` and target type `To`. + * + * @note + * You should not need to instantiate this class manually, if you can derive it - take a look at + * [[io.scalaland.chimney.Transformer.derive]] and [[io.scalaland.chimney.Transformer.define]] methods for that. + * Manual intantiation is only necessary if you want to add support for the transformation that is not supported out + * of the box. Even then consult [[https://chimney.readthedocs.io/cookbook/#integrations]] first! * * @see * [[https://chimney.readthedocs.io/supported-transformations/]] @@ -107,7 +113,7 @@ object Transformer extends TransformerCompanionPlatform { private[chimney] trait TransformerLowPriorityImplicits1 extends TransformerLowPriorityImplicits2 { this: Transformer.type => - /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Iso#left]]. + /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Iso.first]]. * * @tparam First * input type of the first conversion, output type of the second conversion @@ -122,7 +128,7 @@ private[chimney] trait TransformerLowPriorityImplicits1 extends TransformerLowPr private[chimney] trait TransformerLowPriorityImplicits2 extends TransformerLowPriorityImplicits3 { this: Transformer.type => - /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Iso#right]]. + /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Iso.second]]. * * @tparam First * input type of the first conversion, output type of the second conversion @@ -136,7 +142,7 @@ private[chimney] trait TransformerLowPriorityImplicits2 extends TransformerLowPr } private[chimney] trait TransformerLowPriorityImplicits3 { this: Transformer.type => - /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Codec#encode]]. + /** Extracts [[io.scalaland.chimney.Transformer]] from existing [[io.scalaland.chimney.Codec.encode]]. * * @tparam Domain * type of domain value diff --git a/docs/docs/cookbook.md b/docs/docs/cookbook.md index d23ca8432..6164cb470 100644 --- a/docs/docs/cookbook.md +++ b/docs/docs/cookbook.md @@ -261,7 +261,7 @@ These can be enabled with `UnusedFieldPolicy`: // User2(id = 1, name = "Adam") locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableUnusedFieldPolicyCheck(FailOnIgnoredSourceVal) pprint.pprintln( @@ -345,7 +345,7 @@ and `UnmatchedSubtypePolicy`: // Green locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableUnmatchedSubtypePolicyCheck(FailOnUnmatchedTargetSubtype) pprint.pprintln( @@ -2948,6 +2948,111 @@ fields from the patching: // Foo(a = "a", b = "d") ``` +## Patching optional field with value decoded from JSON + +JSON cannot define a nested optional values - since there is no wrapper like `Some` there is no way to represent difference between +`Some(None)` and `None` using build-in JSON semantics. If during `POST` request one want to always use `Some` values to **update**, +and `None` values to always indicate *keep old* semantics **or** always indicate *clear value* semantics (if the modified value is `Option` as well), +this is enough. + +The problem, arises when one wantes to express 3 possible outcomes for modifying an `Option` value: *update value*/*keep old*/*clear value*. + +The only solution in such case is to express in the API the 3 possible outcomes somwhow without resorting to nested `Option`s. As long as it can +be done, the type can be converted to nested `Option`s which have unambguous semantics: + +!!! example + + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + //> using dep io.circe::circe-generic-extras::0.14.4 + //> using dep io.circe::circe-parser::0.14.10 + import io.circe.{Encoder, Decoder} + import io.circe.generic.extras.Configuration + import io.circe.generic.extras.auto._ + import io.circe.generic.extras.semiauto._ + import io.circe.parser.decode + import io.circe.syntax._ + + // An example of representing set-clean-keep operations in a way that cooperates with JSONs. + sealed trait OptionalUpdate[+A] extends Product with Serializable { + + def toOption: Option[Option[A]] = this match { + case OptionalUpdate.Set(value) => Some(Some(value)) + case OptionalUpdate.Clear => Some(None) + case OptionalUpdate.Keep => None + } + } + object OptionalUpdate { + + case class Set[A](value: A) extends OptionalUpdate[A] + case object Clear extends OptionalUpdate[Nothing] + case object Keep extends OptionalUpdate[Nothing] + + private implicit val customConfig: Configuration = + Configuration.default + .withDiscriminator("action") + .withSnakeCaseConstructorNames + + implicit def encoder[A: Encoder]: Encoder[OptionalUpdate[A]] = + deriveConfiguredEncoder + implicit def decoder[A: Decoder]: Decoder[OptionalUpdate[A]] = + deriveConfiguredDecoder + } + + case class Foo(field: Option[String], anotherField: String) + + case class FooUpdate(field: OptionalUpdate[String]) + object FooUpdate { + + private implicit val customConfig: Configuration = Configuration.default + implicit val encoder: Encoder[FooUpdate] = deriveConfiguredEncoder + implicit val decoder: Decoder[FooUpdate] = deriveConfiguredDecoder + } + + import io.scalaland.chimney.Patcher + import io.scalaland.chimney.dsl._ + + // This utility allows to automatically handle Option patching with OptionalUpdate values. + implicit def patchWithOptionalUpdate[A, Patch](implicit + inner: Patcher.AutoDerived[Option[A], Option[Option[A]]] + ): Patcher[Option[A], OptionalUpdate[A]] = (obj, patch) => + obj.patchUsing(patch.toOption) + + pprint.pprintln( + decode[FooUpdate]( + """{ "field": { "action": "set", "value": "new-value" } }""" + ) match { + case Left(error) => println(error) + case Right(patch) => Foo(Some("old-value"), "another-value").patchUsing(patch) + } + ) + // expected output: + // Foo(field = Some(value = "new-value"), anotherField = "another-value") + pprint.pprintln( + decode[FooUpdate]( + """{ "field": { "action": "clear" } }""" + ) match { + case Left(error) => println(error) + case Right(patch) => Foo(Some("old-value"), "another-value").patchUsing(patch) + } + ) + // expected output: + // Foo(field = None, anotherField = "another-value") + pprint.pprintln( + decode[FooUpdate]( + """{ "field": { "action": "keep" } }""" + ) match { + case Left(error) => println(error) + case Right(patch) => Foo(Some("old-value"), "another-value").patchUsing(patch) + } + ) + // expected output: + // Foo(field = Some(value = "old-value"), anotherField = "another-value") + ``` + +If we cannot modify our API, we have to [choose one semantics for `None` values](supported-patching.md#treating-none-as-no-update-instead-of-set-to-none). + ## Mixing Scala 2.13 and Scala 3 types [Scala 2.13 project can use Scala 3 artifacts and vice versa](https://docs.scala-lang.org/scala3/guides/migration/compatibility-classpath.html). diff --git a/docs/docs/quickstart.md b/docs/docs/quickstart.md index 020786969..569ace23c 100644 --- a/docs/docs/quickstart.md +++ b/docs/docs/quickstart.md @@ -55,11 +55,22 @@ case class ApiUser(name: String, surname: String) val userID: UUID = ... val user: User = ... -// Use .transformInto[Type], when don't need to customize anything... +// Use .transformInto[Type], when don't need to customize anything...: val apiUser: ApiUser = user.transformInto[ApiUser] -// ...and .into[Type].customization.transform when you do. +// ...and .into[Type].customization.transform when you do: val user2: User = apiUser.into[User].withFieldConst(_.id, userID).transform + +// If yout want to reuse some Transformation (and you don't want to write it by hand) +// you can generate it with .derive: +implicit val transformer: Transformer[ApiUser, User] = Transformer.derive[ApiUser, User] + +// ...or with .define.customization.buildTransformer: +implicit val transformerWithOverrides: Transformer[User, ApiUser] = Transformer.define[User, ApiUser] + .withFieldConst(_.id, userID) + .buildTransformer + +// It works the same way with PartialTransformers and Patchers. ``` Chimney will take care of generating the boring transformation code, and if it finds something non-obvious, it will give @@ -78,7 +89,7 @@ apiUser.transformInto[User] But don't you worry! Usually Chimney only needs your help if there is no field in the source value with a matching name or whe the targeted type has a private constructor. Out of the box, it supports: - * conversions [between `case class`es](supported-transformations.md#into-a-case-class) + * conversions [between `case class`es](supported-transformations.md#into-a-case-class-or-pojo) * actually, a conversion between *any* `class` and *another `class` with a public constructor* * with [an opt-in support for Java Beans](supported-transformations.md#reading-from-bean-getters) * conversions [between `sealed trait`s, Scala 3 `enum`s, Java `enum`s](supported-transformations.md#between-sealedenums) diff --git a/docs/docs/supported-patching.md b/docs/docs/supported-patching.md index 552712015..b328ab544 100644 --- a/docs/docs/supported-patching.md +++ b/docs/docs/supported-patching.md @@ -28,6 +28,14 @@ Currently, the only supported case is updating one `case class` with another: ) // expected output: // User(id = 10, email = Email(address = "xyz@@domain.com"), phone = Phone(number = 123123123L)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserUpdateForm] = Patcher.derive[User, UserUpdateForm] + // or (if you want to pass overrides): + val patcher2: Patcher[User, UserUpdateForm] = Patcher.define[User, UserUpdateForm] + .buildPatcher ``` As we see the values from the "patch" aren't always of the same type as the values they are supposed to update. @@ -61,6 +69,13 @@ we can do it using `.withFieldConst` (just like with `Transformer`s): ) // expected output: // User(id = 20, email = Email(address = "xyz@@domain.com"), phone = Phone(number = 123123123L)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserUpdateForm] = Patcher.define[User, UserUpdateForm] + .withFieldConst(_.id, 20) + .buildPatcher ``` ### Updating field with a computed value @@ -113,6 +128,13 @@ we can do it using `.withFieldComputed` (just like with `Transformer`s) or `.wit // phone = Phone(number = 123123123L) // ) // ) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[Wrapper[User], Wrapper[UserUpdateForm]] = Patcher.define[Wrapper[User], Wrapper[UserUpdateForm]] + .withFieldComputed(_.value.id, patch => patch.value.phone.toInt) + .buildPatcher ``` ### Ignoring fields in patches @@ -175,6 +197,13 @@ But there is a way to ignore redundant patcher fields explicitly with `.ignoreRe // expected output: // User(id = 10, email = "xyz@@domain.com", phone = 123123123L) } + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserUpdateForm] = Patcher.define[User, UserUpdateForm] + .ignoreRedundantPatcherFields + .buildPatcher ``` Patching succeeded using only relevant fields that appear in the patched object and ignoring address: `String` field @@ -262,6 +291,14 @@ It is possible to update values containing `AnyVal`s: ) // expected output: // Foo(value = Wrapper(str = "bbb")) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[Foo[String], Bar[Wrapper]] = Patcher.derive[Foo[String], Bar[Wrapper]] + // or (if you want to pass overrides): + val patcher2: Patcher[Foo[String], Bar[Wrapper]] = Patcher.define[Foo[String], Bar[Wrapper]] + .buildPatcher ``` ## Updating value with `Option` @@ -290,6 +327,14 @@ Let’s consider the following patch: ) // expected output: // User(id = 10, email = "updated@@example.com", phone = 1234567890L) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.derive[User, UserPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .buildPatcher ``` The field `phone` remained the same as in the original `user`, while the optional e-mail string got updated from @@ -367,6 +412,13 @@ but it also gives a simple way to always ignore `None` from the patch with `.ign // expected output: // User(name = Some(value = "John"), age = Some(value = 30)) } + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .ignoreNoneInPatch + .buildPatcher ``` If the flag was enabled in the implicit config it can be disabled with `.clearOnNoneInPatch`. @@ -396,6 +448,13 @@ If the flag was enabled in the implicit config it can be disabled with `.clearOn // clears both fields: // expected output: // User(name = None, age = None) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .clearOnNoneInPatch + .buildPatcher ``` ### Unambiguous `Option` update @@ -431,6 +490,14 @@ unambiguous what to do: // ignores updating both fields: // expected output: // User(name = Some(value = "Jane"), age = Some(value = 25)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.derive[User, UserPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .buildPatcher ``` ## Updating value with `Either` @@ -455,6 +522,14 @@ By default patch always just replaces the old value with a new one: ) // expected output: // User(name = Left(value = "nope"), age = Left(value = "nope")) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.derive[User, UserPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .buildPatcher ``` ### Treating `Left` as no-update instead of "set to `Left`" @@ -523,6 +598,13 @@ The latter would assume that `Either` is `Right`-biased. // expected output: // User(name = Right(value = "John"), age = Right(value = 30)) } + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .ignoreLeftInPatch + .buildPatcher ``` If the flag was enabled in the implicit config it can be disabled with `.useLeftOnLeftInPatch`. @@ -552,6 +634,13 @@ If the flag was enabled in the implicit config it can be disabled with `.useLeft // clears both fields: // expected output: // User(name = Left(value = "nope"), age = Left(value = "nope")) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .useLeftOnLeftInPatch + .buildPatcher ``` ### Unambiguous `Either` update @@ -587,6 +676,14 @@ unambiguous what to do: // ignores updating both fields: // expected output: // User(name = Right(value = "Jane"), age = Right(value = 25)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[User, UserPatch] = Patcher.derive[User, UserPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[User, UserPatch] = Patcher.define[User, UserPatch] + .buildPatcher ``` ## Updating value with collection @@ -611,6 +708,14 @@ By default patch always just replaces the old value with a new one: ) // expected output: // UserStats(names = List("Jane"), ages = List(25)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[UserStats, UserStatsPatch] = Patcher.derive[UserStats, UserStatsPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[UserStats, UserStatsPatch] = Patcher.define[UserStats, UserStatsPatch] + .buildPatcher ``` ### Appending to collection instead of replacing it @@ -664,6 +769,13 @@ but it also gives a simple way to append collection to the old value. // expected output: // UserStats(names = List("John", "Jane"), ages = List(30, 25)) } + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[UserStats, UserStatsPatch] = Patcher.define[UserStats, UserStatsPatch] + .appendCollectionInPatch + .buildPatcher ``` If the flag was enabled in the implicit config it can be disabled with `.overrideCollectionInPatch`. @@ -693,6 +805,13 @@ If the flag was enabled in the implicit config it can be disabled with `.overrid // clears both fields: // expected output: // UserStats(names = List("Jane"), ages = List(25)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[UserStats, UserStatsPatch] = Patcher.define[UserStats, UserStatsPatch] + .overrideCollectionInPatch + .buildPatcher ``` ### Unambiguous collection update @@ -728,4 +847,12 @@ unambiguous what to do (leave unchanged or replace): // ignores updating both fields: // expected output: // UserStats(names = List("Jane"), ages = List(25)) + + import io.scalaland.chimney.Patcher + + // If we want to reuse Patcher, we can create implicits using: + val patcher: Patcher[UserStats, UserStatsPatch] = Patcher.derive[UserStats, UserStatsPatch] + // or (if you want to pass overrides): + val patcher2: Patcher[UserStats, UserStatsPatch] = Patcher.define[UserStats, UserStatsPatch] + .buildPatcher ``` diff --git a/docs/docs/supported-transformations.md b/docs/docs/supported-transformations.md index c86019019..9d65d14f3 100644 --- a/docs/docs/supported-transformations.md +++ b/docs/docs/supported-transformations.md @@ -37,6 +37,8 @@ transformation is through `Transformer[From, To]`: override def toString: String = s"MyOtherType($b)" } + // There are better ways of defining implicit Transformer - see Transformer.derive[From, To] and + // Transformer.define[From, To].buildTransformer - but for completely arbitrary type it's ok: val transformer: Transformer[MyType, MyOtherType] = (src: MyType) => new MyOtherType(src.a.toString) transformer.transform(new MyType(10)) // new MyOtherType("10") @@ -57,7 +59,7 @@ transformation is through `Transformer[From, To]`: For many cases, Chimney can generate this `Transformer` for you, without you having to do anything. As a matter of fact, the majority of this page describes exactly that. In some cases Chimney might not know how to generate a total transformation - but you would know, and you [could provide it yourself](#custom-transformations). But what if -converting one type into another cannot be described with a total function? +converting one type into another cannot be described with a total function? Partial Transformers owe their name to **partial functions**. They might successfully convert only some values of the source type. However, contrary to Scala's `PartialFunction` they do not throw an `Exception` when you pass a "wrong" @@ -78,6 +80,8 @@ function was not defined, "empty value" when something was expected) and even th override def toString: String = s"MyOtherType($a)" } + // There are better ways of defining implicit PartialTransformer - see PartialTransformer.derive[From, To] and + // PartialTransformer.define[From, To].buildTransformer - but for completely arbitrary type it's ok val transformer: PartialTransformer[MyType, MyOtherType] = PartialTransformer[MyType, MyOtherType] { (src: MyType) => partial.Result @@ -107,10 +111,10 @@ function was not defined, "empty value" when something was expected) and even th import io.scalaland.chimney.dsl._ - // When the compiler can find an implicit Transformer... + // When the compiler can find an implicit Transformer...: implicit val transformerAsImplicit: PartialTransformer[MyType, MyOtherType] = transformer - // ...we can use this extension method to call it + // ...we can use this extension method to call it: pprint.pprintln( (new MyType("10")) .transformIntoPartial[MyOtherType] @@ -176,10 +180,10 @@ away with just providing a throwing function, and letting some utility catch the import io.scalaland.chimney.PartialTransformer import io.scalaland.chimney.dsl._ - val fn: String => Int = str => str.toInt // throws Exception if String is not a number + val fn: String => Int = str => str.toInt // Throws Exception if String is not a number. implicit val transformer: PartialTransformer[String, Int] = - PartialTransformer.fromFunction(fn) // catches exception + PartialTransformer.fromFunction(fn) // Catches exception! pprint.pprintln( "1".transformIntoPartial[Int].asEitherErrorPathMessageStrings @@ -210,7 +214,7 @@ Other times you might need to convert `PartialFunction` into total function with } implicit val transformer: PartialTransformer[String, Int] = - PartialTransformer(partial.Result.fromPartialFunction(fn)) // handled "not defined at" case + PartialTransformer(partial.Result.fromPartialFunction(fn)) // Handled "not defined at" case! pprint.pprintln( "1".transformIntoPartial[Int].asEitherErrorPathMessageStrings @@ -313,10 +317,21 @@ If you transform one type into itself or its supertype, it will be upcast withou trait A class B extends A val b = new B - b.transformInto[A] + b.transformInto[A] // == (b: A) b.into[A].transform // == (b: A) b.transformIntoPartial[A].asEither // == Right(b: A) b.intoPartial[A].transform.asEither // == Right(b: A) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[A, B] = Transformer.derive[A, B] + val partialTransformer: PartialTransformer[A, B] = PartialTransformer.derive[A, B] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[A, B] = Transformer.define[A, B] + .buildTransformer + val partialTransformer2: PartialTransformer[A, B] = PartialTransformer.define[A, B] + .buildTransformer ``` In particular, when the source type is (`=:=`) the target type, you will end up with an identity transformation. @@ -337,6 +352,9 @@ In particular, when the source type is (`=:=`) the target type, you will end up val b = new B b.into[A].withFieldConst(_.a, "copied").transform // new A("copied") + + import io.scalaland.chimney.Transformer + Transformer.define[B, A].withFieldConst(_.a, "copied").buildTransformer.transform(b) // new A("copied") ``` since that customization couldn't be applied if we only upcasted the value. @@ -366,7 +384,7 @@ But default conversions using `=:=` and `<:<` are disabled, but they can be enab // Bar(value = "bar") def fooToBar2[A, B](value: Foo[A])(implicit ev: A <:< B): Bar[B] = { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableTypeConstraintEvidence value.transformInto[Bar[B]] @@ -391,7 +409,7 @@ If the flag was enabled in the implicit config it can be disabled with `.enableT case class Foo[A](value: A) case class Bar[A](value: A) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableTypeConstraintEvidence @@ -461,6 +479,17 @@ The obvious examples are `case class`es with the same fields: // Target(a = 42, b = 0.07) // Right(value = Target(a = 42, b = 0.07)) // Right(value = Target(a = 42, b = 0.07)) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Source, Target] = Transformer.derive[Source, Target] + val partialTransformer: PartialTransformer[Source, Target] = PartialTransformer.derive[Source, Target] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Source, Target] = Transformer.define[Source, Target] + .buildTransformer + val partialTransformer2: PartialTransformer[Source, Target] = PartialTransformer.define[Source, Target] + .buildTransformer ``` However, the original value might have fields absent in the target type and/or appearing in a different order: @@ -623,7 +652,7 @@ side effects - you need to enable the `.enableMethodAccessors` flag: // partial.Result.fromValue(new Target(source.a, source.b())) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableMethodAccessors (new Source("value", 512)).transformInto[Target] @@ -633,6 +662,13 @@ side effects - you need to enable the `.enableMethodAccessors` flag: // val source = new Source("value", 512) // partial.Result.fromValue(new Target(source.a, source.b())) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableMethodAccessors + .buildTransformer ``` Flag `.enableMethodAccessors` will allow macros to consider methods that are: @@ -655,7 +691,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable } class Target(a: String, b: Int) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableMethodAccessors (new Source("value", 512)).into[Target].disableMethodAccessors.transform @@ -700,7 +736,7 @@ inherited from a source value's supertype, you need to enable the `.enableInheri // Right(value = Target(a = "value", b = 10)) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableInheritedAccessors pprint.pprintln( @@ -713,6 +749,13 @@ inherited from a source value's supertype, you need to enable the `.enableInheri // Target(a = "value", b = 10) // Right(value = Target(a = "value", b = 10)) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableInheritedAccessors + .buildTransformer ``` !!! tip @@ -734,7 +777,7 @@ If the flag was enabled in the implicit config it can be disabled with `.enableI case class Source(b: Int) extends Parent case class Target(a: String, b: Int) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableInheritedAccessors Source(10).into[Target].disableInheritedAccessors.transform @@ -780,7 +823,7 @@ If we want to read `def getFieldName(): A` as if it was `val fieldName: A` - whi // partial.Result.fromValue(new Target(source.getA(), source.getB())) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanGetters (new Source("value", 512)).transformInto[Target] @@ -790,6 +833,13 @@ If we want to read `def getFieldName(): A` as if it was `val fieldName: A` - whi // val source = new Source("value", 512) // partial.Result.fromValue(new Target(source.getA(), source.getB())) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableBeanGetters + .buildTransformer ``` Flag `.enableBeanGetters` will allow macros to consider methods which are: @@ -816,7 +866,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable } class Target(a: String, b: Int) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanGetters (new Source("value", 512)).into[Target].disableBeanGetters.transform @@ -872,7 +922,7 @@ flag: // partial.Result.fromValue(target) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanSetters (new Source("value", 512)).transformInto[Target] @@ -888,6 +938,13 @@ flag: // target.setB(source.b) // partial.Result.fromValue(target) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableBeanSetters + .buildTransformer ``` Flag `.enableBeanSetters` will allow macros to write to methods which are: @@ -926,7 +983,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable def setB(bb: Int): Unit = b = bb } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanSetters (new Source("value", 512)).into[Target].disableBeanSetters.transform @@ -939,7 +996,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable // Consult https://chimney.readthedocs.io for usage examples. ``` -This flag would ALSO enable writing to public `var`s: +This flag would **also** enable writing to public `var`s: !!! example @@ -953,7 +1010,7 @@ This flag would ALSO enable writing to public `var`s: var b: Int = 0 } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanSetters (new Source("value", 512)).transformInto[Target] @@ -1054,12 +1111,19 @@ them: .transform // partial.Result.fromValue(new Target()) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableIgnoreUnmatchedBeanSetters ().transformInto[Target] // new Target() ().transformIntoPartial[Target] // partial.Result.fromValue(new Target()) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Unit, Target] = Transformer.define[Unit, Target] + .enableIgnoreUnmatchedBeanSetters + .buildTransformer ``` If the flag was enabled in the implicit config it can be disabled with `.disableIgnoreUnmatchedBeanSetters`. @@ -1079,7 +1143,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable def setB(bb: Int): Unit = b = bb } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableIgnoreUnmatchedBeanSetters ().into[Target].disableIgnoreUnmatchedBeanSetters.transform @@ -1135,7 +1199,7 @@ making this setting sort of a setters' counterpart to a default value in a const // partial.Result.fromValue(target) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanSetters.enableIgnoreUnmatchedBeanSetters (new Source("value")).transformInto[Target] @@ -1206,7 +1270,7 @@ To consider such methods (and fail compilation if they are not matched) you can // partial.Result.fromValue(target) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanSetters.enableNonUnitBeanSetters new Source("value", 128).transformInto[Target] @@ -1223,6 +1287,14 @@ To consider such methods (and fail compilation if they are not matched) you can // target.setB(source.b) // partial.Result.fromValue(target) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableBeanSetters + .enableNonUnitBeanSetters + .buildTransformer ``` It is disabled by default for the same reasons as default values - being potentially dangerous. @@ -1415,6 +1487,13 @@ to default values with the `.enableDefaultValues` flag: // expected output: // Target(a = "value", b = 128, c = 0L) // Value(value = Target(a = "value", b = 128, c = 0L)) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .enableDefaultValues + .buildTransformer ``` A default value is used as a fallback, meaning: @@ -1438,7 +1517,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable case class Source(a: String, b: Int) case class Target(a: String, b: Int = 0, c: Long = 0L) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableDefaultValues (new Source("value", 512)).into[Target].disableDefaultValues.transform @@ -1498,7 +1577,7 @@ similar reasons to default values support, but we can enable it with the `.enabl case class Foo(a: String) case class Bar(a: String, b: Option[String] = Some("a")) - // without flags -> compilation error + // Without these flags -> compilation error! pprint.pprintln( Foo("value").into[Bar].enableOptionDefaultsToNone.transform ) @@ -1510,7 +1589,7 @@ similar reasons to default values support, but we can enable it with the `.enabl // Some(value = Bar(a = "value", b = None)) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableOptionDefaultsToNone pprint.pprintln( @@ -1523,6 +1602,13 @@ similar reasons to default values support, but we can enable it with the `.enabl // Bar(a = "value", b = None) // Some(value = Bar(a = "value", b = None)) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .enableOptionDefaultsToNone + .buildTransformer ``` The `None` value is used as a fallback, meaning: @@ -1560,7 +1646,7 @@ The `None` value is used as a fallback, meaning: // Some(value = Bar(a = "value", b = Some(value = "a"))) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableOptionDefaultsToNone.enableDefaultValues pprint.pprintln( @@ -1588,7 +1674,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable case class Foo(a: String) case class Bar(a: String, b: Option[String] = Some("a")) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableOptionDefaultsToNone Foo("value").into[Bar].disableOptionDefaultsToNone.transform @@ -1630,6 +1716,13 @@ it with another field. Since the usual cause of such cases is a _rename_, we can // expected output: // Bar(a = "value", c = 1248) // Right(value = Bar(a = "value", c = 1248)) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer with overrides we can create implicits using + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .withFieldRenamed(_.b, _.c) + .buildTransformer ``` !!! tip @@ -1682,7 +1775,7 @@ with all arguments declared as public `val`s, and Java Beans where each setter h def setC(cc: Int): Unit = c = cc } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanGetters.enableBeanSetters (new Foo()) @@ -1792,7 +1885,7 @@ the constructor's argument/setter yourself. The successful value can be provided case class Foo(a: String, b: Int) case class Bar(a: String, b: Int, c: Long) - // providing missing value... + // Providing missing value...: pprint.pprintln( Foo("value", 10).into[Bar].withFieldConst(_.c, 1000L).transform ) @@ -1803,7 +1896,7 @@ the constructor's argument/setter yourself. The successful value can be provided // Bar(a = "value", b = 10, c = 1000L) // Right(value = Bar(a = "value", b = 10, c = 1000L)) - // ...and overriding existing value + // ...and overriding existing value: pprint.pprintln( Foo("value", 10).into[Bar].withFieldConst(_.c, 1000L).withFieldConst(_.b, 20).transform ) @@ -1813,6 +1906,14 @@ the constructor's argument/setter yourself. The successful value can be provided // expected output: // Bar(a = "value", b = 20, c = 1000L) // Right(value = Bar(a = "value", b = 20, c = 1000L)) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .withFieldConst(_.c, 1000L) + .withFieldConst(_.b, 20) + .buildTransformer ``` `.withFieldConst` can be used to provide/override only _successful_ values. What if we want to provide a failure, e.g.: @@ -1834,7 +1935,7 @@ These cases can be handled only with `PartialTransformer` using `.withFieldConst case class Foo(a: String, b: Int) case class Bar(a: String, b: Int, c: Long) - // successful partial.Result constant + // Successful partial.Result constant: pprint.pprintln( Foo("value", 10) .intoPartial[Bar] @@ -1847,7 +1948,7 @@ These cases can be handled only with `PartialTransformer` using `.withFieldConst // expected output: // Right(value = Bar(a = "value", b = 10, c = 100L)) - // a few different partial.Result failures constants + // A few different partial.Result failures constants: pprint.pprintln( Foo("value", 10) .intoPartial[Bar] @@ -1933,7 +2034,7 @@ with all arguments declared as public `val`s, and Java Beans where each setter h def setC(cc: Int): Unit = c = cc } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanGetters.enableBeanSetters (new Foo()) @@ -2011,7 +2112,7 @@ using `.withFieldComputed`: case class Foo(a: String, b: Int) case class Bar(a: String, b: Int, c: Long) - // providing missing value... + // Providing missing value...: pprint.pprintln( Foo("value", 10).into[Bar].withFieldComputed(_.c, foo => foo.b.toLong * 2).transform ) @@ -2022,7 +2123,7 @@ using `.withFieldComputed`: // Bar(a = "value", b = 10, c = 20L) // Right(value = Bar(a = "value", b = 10, c = 20L)) - // ...and overriding existing value + // ...and overriding existing value: pprint.pprintln( Foo("value", 10) .into[Bar] @@ -2042,7 +2143,7 @@ using `.withFieldComputed`: // Bar(a = "value", b = 40, c = 20L) // Right(value = Bar(a = "value", b = 40, c = 20L)) - // we can also use values extracted from the source + // We can also use values extracted from the source: pprint.pprintln( List(Foo("value", 10)) .into[Vector[Bar]] @@ -2061,6 +2162,14 @@ using `.withFieldComputed`: // expected output: // Vector(Bar(a = "value", b = 40, c = 20L)) // Right(value = Vector(Bar(a = "value", b = 40, c = 20L))) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .withFieldComputed(_.c, foo => foo.b.toLong * 2) + .withFieldComputed(_.b, foo => foo.b * 4) + .buildTransformer ``` `.withFieldComputed`/`.withFieldComputedFrom` can be used to compute only _successful_ values. What if we want to @@ -2084,7 +2193,7 @@ These cases can be handled only with `PartialTransformer` using case class Foo(a: String, b: Int) case class Bar(a: String, b: Int, c: Long) - // always successful partial.Result + // Always successful partial.Result: pprint.pprintln( Foo("value", 10) .intoPartial[Bar] @@ -2097,7 +2206,7 @@ These cases can be handled only with `PartialTransformer` using // expected output: // Right(value = Bar(a = "value", b = 10, c = 20L)) - // always failing with a partial.Result.fromErrorString + // Always failing with a partial.Result.fromErrorString: pprint.pprintln( Foo("value", 10) .intoPartial[Bar] @@ -2110,7 +2219,7 @@ These cases can be handled only with `PartialTransformer` using // expected output: // Left(value = List(("", StringMessage(message = "bad value")))) - // failure depends on the input (whether .toLong throws or not) + // Failure depends on the input (whether .toLong throws or not): pprint.pprintln( Foo("20", 10) .intoPartial[Bar] @@ -2140,7 +2249,7 @@ These cases can be handled only with `PartialTransformer` using // ) // ) - // we can also use values extracted from the source + // We can also use values extracted from the source: pprint.pprintln( List(Foo("20", 10)) .intoPartial[Vector[Bar]] @@ -2223,7 +2332,7 @@ with all arguments declared as public `val`s, and Java Beans where each setter h def setC(cc: Long): Unit = c = cc } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableBeanGetters.enableBeanSetters (new Foo()) @@ -2341,7 +2450,7 @@ The field name matching predicate can be overridden with a flag: // Right(value = Bar(baz = Baz(s = "test"), a = 1024)) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default .enableCustomFieldNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) @@ -2365,6 +2474,13 @@ The field name matching predicate can be overridden with a flag: // Right(value = Bar(baz = Baz(s = "test"), a = 1024)) // Right(value = Bar(baz = Baz(s = "test"), a = 1024)) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .enableCustomFieldNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) + .buildTransformer ``` For details about `TransformedNamesComparison` look at [their dedicated section](#defining-custom-name-matching-predicate). @@ -2405,7 +2521,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable case class Baz(s: String) } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default .enableCustomFieldNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) @@ -2455,6 +2571,17 @@ constructor's argument is made by position instead of name: ) // expected output: // Right(value = Foo(a = "value", b = 42, c = 1024L)) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Foo, (String, Int, Long)] = Transformer.derive[Foo, (String, Int, Long)] + val partialTransformer: PartialTransformer[Foo, (String, Int, Long)] = PartialTransformer.derive[Foo, (String, Int, Long)] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Foo, (String, Int, Long)] = Transformer.define[Foo, (String, Int, Long)] + .buildTransformer + val partialTransformer2: PartialTransformer[Foo, (String, Int, Long)] = PartialTransformer.define[Foo, (String, Int, Long)] + .buildTransformer ``` !!! tip @@ -2462,6 +2589,25 @@ constructor's argument is made by position instead of name: You can use all the flags, renames, value provisions, and computations that are available to case classes, Java Beans and so on. + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + import io.scalaland.chimney.dsl._ + + case class Foo(a: String, b: Int, c: Long) + + pprint.pprintln( + Foo("value", 42, 1024L).into[(String, Int, Long, String, Double, Long, Option[Double])] + .withFieldRenamed(_.a, _._4) // _4 + .withFieldConst(_._5, 3.14) // _5 + .withFieldComputed(_._6, foo => foo.c + 2) // _6 + .withTargetFlag(_._7).enableOptionDefaultsToNone // _7 + .transform + ) + // expected output: + // ("value", 42, 1024L, "value", 3.14, 1026L, None) + ``` + !!! tip If you are not sure whether the derivation treats your case as tuple conversion, [try enabling macro logging](troubleshooting.md#debugging-macros). @@ -2502,6 +2648,17 @@ as transparent, similarly to virtually every other Scala library. // expected output: // Right(value = Bar(b = 10)) // Right(value = Bar(b = 10)) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Foo, Bar] = Transformer.derive[Foo, Bar] + val partialTransformer: PartialTransformer[Foo, Bar] = PartialTransformer.derive[Foo, Bar] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .buildTransformer + val partialTransformer2: PartialTransformer[Foo, Bar] = PartialTransformer.define[Foo, Bar] + .buildTransformer ``` !!! tip @@ -2535,10 +2692,27 @@ as transparent, similarly to virtually every other Scala library. When `AnyVal` special handling cannot be used (e.g. because value/constructor is private), then Chimney falls back to treat them as a normal class. -!!! warning +!!! tip + + You can use all the flags, renames, value provisions, and computations that are available to case classes, + Java Beans and so on. - If you use any value override (`.withFieldConst`, `.withFieldComputed`, etc.) getting value from/to `AnyVal`, it - _will_ be treated as just a normal product type. + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + import io.scalaland.chimney.dsl._ + + case class Foo(a: Int) extends AnyVal + case class Bar(b: Int) extends AnyVal + + pprint.pprintln( + Foo(10).into[Bar] + .withFieldComputed(_.b, foo => foo.a + 5) + .transform + ) + // expected output: + // 15 + ``` ### From/into a wrapper type @@ -2578,7 +2752,7 @@ a flag: // Right(value = "user name") locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableNonAnyValWrappers pprint.pprintln( @@ -2602,6 +2776,13 @@ a flag: // Right(value = UserName(value = "user name")) // Right(value = UserName(value = "user name")) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[String, UserName] = Transformer.define[String, UserName] + .enableNonAnyValWrappers + .buildTransformer ``` !!! tip @@ -2694,6 +2875,13 @@ type's subtype needs to have a corresponding subtype with a matching name in the // expected output: // Right(value = Buzz) // Right(value = Buzz) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer with overrides we can create implicits using + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .enableCustomFieldNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) + .buildTransformer ``` !!! tip @@ -2855,6 +3043,11 @@ In such cases, Chimney is able to automatically wrap/unwrap these inner values a // expected output: // A(value = A(a = "value", b = 42)) // B(value = B()) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[protobuf.Foo, domain.Bar] = Transformer.derive[protobuf.Foo, domain.Bar] ``` ### Java's `enum`s @@ -2912,6 +3105,11 @@ Java's `enum` can also be converted this way to/from `sealed`/Scala 3's `enum`/a // Red // Green // Blue + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[ColorJ, ColorS] = Transformer.derive[ColorJ, ColorS] ``` !!! example @@ -2995,6 +3193,13 @@ Or we might want to redirect two subtypes into the same target subtype. For that ) // expected output: // Bar(a = 10) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Source, Target] = Transformer.define[Source, Target] + .withSealedSubtypeRenamed[Source.Baz, Target.Bar] + .buildTransformer ``` !!! notice @@ -3127,6 +3332,15 @@ computation. This can be done using `.withSealedSubtypeHandled`: ) // expected output: // Buzz + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Bar, Foo] = Transformer.define[Bar, Foo] + .withSealedSubtypeHandled[Bar.Fizz.type] { fizz => + Foo.Baz(fizz.toString) + } + .buildTransformer ``` If the computation needs to allow failure, there is `.withSealedSubtypeHandledPartial`: @@ -3542,7 +3756,7 @@ The subtype name matching predicate can be overridden with a flag: // Right(value = Baz) locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default .enableCustomSubtypeNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) @@ -3566,6 +3780,13 @@ The subtype name matching predicate can be overridden with a flag: // Right(value = Baz) // Right(value = Baz) } + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .enableCustomSubtypeNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) + .buildTransformer ``` For details about `TransformedNamesComparison` look at [their dedicated section](#defining-custom-name-matching-predicate). @@ -3614,7 +3835,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable case object Baz extends Bar } - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default .enableCustomSubtypeNameComparison(TransformedNamesComparison.CaseInsensitiveEquality) @@ -3684,6 +3905,17 @@ The transformation from one `Option` into another is obviously always supported: // expected output: // Right(value = Some(value = Bar(a = "value"))) // Right(value = None) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Option[Foo], Option[Bar]] = Transformer.derive[Option[Foo], Option[Bar]] + val partialTransformer: PartialTransformer[Option[Foo], Option[Bar]] = PartialTransformer.derive[Option[Foo], Option[Bar]] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Option[Foo], Option[Bar]] = Transformer.define[Option[Foo], Option[Bar]] + .buildTransformer + val partialTransformer2: PartialTransformer[Option[Foo], Option[Bar]] = PartialTransformer.define[Option[Foo], Option[Bar]] + .buildTransformer ``` Additionally, an automatic wrapping with `Option` is also considered safe and always available: @@ -3780,7 +4012,36 @@ automatically only with `PartialTransformer`: If you need to provide support for your optional types, please, read about [custom optional types](cookbook.md#custom-optional-types). - + +!!! tip + + You can use all the flags, renames, value provisions, and computations that are available to case classes, + Java Beans and so on. + + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + import io.scalaland.chimney.dsl._ + + case class Foo(a: String) + case class Bar(a: String, b: String, c: Int, d: Char, e: Option[Float]) + + pprint.pprintln( + Option(Foo("value")).into[Option[Bar]] + .withFieldRenamed(_.matchingSome.a, _.matchingSome.b) + .withFieldConst(_.matchingSome.c, 10) + .withFieldComputedFrom(_.matchingSome)(_.matchingSome.d, foo => foo.a.headOption.getOrElse('0')) + .withTargetFlag(_.matchingSome.e).enableOptionDefaultsToNone + .transform + ) + // expected output: + // Some(value = Bar(a = "value", b = "value", c = 10, d = 'v', e = None)) + ``` + + While you could use `.matching[Some[Foo]].value` it is more convenient to use `.matchingSome` since it infers + the inner type and exposes it automatically. Additionally, `.matchingSome` works with + [custom optional types](cookbook.md#custom-optional-types). + ### Controlling automatic `Option` unwrapping Automatic unwrapping of `Option`s by `PartialTransformer`s allows for seamless decoding of many PTO types into domain @@ -3810,7 +4071,7 @@ However, sometimes you might prefer to opt out of such behavior. You can disable // Consult https://chimney.readthedocs.io for usage examples. locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.disablePartialUnwrapsOption Foo(Some(10)).transformIntoPartial[Bar] @@ -3850,7 +4111,7 @@ If the flag was disabled in the implicit config it can be enabled with `.disable case class Foo(a: Option[Int]) case class Bar(a: Int) - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.disablePartialUnwrapsOption pprint.pprintln( @@ -3897,6 +4158,22 @@ A transformation from one `Either` to another is supported as long as both left // expected output: // Some(value = Left(value = Bar(a = "value"))) // Some(value = Right(value = Foo(a = "value"))) + + import io.scalaland.chimney.Transformer + + // If we want to reuse Transformer, we can create implicits using: + val transformer: Transformer[Either[Foo, Bar], Either[Bar, Foo]] = Transformer.derive[Either[Foo, Bar], Either[Bar, Foo]] + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Either[Foo, Bar], Either[Bar, Foo]] = Transformer.derive[Either[Foo, Bar], Either[Bar, Foo]] + val partialTransformer: PartialTransformer[Either[Foo, Bar], Either[Bar, Foo]] = PartialTransformer.derive[Either[Foo, Bar], Either[Bar, Foo]] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Either[Foo, Bar], Either[Bar, Foo]] = Transformer.define[Either[Foo, Bar], Either[Bar, Foo]] + .buildTransformer + val partialTransformer2: PartialTransformer[Either[Foo, Bar], Either[Bar, Foo]] = PartialTransformer.define[Either[Foo, Bar], Either[Bar, Foo]] + .buildTransformer ``` A transformation from `Left` and `Right` into `Either` requires existence of only the transformation from the type we @@ -3926,6 +4203,35 @@ know for sure is inside to their corresponding type in target `Either`: // Right(value = Bar(a = "value")) ``` +!!! tip + + You can use all the flags, renames, value provisions, and computations that are available to case classes, + Java Beans and so on. + + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + import io.scalaland.chimney.dsl._ + + case class Foo(a0: String) + case class Bar(a: String, b: Int, c: Char, d: Option[Float]) + + pprint.pprintln( + (Right(Foo("value")): Either[Bar, Foo]).into[Either[Foo, Bar]] + .withFieldRenamed(_.matchingLeft.a, _.matchingLeft.a0) + .withFieldRenamed(_.matchingRight.a0, _.matchingRight.a) + .withFieldConst(_.matchingRight.b, 10) + .withFieldComputedFrom(_.matchingRight)(_.matchingRight.c, bar => bar.a0.headOption.getOrElse('0')) + .withTargetFlag(_.matchingRight.d).enableOptionDefaultsToNone + .transform + ) + // expected output: + // Right(value = Bar(a = "value", b = 10, c = 'v', d = None)) + ``` + + While you could use `.matching[Left[Foo]].value`/`.matching[Right[Bar]].value` it is more convenient to use + `.matchingLeft`/`.matchingRight` since it infers the inner type and exposes it automatically. + ## Between Scala's collections/`Array`s Every `Array`/every collection extending `scala.collection.Iterable` can be used as a source value for a collection's @@ -3961,6 +4267,17 @@ the types stored within these collections can also be converted. // Vector(Bar(a = Some(value = "value"))) // Array((Bar(a = Some(value = "key")), Bar(a = Some(value = "value")))) // ListMap(Bar(a = Some(value = "key")) -> Bar(a = Some(value = "value"))) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[List[Foo], Vector[Bar]] = Transformer.derive[List[Foo], Vector[Bar]] + val partialTransformer: PartialTransformer[List[Foo], Vector[Bar]] = PartialTransformer.derive[List[Foo], Vector[Bar]] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[List[Foo], Vector[Bar]] = Transformer.define[List[Foo], Vector[Bar]] + .buildTransformer + val partialTransformer2: PartialTransformer[List[Foo], Vector[Bar]] = PartialTransformer.define[List[Foo], Vector[Bar]] + .buildTransformer ``` With `PartialTransformer`s ware able to handle fallible conversions, tracing at which key/index the failure occurred: @@ -4002,7 +4319,40 @@ With `PartialTransformer`s ware able to handle fallible conversions, tracing at If you need to integrate with Java's collections, please, read about [Java's collections integration](cookbook.md#java-collections-integration). - If you need to provide support for your collection types, you have to write your own implicit methods. + If you need to provide support for your collection types, you have to write your own implicit methods. + +!!! tip + + You can use all the flags, renames, value provisions, and computations that are available to case classes, + Java Beans and so on. + + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep com.lihaoyi::pprint::{{ libraries.pprint }} + import io.scalaland.chimney.dsl._ + + case class Foo(a: String) + case class Bar(a: String, b: String, c: Int, d: Char, e: Option[Float]) + + pprint.pprintln( + List(Foo("key") -> Foo("value")).into[Map[Bar, Bar]] + .withFieldRenamed(_.everyItem._1.a, _.everyMapKey.b) + .withFieldConst(_.everyMapKey.c, 10) + .withFieldComputedFrom(_.everyItem._1)(_.everyMapKey.d, foo => foo.a.headOption.getOrElse('0')) + .withTargetFlag(_.everyMapKey.e).enableOptionDefaultsToNone + .withFieldRenamed(_.everyItem._2.a, _.everyMapValue.b) + .withFieldConst(_.everyMapValue.c, 10) + .withFieldComputedFrom(_.everyItem._2)(_.everyMapValue.d, foo => foo.a.headOption.getOrElse('0')) + .withTargetFlag(_.everyMapValue.e).enableOptionDefaultsToNone + .transform + ) + // expected output: + // Map( + // Bar(a = "key", b = "key", c = 10, d = 'k', e = None) -> Bar(a = "value", b = "value", c = 10, d = 'v', e = None) + // ) + ``` + + `.everyItem`/`.everyMapKey`/`.everyMapValue` work with [custom optional types](cookbook.md#custom-collection-types). ## Parametric types/generics @@ -4028,6 +4378,17 @@ The most obvious case is having all type parameters applied to non-abstract type ) // expected output: // Bar(value = Baz(value = "value")) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Foo[Baz[String]], Bar[Bar[String]]] = Transformer.derive[Foo[Baz[String]], Bar[Bar[String]]] + val partialTransformer: PartialTransformer[Foo[Baz[String]], Bar[Bar[String]]] = PartialTransformer.derive[Foo[Baz[String]], Bar[Bar[String]]] + // or (if you want to pass overrides): + val totalTransformer2: Transformer[Foo[Baz[String]], Bar[Bar[String]]] = Transformer.define[Foo[Baz[String]], Bar[Bar[String]]] + .buildTransformer + val partialTransformer2: PartialTransformer[Foo[Baz[String]], Bar[Bar[String]]] = PartialTransformer.define[Foo[Baz[String]], Bar[Bar[String]]] + .buildTransformer ``` or having type parameter being not used at all: @@ -4110,7 +4471,7 @@ knows how to apply it, the transformation can still be derived: case class Bar[A](value: A) def refinedExample[A <: { val value: String }](foo: Foo[A]): Bar[Bar[String]] = - foo.into[Bar[Bar[String]]].enableMacrosLogging.transform + foo.transformInto[Bar[Bar[String]]] pprint.pprintln( refinedExample[Foo[String]](Foo(Foo("value"))) @@ -4231,9 +4592,9 @@ Scala 2.13 and 3 allow using [literal-based singleton types](https://docs.scala- // "str" ``` -### Into a case class +### Into a case object -When the target is a `case class`, the transformation can always be provided: +When the target is a `case object`, the transformation can always be provided: !!! example @@ -4363,6 +4724,20 @@ Then Chimney will try to match the source type's getters against the method's pa ) // expected output: // Vector(Bar(value = "1000")) + + import io.scalaland.chimney.{Transformer, PartialTransformer} + + // If we want to reuse Transformer, we can create implicits using: + val totalTransformer: Transformer[Foo, Bar] = Transformer.define[Foo, Bar] + .withConstructor { (value: Int) => + Bar.make(value * 100) + } + .buildTransformer + val partialTransformer: PartialTransformer[Foo, Bar] = PartialTransformer.define[Foo, Bar] + .withConstructor { (value: Int) => + Bar.make(value * 100) + } + .buildTransformer ``` !!! note @@ -5139,7 +5514,7 @@ with a flag `.enableImplicitConversions`: // "10" locally { - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableImplicitConversions pprint.pprintln( @@ -5163,7 +5538,7 @@ If the flag was enabled in the implicit config it can be disabled with `.disable implicit def convert(a: Int): String = a.toString - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableImplicitConversions 10.into[String].disableImplicitConversions.transform @@ -5246,7 +5621,39 @@ for which they would not have a reasonable mapping: ``` If you pass field or coproduct overrides, they could not be applied if we used the implicit, so in such case Chimney - assumed that the user wants to ignore the implicit. + assumed that the user wants to ignore the implicit. + +!!! warning + + Make sure that you are: + + * **not** using `.transformInto`/`.transformIntoPartial`/`.patchUsing` + * **nor** `.into.transform`/`.intoPartial.transform`/`.using.patch` **without overrides** + * when transforming/patching **top-level object** + + as the code like: + + ```scala + implicit val totalTransformer: Transformer[Foo, Bar] = (foo: Foo) => foo.transformInto[Bar] + implicit val partialTransformer: PartialTransformer[Foo, Bar] = (foo: Foo) => foo.transformIntoPartial[Bar] + implicit val patcher: Patcher[A, Patch] = (obj: A, patch: Patch) => obj.patchUsing(patch) + ``` + + will [create inifinite recursion in runtime and result in `StackOverflowError`](troubleshooting.md#recursive-calls-on-implicits). + + In such cases derive the code with `.derive`/.define` utilities: + + ```scala + implicit val totalTransformer: Transformer[Foo, Bar] = Transformer.derive[Foo, Bar] + implicit val partialTransformer: PartialTransformer[Foo, Bar] = PartialTransformer.derive[Foo, Bar] + implicit val patcher: Patcher[A, Patch] = Patcher.derive[A, Patch] + ``` + + A manual definition of `Transfrormer`s/`PartialTransformer`s/`Patcher`s is necessary only when the derivation cannot happed + for a particular type - and them using `.transformInto`/`.into.transform`/etc is usually not helpful. + + The only exception is when we would like to use the implicit we're currently deriving for some nested field as we're working with + [recursive data structures (which can be safely handled with `.derive`/`.define`)](#recursive-data-types). Total `Transformer`s can be utilized by `PartialTransformer`s as well - handling every input is a stronger guarantee than handling only some of them, so we can always relax it: diff --git a/docs/docs/troubleshooting.md b/docs/docs/troubleshooting.md index 648a30bb1..d4415469c 100644 --- a/docs/docs/troubleshooting.md +++ b/docs/docs/troubleshooting.md @@ -385,7 +385,7 @@ If you used default values a lot, remember that you can enable them for all tran !!! example ```scala - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableDefaultValues ``` @@ -2093,7 +2093,7 @@ The same is true for partial transformers. ### Recursive calls on implicits -Old versions of Chimney in situations like this: +In situations like this: !!! example @@ -2102,7 +2102,7 @@ Old versions of Chimney in situations like this: implicit val t: Transformer[Foo, Bar] = foo => foo.into[Bar].transform ``` -would result in errors like: +the old versions of Chimney would result in errors like: !!! example @@ -2110,7 +2110,7 @@ would result in errors like: forward reference extends over definition of value t ``` -In newer, it can result in errors like: +In newer, it can result in errors like stack overflow: !!! example @@ -2118,6 +2118,30 @@ In newer, it can result in errors like: java.lang.StackOverflowError ``` +The reason for that is that: + +!!! example + + ```scala + implicit val t: Transformer[Foo, Bar] = foo => foo.transformInto[Bar] // or + implicit val t: Transformer[Foo, Bar] = foo => foo.into[Bar].transform + ``` + +generates this code: + +!!! example + + ```scala + implicit val t: Transformer[Foo, Bar] = foo => foo.transformInto(t) // t is calling itself! + implicit val t: Transformer[Foo, Bar] = foo => t.transform(foo) // t is calling itself! + ``` + +When you have such reference to itself, then depending on where it was defined, or whether it was `val`, `lazy val` +or `def`, you can have runtime exception such as: + + * `StackOverflowError` - when `Transformer` initialized correctly, but calling it resulted in inifinite recursion + * `NullPointerException` - when it's something like `implicit val t: Transformer[Foo, Bar] = implicitly[Transformer[Foo, Bar]]` + It's a sign of recursion which has to be handled with [semiautomatic derivation](cookbook.md#automatic-vs-semiautomatic). !!! example @@ -2127,6 +2151,20 @@ It's a sign of recursion which has to be handled with [semiautomatic derivation] implicit val t: Transformer[Foo, Bar] = Transformer.define[Foo, Bar].buildTransformer ``` +When using `.derive` (counterpart to `.transformInto`)/`.define.buildTransformer` (counterpart `.into.transform` as it also +allows to put overrides/flags), implicit `Transformer` is **not being searched for the top level transformation** - +but it **is allowed in nested fields** which allows transforming recursive data structures. + +Implicits are also not searched when the type has overrides (as using implicit would skip all these overrides): + +!!! example + + ```scala + // This does NOT call itself as withField*/withSealedSubtype*/withEnumCase*/withFallback*/withConstructor* + // prevents implicit usage (and upcasting) + implicit val t: Transformer[Foo, Bar] = foo => foo.into[Bar].withFieldConst(_.field, value).transform + ``` + ### `sealed trait`s fail to recompile In the case of incremental compilation, the Zinc compiler sometimes has issues with @@ -2313,7 +2351,7 @@ above, or with a shared implicit config: //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.dsl._ - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableMacrosLogging ``` diff --git a/docs/docs/under-the-hood.md b/docs/docs/under-the-hood.md index bfab4d60b..13ea634ec 100644 --- a/docs/docs/under-the-hood.md +++ b/docs/docs/under-the-hood.md @@ -260,7 +260,7 @@ And since it is an implicit, it can be shared between several different macro ex //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.dsl._ - // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3) + // All transformations derived in this scope will see these new flags (Scala 2-only syntax, see cookbook for Scala 3!). implicit val cfg = TransformerConfiguration.default.enableMacrosLogging "test".transformInto[Option[String]] diff --git a/docs/main.py b/docs/main.py index 15d1819d6..1ecd3e8ab 100644 --- a/docs/main.py +++ b/docs/main.py @@ -31,6 +31,8 @@ def define_env(env): If git describe tells us that this is NOT a git tag but git tag + some offset, we need to add -SNAPSHOT to match sbt """ if re.compile('.+-[0-9]+-g[0-9a-z]{8}').match(chimney_version_string): + chimney_version_string = chimney_version_string[0:-1] + '-SNAPSHOT' + elif re.compile('.+-[0-9]+-[0-9a-z]{8}').match(chimney_version_string): chimney_version_string = chimney_version_string + '-SNAPSHOT' @env.macro diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index f41dfeff4..2f359d76f 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -75,7 +75,7 @@ plugins: - search copyright: |
- Copyright © 2017—2024, Scalaland.io.
+ Copyright © 2017—2025, Scalaland.io.
Chimney is FOSS licensed under Apache 2.0

Documentation made with MkDocs, Material for MkDocs and Mkdocs-Macros, hosted on Read the Docs