From c7a5e97f75c0b824e9c087f5ef842c170e86a088 Mon Sep 17 00:00:00 2001 From: Ivan Topolnjak Date: Mon, 7 Apr 2025 13:09:14 +0200 Subject: [PATCH] upgrade to Kanela 2.0.0-beta.1 --- build.sbt | 87 +- .../main/scala/kamon/runtime/Attacher.scala | 2 +- .../kamon/status/InstrumentationStatus.scala | 35 +- .../src/main/scala/kamon/status/Status.scala | 11 +- .../kamon/status/page/JsonMarshalling.scala | 15 - .../src/main/resources/logback.xml | 12 + .../src/main/resources/reference.conf | 4 +- .../src/main/resources/reference.conf | 4 +- .../http/AkkaHttpServerInstrumentation.scala | 331 --- .../http/AkkaHttpServerInstrumentation.scala | 355 --- .../http/AkkaHttpServerInstrumentation.scala | 7 +- .../akka/http/AkkaHttpServerTracingSpec.scala | 5 +- instrumentation/kamon-akka/build.sbt | 130 +- .../akka/remote/ContextAwareWireFormats.java | 2433 ----------------- .../protobuf/ContextAwareWireFormats.proto | 30 - .../src/akka-2.5/protobuf/WireFormats.proto | 132 - .../akka_26/DispatcherInstrumentation.scala | 22 - .../remote/RemotingInstrumentation.scala | 6 - .../akka_25/DispatcherInstrumentation.scala | 182 -- .../remote/RemotingInstrumentation.scala | 167 -- ...decConstructMessageMethodInterceptor.scala | 81 - ...tobufCodecDecodeMessageMethodAdvisor.scala | 34 - .../internal/ArterySerializationAdvice.scala | 171 -- .../src/common/resources/reference.conf | 40 +- .../akka/remote/MessageBufferTest.scala | 34 - .../ShardingInstrumentationSpec.scala | 173 -- .../ShardingMessageBufferingSpec.scala | 91 - .../src/main/resources/reference.conf | 4 +- .../cache/AnnotationCache.java | 6 +- .../annotation/el/EnhancedELProcessor.scala | 32 +- .../src/test/resources/application.conf | 4 +- .../src/main/resources/reference.conf | 2 +- .../src/main/resources/reference.conf | 4 +- .../src/main/resources/reference.conf | 4 +- .../src/main/resources/reference.conf | 6 +- .../src/main/resources/reference.conf | 6 +- .../src/main/resources/reference.conf | 2 +- .../CaptureContextOnSubmitAdvices.java | 62 +- ...CaptureContextOnSubmitInstrumentation.java | 200 +- .../src/main/resources/reference.conf | 45 +- .../ScalaGlobalExecutionContextAdvice.java | 34 - .../kamon/instrumentation/package.scala | 4 - .../ScalaGlobalExecutionContextAdvice.java | 39 - .../kamon/instrumentation/package.scala | 20 - .../src/test/resources/application.conf | 18 +- .../OnSubmitContextPropagationSpec.scala | 23 - .../scala-2.11/scala/annotation/static.scala | 4 - .../scala-2.12/scala/annotation/static.scala | 4 - .../scala/kamon/instrumentation/package.scala | 6 +- .../src/test/resources/application.conf | 4 +- .../src/main/resources/reference.conf | 48 +- .../jdbc/StatementMonitor.scala | 18 +- .../src/main/resources/reference.conf | 2 +- .../src/main/resources/reference.conf | 6 +- .../instrumentation/logback/package.scala | 4 +- .../src/main/resources/reference.conf | 10 +- .../src/main/resources/reference.conf | 10 +- .../src/main/resources/reference.conf | 2 +- .../src/main/resources/reference.conf | 2 +- .../src/main/resources/reference.conf | 4 +- .../src/main/resources/reference.conf | 4 +- .../src/test/resources/application.conf | 3 +- .../src/main/resources/reference.conf | 4 +- instrumentation/kamon-pekko/build.sbt | 33 +- .../src/main/resources/reference.conf | 38 +- instrumentation/kamon-play/build.sbt | 68 +- .../src/main/resources/reference.conf | 4 +- .../play/PlayServerInstrumentation.scala | 25 +- .../src/main/resources/reference.conf | 6 +- .../src/main/resources/reference.conf | 8 +- .../scala/FutureChainingInstrumentation.scala | 130 - .../scala/FutureChainingInstrumentation.scala | 145 - .../src/main/resources/reference.conf | 4 +- .../src/main/resources/reference.conf | 2 +- .../spring/server/InstrumentationUtils.scala | 2 +- .../SpringClientInstrumentationSpec.scala | 1 + .../SpringMVCInstrumentationSpec.scala | 1 + .../tapir/TapirInstrumentation.scala | 3 +- .../src/main/resources/reference.conf | 4 +- .../tapir/TapirInstrumentation.scala | 3 +- .../src/main/resources/reference.conf | 10 +- .../src/main/resources/reference.conf | 2 +- project/Build.scala | 25 +- project/build.properties | 2 +- 84 files changed, 421 insertions(+), 5339 deletions(-) create mode 100644 instrumentation/kamon-akka-grpc/src/main/resources/logback.xml delete mode 100644 instrumentation/kamon-akka-http/src/main/scala-2.11/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala delete mode 100644 instrumentation/kamon-akka-http/src/main/scala-2.12/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala rename instrumentation/kamon-akka-http/src/main/{scala-2.13+ => scala}/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala (98%) delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/java/akka/remote/ContextAwareWireFormats.java delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/protobuf/ContextAwareWireFormats.proto delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/protobuf/WireFormats.proto delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/DispatcherInstrumentation.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/RemotingInstrumentation.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecConstructMessageMethodInterceptor.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala delete mode 100644 instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/ArterySerializationAdvice.scala delete mode 100644 instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/remote/MessageBufferTest.scala delete mode 100644 instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingInstrumentationSpec.scala delete mode 100644 instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingMessageBufferingSpec.scala delete mode 100644 instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java delete mode 100644 instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/package.scala delete mode 100644 instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java delete mode 100644 instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/package.scala delete mode 100644 instrumentation/kamon-instrumentation-common/src/main/scala-2.11/scala/annotation/static.scala delete mode 100644 instrumentation/kamon-instrumentation-common/src/main/scala-2.12/scala/annotation/static.scala delete mode 100644 instrumentation/kamon-scala-future/src/main/scala-2.11/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala delete mode 100644 instrumentation/kamon-scala-future/src/main/scala-2.12/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala diff --git a/build.sbt b/build.sbt index d742aa012..05d8cac39 100644 --- a/build.sbt +++ b/build.sbt @@ -196,7 +196,7 @@ lazy val `kamon-twitter-future` = (project in file("instrumentation/kamon-twitte .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "com.twitter" %% "util-core" % "20.3.0" % "provided", @@ -210,7 +210,7 @@ lazy val `kamon-scalaz-future` = (project in file("instrumentation/kamon-scalaz- .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.scalaz" %% "scalaz-concurrent" % "7.2.28" % "provided", @@ -236,7 +236,7 @@ lazy val `kamon-cats-io` = (project in file("instrumentation/kamon-cats-io")) .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", { if (scalaBinaryVersion.value == "2.11") @@ -254,7 +254,7 @@ lazy val `kamon-cats-io-3` = (project in file("instrumentation/kamon-cats-io-3") .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.typelevel" %% "cats-effect" % "3.3.14" % "provided", @@ -268,7 +268,7 @@ lazy val `kamon-zio-2` = (project in file("instrumentation/kamon-zio-2")) .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "dev.zio" %% "zio" % "2.0.21" % "provided", @@ -333,7 +333,7 @@ lazy val `kamon-mongo-legacy` = (project in file("instrumentation/kamon-mongo-le .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.mongodb" % "mongodb-driver-sync" % "3.11.0" % "provided", @@ -350,7 +350,7 @@ lazy val `kamon-mongo` = (project in file("instrumentation/kamon-mongo")) .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.mongodb" % "mongodb-driver-sync" % "4.2.3" % "provided", @@ -366,7 +366,7 @@ lazy val `kamon-cassandra` = (project in file("instrumentation/kamon-cassandra") .disablePlugins(AssemblyPlugin) .enablePlugins(JavaAgent) .settings(instrumentationSettings) - .settings(crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`)) + .settings(crossScalaVersions := Seq(`scala_2.13_version`)) .dependsOn(`kamon-core`, `kamon-instrumentation-common`, `kamon-testkit` % "test", `kamon-executors`) lazy val `kamon-elasticsearch` = (project in file("instrumentation/kamon-elasticsearch")) @@ -412,6 +412,7 @@ lazy val `kamon-spring` = (project in file("instrumentation/kamon-spring")) kanelaAgent % "provided", "org.springframework.boot" % "spring-boot-starter-web" % "2.4.2" % "provided", "org.springframework.boot" % "spring-boot-starter-webflux" % "2.4.2" % "provided", + "javax.servlet" % "javax.servlet-api" % "4.0.1" % "provided", okHttp % "test", "com.h2database" % "h2" % "1.4.200" % "test", "javax.xml.bind" % "jaxb-api" % "2.3.1" % "test", @@ -466,14 +467,13 @@ lazy val `kamon-akka` = (project in file("instrumentation/kamon-akka")) .disablePlugins(AssemblyPlugin) .settings(instrumentationSettings: _*) .dependsOn( - `kamon-scala-future` % "compile,common,akka-2.5,akka-2.6", - `kamon-testkit` % "test,test-common,test-akka-2.5,test-akka-2.6" + `kamon-scala-future` % "compile,common,akka-2.6", + `kamon-testkit` % "test,test-common,test-akka-2.6" ) def akkaHttpVersion(scalaVersion: String) = scalaVersion match { - case "2.11" => "10.1.12" - case "3" => "10.5.0" - case _ => "10.2.8" + case "3" => "10.5.0" + case _ => "10.2.8" } def akkaStreamVersion(scalaVersion: String) = scalaVersion match { case "3" => "2.7.0" @@ -484,22 +484,11 @@ def akkaGrpcRuntimeVersion(scalaVersion: String) = scalaVersion match { case _ => "2.1.3" } -def versionedScalaSourceDirectories(sourceDir: File, scalaVersion: String): List[File] = - scalaVersion match { - case "3" => List(sourceDir / "scala-2.13+") - case "2.13" => List(sourceDir / "scala-2.13+") - case _ => Nil - } - lazy val `kamon-akka-http` = (project in file("instrumentation/kamon-akka-http")) .enablePlugins(JavaAgent) .disablePlugins(AssemblyPlugin) .settings(instrumentationSettings) .settings(Seq( - Compile / unmanagedSourceDirectories ++= versionedScalaSourceDirectories( - (Compile / sourceDirectory).value, - scalaBinaryVersion.value - ), resolvers += Resolver.bintrayRepo("hseeberger", "maven"), javaAgents += "org.mortbay.jetty.alpn" % "jetty-alpn-agent" % "2.0.10" % "test", libraryDependencies ++= Seq( @@ -523,7 +512,7 @@ lazy val `kamon-pekko` = (project in file("instrumentation/kamon-pekko")) .disablePlugins(AssemblyPlugin) .settings(instrumentationSettings: _*) .settings(Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( "org.apache.pekko" %% "pekko-actor" % "1.0.1" % "provided" ) @@ -540,7 +529,7 @@ lazy val `kamon-pekko-http` = (project in file("instrumentation/kamon-pekko-http .disablePlugins(AssemblyPlugin) .settings(instrumentationSettings) .settings(Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.apache.pekko" %% "pekko-http" % pekkoHttpVersion % "provided", @@ -561,7 +550,7 @@ lazy val `kamon-pekko-grpc` = (project in file("instrumentation/kamon-pekko-grpc .settings(instrumentationSettings) .settings(Seq( PB.additionalDependencies := Seq.empty, - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.apache.pekko" %% "pekko-http" % pekkoHttpVersion % "provided", @@ -581,7 +570,7 @@ lazy val `kamon-pekko-connectors-kafka` = (project in file("instrumentation/kamo .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "org.apache.pekko" %% "pekko-connectors-kafka" % "1.0.0" % "provided", @@ -597,7 +586,7 @@ lazy val `kamon-akka-grpc` = (project in file("instrumentation/kamon-akka-grpc") .settings(instrumentationSettings) .settings(Seq( PB.additionalDependencies := Seq.empty, - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), libraryDependencies ++= Seq( kanelaAgent % "provided", "com.typesafe.akka" %% "akka-http" % akkaHttpVersion(scalaBinaryVersion.value) % "provided", @@ -620,12 +609,12 @@ lazy val `kamon-play` = (project in file("instrumentation/kamon-play")) .disablePlugins(AssemblyPlugin) .settings( instrumentationSettings, - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`) + crossScalaVersions := Seq(`scala_2.13_version`) ) .dependsOn( - `kamon-akka` % "compile,test-common,test-play-2.8,test-play-2.7,test-play-2.6", - `kamon-akka-http` % "compile,test-common,test-play-2.8,test-play-2.7,test-play-2.6", - `kamon-testkit` % "test-common,test-play-2.8,test-play-2.7,test-play-2.6" + `kamon-akka` % "compile,test-common,test-play-2.8,test-play-2.7", + `kamon-akka-http` % "compile,test-common,test-play-2.8,test-play-2.7", + `kamon-testkit` % "test-common,test-play-2.8,test-play-2.7" ) lazy val `kamon-okhttp` = (project in file("instrumentation/kamon-okhttp")) @@ -648,7 +637,7 @@ lazy val `kamon-tapir` = (project in file("instrumentation/kamon-tapir")) .enablePlugins(JavaAgent) .settings( instrumentationSettings, - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided,legacy", @@ -704,7 +693,7 @@ lazy val `kamon-caffeine` = (project in file("instrumentation/kamon-caffeine")) lazy val `kamon-lagom` = (project in file("instrumentation/kamon-lagom")) .disablePlugins(AssemblyPlugin) .settings( - crossScalaVersions := Seq(`scala_2.11_version`, `scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= { CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, scalaMajor)) if scalaMajor == 11 => @@ -720,7 +709,7 @@ lazy val `kamon-finagle` = (project in file("instrumentation/kamon-finagle")) .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "com.twitter" %% "finagle-http" % "21.12.0" % "provided", @@ -751,7 +740,7 @@ lazy val `kamon-alpakka-kafka` = (project in file("instrumentation/kamon-alpakka .enablePlugins(JavaAgent) .settings(instrumentationSettings) .settings( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`), + crossScalaVersions := Seq(`scala_2.13_version`), libraryDependencies ++= Seq( kanelaAgent % "provided", "com.typesafe.akka" %% "akka-stream-kafka" % "2.1.1" % "provided", @@ -790,7 +779,7 @@ lazy val `kamon-http4s-0_23` = (project in file("instrumentation/kamon-http4s-0. .settings( name := "kamon-http4s-0.23", scalacOptions ++= { if (scalaBinaryVersion.value == "2.12") Seq("-Ypartial-unification") else Seq.empty }, - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, `scala_3_version`), + crossScalaVersions := Seq(`scala_2.13_version`, `scala_3_version`), libraryDependencies ++= Seq( "org.http4s" %% "http4s-client" % "0.23.19" % Provided, "org.http4s" %% "http4s-server" % "0.23.19" % Provided, @@ -1002,7 +991,6 @@ lazy val bundle = (project in file("bundle")) .settings(crossScalaVersions := Nil) .aggregate( `kamon-bundle`, - `kamon-bundle_2_11`, `kamon-bundle-3`, `kamon-runtime-attacher` ) @@ -1104,13 +1092,12 @@ lazy val `kamon-bundle-dependencies-all` = (project in file("bundle/kamon-bundle * Add a reference here to all the project dependencies that can be built * from 2.12. Currently only Scala 2.12 and 2.13. */ -lazy val `kamon-bundle-dependencies-2-12-and-up` = (project in file("bundle/kamon-bundle-dependencies-2-12-and-up")) +lazy val `kamon-bundle-dependencies-2-13` = (project in file("bundle/kamon-bundle-dependencies-2-12-and-up")) .disablePlugins(AssemblyPlugin) .settings(noPublishing: _*) .settings(ideSkipProject: _*) .settings( crossScalaVersions := Seq( - `scala_2.12_version`, `scala_2.13_version` ) ) @@ -1173,13 +1160,12 @@ lazy val `kamon-bundle` = (project in file("bundle/kamon-bundle")) .settings(ideSkipProject: _*) .settings( crossScalaVersions := Seq( - `scala_2.12_version`, `scala_2.13_version` ) ) .dependsOn( `kamon-core`, - `kamon-bundle-dependencies-2-12-and-up` % "shaded" + `kamon-bundle-dependencies-2-13` % "shaded" ) lazy val `kamon-bundle-3` = (project in file("bundle/kamon-bundle-3")) @@ -1195,21 +1181,6 @@ lazy val `kamon-bundle-3` = (project in file("bundle/kamon-bundle-3")) `kamon-bundle-dependencies-3` % "shaded" ) -lazy val `kamon-bundle_2_11` = (project in file("bundle/kamon-bundle_2.11")) - .enablePlugins(AssemblyPlugin) - .settings(commonBundleSettings) - .settings(ideSkipProject: _*) - .settings( - scalaVersion := `scala_2.11_version`, - crossScalaVersions := Seq( - `scala_2.11_version` - ) - ) - .dependsOn( - `kamon-core`, - `kamon-bundle-dependencies-all` % "shaded" - ) - lazy val `bill-of-materials` = (project in file("bill-of-materials")) .enablePlugins(BillOfMaterialsPlugin) .settings(ideSkipProject: _*) diff --git a/bundle/kamon-runtime-attacher/src/main/scala/kamon/runtime/Attacher.scala b/bundle/kamon-runtime-attacher/src/main/scala/kamon/runtime/Attacher.scala index fbc9fe4c2..7b9992f4d 100644 --- a/bundle/kamon-runtime-attacher/src/main/scala/kamon/runtime/Attacher.scala +++ b/bundle/kamon-runtime-attacher/src/main/scala/kamon/runtime/Attacher.scala @@ -19,7 +19,7 @@ object Attacher { def attach(): Unit = { val springBootClassLoader = findSpringBootJarLauncherClassLoader() - if (isKanelaLoaded) { + if (isKanelaLoaded()) { // If Kanela has already been loaded and we are running on a Spring Boot application, we might need to reload // Kanela to ensure it will use the proper ClassLoader for loading the instrumentations. diff --git a/core/kamon-core/src/main/scala/kamon/status/InstrumentationStatus.scala b/core/kamon-core/src/main/scala/kamon/status/InstrumentationStatus.scala index 6e10aa9b4..fe54ffa32 100644 --- a/core/kamon-core/src/main/scala/kamon/status/InstrumentationStatus.scala +++ b/core/kamon-core/src/main/scala/kamon/status/InstrumentationStatus.scala @@ -17,7 +17,6 @@ package kamon package status -import kamon.status.Status.Instrumentation.TypeError import org.slf4j.LoggerFactory import java.lang.{Boolean => JBoolean} import java.util.{List => JavaList, Map => JavaMap} @@ -35,7 +34,7 @@ object InstrumentationStatus { private val _logger = LoggerFactory.getLogger("kamon.status.Status.Instrumentation") private val _kanelaLoadedPropertyName = "kanela.loaded" - private val _registryClassName = "kanela.agent.api.instrumentation.listener.InstrumentationRegistryListener" + private val _registryClassName = "kanela.agent.bootstrap.StatusApi" /** * Tries to fetch the current instrumentation information from Kanela and assemble a status instance. Since the @@ -53,39 +52,32 @@ object InstrumentationStatus { val present = (registryClass != null) && kanelaLoaded val kanelaVersion = Class.forName("kanela.agent.util.BuildInfo", false, ClassLoader.getSystemClassLoader) - .getMethod("version") - .invoke(null) + .getField("version") + .get(null) .asInstanceOf[String] - val modules = registryClass.getMethod("shareModules") + val modules = registryClass.getMethod("shareModulesInfo") .invoke(null) .asInstanceOf[JavaList[JavaMap[String, String]]] .asScala .map(toModule) - val errors = registryClass.getMethod("shareErrors") - .invoke(null) - .asInstanceOf[JavaMap[String, JavaList[Throwable]]] - .asScala - .map(toTypeError) - .toSeq - - Status.Instrumentation(present, Option(kanelaVersion), modules.toSeq, errors) + Status.Instrumentation(present, Option(kanelaVersion), modules.toSeq) } catch { case t: Throwable => - if (warnIfFailed) { + if (warnIfFailed || true) { t match { - case _: ClassNotFoundException if warnIfFailed => + case _: ClassNotFoundException => _logger.warn( "Failed to load the instrumentation modules status because the Kanela agent is not available" ) - case t: Throwable if warnIfFailed => + case t: Throwable => _logger.warn("Failed to load the instrumentation modules status", t) } } - Status.Instrumentation(false, None, Seq.empty, Seq.empty) + Status.Instrumentation(false, None, Seq.empty) } } @@ -101,13 +93,4 @@ object InstrumentationStatus { JBoolean.parseBoolean(map.get("active")) ) } - - /** - * Transforms a pair of information into a type error instance. The convention of the first element being the type - * name is tied to Kanela's implementation - */ - private def toTypeError(pair: (String, JavaList[Throwable])): TypeError = { - val (typeName, errors) = pair - TypeError(typeName, errors.asScala.toSeq) - } } diff --git a/core/kamon-core/src/main/scala/kamon/status/Status.scala b/core/kamon-core/src/main/scala/kamon/status/Status.scala index b6f031235..cdb3081bd 100644 --- a/core/kamon-core/src/main/scala/kamon/status/Status.scala +++ b/core/kamon-core/src/main/scala/kamon/status/Status.scala @@ -112,8 +112,7 @@ object Status { case class Instrumentation( present: Boolean, kanelaVersion: Option[String], - modules: Seq[Status.Instrumentation.ModuleInfo], - errors: Seq[Status.Instrumentation.TypeError] + modules: Seq[Status.Instrumentation.ModuleInfo] ) object Instrumentation { @@ -131,13 +130,5 @@ object Status { enabled: Boolean, active: Boolean ) - - /** - * Describes errors that might have occurred while transforming a target type. - */ - case class TypeError( - targetType: String, - errors: Seq[Throwable] - ) } } diff --git a/core/kamon-status-page/src/main/scala/kamon/status/page/JsonMarshalling.scala b/core/kamon-status-page/src/main/scala/kamon/status/page/JsonMarshalling.scala index 6a1088cc0..dfa1a2b0c 100644 --- a/core/kamon-status-page/src/main/scala/kamon/status/page/JsonMarshalling.scala +++ b/core/kamon-status-page/src/main/scala/kamon/status/page/JsonMarshalling.scala @@ -147,21 +147,6 @@ object JsonMarshalling { instrumentationObject .end() // end modules - .`object`("errors") - - instance.errors.foreach { typeError => - val errorsArray = instrumentationObject.array(typeError.targetType) - typeError.errors.foreach(t => { - errorsArray.`object`() - .value("message", t.getMessage) - .value("stacktrace", t.getStackTrace.mkString("", EOL, EOL)) - .end() - }) - errorsArray.end() - } - - instrumentationObject - .end() // errors .end() // object .done() } diff --git a/instrumentation/kamon-akka-grpc/src/main/resources/logback.xml b/instrumentation/kamon-akka-grpc/src/main/resources/logback.xml new file mode 100644 index 000000000..9705bf12f --- /dev/null +++ b/instrumentation/kamon-akka-grpc/src/main/resources/logback.xml @@ -0,0 +1,12 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + diff --git a/instrumentation/kamon-akka-grpc/src/main/resources/reference.conf b/instrumentation/kamon-akka-grpc/src/main/resources/reference.conf index 9f5880d6e..687bebb07 100644 --- a/instrumentation/kamon-akka-grpc/src/main/resources/reference.conf +++ b/instrumentation/kamon-akka-grpc/src/main/resources/reference.conf @@ -13,8 +13,8 @@ kanela.modules { ] within = [ - "^akka.grpc.internal..*", - "^akka.grpc.javadsl.GrpcMarshalling$" + "akka.grpc.internal.", + "akka.grpc.javadsl.GrpcMarshalling$" ] } } diff --git a/instrumentation/kamon-akka-http/src/main/resources/reference.conf b/instrumentation/kamon-akka-http/src/main/resources/reference.conf index f4c6c54c9..8d327350f 100644 --- a/instrumentation/kamon-akka-http/src/main/resources/reference.conf +++ b/instrumentation/kamon-akka-http/src/main/resources/reference.conf @@ -244,8 +244,8 @@ kanela.modules { ] within = [ - "akka.http.*", - "akka.grpc.internal.*", + "akka.http.", + "akka.grpc.internal.", "akka.stream.scaladsl.Flow", "akka.stream.scaladsl.FlowOps" ] diff --git a/instrumentation/kamon-akka-http/src/main/scala-2.11/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala b/instrumentation/kamon-akka-http/src/main/scala-2.11/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala deleted file mode 100644 index eec9f91b4..000000000 --- a/instrumentation/kamon-akka-http/src/main/scala-2.11/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala +++ /dev/null @@ -1,331 +0,0 @@ -package kamon.instrumentation.akka.http - -import java.util.concurrent.Callable -import akka.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable, ToResponseMarshaller} -import akka.http.scaladsl.model.StatusCodes.Redirection -import akka.http.scaladsl.model.{HttpHeader, HttpRequest, HttpResponse, StatusCode, Uri} -import akka.http.scaladsl.server.PathMatcher.{Matched, Unmatched} -import akka.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet} -import akka.http.scaladsl.server.directives.RouteDirectives.reject -import akka.http.scaladsl.server._ -import akka.http.scaladsl.server.util.Tupler -import akka.http.scaladsl.util.FastFuture -import kamon.Kamon -import kamon.instrumentation.akka.http.HasMatchingContext.PathMatchingContext -import kamon.instrumentation.context.{HasContext, InvokeWithCapturedContext} -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.mixin.Initializer -import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation._ - -import scala.concurrent.{ExecutionContext, Future, Promise} -import scala.util.control.NonFatal -import scala.util.{Failure, Success, Try} -import java.util.regex.Pattern -import akka.NotUsed -import akka.http.scaladsl.server.RouteResult.Rejected -import akka.stream.scaladsl.Flow -import kamon.context.Context -import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic - -import scala.collection.immutable - -class AkkaHttpServerInstrumentation extends InstrumentationBuilder { - - /** - * When instrumenting bindAndHandle what we do is wrap the Flow[HttpRequest, HttpResponse, NotUsed] provided by - * the user and add all the processing there. This is the part of the instrumentation that performs Context - * propagation, tracing and gather metrics using the HttpServerInstrumentation packed in common. - * - * One important point about the HTTP Server instrumentation is that because it is almost impossible to have a proper - * operation name before the request processing hits the routing tree, we are delaying the sampling decision to the - * point at which we have some operation name. - */ - onType("akka.http.scaladsl.HttpExt") - .advise(method("bindAndHandle"), classOf[HttpExtBindAndHandleAdvice]) - - /** - * For the HTTP/2 instrumentation, since the parts where we can capture the interface/port and the actual flow - * creation happen at different times we are wrapping the handler with the interface/port data and reading that - * information when turning the handler function into a flow and wrapping it the same way we would for HTTP/1. - */ - onType("akka.http.scaladsl.Http2Ext") - .advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice]) - - onType("akka.http.impl.engine.http2.Http2Blueprint$") - .intercept(method("handleWithStreamIdHeader"), Http2BlueprintInterceptor) - - /** - * The rest of these sections are just about making sure that we can generate an appropriate operation name (i.e. free - * of variables) and take a Sampling Decision in case none has been taken so far. - */ - onType("akka.http.scaladsl.server.RequestContextImpl") - .mixin(classOf[HasMatchingContext.Mixin]) - .intercept(method("copy"), RequestContextCopyInterceptor) - - onType("akka.http.scaladsl.server.directives.PathDirectives$class") - .intercept(method("rawPathPrefix"), classOf[PathDirectivesRawPathPrefixInterceptor]) - - onType("akka.http.scaladsl.server.directives.FutureDirectives$class") - .intercept(method("onComplete"), classOf[ResolveOperationNameOnRouteInterceptor]) - - onTypes( - "akka.http.scaladsl.server.directives.OnSuccessMagnet$", - "akka.http.scaladsl.server.directives.CompleteOrRecoverWithMagnet$" - ) - .intercept(method("apply"), classOf[ResolveOperationNameOnRouteInterceptor]) - - onType("akka.http.scaladsl.server.directives.RouteDirectives$class") - .intercept(method("complete"), classOf[ResolveOperationNameOnRouteInterceptor]) - .intercept(method("redirect"), classOf[ResolveOperationNameOnRouteInterceptor]) - .intercept(method("failWith"), classOf[ResolveOperationNameOnRouteInterceptor]) - - /** - * Support for HTTP/1 and HTTP/2 at the same time. - */ - - onType("akka.stream.scaladsl.Flow") - .advise(method("mapAsync"), classOf[FlowOpsMapAsyncAdvice]) - -} - -trait HasMatchingContext { - def defaultOperationName: String - def matchingContext: Seq[PathMatchingContext] - def setMatchingContext(ctx: Seq[PathMatchingContext]): Unit - def setDefaultOperationName(defaultOperationName: String): Unit - def prependMatchingContext(matched: PathMatchingContext): Unit - def popOneMatchingContext(): Unit -} - -object HasMatchingContext { - - case class PathMatchingContext( - fullPath: String, - matched: Matched[_] - ) - - class Mixin(var matchingContext: Seq[PathMatchingContext], var defaultOperationName: String) - extends HasMatchingContext { - - override def setMatchingContext(matchingContext: Seq[PathMatchingContext]): Unit = - this.matchingContext = matchingContext - - override def setDefaultOperationName(defaultOperationName: String): Unit = - this.defaultOperationName = defaultOperationName - - override def prependMatchingContext(matched: PathMatchingContext): Unit = - matchingContext = matched +: matchingContext - - override def popOneMatchingContext(): Unit = - matchingContext = matchingContext.tail - - @Initializer - def initialize(): Unit = - matchingContext = Seq.empty - } -} - -class ResolveOperationNameOnRouteInterceptor -object ResolveOperationNameOnRouteInterceptor { - import akka.http.scaladsl.util.FastFuture._ - - // We are replacing some of the basic directives here to ensure that we will resolve both the Sampling Decision and - // the operation name before the request gets to the actual handling code (presumably inside of a "complete" - // directive. - - def complete(@Argument(1) m: => ToResponseMarshallable): StandardRoute = - StandardRoute(resolveOperationName(_).complete(m)) - - def complete[T](status: StatusCode, v: => T)(implicit m: ToEntityMarshaller[T]): StandardRoute = - StandardRoute(resolveOperationName(_).complete((status, v))) - - def complete[T](status: StatusCode, headers: immutable.Seq[HttpHeader], v: => T)(implicit - m: ToEntityMarshaller[T] - ): StandardRoute = - complete((status, headers, v)) - - def redirect(@Argument(1) uri: Uri, @Argument(2) redirectionType: Redirection): StandardRoute = - StandardRoute(resolveOperationName(_).redirect(uri, redirectionType)) - - def failWith(@Argument(1) error: Throwable): StandardRoute = { - Kamon.currentSpan().fail(error) - StandardRoute(resolveOperationName(_).fail(error)) - } - - def onComplete[T](@Argument(1) future: => Future[T]): Directive1[Try[T]] = - Directive { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.transformWith(t => inner(Tuple1(t))(ctx)) - } - - def apply[T](future: => Future[T])(implicit tupler: Tupler[T]): OnSuccessMagnet { type Out = tupler.Out } = - new OnSuccessMagnet { - type Out = tupler.Out - val directive = Directive[tupler.Out] { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.flatMap(t => inner(tupler(t))(ctx)) - }(tupler.OutIsTuple) - } - - def apply[T](future: => Future[T])(implicit m: ToResponseMarshaller[T]): CompleteOrRecoverWithMagnet = - new CompleteOrRecoverWithMagnet { - val directive = Directive[Tuple1[Throwable]] { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.transformWith { - case Success(res) => ctx.complete(res) - case Failure(error) => inner(Tuple1(error))(ctx) - } - } - } - - private def resolveOperationName(requestContext: RequestContext): RequestContext = { - - // We will only change the operation name if the last edit made to it was an automatic one. At this point, the only - // way in which the operation name might have changed is if the user changed it with the operationName directive or - // by accessing the Span and changing it directly there, so we wouldn't want to overwrite that. - - Kamon.currentContext().get(LastAutomaticOperationNameEdit.Key).foreach(lastEdit => { - val currentSpan = Kamon.currentSpan() - - if (lastEdit.allowAutomaticChanges) { - if (currentSpan.operationName() == lastEdit.operationName) { - val allMatches = requestContext.asInstanceOf[HasMatchingContext].matchingContext.reverse.map(singleMatch) - val operationName = allMatches.mkString("") - - if (operationName.nonEmpty) { - currentSpan - .name(operationName) - .takeSamplingDecision() - - lastEdit.operationName = operationName - } - } else { - lastEdit.allowAutomaticChanges = false - } - } else { - currentSpan.takeSamplingDecision() - } - }) - - requestContext - } - - private def singleMatch(matching: PathMatchingContext): String = { - val rest = matching.matched.pathRest.toString() - val consumedCount = matching.fullPath.length - rest.length - val consumedSegment = matching.fullPath.substring(0, consumedCount) - - matching.matched.extractions match { - case () => // string segment matched - consumedSegment - case tuple: Product => - val values = tuple.productIterator.toList map { - case Some(x) => List(x.toString) - case None => Nil - case long: Long => List(long.toString, long.toHexString) - case int: Int => List(int.toString, int.toHexString) - case a: Any => List(a.toString) - } - values.flatten.fold(consumedSegment) { (full, value) => - val r = "(?i)(^|/)" + Pattern.quote(value) + "($|/)" - full.replaceFirst(r, "$1{}$2") - } - } - } -} - -/** - * Tracks the last operation name that was automatically assigned to an operation via instrumentation. The - * instrumentation might assign a name to the operations via settings on the HTTP Server instrumentation instance or - * via the Path directives instrumentation, but might never reassign a name if the user somehow assigned their own name - * to the operation. Users chan change operation names by: - * - Using operation mappings via configuration of the HTTP Server. - * - Providing a custom HTTP Operation Name Generator for the server. - * - Using the "operationName" directive. - * - Directly accessing the Span for the current operation and changing the name on it. - * - */ -class LastAutomaticOperationNameEdit( - @volatile var operationName: String, - @volatile var allowAutomaticChanges: Boolean -) - -object LastAutomaticOperationNameEdit { - val Key = Context.key[Option[LastAutomaticOperationNameEdit]]("laone", None) - - def apply(operationName: String, allowAutomaticChanges: Boolean): LastAutomaticOperationNameEdit = - new LastAutomaticOperationNameEdit(operationName, allowAutomaticChanges) -} - -object RequestContextCopyInterceptor { - - @RuntimeType - def copy(@This context: RequestContext, @SuperCall copyCall: Callable[RequestContext]): RequestContext = { - val copiedRequestContext = copyCall.call() - copiedRequestContext.asInstanceOf[HasMatchingContext].setMatchingContext( - context.asInstanceOf[HasMatchingContext].matchingContext - ) - copiedRequestContext - } -} - -class PathDirectivesRawPathPrefixInterceptor -object PathDirectivesRawPathPrefixInterceptor { - import BasicDirectives._ - - @RuntimeType - def rawPathPrefix[T](@Argument(1) matcher: PathMatcher[T]): Directive[T] = { - implicit val LIsTuple = matcher.ev - - extract { ctx => - val fullPath = ctx.unmatchedPath.toString() - val matching = matcher(ctx.unmatchedPath) - - matching match { - case m: Matched[_] => - ctx.asInstanceOf[HasMatchingContext] - .prependMatchingContext(PathMatchingContext(fullPath, m)) - case _ => - } - - (ctx, matching) - } flatMap { - case (ctx, Matched(rest, values)) => - tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult => - if (routeResult.isInstanceOf[Rejected]) - ctx.asInstanceOf[HasMatchingContext].popOneMatchingContext() - - routeResult - } - - case (_, Unmatched) => reject - } - } -} - -object Http2BlueprintInterceptor { - - case class HandlerWithEndpoint(interface: String, port: Int, handler: HttpRequest => Future[HttpResponse]) - extends (HttpRequest => Future[HttpResponse]) { - - override def apply(request: HttpRequest): Future[HttpResponse] = handler(request) - } - - @RuntimeType - def handleWithStreamIdHeader( - @Argument(1) handler: HttpRequest => Future[HttpResponse], - @SuperCall zuper: Callable[Flow[HttpRequest, HttpResponse, NotUsed]] - ): Flow[HttpRequest, HttpResponse, NotUsed] = { - - handler match { - case HandlerWithEndpoint(interface, port, _) => - ServerFlowWrapper(zuper.call(), interface, port) - - case _ => - zuper.call() - } - } -} diff --git a/instrumentation/kamon-akka-http/src/main/scala-2.12/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala b/instrumentation/kamon-akka-http/src/main/scala-2.12/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala deleted file mode 100644 index 2a48e71fb..000000000 --- a/instrumentation/kamon-akka-http/src/main/scala-2.12/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala +++ /dev/null @@ -1,355 +0,0 @@ -/* - * Copyright 2013-2021 The Kamon Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package kamon.instrumentation.akka.http - -import java.util.concurrent.Callable -import akka.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable, ToResponseMarshaller} -import akka.http.scaladsl.model.StatusCodes.Redirection -import akka.http.scaladsl.model.{HttpHeader, HttpRequest, HttpResponse, StatusCode, Uri} -import akka.http.scaladsl.server.PathMatcher.{Matched, Unmatched} -import akka.http.scaladsl.server.directives.{BasicDirectives, CompleteOrRecoverWithMagnet, OnSuccessMagnet} -import akka.http.scaladsl.server.directives.RouteDirectives.reject -import akka.http.scaladsl.server._ -import akka.http.scaladsl.server.util.Tupler -import akka.http.scaladsl.util.FastFuture -import kamon.Kamon -import kamon.instrumentation.akka.http.HasMatchingContext.PathMatchingContext -import kamon.instrumentation.context.{HasContext, InvokeWithCapturedContext} -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.mixin.Initializer -import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation._ - -import scala.concurrent.{ExecutionContext, Future, Promise} -import scala.util.control.NonFatal -import scala.util.{Failure, Success, Try} -import java.util.regex.Pattern -import akka.NotUsed -import akka.http.scaladsl.server.RouteResult.Rejected -import akka.stream.scaladsl.Flow -import kamon.context.Context -import kanela.agent.libs.net.bytebuddy.matcher.ElementMatchers.isPublic - -import scala.collection.immutable - -class AkkaHttpServerInstrumentation extends InstrumentationBuilder { - - /** - * When instrumenting bindAndHandle what we do is wrap the Flow[HttpRequest, HttpResponse, NotUsed] provided by - * the user and add all the processing there. This is the part of the instrumentation that performs Context - * propagation, tracing and gather metrics using the HttpServerInstrumentation packed in common. - * - * One important point about the HTTP Server instrumentation is that because it is almost impossible to have a proper - * operation name before the request processing hits the routing tree, we are delaying the sampling decision to the - * point at which we have some operation name. - */ - - onType("akka.http.scaladsl.HttpExt") - .advise(method("bindAndHandle"), classOf[HttpExtBindAndHandleAdvice]) - - /** - * For the HTTP/2 instrumentation, since the parts where we can capture the interface/port and the actual flow - * creation happen at different times we are wrapping the handler with the interface/port data and reading that - * information when turning the handler function into a flow and wrapping it the same way we would for HTTP/1. - */ - - onType("akka.http.impl.engine.http2.Http2Ext") - .advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice]) - - onType("akka.http.impl.engine.http2.Http2Blueprint$") - .intercept(method("handleWithStreamIdHeader"), Http2BlueprintInterceptor) - - /** - * The rest of these sections are just about making sure that we can generate an appropriate operation name (i.e. free - * of variables) and take a Sampling Decision in case none has been taken so far. - */ - onType("akka.http.scaladsl.server.RequestContextImpl") - .mixin(classOf[HasMatchingContext.Mixin]) - .intercept(method("copy"), RequestContextCopyInterceptor) - - onType("akka.http.scaladsl.server.directives.PathDirectives") - .intercept(method("rawPathPrefix"), classOf[PathDirectivesRawPathPrefixInterceptor]) - - onType("akka.http.scaladsl.server.directives.FutureDirectives") - .intercept(method("onComplete"), classOf[ResolveOperationNameOnRouteInterceptor]) - - onTypes( - "akka.http.scaladsl.server.directives.OnSuccessMagnet$", - "akka.http.scaladsl.server.directives.CompleteOrRecoverWithMagnet$" - ) - .intercept(method("apply"), classOf[ResolveOperationNameOnRouteInterceptor]) - - onType("akka.http.scaladsl.server.directives.RouteDirectives") - .intercept(method("complete"), classOf[ResolveOperationNameOnRouteInterceptor]) - .intercept(method("redirect"), classOf[ResolveOperationNameOnRouteInterceptor]) - .intercept(method("failWith"), classOf[ResolveOperationNameOnRouteInterceptor]) - - /** - * Akka-http 10.1.x compatibility. - */ - - onType("akka.http.scaladsl.Http2Ext") - .advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice]) - - /** - * Support for HTTP/1 and HTTP/2 at the same time. - * - */ - - onType("akka.stream.scaladsl.FlowOps") - .advise(method("mapAsync"), classOf[FlowOpsMapAsyncAdvice]) -} - -trait HasMatchingContext { - def defaultOperationName: String - def matchingContext: Seq[PathMatchingContext] - def setMatchingContext(ctx: Seq[PathMatchingContext]): Unit - def setDefaultOperationName(defaultOperationName: String): Unit - def prependMatchingContext(matched: PathMatchingContext): Unit - def popOneMatchingContext(): Unit -} - -object HasMatchingContext { - - case class PathMatchingContext( - fullPath: String, - matched: Matched[_] - ) - - class Mixin(var matchingContext: Seq[PathMatchingContext], var defaultOperationName: String) - extends HasMatchingContext { - - override def setMatchingContext(matchingContext: Seq[PathMatchingContext]): Unit = - this.matchingContext = matchingContext - - override def setDefaultOperationName(defaultOperationName: String): Unit = - this.defaultOperationName = defaultOperationName - - override def prependMatchingContext(matched: PathMatchingContext): Unit = - matchingContext = matched +: matchingContext - - override def popOneMatchingContext(): Unit = - matchingContext = matchingContext.tail - - @Initializer - def initialize(): Unit = - matchingContext = Seq.empty - } -} - -class ResolveOperationNameOnRouteInterceptor -object ResolveOperationNameOnRouteInterceptor { - import akka.http.scaladsl.util.FastFuture._ - - // We are replacing some of the basic directives here to ensure that we will resolve both the Sampling Decision and - // the operation name before the request gets to the actual handling code (presumably inside of a "complete" - // directive. - - def complete(m: => ToResponseMarshallable): StandardRoute = - StandardRoute(resolveOperationName(_).complete(m)) - - def complete[T](status: StatusCode, v: => T)(implicit m: ToEntityMarshaller[T]): StandardRoute = - StandardRoute(resolveOperationName(_).complete((status, v))) - - def complete[T](status: StatusCode, headers: immutable.Seq[HttpHeader], v: => T)(implicit - m: ToEntityMarshaller[T] - ): StandardRoute = - complete((status, headers, v)) - - def redirect(uri: Uri, redirectionType: Redirection): StandardRoute = - StandardRoute(resolveOperationName(_).redirect(uri, redirectionType)) - - def failWith(error: Throwable): StandardRoute = { - Kamon.currentSpan().fail(error) - StandardRoute(resolveOperationName(_).fail(error)) - } - - def onComplete[T](future: => Future[T]): Directive1[Try[T]] = - Directive { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.transformWith(t => inner(Tuple1(t))(ctx)) - } - - def apply[T](future: => Future[T])(implicit tupler: Tupler[T]): OnSuccessMagnet { type Out = tupler.Out } = - new OnSuccessMagnet { - type Out = tupler.Out - val directive = Directive[tupler.Out] { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.flatMap(t => inner(tupler(t))(ctx)) - }(tupler.OutIsTuple) - } - - def apply[T](future: => Future[T])(implicit m: ToResponseMarshaller[T]): CompleteOrRecoverWithMagnet = - new CompleteOrRecoverWithMagnet { - val directive = Directive[Tuple1[Throwable]] { inner => ctx => - import ctx.executionContext - resolveOperationName(ctx) - future.fast.transformWith { - case Success(res) => ctx.complete(res) - case Failure(error) => inner(Tuple1(error))(ctx) - } - } - } - - private def resolveOperationName(requestContext: RequestContext): RequestContext = { - - // We will only change the operation name if the last edit made to it was an automatic one. At this point, the only - // way in which the operation name might have changed is if the user changed it with the operationName directive or - // by accessing the Span and changing it directly there, so we wouldn't want to overwrite that. - - Kamon.currentContext().get(LastAutomaticOperationNameEdit.Key).foreach(lastEdit => { - val currentSpan = Kamon.currentSpan() - - if (lastEdit.allowAutomaticChanges) { - if (currentSpan.operationName() == lastEdit.operationName) { - val allMatches = requestContext.asInstanceOf[HasMatchingContext].matchingContext.reverse.map(singleMatch) - val operationName = allMatches.mkString("") - - if (operationName.nonEmpty) { - currentSpan - .name(operationName) - .takeSamplingDecision() - - lastEdit.operationName = operationName - } - } else { - lastEdit.allowAutomaticChanges = false - } - } else { - currentSpan.takeSamplingDecision() - } - }) - - requestContext - } - - private def singleMatch(matching: PathMatchingContext): String = { - val rest = matching.matched.pathRest.toString() - val consumedCount = matching.fullPath.length - rest.length - val consumedSegment = matching.fullPath.substring(0, consumedCount) - - matching.matched.extractions match { - case () => // string segment matched - consumedSegment - case tuple: Product => - val values = tuple.productIterator.toList map { - case Some(x) => List(x.toString) - case None => Nil - case long: Long => List(long.toString, long.toHexString) - case int: Int => List(int.toString, int.toHexString) - case a: Any => List(a.toString) - } - values.flatten.fold(consumedSegment) { (full, value) => - val r = "(?i)(^|/)" + Pattern.quote(value) + "($|/)" - full.replaceFirst(r, "$1{}$2") - } - } - } -} - -/** - * Tracks the last operation name that was automatically assigned to an operation via instrumentation. The - * instrumentation might assign a name to the operations via settings on the HTTP Server instrumentation instance or - * via the Path directives instrumentation, but might never reassign a name if the user somehow assigned their own name - * to the operation. Users chan change operation names by: - * - Using operation mappings via configuration of the HTTP Server. - * - Providing a custom HTTP Operation Name Generator for the server. - * - Using the "operationName" directive. - * - Directly accessing the Span for the current operation and changing the name on it. - * - */ -class LastAutomaticOperationNameEdit( - @volatile var operationName: String, - @volatile var allowAutomaticChanges: Boolean -) - -object LastAutomaticOperationNameEdit { - val Key = Context.key[Option[LastAutomaticOperationNameEdit]]("laone", None) - - def apply(operationName: String, allowAutomaticChanges: Boolean): LastAutomaticOperationNameEdit = - new LastAutomaticOperationNameEdit(operationName, allowAutomaticChanges) -} - -object RequestContextCopyInterceptor { - - @RuntimeType - def copy(@This context: RequestContext, @SuperCall copyCall: Callable[RequestContext]): RequestContext = { - val copiedRequestContext = copyCall.call() - copiedRequestContext.asInstanceOf[HasMatchingContext].setMatchingContext( - context.asInstanceOf[HasMatchingContext].matchingContext - ) - copiedRequestContext - } -} - -class PathDirectivesRawPathPrefixInterceptor -object PathDirectivesRawPathPrefixInterceptor { - import BasicDirectives._ - - def rawPathPrefix[T](@Argument(0) matcher: PathMatcher[T]): Directive[T] = { - implicit val LIsTuple = matcher.ev - - extract { ctx => - val fullPath = ctx.unmatchedPath.toString() - val matching = matcher(ctx.unmatchedPath) - - matching match { - case m: Matched[_] => - ctx.asInstanceOf[HasMatchingContext] - .prependMatchingContext(PathMatchingContext(fullPath, m)) - case _ => - } - - (ctx, matching) - } flatMap { - case (ctx, Matched(rest, values)) => - tprovide(values) & mapRequestContext(_ withUnmatchedPath rest) & mapRouteResult { routeResult => - if (routeResult.isInstanceOf[Rejected]) - ctx.asInstanceOf[HasMatchingContext].popOneMatchingContext() - - routeResult - } - - case (_, Unmatched) => reject - } - } -} - -object Http2BlueprintInterceptor { - - case class HandlerWithEndpoint(interface: String, port: Int, handler: HttpRequest => Future[HttpResponse]) - extends (HttpRequest => Future[HttpResponse]) { - - override def apply(request: HttpRequest): Future[HttpResponse] = handler(request) - } - - @RuntimeType - def handleWithStreamIdHeader( - @Argument(1) handler: HttpRequest => Future[HttpResponse], - @SuperCall zuper: Callable[Flow[HttpRequest, HttpResponse, NotUsed]] - ): Flow[HttpRequest, HttpResponse, NotUsed] = { - - handler match { - case HandlerWithEndpoint(interface, port, _) => - ServerFlowWrapper(zuper.call(), interface, port) - - case _ => - zuper.call() - } - } -} diff --git a/instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala b/instrumentation/kamon-akka-http/src/main/scala/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala similarity index 98% rename from instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala rename to instrumentation/kamon-akka-http/src/main/scala/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala index 320d8dda1..47292ca86 100644 --- a/instrumentation/kamon-akka-http/src/main/scala-2.13+/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala +++ b/instrumentation/kamon-akka-http/src/main/scala/kamon/instrumentation/akka/http/AkkaHttpServerInstrumentation.scala @@ -54,7 +54,7 @@ class AkkaHttpServerInstrumentation extends InstrumentationBuilder { .advise(method("bindAndHandleAsync") and isPublic(), classOf[Http2ExtBindAndHandleAdvice]) onType("akka.http.impl.engine.http2.Http2Blueprint$") - .intercept(method("handleWithStreamIdHeader"), classOf[Http2BlueprintInterceptor]) + .intercept(method("handleWithStreamIdHeader"), Http2BlueprintInterceptor) /** * The rest of these sections are just about making sure that we can generate an appropriate operation name (i.e. free @@ -94,7 +94,7 @@ class AkkaHttpServerInstrumentation extends InstrumentationBuilder { */ onType("akka.stream.scaladsl.FlowOps") - .advise(method("mapAsync"), classOf[FlowOpsMapAsyncAdvice]) + .advise(method("mapAsync"), classOf[kamon.instrumentation.akka.http.FlowOpsMapAsyncAdvice]) } trait HasMatchingContext { @@ -315,7 +315,6 @@ object PathDirectivesRawPathPrefixInterceptor { } } -class Http2BlueprintInterceptor object Http2BlueprintInterceptor { case class HandlerWithEndpoint(interface: String, port: Int, handler: HttpRequest => Future[HttpResponse]) @@ -325,7 +324,7 @@ object Http2BlueprintInterceptor { } @RuntimeType - @static def handleWithStreamIdHeader( + def handleWithStreamIdHeader( @Argument(1) handler: HttpRequest => Future[HttpResponse], @SuperCall zuper: Callable[Flow[HttpRequest, HttpResponse, NotUsed]] ): Flow[HttpRequest, HttpResponse, NotUsed] = { diff --git a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala index 7bad768c5..6cca3f121 100644 --- a/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala +++ b/instrumentation/kamon-akka-http/src/test/scala/kamon/akka/http/AkkaHttpServerTracingSpec.scala @@ -231,12 +231,9 @@ class AkkaHttpServerTracingSpec extends AnyWordSpecLike with Matchers with Scala "correctly time entity transfer timings" in { val target = s"$protocol://$interface:$port/$stream" - def probablyScala3 = util.Properties.releaseVersion.contains("2.13.10") - def makeCall = client.newCall(new Request.Builder().url(target).build()).execute() // akka 2.7.0 is flaky on this - if (probablyScala3) Try(makeCall).orElse(Try(makeCall)) - else makeCall + Try(makeCall).orElse(Try(makeCall)) val span = eventually(timeout(10 seconds)) { val span = testSpanReporter().nextSpan().value diff --git a/instrumentation/kamon-akka/build.sbt b/instrumentation/kamon-akka/build.sbt index b29ec8c84..f6cffe9db 100644 --- a/instrumentation/kamon-akka/build.sbt +++ b/instrumentation/kamon-akka/build.sbt @@ -1,107 +1,69 @@ import sbt.Tests.{Group, SubProcess} import Def.Initialize -val `Akka-2.4-version` = "2.4.20" -val `Akka-2.5-version` = "2.5.32" val `Akka-2.6-version` = "2.6.21" /** * Compile Configurations */ lazy val Common = config("common") -lazy val `Compile-Akka-2.5` = config("akka-2.5") lazy val `Compile-Akka-2.6` = config("akka-2.6") /** * Test Configurations */ lazy val TestCommon = config("test-common") extend (Common) -lazy val `Test-Akka-2.5` = config("test-akka-2.5") extend (`Compile-Akka-2.5`) lazy val `Test-Akka-2.6` = config("test-akka-2.6") extend (`Compile-Akka-2.6`) configs( Common, - `Compile-Akka-2.5`, `Compile-Akka-2.6`, TestCommon, - `Test-Akka-2.5`, `Test-Akka-2.6` ) // The Common configuration should always depend on the latest version of Akka. All code in the Common configuration // should be source compatible with all Akka versions. inConfig(Common)(Defaults.compileSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version) + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version) )) -libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.11") Seq.empty - else Seq( - kanelaAgent % Common, - scalatest % TestCommon, - logbackClassic % TestCommon, - "com.typesafe.akka" %% "akka-actor" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-slf4j" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-remote" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-cluster" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-protobuf" % `Akka-2.6-version` % Common, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % TestCommon - ) -} +libraryDependencies ++= Seq( + kanelaAgent % Common, + scalatest % TestCommon, + logbackClassic % TestCommon, + "com.typesafe.akka" %% "akka-actor" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-slf4j" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-remote" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-cluster" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-protobuf" % `Akka-2.6-version` % Common, + "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % TestCommon +) inConfig(`Compile-Akka-2.6`)(Defaults.compileSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), sources := joinSources(Common, `Compile-Akka-2.6`).value )) -libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.11") Seq.empty - else Seq( - kanelaAgent % `Compile-Akka-2.6`, - scalatest % `Test-Akka-2.6`, - logbackClassic % `Test-Akka-2.6`, - "com.typesafe.akka" %% "akka-actor" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-slf4j" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-remote" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-cluster" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-protobuf" % `Akka-2.6-version` % `Compile-Akka-2.6`, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % `Test-Akka-2.6` - ) -} - -inConfig(`Compile-Akka-2.5`)(Defaults.compileSettings ++ Seq( - sources := joinSources(Common, `Compile-Akka-2.5`).value -)) - -libraryDependencies ++= { - if (scalaVersion.value startsWith "3") Seq.empty - else Seq( - kanelaAgent % `Compile-Akka-2.5`, - scalatest % `Test-Akka-2.5`, - logbackClassic % `Test-Akka-2.5`, - "com.typesafe.akka" %% "akka-actor" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-slf4j" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-remote" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-cluster" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-protobuf" % `Akka-2.5-version` % `Compile-Akka-2.5`, - "com.typesafe.akka" %% "akka-testkit" % `Akka-2.5-version` % `Test-Akka-2.5` - ) -} +libraryDependencies ++= Seq( + kanelaAgent % `Compile-Akka-2.6`, + scalatest % `Test-Akka-2.6`, + logbackClassic % `Test-Akka-2.6`, + "com.typesafe.akka" %% "akka-actor" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-slf4j" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-remote" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-cluster" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-cluster-sharding" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-protobuf" % `Akka-2.6-version` % `Compile-Akka-2.6`, + "com.typesafe.akka" %% "akka-testkit" % `Akka-2.6-version` % `Test-Akka-2.6` +) // Ensure that the packaged artifact contains the instrumentation for all Akka versions. Compile / packageBin / mappings := Def.taskDyn { - if (scalaBinaryVersion.value == "2.11") { - Def.task { - joinProducts((`Compile-Akka-2.5` / products).value) ++ - joinProducts((Common / unmanagedResourceDirectories).value) - } - } else if (scalaVersion.value startsWith "3") { + if (scalaVersion.value startsWith "3") { Def.task { joinProducts((`Compile-Akka-2.6` / products).value) ++ joinProducts((Common / unmanagedResourceDirectories).value) @@ -109,7 +71,6 @@ Compile / packageBin / mappings := Def.taskDyn { } else { Def.task { joinProducts( - (`Compile-Akka-2.5` / products).value ++ (`Compile-Akka-2.6` / products).value ) ++ joinProducts((Common / unmanagedResourceDirectories).value) } @@ -118,19 +79,13 @@ Compile / packageBin / mappings := Def.taskDyn { // Ensure that the packaged sources contains the instrumentation for all Akka versions. Compile / packageSrc / mappings := Def.taskDyn { - if (scalaBinaryVersion.value == "2.11") { - Def.task { - (`Compile-Akka-2.5` / packageSrc / mappings).value ++ - (Common / packageSrc / mappings).value - } - } else if (scalaVersion.value startsWith "3") { + if (scalaVersion.value startsWith "3") { Def.task { (`Compile-Akka-2.6` / packageSrc / mappings).value ++ (Common / packageSrc / mappings).value } } else { Def.task { - (`Compile-Akka-2.5` / packageSrc / mappings).value ++ (`Compile-Akka-2.6` / packageSrc / mappings).value ++ (Common / packageSrc / mappings).value } @@ -139,18 +94,12 @@ Compile / packageSrc / mappings := Def.taskDyn { // Compile will return the compile analysis for the Common configuration but will run on all Akka configurations. Compile / compile := Def.taskDyn { - if (scalaBinaryVersion.value == "2.11") { - Def.task { - (`Compile-Akka-2.5` / compile).value - } - } else if (scalaVersion.value startsWith "3") { - + if (scalaVersion.value startsWith "3") { Def.task { (`Compile-Akka-2.6` / compile).value } } else { Def.task { - (`Compile-Akka-2.5` / compile).value (`Compile-Akka-2.6` / compile).value } } @@ -170,34 +119,23 @@ lazy val baseTestSettings = Seq( ) inConfig(TestCommon)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version) -)) - -inConfig(`Test-Akka-2.5`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - sources := joinSources(TestCommon, `Test-Akka-2.5`).value, - unmanagedResourceDirectories ++= (Common / unmanagedResourceDirectories).value, - unmanagedResourceDirectories ++= (TestCommon / unmanagedResourceDirectories).value + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version) )) inConfig(`Test-Akka-2.6`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version), + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version), sources := joinSources(TestCommon, `Test-Akka-2.6`).value, unmanagedResourceDirectories ++= (Common / unmanagedResourceDirectories).value, unmanagedResourceDirectories ++= (TestCommon / unmanagedResourceDirectories).value )) Test / test := Def.taskDyn { - if (scalaBinaryVersion.value == "2.11") { - Def.task { - (`Test-Akka-2.5` / test).value - } - } else if (scalaVersion.value startsWith "3") { + if (scalaVersion.value startsWith "3") { Def.task { (`Test-Akka-2.6` / test).value } } else { Def.task { - (`Test-Akka-2.5` / test).value (`Test-Akka-2.6` / test).value } } diff --git a/instrumentation/kamon-akka/src/akka-2.5/java/akka/remote/ContextAwareWireFormats.java b/instrumentation/kamon-akka/src/akka-2.5/java/akka/remote/ContextAwareWireFormats.java deleted file mode 100644 index 8a607c3fe..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/java/akka/remote/ContextAwareWireFormats.java +++ /dev/null @@ -1,2433 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// source: ContextAwareWireFormats.proto - -package akka.remote; - -public final class ContextAwareWireFormats { - private ContextAwareWireFormats() {} - public static void registerAllExtensions( - akka.protobuf.ExtensionRegistry registry) { - } - public interface AckAndTraceContextAwareEnvelopeContainerOrBuilder - extends akka.protobuf.MessageOrBuilder { - - // optional .AcknowledgementInfo ack = 1; - /** - * optional .AcknowledgementInfo ack = 1; - */ - boolean hasAck(); - /** - * optional .AcknowledgementInfo ack = 1; - */ - akka.remote.WireFormats.AcknowledgementInfo getAck(); - /** - * optional .AcknowledgementInfo ack = 1; - */ - akka.remote.WireFormats.AcknowledgementInfoOrBuilder getAckOrBuilder(); - - // optional .ContextAwareRemoteEnvelope envelope = 2; - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - boolean hasEnvelope(); - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - ContextAwareRemoteEnvelope getEnvelope(); - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder getEnvelopeOrBuilder(); - } - /** - * Protobuf type {@code AckAndContextAwareEnvelopeContainer} - */ - public static final class AckAndContextAwareEnvelopeContainer extends - akka.protobuf.GeneratedMessage - implements AckAndTraceContextAwareEnvelopeContainerOrBuilder { - // Use AckAndContextAwareEnvelopeContainer.newBuilder() to construct. - private AckAndContextAwareEnvelopeContainer(akka.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private AckAndContextAwareEnvelopeContainer(boolean noInit) { this.unknownFields = akka.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final AckAndContextAwareEnvelopeContainer defaultInstance; - public static AckAndContextAwareEnvelopeContainer getDefaultInstance() { - return defaultInstance; - } - - public AckAndContextAwareEnvelopeContainer getDefaultInstanceForType() { - return defaultInstance; - } - - private final akka.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final akka.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private AckAndContextAwareEnvelopeContainer( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - akka.protobuf.UnknownFieldSet.Builder unknownFields = - akka.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - akka.remote.WireFormats.AcknowledgementInfo.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = ack_.toBuilder(); - } - ack_ = input.readMessage(akka.remote.WireFormats.AcknowledgementInfo.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(ack_); - ack_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 18: { - ContextAwareRemoteEnvelope.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = envelope_.toBuilder(); - } - envelope_ = input.readMessage(ContextAwareRemoteEnvelope.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(envelope_); - envelope_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - } - } - } catch (akka.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new akka.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_AckAndTraceContextAwareEnvelopeContainer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - AckAndContextAwareEnvelopeContainer.class, AckAndContextAwareEnvelopeContainer.Builder.class); - } - - public static akka.protobuf.Parser PARSER = - new akka.protobuf.AbstractParser() { - public AckAndContextAwareEnvelopeContainer parsePartialFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return new AckAndContextAwareEnvelopeContainer(input, extensionRegistry); - } - }; - - @java.lang.Override - public akka.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // optional .AcknowledgementInfo ack = 1; - public static final int ACK_FIELD_NUMBER = 1; - private akka.remote.WireFormats.AcknowledgementInfo ack_; - /** - * optional .AcknowledgementInfo ack = 1; - */ - public boolean hasAck() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public akka.remote.WireFormats.AcknowledgementInfo getAck() { - return ack_; - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public akka.remote.WireFormats.AcknowledgementInfoOrBuilder getAckOrBuilder() { - return ack_; - } - - // optional .ContextAwareRemoteEnvelope envelope = 2; - public static final int ENVELOPE_FIELD_NUMBER = 2; - private ContextAwareRemoteEnvelope envelope_; - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public boolean hasEnvelope() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public ContextAwareRemoteEnvelope getEnvelope() { - return envelope_; - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder getEnvelopeOrBuilder() { - return envelope_; - } - - private void initFields() { - ack_ = akka.remote.WireFormats.AcknowledgementInfo.getDefaultInstance(); - envelope_ = ContextAwareRemoteEnvelope.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (hasAck()) { - if (!getAck().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasEnvelope()) { - if (!getEnvelope().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(akka.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, ack_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, envelope_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(1, ack_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(2, envelope_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static AckAndContextAwareEnvelopeContainer parseFrom( - akka.protobuf.ByteString data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static AckAndContextAwareEnvelopeContainer parseFrom( - akka.protobuf.ByteString data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static AckAndContextAwareEnvelopeContainer parseFrom(byte[] data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static AckAndContextAwareEnvelopeContainer parseFrom( - byte[] data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static AckAndContextAwareEnvelopeContainer parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static AckAndContextAwareEnvelopeContainer parseFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static AckAndContextAwareEnvelopeContainer parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static AckAndContextAwareEnvelopeContainer parseDelimitedFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static AckAndContextAwareEnvelopeContainer parseFrom( - akka.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static AckAndContextAwareEnvelopeContainer parseFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(AckAndContextAwareEnvelopeContainer prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code AckAndContextAwareEnvelopeContainer} - */ - public static final class Builder extends - akka.protobuf.GeneratedMessage.Builder - implements ContextAwareWireFormats.AckAndTraceContextAwareEnvelopeContainerOrBuilder { - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_AckAndTraceContextAwareEnvelopeContainer_fieldAccessorTable - .ensureFieldAccessorsInitialized( - AckAndContextAwareEnvelopeContainer.class, AckAndContextAwareEnvelopeContainer.Builder.class); - } - - // Construct using akka.remote.ContextAwareWireFormats.AckAndContextAwareEnvelopeContainer.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (akka.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getAckFieldBuilder(); - getEnvelopeFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (ackBuilder_ == null) { - ack_ = akka.remote.WireFormats.AcknowledgementInfo.getDefaultInstance(); - } else { - ackBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (envelopeBuilder_ == null) { - envelope_ = ContextAwareRemoteEnvelope.getDefaultInstance(); - } else { - envelopeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public akka.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return ContextAwareWireFormats.internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor; - } - - public AckAndContextAwareEnvelopeContainer getDefaultInstanceForType() { - return AckAndContextAwareEnvelopeContainer.getDefaultInstance(); - } - - public AckAndContextAwareEnvelopeContainer build() { - AckAndContextAwareEnvelopeContainer result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public AckAndContextAwareEnvelopeContainer buildPartial() { - AckAndContextAwareEnvelopeContainer result = new AckAndContextAwareEnvelopeContainer(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (ackBuilder_ == null) { - result.ack_ = ack_; - } else { - result.ack_ = ackBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (envelopeBuilder_ == null) { - result.envelope_ = envelope_; - } else { - result.envelope_ = envelopeBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(akka.protobuf.Message other) { - if (other instanceof AckAndContextAwareEnvelopeContainer) { - return mergeFrom((AckAndContextAwareEnvelopeContainer)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(AckAndContextAwareEnvelopeContainer other) { - if (other == AckAndContextAwareEnvelopeContainer.getDefaultInstance()) return this; - if (other.hasAck()) { - mergeAck(other.getAck()); - } - if (other.hasEnvelope()) { - mergeEnvelope(other.getEnvelope()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (hasAck()) { - if (!getAck().isInitialized()) { - - return false; - } - } - if (hasEnvelope()) { - if (!getEnvelope().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - AckAndContextAwareEnvelopeContainer parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (akka.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (AckAndContextAwareEnvelopeContainer) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // optional .AcknowledgementInfo ack = 1; - private akka.remote.WireFormats.AcknowledgementInfo ack_ = akka.remote.WireFormats.AcknowledgementInfo.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.AcknowledgementInfo, akka.remote.WireFormats.AcknowledgementInfo.Builder, akka.remote.WireFormats.AcknowledgementInfoOrBuilder> ackBuilder_; - /** - * optional .AcknowledgementInfo ack = 1; - */ - public boolean hasAck() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public akka.remote.WireFormats.AcknowledgementInfo getAck() { - if (ackBuilder_ == null) { - return ack_; - } else { - return ackBuilder_.getMessage(); - } - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public Builder setAck(akka.remote.WireFormats.AcknowledgementInfo value) { - if (ackBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - ack_ = value; - onChanged(); - } else { - ackBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public Builder setAck( - akka.remote.WireFormats.AcknowledgementInfo.Builder builderForValue) { - if (ackBuilder_ == null) { - ack_ = builderForValue.build(); - onChanged(); - } else { - ackBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public Builder mergeAck(akka.remote.WireFormats.AcknowledgementInfo value) { - if (ackBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - ack_ != akka.remote.WireFormats.AcknowledgementInfo.getDefaultInstance()) { - ack_ = - akka.remote.WireFormats.AcknowledgementInfo.newBuilder(ack_).mergeFrom(value).buildPartial(); - } else { - ack_ = value; - } - onChanged(); - } else { - ackBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public Builder clearAck() { - if (ackBuilder_ == null) { - ack_ = akka.remote.WireFormats.AcknowledgementInfo.getDefaultInstance(); - onChanged(); - } else { - ackBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public akka.remote.WireFormats.AcknowledgementInfo.Builder getAckBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getAckFieldBuilder().getBuilder(); - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - public akka.remote.WireFormats.AcknowledgementInfoOrBuilder getAckOrBuilder() { - if (ackBuilder_ != null) { - return ackBuilder_.getMessageOrBuilder(); - } else { - return ack_; - } - } - /** - * optional .AcknowledgementInfo ack = 1; - */ - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.AcknowledgementInfo, akka.remote.WireFormats.AcknowledgementInfo.Builder, akka.remote.WireFormats.AcknowledgementInfoOrBuilder> - getAckFieldBuilder() { - if (ackBuilder_ == null) { - ackBuilder_ = new akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.AcknowledgementInfo, akka.remote.WireFormats.AcknowledgementInfo.Builder, akka.remote.WireFormats.AcknowledgementInfoOrBuilder>( - ack_, - getParentForChildren(), - isClean()); - ack_ = null; - } - return ackBuilder_; - } - - // optional .ContextAwareRemoteEnvelope envelope = 2; - private ContextAwareRemoteEnvelope envelope_ = ContextAwareRemoteEnvelope.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - ContextAwareRemoteEnvelope, ContextAwareRemoteEnvelope.Builder, ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder> envelopeBuilder_; - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public boolean hasEnvelope() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public ContextAwareRemoteEnvelope getEnvelope() { - if (envelopeBuilder_ == null) { - return envelope_; - } else { - return envelopeBuilder_.getMessage(); - } - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public Builder setEnvelope(ContextAwareRemoteEnvelope value) { - if (envelopeBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - envelope_ = value; - onChanged(); - } else { - envelopeBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public Builder setEnvelope( - ContextAwareRemoteEnvelope.Builder builderForValue) { - if (envelopeBuilder_ == null) { - envelope_ = builderForValue.build(); - onChanged(); - } else { - envelopeBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public Builder mergeEnvelope(ContextAwareRemoteEnvelope value) { - if (envelopeBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - envelope_ != ContextAwareRemoteEnvelope.getDefaultInstance()) { - envelope_ = - ContextAwareRemoteEnvelope.newBuilder(envelope_).mergeFrom(value).buildPartial(); - } else { - envelope_ = value; - } - onChanged(); - } else { - envelopeBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public Builder clearEnvelope() { - if (envelopeBuilder_ == null) { - envelope_ = ContextAwareRemoteEnvelope.getDefaultInstance(); - onChanged(); - } else { - envelopeBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public ContextAwareRemoteEnvelope.Builder getEnvelopeBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getEnvelopeFieldBuilder().getBuilder(); - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - public ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder getEnvelopeOrBuilder() { - if (envelopeBuilder_ != null) { - return envelopeBuilder_.getMessageOrBuilder(); - } else { - return envelope_; - } - } - /** - * optional .ContextAwareRemoteEnvelope envelope = 2; - */ - private akka.protobuf.SingleFieldBuilder< - ContextAwareRemoteEnvelope, ContextAwareRemoteEnvelope.Builder, ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder> - getEnvelopeFieldBuilder() { - if (envelopeBuilder_ == null) { - envelopeBuilder_ = new akka.protobuf.SingleFieldBuilder< - ContextAwareRemoteEnvelope, ContextAwareRemoteEnvelope.Builder, ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder>( - envelope_, - getParentForChildren(), - isClean()); - envelope_ = null; - } - return envelopeBuilder_; - } - - // @@protoc_insertion_point(builder_scope:AckAndContextAwareEnvelopeContainer) - } - - static { - defaultInstance = new AckAndContextAwareEnvelopeContainer(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:AckAndContextAwareEnvelopeContainer) - } - - public interface TraceContextAwareRemoteEnvelopeOrBuilder - extends akka.protobuf.MessageOrBuilder { - - // required .ActorRefData recipient = 1; - /** - * required .ActorRefData recipient = 1; - */ - boolean hasRecipient(); - /** - * required .ActorRefData recipient = 1; - */ - akka.remote.WireFormats.ActorRefData getRecipient(); - /** - * required .ActorRefData recipient = 1; - */ - akka.remote.WireFormats.ActorRefDataOrBuilder getRecipientOrBuilder(); - - // required .SerializedMessage message = 2; - /** - * required .SerializedMessage message = 2; - */ - boolean hasMessage(); - /** - * required .SerializedMessage message = 2; - */ - akka.remote.WireFormats.SerializedMessage getMessage(); - /** - * required .SerializedMessage message = 2; - */ - akka.remote.WireFormats.SerializedMessageOrBuilder getMessageOrBuilder(); - - // optional .ActorRefData sender = 4; - /** - * optional .ActorRefData sender = 4; - */ - boolean hasSender(); - /** - * optional .ActorRefData sender = 4; - */ - akka.remote.WireFormats.ActorRefData getSender(); - /** - * optional .ActorRefData sender = 4; - */ - akka.remote.WireFormats.ActorRefDataOrBuilder getSenderOrBuilder(); - - // optional fixed64 seq = 5; - /** - * optional fixed64 seq = 5; - */ - boolean hasSeq(); - /** - * optional fixed64 seq = 5; - */ - long getSeq(); - - // optional .RemoteContext traceContext = 15; - /** - * optional .RemoteContext traceContext = 15; - */ - boolean hasTraceContext(); - /** - * optional .RemoteContext traceContext = 15; - */ - RemoteContext getTraceContext(); - /** - * optional .RemoteContext traceContext = 15; - */ - ContextAwareWireFormats.RemoteTraceContextOrBuilder getTraceContextOrBuilder(); - } - /** - * Protobuf type {@code ContextAwareRemoteEnvelope} - */ - public static final class ContextAwareRemoteEnvelope extends - akka.protobuf.GeneratedMessage - implements TraceContextAwareRemoteEnvelopeOrBuilder { - // Use ContextAwareRemoteEnvelope.newBuilder() to construct. - private ContextAwareRemoteEnvelope(akka.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private ContextAwareRemoteEnvelope(boolean noInit) { this.unknownFields = akka.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final ContextAwareRemoteEnvelope defaultInstance; - public static ContextAwareRemoteEnvelope getDefaultInstance() { - return defaultInstance; - } - - public ContextAwareRemoteEnvelope getDefaultInstanceForType() { - return defaultInstance; - } - - private final akka.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final akka.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private ContextAwareRemoteEnvelope( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - akka.protobuf.UnknownFieldSet.Builder unknownFields = - akka.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - akka.remote.WireFormats.ActorRefData.Builder subBuilder = null; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - subBuilder = recipient_.toBuilder(); - } - recipient_ = input.readMessage(akka.remote.WireFormats.ActorRefData.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(recipient_); - recipient_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000001; - break; - } - case 18: { - akka.remote.WireFormats.SerializedMessage.Builder subBuilder = null; - if (((bitField0_ & 0x00000002) == 0x00000002)) { - subBuilder = message_.toBuilder(); - } - message_ = input.readMessage(akka.remote.WireFormats.SerializedMessage.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(message_); - message_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000002; - break; - } - case 34: { - akka.remote.WireFormats.ActorRefData.Builder subBuilder = null; - if (((bitField0_ & 0x00000004) == 0x00000004)) { - subBuilder = sender_.toBuilder(); - } - sender_ = input.readMessage(akka.remote.WireFormats.ActorRefData.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(sender_); - sender_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000004; - break; - } - case 41: { - bitField0_ |= 0x00000008; - seq_ = input.readFixed64(); - break; - } - case 122: { - RemoteContext.Builder subBuilder = null; - if (((bitField0_ & 0x00000010) == 0x00000010)) { - subBuilder = traceContext_.toBuilder(); - } - traceContext_ = input.readMessage(RemoteContext.PARSER, extensionRegistry); - if (subBuilder != null) { - subBuilder.mergeFrom(traceContext_); - traceContext_ = subBuilder.buildPartial(); - } - bitField0_ |= 0x00000010; - break; - } - } - } - } catch (akka.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new akka.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_TraceContextAwareRemoteEnvelope_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_TraceContextAwareRemoteEnvelope_fieldAccessorTable - .ensureFieldAccessorsInitialized( - ContextAwareRemoteEnvelope.class, ContextAwareRemoteEnvelope.Builder.class); - } - - public static akka.protobuf.Parser PARSER = - new akka.protobuf.AbstractParser() { - public ContextAwareRemoteEnvelope parsePartialFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return new ContextAwareRemoteEnvelope(input, extensionRegistry); - } - }; - - @java.lang.Override - public akka.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required .ActorRefData recipient = 1; - public static final int RECIPIENT_FIELD_NUMBER = 1; - private akka.remote.WireFormats.ActorRefData recipient_; - /** - * required .ActorRefData recipient = 1; - */ - public boolean hasRecipient() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .ActorRefData recipient = 1; - */ - public akka.remote.WireFormats.ActorRefData getRecipient() { - return recipient_; - } - /** - * required .ActorRefData recipient = 1; - */ - public akka.remote.WireFormats.ActorRefDataOrBuilder getRecipientOrBuilder() { - return recipient_; - } - - // required .SerializedMessage message = 2; - public static final int MESSAGE_FIELD_NUMBER = 2; - private akka.remote.WireFormats.SerializedMessage message_; - /** - * required .SerializedMessage message = 2; - */ - public boolean hasMessage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .SerializedMessage message = 2; - */ - public akka.remote.WireFormats.SerializedMessage getMessage() { - return message_; - } - /** - * required .SerializedMessage message = 2; - */ - public akka.remote.WireFormats.SerializedMessageOrBuilder getMessageOrBuilder() { - return message_; - } - - // optional .ActorRefData sender = 4; - public static final int SENDER_FIELD_NUMBER = 4; - private akka.remote.WireFormats.ActorRefData sender_; - /** - * optional .ActorRefData sender = 4; - */ - public boolean hasSender() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional .ActorRefData sender = 4; - */ - public akka.remote.WireFormats.ActorRefData getSender() { - return sender_; - } - /** - * optional .ActorRefData sender = 4; - */ - public akka.remote.WireFormats.ActorRefDataOrBuilder getSenderOrBuilder() { - return sender_; - } - - // optional fixed64 seq = 5; - public static final int SEQ_FIELD_NUMBER = 5; - private long seq_; - /** - * optional fixed64 seq = 5; - */ - public boolean hasSeq() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional fixed64 seq = 5; - */ - public long getSeq() { - return seq_; - } - - // optional .RemoteContext traceContext = 15; - public static final int TRACECONTEXT_FIELD_NUMBER = 15; - private RemoteContext traceContext_; - /** - * optional .RemoteContext traceContext = 15; - */ - public boolean hasTraceContext() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional .RemoteContext traceContext = 15; - */ - public RemoteContext getTraceContext() { - return traceContext_; - } - /** - * optional .RemoteContext traceContext = 15; - */ - public ContextAwareWireFormats.RemoteTraceContextOrBuilder getTraceContextOrBuilder() { - return traceContext_; - } - - private void initFields() { - recipient_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - message_ = akka.remote.WireFormats.SerializedMessage.getDefaultInstance(); - sender_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - seq_ = 0L; - traceContext_ = RemoteContext.getDefaultInstance(); - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasRecipient()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasMessage()) { - memoizedIsInitialized = 0; - return false; - } - if (!getRecipient().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (!getMessage().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - if (hasSender()) { - if (!getSender().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - if (hasTraceContext()) { - if (!getTraceContext().isInitialized()) { - memoizedIsInitialized = 0; - return false; - } - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(akka.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, recipient_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, message_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeMessage(4, sender_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - output.writeFixed64(5, seq_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - output.writeMessage(15, traceContext_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(1, recipient_); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(2, message_); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(4, sender_); - } - if (((bitField0_ & 0x00000008) == 0x00000008)) { - size += akka.protobuf.CodedOutputStream - .computeFixed64Size(5, seq_); - } - if (((bitField0_ & 0x00000010) == 0x00000010)) { - size += akka.protobuf.CodedOutputStream - .computeMessageSize(15, traceContext_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static ContextAwareRemoteEnvelope parseFrom( - akka.protobuf.ByteString data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static ContextAwareRemoteEnvelope parseFrom( - akka.protobuf.ByteString data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static ContextAwareRemoteEnvelope parseFrom(byte[] data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static ContextAwareRemoteEnvelope parseFrom( - byte[] data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static ContextAwareRemoteEnvelope parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static ContextAwareRemoteEnvelope parseFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static ContextAwareRemoteEnvelope parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static ContextAwareRemoteEnvelope parseDelimitedFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static ContextAwareRemoteEnvelope parseFrom( - akka.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static ContextAwareRemoteEnvelope parseFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(ContextAwareRemoteEnvelope prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code ContextAwareRemoteEnvelope} - */ - public static final class Builder extends - akka.protobuf.GeneratedMessage.Builder - implements ContextAwareWireFormats.TraceContextAwareRemoteEnvelopeOrBuilder { - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_TraceContextAwareRemoteEnvelope_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_TraceContextAwareRemoteEnvelope_fieldAccessorTable - .ensureFieldAccessorsInitialized( - ContextAwareRemoteEnvelope.class, ContextAwareRemoteEnvelope.Builder.class); - } - - // Construct using akka.remote.ContextAwareWireFormats.ContextAwareRemoteEnvelope.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (akka.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - getRecipientFieldBuilder(); - getMessageFieldBuilder(); - getSenderFieldBuilder(); - getTraceContextFieldBuilder(); - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - if (recipientBuilder_ == null) { - recipient_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - } else { - recipientBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - if (messageBuilder_ == null) { - message_ = akka.remote.WireFormats.SerializedMessage.getDefaultInstance(); - } else { - messageBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - if (senderBuilder_ == null) { - sender_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - } else { - senderBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - seq_ = 0L; - bitField0_ = (bitField0_ & ~0x00000008); - if (traceContextBuilder_ == null) { - traceContext_ = RemoteContext.getDefaultInstance(); - } else { - traceContextBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public akka.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return ContextAwareWireFormats.internal_static_TraceContextAwareRemoteEnvelope_descriptor; - } - - public ContextAwareRemoteEnvelope getDefaultInstanceForType() { - return ContextAwareRemoteEnvelope.getDefaultInstance(); - } - - public ContextAwareRemoteEnvelope build() { - ContextAwareRemoteEnvelope result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public ContextAwareRemoteEnvelope buildPartial() { - ContextAwareRemoteEnvelope result = new ContextAwareRemoteEnvelope(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - if (recipientBuilder_ == null) { - result.recipient_ = recipient_; - } else { - result.recipient_ = recipientBuilder_.build(); - } - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - if (messageBuilder_ == null) { - result.message_ = message_; - } else { - result.message_ = messageBuilder_.build(); - } - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - if (senderBuilder_ == null) { - result.sender_ = sender_; - } else { - result.sender_ = senderBuilder_.build(); - } - if (((from_bitField0_ & 0x00000008) == 0x00000008)) { - to_bitField0_ |= 0x00000008; - } - result.seq_ = seq_; - if (((from_bitField0_ & 0x00000010) == 0x00000010)) { - to_bitField0_ |= 0x00000010; - } - if (traceContextBuilder_ == null) { - result.traceContext_ = traceContext_; - } else { - result.traceContext_ = traceContextBuilder_.build(); - } - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(akka.protobuf.Message other) { - if (other instanceof ContextAwareRemoteEnvelope) { - return mergeFrom((ContextAwareRemoteEnvelope)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(ContextAwareRemoteEnvelope other) { - if (other == ContextAwareRemoteEnvelope.getDefaultInstance()) return this; - if (other.hasRecipient()) { - mergeRecipient(other.getRecipient()); - } - if (other.hasMessage()) { - mergeMessage(other.getMessage()); - } - if (other.hasSender()) { - mergeSender(other.getSender()); - } - if (other.hasSeq()) { - setSeq(other.getSeq()); - } - if (other.hasTraceContext()) { - mergeTraceContext(other.getTraceContext()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasRecipient()) { - - return false; - } - if (!hasMessage()) { - - return false; - } - if (!getRecipient().isInitialized()) { - - return false; - } - if (!getMessage().isInitialized()) { - - return false; - } - if (hasSender()) { - if (!getSender().isInitialized()) { - - return false; - } - } - if (hasTraceContext()) { - if (!getTraceContext().isInitialized()) { - - return false; - } - } - return true; - } - - public Builder mergeFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - ContextAwareRemoteEnvelope parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (akka.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (ContextAwareRemoteEnvelope) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required .ActorRefData recipient = 1; - private akka.remote.WireFormats.ActorRefData recipient_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder> recipientBuilder_; - /** - * required .ActorRefData recipient = 1; - */ - public boolean hasRecipient() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required .ActorRefData recipient = 1; - */ - public akka.remote.WireFormats.ActorRefData getRecipient() { - if (recipientBuilder_ == null) { - return recipient_; - } else { - return recipientBuilder_.getMessage(); - } - } - /** - * required .ActorRefData recipient = 1; - */ - public Builder setRecipient(akka.remote.WireFormats.ActorRefData value) { - if (recipientBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - recipient_ = value; - onChanged(); - } else { - recipientBuilder_.setMessage(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .ActorRefData recipient = 1; - */ - public Builder setRecipient( - akka.remote.WireFormats.ActorRefData.Builder builderForValue) { - if (recipientBuilder_ == null) { - recipient_ = builderForValue.build(); - onChanged(); - } else { - recipientBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .ActorRefData recipient = 1; - */ - public Builder mergeRecipient(akka.remote.WireFormats.ActorRefData value) { - if (recipientBuilder_ == null) { - if (((bitField0_ & 0x00000001) == 0x00000001) && - recipient_ != akka.remote.WireFormats.ActorRefData.getDefaultInstance()) { - recipient_ = - akka.remote.WireFormats.ActorRefData.newBuilder(recipient_).mergeFrom(value).buildPartial(); - } else { - recipient_ = value; - } - onChanged(); - } else { - recipientBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000001; - return this; - } - /** - * required .ActorRefData recipient = 1; - */ - public Builder clearRecipient() { - if (recipientBuilder_ == null) { - recipient_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - onChanged(); - } else { - recipientBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - /** - * required .ActorRefData recipient = 1; - */ - public akka.remote.WireFormats.ActorRefData.Builder getRecipientBuilder() { - bitField0_ |= 0x00000001; - onChanged(); - return getRecipientFieldBuilder().getBuilder(); - } - /** - * required .ActorRefData recipient = 1; - */ - public akka.remote.WireFormats.ActorRefDataOrBuilder getRecipientOrBuilder() { - if (recipientBuilder_ != null) { - return recipientBuilder_.getMessageOrBuilder(); - } else { - return recipient_; - } - } - /** - * required .ActorRefData recipient = 1; - */ - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder> - getRecipientFieldBuilder() { - if (recipientBuilder_ == null) { - recipientBuilder_ = new akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder>( - recipient_, - getParentForChildren(), - isClean()); - recipient_ = null; - } - return recipientBuilder_; - } - - // required .SerializedMessage message = 2; - private akka.remote.WireFormats.SerializedMessage message_ = akka.remote.WireFormats.SerializedMessage.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.SerializedMessage, akka.remote.WireFormats.SerializedMessage.Builder, akka.remote.WireFormats.SerializedMessageOrBuilder> messageBuilder_; - /** - * required .SerializedMessage message = 2; - */ - public boolean hasMessage() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * required .SerializedMessage message = 2; - */ - public akka.remote.WireFormats.SerializedMessage getMessage() { - if (messageBuilder_ == null) { - return message_; - } else { - return messageBuilder_.getMessage(); - } - } - /** - * required .SerializedMessage message = 2; - */ - public Builder setMessage(akka.remote.WireFormats.SerializedMessage value) { - if (messageBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - message_ = value; - onChanged(); - } else { - messageBuilder_.setMessage(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .SerializedMessage message = 2; - */ - public Builder setMessage( - akka.remote.WireFormats.SerializedMessage.Builder builderForValue) { - if (messageBuilder_ == null) { - message_ = builderForValue.build(); - onChanged(); - } else { - messageBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .SerializedMessage message = 2; - */ - public Builder mergeMessage(akka.remote.WireFormats.SerializedMessage value) { - if (messageBuilder_ == null) { - if (((bitField0_ & 0x00000002) == 0x00000002) && - message_ != akka.remote.WireFormats.SerializedMessage.getDefaultInstance()) { - message_ = - akka.remote.WireFormats.SerializedMessage.newBuilder(message_).mergeFrom(value).buildPartial(); - } else { - message_ = value; - } - onChanged(); - } else { - messageBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000002; - return this; - } - /** - * required .SerializedMessage message = 2; - */ - public Builder clearMessage() { - if (messageBuilder_ == null) { - message_ = akka.remote.WireFormats.SerializedMessage.getDefaultInstance(); - onChanged(); - } else { - messageBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000002); - return this; - } - /** - * required .SerializedMessage message = 2; - */ - public akka.remote.WireFormats.SerializedMessage.Builder getMessageBuilder() { - bitField0_ |= 0x00000002; - onChanged(); - return getMessageFieldBuilder().getBuilder(); - } - /** - * required .SerializedMessage message = 2; - */ - public akka.remote.WireFormats.SerializedMessageOrBuilder getMessageOrBuilder() { - if (messageBuilder_ != null) { - return messageBuilder_.getMessageOrBuilder(); - } else { - return message_; - } - } - /** - * required .SerializedMessage message = 2; - */ - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.SerializedMessage, akka.remote.WireFormats.SerializedMessage.Builder, akka.remote.WireFormats.SerializedMessageOrBuilder> - getMessageFieldBuilder() { - if (messageBuilder_ == null) { - messageBuilder_ = new akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.SerializedMessage, akka.remote.WireFormats.SerializedMessage.Builder, akka.remote.WireFormats.SerializedMessageOrBuilder>( - message_, - getParentForChildren(), - isClean()); - message_ = null; - } - return messageBuilder_; - } - - // optional .ActorRefData sender = 4; - private akka.remote.WireFormats.ActorRefData sender_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder> senderBuilder_; - /** - * optional .ActorRefData sender = 4; - */ - public boolean hasSender() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * optional .ActorRefData sender = 4; - */ - public akka.remote.WireFormats.ActorRefData getSender() { - if (senderBuilder_ == null) { - return sender_; - } else { - return senderBuilder_.getMessage(); - } - } - /** - * optional .ActorRefData sender = 4; - */ - public Builder setSender(akka.remote.WireFormats.ActorRefData value) { - if (senderBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - sender_ = value; - onChanged(); - } else { - senderBuilder_.setMessage(value); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * optional .ActorRefData sender = 4; - */ - public Builder setSender( - akka.remote.WireFormats.ActorRefData.Builder builderForValue) { - if (senderBuilder_ == null) { - sender_ = builderForValue.build(); - onChanged(); - } else { - senderBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * optional .ActorRefData sender = 4; - */ - public Builder mergeSender(akka.remote.WireFormats.ActorRefData value) { - if (senderBuilder_ == null) { - if (((bitField0_ & 0x00000004) == 0x00000004) && - sender_ != akka.remote.WireFormats.ActorRefData.getDefaultInstance()) { - sender_ = - akka.remote.WireFormats.ActorRefData.newBuilder(sender_).mergeFrom(value).buildPartial(); - } else { - sender_ = value; - } - onChanged(); - } else { - senderBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000004; - return this; - } - /** - * optional .ActorRefData sender = 4; - */ - public Builder clearSender() { - if (senderBuilder_ == null) { - sender_ = akka.remote.WireFormats.ActorRefData.getDefaultInstance(); - onChanged(); - } else { - senderBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000004); - return this; - } - /** - * optional .ActorRefData sender = 4; - */ - public akka.remote.WireFormats.ActorRefData.Builder getSenderBuilder() { - bitField0_ |= 0x00000004; - onChanged(); - return getSenderFieldBuilder().getBuilder(); - } - /** - * optional .ActorRefData sender = 4; - */ - public akka.remote.WireFormats.ActorRefDataOrBuilder getSenderOrBuilder() { - if (senderBuilder_ != null) { - return senderBuilder_.getMessageOrBuilder(); - } else { - return sender_; - } - } - /** - * optional .ActorRefData sender = 4; - */ - private akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder> - getSenderFieldBuilder() { - if (senderBuilder_ == null) { - senderBuilder_ = new akka.protobuf.SingleFieldBuilder< - akka.remote.WireFormats.ActorRefData, akka.remote.WireFormats.ActorRefData.Builder, akka.remote.WireFormats.ActorRefDataOrBuilder>( - sender_, - getParentForChildren(), - isClean()); - sender_ = null; - } - return senderBuilder_; - } - - // optional fixed64 seq = 5; - private long seq_ ; - /** - * optional fixed64 seq = 5; - */ - public boolean hasSeq() { - return ((bitField0_ & 0x00000008) == 0x00000008); - } - /** - * optional fixed64 seq = 5; - */ - public long getSeq() { - return seq_; - } - /** - * optional fixed64 seq = 5; - */ - public Builder setSeq(long value) { - bitField0_ |= 0x00000008; - seq_ = value; - onChanged(); - return this; - } - /** - * optional fixed64 seq = 5; - */ - public Builder clearSeq() { - bitField0_ = (bitField0_ & ~0x00000008); - seq_ = 0L; - onChanged(); - return this; - } - - // optional .RemoteContext traceContext = 15; - private RemoteContext traceContext_ = RemoteContext.getDefaultInstance(); - private akka.protobuf.SingleFieldBuilder< - RemoteContext, RemoteContext.Builder, ContextAwareWireFormats.RemoteTraceContextOrBuilder> traceContextBuilder_; - /** - * optional .RemoteContext traceContext = 15; - */ - public boolean hasTraceContext() { - return ((bitField0_ & 0x00000010) == 0x00000010); - } - /** - * optional .RemoteContext traceContext = 15; - */ - public RemoteContext getTraceContext() { - if (traceContextBuilder_ == null) { - return traceContext_; - } else { - return traceContextBuilder_.getMessage(); - } - } - /** - * optional .RemoteContext traceContext = 15; - */ - public Builder setTraceContext(RemoteContext value) { - if (traceContextBuilder_ == null) { - if (value == null) { - throw new NullPointerException(); - } - traceContext_ = value; - onChanged(); - } else { - traceContextBuilder_.setMessage(value); - } - bitField0_ |= 0x00000010; - return this; - } - /** - * optional .RemoteContext traceContext = 15; - */ - public Builder setTraceContext( - RemoteContext.Builder builderForValue) { - if (traceContextBuilder_ == null) { - traceContext_ = builderForValue.build(); - onChanged(); - } else { - traceContextBuilder_.setMessage(builderForValue.build()); - } - bitField0_ |= 0x00000010; - return this; - } - /** - * optional .RemoteContext traceContext = 15; - */ - public Builder mergeTraceContext(RemoteContext value) { - if (traceContextBuilder_ == null) { - if (((bitField0_ & 0x00000010) == 0x00000010) && - traceContext_ != RemoteContext.getDefaultInstance()) { - traceContext_ = - RemoteContext.newBuilder(traceContext_).mergeFrom(value).buildPartial(); - } else { - traceContext_ = value; - } - onChanged(); - } else { - traceContextBuilder_.mergeFrom(value); - } - bitField0_ |= 0x00000010; - return this; - } - /** - * optional .RemoteContext traceContext = 15; - */ - public Builder clearTraceContext() { - if (traceContextBuilder_ == null) { - traceContext_ = RemoteContext.getDefaultInstance(); - onChanged(); - } else { - traceContextBuilder_.clear(); - } - bitField0_ = (bitField0_ & ~0x00000010); - return this; - } - /** - * optional .RemoteContext traceContext = 15; - */ - public RemoteContext.Builder getTraceContextBuilder() { - bitField0_ |= 0x00000010; - onChanged(); - return getTraceContextFieldBuilder().getBuilder(); - } - /** - * optional .RemoteContext traceContext = 15; - */ - public ContextAwareWireFormats.RemoteTraceContextOrBuilder getTraceContextOrBuilder() { - if (traceContextBuilder_ != null) { - return traceContextBuilder_.getMessageOrBuilder(); - } else { - return traceContext_; - } - } - /** - * optional .RemoteContext traceContext = 15; - */ - private akka.protobuf.SingleFieldBuilder< - RemoteContext, RemoteContext.Builder, ContextAwareWireFormats.RemoteTraceContextOrBuilder> - getTraceContextFieldBuilder() { - if (traceContextBuilder_ == null) { - traceContextBuilder_ = new akka.protobuf.SingleFieldBuilder< - RemoteContext, RemoteContext.Builder, ContextAwareWireFormats.RemoteTraceContextOrBuilder>( - traceContext_, - getParentForChildren(), - isClean()); - traceContext_ = null; - } - return traceContextBuilder_; - } - - // @@protoc_insertion_point(builder_scope:ContextAwareRemoteEnvelope) - } - - static { - defaultInstance = new ContextAwareRemoteEnvelope(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:ContextAwareRemoteEnvelope) - } - - public interface RemoteTraceContextOrBuilder - extends akka.protobuf.MessageOrBuilder { - - // required bytes context = 1; - /** - * required bytes context = 1; - */ - boolean hasContext(); - /** - * required bytes context = 1; - */ - akka.protobuf.ByteString getContext(); - } - /** - * Protobuf type {@code RemoteContext} - */ - public static final class RemoteContext extends - akka.protobuf.GeneratedMessage - implements RemoteTraceContextOrBuilder { - // Use RemoteContext.newBuilder() to construct. - private RemoteContext(akka.protobuf.GeneratedMessage.Builder builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RemoteContext(boolean noInit) { this.unknownFields = akka.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RemoteContext defaultInstance; - public static RemoteContext getDefaultInstance() { - return defaultInstance; - } - - public RemoteContext getDefaultInstanceForType() { - return defaultInstance; - } - - private final akka.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final akka.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; - } - private RemoteContext( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - akka.protobuf.UnknownFieldSet.Builder unknownFields = - akka.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 10: { - bitField0_ |= 0x00000001; - context_ = input.readBytes(); - break; - } - } - } - } catch (akka.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new akka.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } - } - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_RemoteTraceContext_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_RemoteTraceContext_fieldAccessorTable - .ensureFieldAccessorsInitialized( - RemoteContext.class, RemoteContext.Builder.class); - } - - public static akka.protobuf.Parser PARSER = - new akka.protobuf.AbstractParser() { - public RemoteContext parsePartialFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return new RemoteContext(input, extensionRegistry); - } - }; - - @java.lang.Override - public akka.protobuf.Parser getParserForType() { - return PARSER; - } - - private int bitField0_; - // required bytes context = 1; - public static final int CONTEXT_FIELD_NUMBER = 1; - private akka.protobuf.ByteString context_; - /** - * required bytes context = 1; - */ - public boolean hasContext() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes context = 1; - */ - public akka.protobuf.ByteString getContext() { - return context_; - } - - private void initFields() { - context_ = akka.protobuf.ByteString.EMPTY; - } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasContext()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; - } - - public void writeTo(akka.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, context_); - } - getUnknownFields().writeTo(output); - } - - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += akka.protobuf.CodedOutputStream - .computeBytesSize(1, context_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; - } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - public static RemoteContext parseFrom( - akka.protobuf.ByteString data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static RemoteContext parseFrom( - akka.protobuf.ByteString data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static RemoteContext parseFrom(byte[] data) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static RemoteContext parseFrom( - byte[] data, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws akka.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static RemoteContext parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static RemoteContext parseFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - public static RemoteContext parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); - } - public static RemoteContext parseDelimitedFrom( - java.io.InputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); - } - public static RemoteContext parseFrom( - akka.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); - } - public static RemoteContext parseFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); - } - - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(RemoteContext prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } - - @java.lang.Override - protected Builder newBuilderForType( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code RemoteContext} - */ - public static final class Builder extends - akka.protobuf.GeneratedMessage.Builder - implements ContextAwareWireFormats.RemoteTraceContextOrBuilder { - public static final akka.protobuf.Descriptors.Descriptor - getDescriptor() { - return ContextAwareWireFormats.internal_static_RemoteTraceContext_descriptor; - } - - protected akka.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return ContextAwareWireFormats.internal_static_RemoteTraceContext_fieldAccessorTable - .ensureFieldAccessorsInitialized( - RemoteContext.class, RemoteContext.Builder.class); - } - - // Construct using akka.remote.ContextAwareWireFormats.RemoteContext.newBuilder() - private Builder() { - maybeForceBuilderInitialization(); - } - - private Builder( - akka.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - maybeForceBuilderInitialization(); - } - private void maybeForceBuilderInitialization() { - if (akka.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { - } - } - private static Builder create() { - return new Builder(); - } - - public Builder clear() { - super.clear(); - context_ = akka.protobuf.ByteString.EMPTY; - bitField0_ = (bitField0_ & ~0x00000001); - return this; - } - - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - - public akka.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return ContextAwareWireFormats.internal_static_RemoteTraceContext_descriptor; - } - - public RemoteContext getDefaultInstanceForType() { - return RemoteContext.getDefaultInstance(); - } - - public RemoteContext build() { - RemoteContext result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - public RemoteContext buildPartial() { - RemoteContext result = new RemoteContext(this); - int from_bitField0_ = bitField0_; - int to_bitField0_ = 0; - if (((from_bitField0_ & 0x00000001) == 0x00000001)) { - to_bitField0_ |= 0x00000001; - } - result.context_ = context_; - result.bitField0_ = to_bitField0_; - onBuilt(); - return result; - } - - public Builder mergeFrom(akka.protobuf.Message other) { - if (other instanceof RemoteContext) { - return mergeFrom((RemoteContext)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(RemoteContext other) { - if (other == RemoteContext.getDefaultInstance()) return this; - if (other.hasContext()) { - setContext(other.getContext()); - } - this.mergeUnknownFields(other.getUnknownFields()); - return this; - } - - public final boolean isInitialized() { - if (!hasContext()) { - - return false; - } - return true; - } - - public Builder mergeFrom( - akka.protobuf.CodedInputStream input, - akka.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - RemoteContext parsedMessage = null; - try { - parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); - } catch (akka.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (RemoteContext) e.getUnfinishedMessage(); - throw e; - } finally { - if (parsedMessage != null) { - mergeFrom(parsedMessage); - } - } - return this; - } - private int bitField0_; - - // required bytes context = 1; - private akka.protobuf.ByteString context_ = akka.protobuf.ByteString.EMPTY; - /** - * required bytes context = 1; - */ - public boolean hasContext() { - return ((bitField0_ & 0x00000001) == 0x00000001); - } - /** - * required bytes context = 1; - */ - public akka.protobuf.ByteString getContext() { - return context_; - } - /** - * required bytes context = 1; - */ - public Builder setContext(akka.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000001; - context_ = value; - onChanged(); - return this; - } - /** - * required bytes context = 1; - */ - public Builder clearContext() { - bitField0_ = (bitField0_ & ~0x00000001); - context_ = getDefaultInstance().getContext(); - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:RemoteContext) - } - - static { - defaultInstance = new RemoteContext(true); - defaultInstance.initFields(); - } - - // @@protoc_insertion_point(class_scope:RemoteContext) - } - - private static akka.protobuf.Descriptors.Descriptor - internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor; - private static - akka.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_AckAndTraceContextAwareEnvelopeContainer_fieldAccessorTable; - private static akka.protobuf.Descriptors.Descriptor - internal_static_TraceContextAwareRemoteEnvelope_descriptor; - private static - akka.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_TraceContextAwareRemoteEnvelope_fieldAccessorTable; - private static akka.protobuf.Descriptors.Descriptor - internal_static_RemoteTraceContext_descriptor; - private static - akka.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_RemoteTraceContext_fieldAccessorTable; - - public static akka.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static akka.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\"ContextAwareWireFormats.proto\032\026Co" + - "ntainerFormats.proto\032\021WireFormats.proto\"" + - "\201\001\n(AckAndTraceContextAwareEnvelopeConta" + - "iner\022!\n\003ack\030\001 \001(\0132\024.AcknowledgementInfo\022" + - "2\n\010envelope\030\002 \001(\0132 .TraceContextAwareRem" + - "oteEnvelope\"\277\001\n\037TraceContextAwareRemoteE" + - "nvelope\022 \n\trecipient\030\001 \002(\0132\r.ActorRefDat" + - "a\022#\n\007message\030\002 \002(\0132\022.SerializedMessage\022\035" + - "\n\006sender\030\004 \001(\0132\r.ActorRefData\022\013\n\003seq\030\005 \001" + - "(\006\022)\n\014traceContext\030\017 \001(\0132\023.RemoteTraceCo", - "ntext\"%\n\022RemoteTraceContext\022\017\n\007context\030\001" + - " \002(\014B\017\n\013akka.remoteH\001" - }; - akka.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = - new akka.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { - public akka.protobuf.ExtensionRegistry assignDescriptors( - akka.protobuf.Descriptors.FileDescriptor root) { - descriptor = root; - internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_AckAndTraceContextAwareEnvelopeContainer_fieldAccessorTable = new - akka.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_AckAndTraceContextAwareEnvelopeContainer_descriptor, - new java.lang.String[] { "Ack", "Envelope", }); - internal_static_TraceContextAwareRemoteEnvelope_descriptor = - getDescriptor().getMessageTypes().get(1); - internal_static_TraceContextAwareRemoteEnvelope_fieldAccessorTable = new - akka.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_TraceContextAwareRemoteEnvelope_descriptor, - new java.lang.String[] { "Recipient", "Message", "Sender", "Seq", "TraceContext", }); - internal_static_RemoteTraceContext_descriptor = - getDescriptor().getMessageTypes().get(2); - internal_static_RemoteTraceContext_fieldAccessorTable = new - akka.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_RemoteTraceContext_descriptor, - new java.lang.String[] { "Context", }); - return null; - } - }; - akka.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new akka.protobuf.Descriptors.FileDescriptor[] { - akka.remote.ContainerFormats.getDescriptor(), - akka.remote.WireFormats.getDescriptor(), - }, assigner); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/instrumentation/kamon-akka/src/akka-2.5/protobuf/ContextAwareWireFormats.proto b/instrumentation/kamon-akka/src/akka-2.5/protobuf/ContextAwareWireFormats.proto deleted file mode 100644 index dd493bf8d..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/protobuf/ContextAwareWireFormats.proto +++ /dev/null @@ -1,30 +0,0 @@ -syntax = "proto2"; -import "WireFormats.proto"; - - -option java_package = "akka.remote"; -option optimize_for = SPEED; - - -/************************************************ - * Kamon-specific additions to the protocol - ************************************************/ - -message AckAndContextAwareEnvelopeContainer { - optional AcknowledgementInfo ack = 1; - optional ContextAwareRemoteEnvelope envelope = 2; -} - -message ContextAwareRemoteEnvelope { - required ActorRefData recipient = 1; - required SerializedMessage message = 2; - optional ActorRefData sender = 4; - optional fixed64 seq = 5; - - optional RemoteContext traceContext = 15; -} - -message RemoteContext { - required bytes context = 1; -} - diff --git a/instrumentation/kamon-akka/src/akka-2.5/protobuf/WireFormats.proto b/instrumentation/kamon-akka/src/akka-2.5/protobuf/WireFormats.proto deleted file mode 100644 index 85500c4df..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/protobuf/WireFormats.proto +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Copyright (C) 2009-2014 Typesafe Inc. - */ - -// Extracted from https://github.com/akka/akka/blob/master/akka-remote/src/main/protobuf/WireFormats.proto - -syntax = "proto2"; -option java_package = "akka.remote"; -option optimize_for = SPEED; - -/****************************************** - * Remoting message formats - ******************************************/ - - -message AckAndEnvelopeContainer { - optional AcknowledgementInfo ack = 1; - optional RemoteEnvelope envelope = 2; -} - -/** - * Defines a remote message. - */ -message RemoteEnvelope { - required ActorRefData recipient = 1; - required SerializedMessage message = 2; - optional ActorRefData sender = 4; - optional fixed64 seq = 5; -} - -message AcknowledgementInfo { - required fixed64 cumulativeAck = 1; - repeated fixed64 nacks = 2; -} - -/** - * Defines a remote ActorRef that "remembers" and uses its original Actor instance - * on the original node. - */ -message ActorRefData { - required string path = 1; -} - -/** - * Defines a message. - */ -message SerializedMessage { - required bytes message = 1; - required int32 serializerId = 2; - optional bytes messageManifest = 3; -} - -/** - * Defines akka.remote.DaemonMsgCreate - */ -message DaemonMsgCreateData { - required PropsData props = 1; - required DeployData deploy = 2; - required string path = 3; - required ActorRefData supervisor = 4; -} - -/** - * Serialization of akka.actor.Props - */ -message PropsData { - required DeployData deploy = 2; - required string clazz = 3; - repeated bytes args = 4; - repeated string classes = 5; -} - -/** - * Serialization of akka.actor.Deploy - */ -message DeployData { - required string path = 1; - optional bytes config = 2; - optional bytes routerConfig = 3; - optional bytes scope = 4; - optional string dispatcher = 5; -} - - -/****************************************** - * Akka Protocol message formats - ******************************************/ - -/** - * Message format of Akka Protocol. - * Message contains either a payload or an instruction. - */ -message AkkaProtocolMessage { - optional bytes payload = 1; - optional AkkaControlMessage instruction = 2; -} - -/** - * Defines some control messages for the remoting - */ -message AkkaControlMessage { - required CommandType commandType = 1; - optional AkkaHandshakeInfo handshakeInfo = 2; -} - -message AkkaHandshakeInfo { - required AddressData origin = 1; - required fixed64 uid = 2; - optional string cookie = 3; - -} - -/** - * Defines the type of the AkkaControlMessage command type - */ -enum CommandType { - ASSOCIATE = 1; - DISASSOCIATE = 2; - HEARTBEAT = 3; - DISASSOCIATE_SHUTTING_DOWN = 4; // Remote system is going down and will not accepts new connections - DISASSOCIATE_QUARANTINED = 5; // Remote system refused the association since the current system is quarantined -} - -/** - * Defines a remote address. - */ -message AddressData { - required string system = 1; - required string hostname = 2; - required uint32 port = 3; - optional string protocol = 4; -} \ No newline at end of file diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala deleted file mode 100644 index 17250ad6a..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/DispatcherInstrumentation.scala +++ /dev/null @@ -1,22 +0,0 @@ -/* - * ========================================================================================= - * Copyright © 2013-2018 the kamon project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file - * except in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, - * either express or implied. See the License for the specific language governing permissions - * and limitations under the License. - * ========================================================================================= - */ - -package kamon.instrumentation.akka.instrumentations.akka_26 - -import kanela.agent.api.instrumentation.InstrumentationBuilder - -// Only exists to avoid warnings of this class not existing when running on Scala 2.11 -class DispatcherInstrumentation extends InstrumentationBuilder {} \ No newline at end of file diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala deleted file mode 100644 index 450231d1f..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala-2.11/kamon/instrumentation/akka/instrumentations/akka_26/remote/RemotingInstrumentation.scala +++ /dev/null @@ -1,6 +0,0 @@ -package kamon.instrumentation.akka.instrumentations.akka_26.remote - -import kanela.agent.api.instrumentation.InstrumentationBuilder - -// Only exists to avoid warnings of this class not existing when running on Scala 2.11 -class RemotingInstrumentation extends InstrumentationBuilder {} \ No newline at end of file diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/DispatcherInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/DispatcherInstrumentation.scala deleted file mode 100644 index cca0f2898..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/DispatcherInstrumentation.scala +++ /dev/null @@ -1,182 +0,0 @@ -/* - * ========================================================================================= - * Copyright © 2013-2018 the kamon project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file - * except in compliance with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the - * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, - * either express or implied. See the License for the specific language governing permissions - * and limitations under the License. - * ========================================================================================= - */ - -package kamon.instrumentation.akka.instrumentations.akka_25 - -import java.util.concurrent.{Callable, ExecutorService} - -import akka.dispatch.{DefaultExecutorServiceConfigurator, DispatcherPrerequisites, Dispatchers, ForkJoinExecutorConfigurator, PinnedDispatcherConfigurator, ThreadPoolExecutorConfigurator} -import kamon.instrumentation.akka.instrumentations.VersionFiltering -import akka.dispatch.forkjoin.ForkJoinPool -import kamon.Kamon -import kamon.instrumentation.akka.AkkaInstrumentation -import kamon.instrumentation.akka.instrumentations.DispatcherInfo.{HasDispatcherName, HasDispatcherPrerequisites} -import kamon.instrumentation.executor.ExecutorInstrumentation -import kamon.instrumentation.executor.ExecutorInstrumentation.ForkJoinPoolTelemetryReader -import kamon.tag.TagSet -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.libs.net.bytebuddy.asm.Advice -import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{SuperCall, This} - -class DispatcherInstrumentation extends InstrumentationBuilder with VersionFiltering { - - /** - * This is where the actual ExecutorService instances are being created, but at this point we don't have access to - * the Actor System Name nor the Dispatcher name, which is why there is additional instrumentation to carry these two - * names down to the ExecutorServiceFactory and use them to tag the newly instrumented ExecutorService. - */ - onAkka("2.5") { - onSubTypesOf("akka.dispatch.ExecutorServiceFactory") - .mixin(classOf[HasDispatcherPrerequisites.Mixin]) - .mixin(classOf[HasDispatcherName.Mixin]) - .intercept(method("createExecutorService"), InstrumentNewExecutorServiceOnAkka25) - } - - onAkka("2.4") { - onSubTypesOf("akka.dispatch.ExecutorServiceFactory") - .mixin(classOf[HasDispatcherPrerequisites.Mixin]) - .mixin(classOf[HasDispatcherName.Mixin]) - .intercept(method("createExecutorService"), InstrumentNewExecutorServiceOnAkka24) - } - - onAkka("2.4", "2.5") { - - /** - * First step on getting the Actor System name is to read it from the prerequisites instance passed to the - * constructors of these two classes. - */ - onTypes( - "akka.dispatch.ThreadPoolExecutorConfigurator", - "akka.dispatch.ForkJoinExecutorConfigurator", - "akka.dispatch.PinnedDispatcherConfigurator", - "akka.dispatch.DefaultExecutorServiceConfigurator") - .mixin(classOf[HasDispatcherPrerequisites.Mixin]) - .advise(isConstructor, CaptureDispatcherPrerequisitesOnExecutorConfigurator) - - /** - * Copies the Actor System and Dispatcher names to the ExecutorServiceFactory instances for the two types of - * executors instrumented by Kamon. - */ - onTypes( - "akka.dispatch.ThreadPoolConfig", - "akka.dispatch.ForkJoinExecutorConfigurator", - "akka.dispatch.PinnedDispatcherConfigurator", - "akka.dispatch.DefaultExecutorServiceConfigurator") - .mixin(classOf[HasDispatcherName.Mixin]) - .advise(method("createExecutorServiceFactory"), CopyDispatcherInfoToExecutorServiceFactory) - - /** - * This ensures that the ActorSystem name is not lost when creating PinnedDispatcher instances. - */ - onType("akka.dispatch.ThreadPoolConfig") - .mixin(classOf[HasDispatcherPrerequisites.Mixin]) - .advise(method("copy"), ThreadPoolConfigCopyAdvice) - } - -} - -object CaptureDispatcherPrerequisitesOnExecutorConfigurator { - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def exit(@Advice.This configurator: Any, @Advice.Argument(1) prerequisites: DispatcherPrerequisites): Unit = { - configurator match { - case fjec: ForkJoinExecutorConfigurator => fjec.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites) - case tpec: ThreadPoolExecutorConfigurator => tpec.threadPoolConfig.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites) - case pdc: PinnedDispatcherConfigurator => pdc.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites) - case desc: DefaultExecutorServiceConfigurator => desc.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(prerequisites) - case _ => // just ignore any other case. - } - } -} - -object CopyDispatcherInfoToExecutorServiceFactory { - - @Advice.OnMethodExit - def exit(@Advice.This poolConfig: HasDispatcherPrerequisites, @Advice.Argument(0) dispatcherName: String, @Advice.Return factory: Any): Unit = { - val factoryWithMixins = factory.asInstanceOf[HasDispatcherName with HasDispatcherPrerequisites] - factoryWithMixins.setDispatcherPrerequisites(poolConfig.dispatcherPrerequisites) - factoryWithMixins.setDispatcherName(dispatcherName) - } -} - - -object InstrumentNewExecutorServiceOnAkka24 { - - def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = { - val executor = callable.call() - val dispatcherName = factory.dispatcherName - - if(Kamon.filter(AkkaInstrumentation.TrackDispatcherFilterName).accept(dispatcherName)) { - val actorSystemName = factory.dispatcherPrerequisites.settings.name - val scheduledActionName = actorSystemName + "/" + dispatcherName - val systemTags = TagSet.of("akka.system", actorSystemName) - val defaultEcOption = factory.dispatcherPrerequisites.defaultExecutionContext - - if(dispatcherName == Dispatchers.DefaultDispatcherId && defaultEcOption.isDefined) { - ExecutorInstrumentation.instrumentExecutionContext(defaultEcOption.get, dispatcherName, systemTags, scheduledActionName, ExecutorInstrumentation.DefaultSettings) - .underlyingExecutor.getOrElse(executor) - } else { - ExecutorInstrumentation.instrument(executor, dispatcherName, systemTags, scheduledActionName, ExecutorInstrumentation.DefaultSettings) - } - } else - executor - } -} - - -object InstrumentNewExecutorServiceOnAkka25 { - - def around(@This factory: HasDispatcherPrerequisites with HasDispatcherName, @SuperCall callable: Callable[ExecutorService]): ExecutorService = { - val executor = callable.call() - val dispatcherName = factory.dispatcherName - - if(Kamon.filter(AkkaInstrumentation.TrackDispatcherFilterName).accept(dispatcherName)) { - val actorSystemName = factory.dispatcherPrerequisites.settings.name - val scheduledActionName = actorSystemName + "/" + dispatcherName - val systemTags = TagSet.of("akka.system", actorSystemName) - val defaultEcOption = factory.dispatcherPrerequisites.defaultExecutionContext - - if(dispatcherName == Dispatchers.DefaultDispatcherId && defaultEcOption.isDefined) { - ExecutorInstrumentation.instrumentExecutionContext(defaultEcOption.get, dispatcherName, systemTags, scheduledActionName, ExecutorInstrumentation.DefaultSettings) - .underlyingExecutor.getOrElse(executor) - } else { - executor match { - case afjp: ForkJoinPool => - ExecutorInstrumentation.instrument(executor, telemetryReader(afjp), dispatcherName, systemTags, scheduledActionName, ExecutorInstrumentation.DefaultSettings) - - case otherExecutor => - ExecutorInstrumentation.instrument(otherExecutor, dispatcherName, systemTags, scheduledActionName, ExecutorInstrumentation.DefaultSettings) - } - } - } else executor - } - - def telemetryReader(fjp: ForkJoinPool): ForkJoinPoolTelemetryReader = new ForkJoinPoolTelemetryReader { - override def activeThreads: Int = fjp.getActiveThreadCount - override def poolSize: Int = fjp.getPoolSize - override def queuedTasks: Int = fjp.getQueuedSubmissionCount - override def parallelism: Int = fjp.getParallelism - } -} - -object ThreadPoolConfigCopyAdvice { - - @Advice.OnMethodExit - def exit(@Advice.This original: Any, @Advice.Return copy: Any): Unit = { - copy.asInstanceOf[HasDispatcherPrerequisites].setDispatcherPrerequisites(original.asInstanceOf[HasDispatcherPrerequisites].dispatcherPrerequisites) - copy.asInstanceOf[HasDispatcherName].setDispatcherName(original.asInstanceOf[HasDispatcherName].dispatcherName) - } -} diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/RemotingInstrumentation.scala b/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/RemotingInstrumentation.scala deleted file mode 100644 index 617082b83..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/RemotingInstrumentation.scala +++ /dev/null @@ -1,167 +0,0 @@ -package kamon.instrumentation.akka.instrumentations.akka_25.remote - -import akka.actor.ActorSystem -import akka.kamon.instrumentation.akka.instrumentations.akka_25.remote.{AkkaPduProtobufCodecConstructMessageMethodInterceptor, AkkaPduProtobufCodecDecodeMessage} -import akka.remote.kamon.instrumentation.akka.instrumentations.akka_25.remote.{ArteryMessageDispatcherAdvice, CaptureContextOnInboundEnvelope, DeserializeForArteryAdvice, SerializeForArteryAdvice} -import kamon.Kamon -import kamon.context.Storage.Scope -import kamon.instrumentation.akka.AkkaRemoteInstrumentation -import kamon.instrumentation.akka.AkkaRemoteMetrics.SerializationInstruments -import kamon.instrumentation.akka.instrumentations.{AkkaPrivateAccess, VersionFiltering} -import kamon.instrumentation.context.{CaptureCurrentContextOnExit, HasContext} -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.libs.net.bytebuddy.asm.Advice - - -class RemotingInstrumentation extends InstrumentationBuilder with VersionFiltering { - - onAkka("2.4", "2.5") { - - /** - * Send messages might be buffered if they reach the EndpointWriter before it has been initialized and the current - * Context might be lost after the buffering, so we make sure we capture the context when the Send command was - * created and then apply it during the EndpointWrite.writeSend method execution (see below). - */ - onType("akka.remote.EndpointManager$Send") - .mixin(classOf[HasContext.Mixin]) - .advise(isConstructor, CaptureCurrentContextOnExit) - - onType("akka.remote.EndpointWriter") - .advise(method("writeSend"), WriteSendWithContext) - - /** - * Reads and writes the Akka PDU using a modified version of the Protobuf that has an extra field for a Context - * instance. - */ - onType("akka.remote.transport.AkkaPduProtobufCodec$") - .intercept(method("constructMessage"), new AkkaPduProtobufCodecConstructMessageMethodInterceptor()) - .advise(method("decodeMessage"), classOf[AkkaPduProtobufCodecDecodeMessage]) - - /** - * Mixin Serialization Instruments to the Actor System and use them to record the serialization and deserialization - * time metrics. - */ - onType("akka.actor.ActorSystemImpl") - .mixin(classOf[HasSerializationInstruments.Mixin]) - .advise(isConstructor, InitializeActorSystemAdvice) - - onType("akka.remote.MessageSerializer$") - .advise(method("serialize"), MeasureSerializationTime) - .advise(method("deserialize"), MeasureDeserializationTime) - - /** - * Artery - */ - onType("akka.remote.artery.ReusableOutboundEnvelope") - .mixin(classOf[HasContext.Mixin]) - .advise(method("copy"), CopyContextOnReusableEnvelope) - - onType("akka.remote.artery.Association") - .advise(method("createOutboundEnvelope$1"), CaptureCurrentContextOnReusableEnvelope) - - onType("akka.remote.MessageSerializer$") - .advise(method("serializeForArtery"), classOf[SerializeForArteryAdvice]) - .advise(method("deserializeForArtery"), classOf[DeserializeForArteryAdvice]) - - onType("akka.remote.artery.ReusableInboundEnvelope") - .mixin(classOf[HasContext.Mixin]) - .advise(method("withMessage"), classOf[CaptureContextOnInboundEnvelope]) - .advise(method("copyForLane"), CopyContextOnReusableEnvelope) - - onType("akka.remote.artery.MessageDispatcher") - .advise(method("dispatch"), classOf[ArteryMessageDispatcherAdvice]) - } - -} - -object CopyContextOnReusableEnvelope { - @Advice.OnMethodExit - def exit(@Advice.This oldEnvelope: Any, @Advice.Return newEnvelope: Any): Unit = - newEnvelope.asInstanceOf[HasContext].setContext(oldEnvelope.asInstanceOf[HasContext].context) -} - -object CaptureCurrentContextOnReusableEnvelope { - - @Advice.OnMethodExit - def exit(@Advice.Return envelope: Any): Unit = { - envelope.asInstanceOf[HasContext].setContext(Kamon.currentContext()) - } -} - -object WriteSendWithContext { - - @Advice.OnMethodEnter - def enter(@Advice.Argument(0) send: Any): Scope = { - Kamon.storeContext(send.asInstanceOf[HasContext].context) - } - - @Advice.OnMethodExit - def exit(@Advice.Enter scope: Scope): Unit = { - scope.asInstanceOf[Scope].close() - } -} - -trait HasSerializationInstruments { - def serializationInstruments: SerializationInstruments - def setSerializationInstruments(instruments: SerializationInstruments): Unit -} - -object HasSerializationInstruments { - - class Mixin(var serializationInstruments: SerializationInstruments) extends HasSerializationInstruments { - override def setSerializationInstruments(instruments: SerializationInstruments): Unit = - serializationInstruments = instruments - } -} - -object InitializeActorSystemAdvice { - - @Advice.OnMethodExit - def exit(@Advice.This system: ActorSystem with HasSerializationInstruments): Unit = - system.setSerializationInstruments(new SerializationInstruments(system.name)) - -} - -object MeasureSerializationTime { - - @Advice.OnMethodEnter - def enter(): Long = { - if(AkkaRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L - } - @Advice.OnMethodExit - def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long): Unit = { - if(startNanoTime != 0L) { - system.asInstanceOf[HasSerializationInstruments] - .serializationInstruments - .serializationTime - .record(System.nanoTime() - startNanoTime) - } - } -} - -object MeasureDeserializationTime { - - @Advice.OnMethodEnter - def enter(): Long = { - if(AkkaRemoteInstrumentation.settings().trackSerializationMetrics) System.nanoTime() else 0L - } - - @Advice.OnMethodExit - def exit(@Advice.Argument(0) system: AnyRef, @Advice.Enter startNanoTime: Long, @Advice.Return msg: Any): Unit = { - - if(AkkaPrivateAccess.isSystemMessage(msg)) { - msg match { - case hc: HasContext if hc.context == null => - hc.setContext(Kamon.currentContext()) - case _ => - } - } - - if(startNanoTime != 0L) { - system.asInstanceOf[HasSerializationInstruments] - .serializationInstruments - .deserializationTime - .record(System.nanoTime() - startNanoTime) - } - } -} diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecConstructMessageMethodInterceptor.scala b/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecConstructMessageMethodInterceptor.scala deleted file mode 100644 index 012e95953..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecConstructMessageMethodInterceptor.scala +++ /dev/null @@ -1,81 +0,0 @@ -package akka.kamon.instrumentation.akka.instrumentations.akka_25.remote - -import java.io.ByteArrayOutputStream - -import akka.KamonOptionVal.OptionVal -import akka.actor.{ActorRef, Address} -import akka.remote.ContextAwareWireFormats.{AckAndContextAwareEnvelopeContainer, ContextAwareRemoteEnvelope, RemoteContext} -import akka.remote.WireFormats.{AcknowledgementInfo, ActorRefData, AddressData, SerializedMessage} -import akka.remote.{Ack, SeqNo} -import akka.util.ByteString -import kamon.Kamon -import kamon.context.BinaryPropagation.ByteStreamWriter -import kamon.instrumentation.akka.AkkaRemoteMetrics -import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{Argument, RuntimeType} - -/** - * Interceptor for akka.remote.transport.AkkaPduProtobufCodec$::constructMessage - */ -class AkkaPduProtobufCodecConstructMessageMethodInterceptor { - - @RuntimeType - def aroundConstructMessage(@Argument(0) localAddress: Address, - @Argument(1) recipient: ActorRef, - @Argument(2) serializedMessage: SerializedMessage, - @Argument(3) senderOption: OptionVal[ActorRef], - @Argument(4) seqOption: Option[SeqNo], - @Argument(5) ackOption: Option[Ack]): AnyRef = { - - val ackAndEnvelopeBuilder = AckAndContextAwareEnvelopeContainer.newBuilder - val envelopeBuilder = ContextAwareRemoteEnvelope.newBuilder - - envelopeBuilder.setRecipient(serializeActorRef(recipient.path.address, recipient)) - if (senderOption.isDefined) - envelopeBuilder.setSender(serializeActorRef(localAddress, senderOption.get)) - seqOption foreach { seq => envelopeBuilder.setSeq(seq.rawValue) } - ackOption foreach { ack => ackAndEnvelopeBuilder.setAck(ackBuilder(ack)) } - envelopeBuilder.setMessage(serializedMessage) - - val out = new ByteArrayOutputStream() - Kamon.defaultBinaryPropagation().write(Kamon.currentContext(), ByteStreamWriter.of(out)) - - val remoteTraceContext = RemoteContext.newBuilder().setContext( - akka.protobuf.ByteString.copyFrom(out.toByteArray) - ) - envelopeBuilder.setTraceContext(remoteTraceContext) - - ackAndEnvelopeBuilder.setEnvelope(envelopeBuilder) - - val messageSize = envelopeBuilder.getMessage.getMessage.size() - AkkaRemoteMetrics.serializationInstruments(localAddress.system).outboundMessageSize.record(messageSize) - - ByteString.ByteString1C(ackAndEnvelopeBuilder.build.toByteArray) //Reuse Byte Array (naughty!) - } - - // Copied from akka.remote.transport.AkkaPduProtobufCodec because of private access. - private def ackBuilder(ack: Ack): AcknowledgementInfo.Builder = { - val ackBuilder = AcknowledgementInfo.newBuilder() - ackBuilder.setCumulativeAck(ack.cumulativeAck.rawValue) - ack.nacks foreach { nack => ackBuilder.addNacks(nack.rawValue) } - ackBuilder - } - - // Copied from akka.remote.transport.AkkaPduProtobufCodec because of private access. - private def serializeActorRef(defaultAddress: Address, ref: ActorRef): ActorRefData = { - ActorRefData.newBuilder.setPath( - if (ref.path.address.host.isDefined) ref.path.toSerializationFormat - else ref.path.toSerializationFormatWithAddress(defaultAddress)).build() - } - - // Copied from akka.remote.transport.AkkaPduProtobufCodec because of private access. - private def serializeAddress(address: Address): AddressData = address match { - case Address(protocol, system, Some(host), Some(port)) => - AddressData.newBuilder - .setHostname(host) - .setPort(port) - .setSystem(system) - .setProtocol(protocol) - .build() - case _ => throw new IllegalArgumentException(s"Address [$address] could not be serialized: host or port missing.") - } -} \ No newline at end of file diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala b/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala deleted file mode 100644 index 3ff4c00e4..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/AkkaPduProtobufCodecDecodeMessageMethodAdvisor.scala +++ /dev/null @@ -1,34 +0,0 @@ -package akka.kamon.instrumentation.akka.instrumentations.akka_25.remote - -import akka.actor.Address -import akka.remote.RemoteActorRefProvider -import akka.util.ByteString -import kamon.Kamon -import kamon.context.BinaryPropagation.ByteStreamReader -import kamon.instrumentation.akka.AkkaRemoteMetrics -import kanela.agent.libs.net.bytebuddy.asm.Advice.{Argument, OnMethodEnter} -import akka.remote.ContextAwareWireFormats.{AckAndContextAwareEnvelopeContainer} - -/** - * Advisor for akka.remote.transport.AkkaPduProtobufCodec$::decodeMessage - */ -class AkkaPduProtobufCodecDecodeMessage - -object AkkaPduProtobufCodecDecodeMessage { - - @OnMethodEnter - def enter(@Argument(0) bs: ByteString, @Argument(1) provider: RemoteActorRefProvider, @Argument(2) localAddress: Address): Unit = { - val ackAndEnvelope = AckAndContextAwareEnvelopeContainer.parseFrom(bs.toArray) - if (ackAndEnvelope.hasEnvelope && ackAndEnvelope.getEnvelope.hasTraceContext) { - val remoteCtx = ackAndEnvelope.getEnvelope.getTraceContext - - if(remoteCtx.getContext.size() > 0) { - val ctx = Kamon.defaultBinaryPropagation().read(ByteStreamReader.of(remoteCtx.getContext.toByteArray)) - Kamon.storeContext(ctx) - } - - val messageSize = ackAndEnvelope.getEnvelope.getMessage.getMessage.size() - AkkaRemoteMetrics.serializationInstruments(localAddress.system).inboundMessageSize.record(messageSize) - } - } -} diff --git a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/ArterySerializationAdvice.scala b/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/ArterySerializationAdvice.scala deleted file mode 100644 index 02ec96984..000000000 --- a/instrumentation/kamon-akka/src/akka-2.5/scala/kamon/instrumentation/akka/instrumentations/akka_25/remote/internal/ArterySerializationAdvice.scala +++ /dev/null @@ -1,171 +0,0 @@ -package akka.remote.kamon.instrumentation.akka.instrumentations.akka_25.remote - -import java.nio.ByteBuffer - -import akka.actor.ActorSystem -import akka.dispatch.sysmsg.SystemMessage -import akka.remote.artery.{EnvelopeBuffer, InboundEnvelope, OutboundEnvelope} -import akka.serialization.Serialization -import kamon.Kamon -import kamon.context.{BinaryPropagation, Context, Storage} -import kamon.instrumentation.akka.AkkaRemoteMetrics -import kamon.instrumentation.context.HasContext -import kanela.agent.libs.net.bytebuddy.asm.Advice - - -/** - * For Artery messages we will always add two sections to the end of each serialized message: the Context and the size - * of the Context. The layout will look something like this: - * - * |------------------ Actual Message ------------------||-- Kamon Context --||-- Context Size (4 bytes) --| - * - * If the Context is empty the Context size will be zero. - */ - -class SerializeForArteryAdvice -object SerializeForArteryAdvice { - - @Advice.OnMethodEnter - def enter(): Long = { - System.nanoTime() - } - - @Advice.OnMethodExit - def exit(@Advice.Argument(0) serialization: Serialization, @Advice.Argument(1) envelope: OutboundEnvelope, - @Advice.Argument(3) envelopeBuffer: EnvelopeBuffer, @Advice.Enter startTime: Long): Unit = { - - val instruments = AkkaRemoteMetrics.serializationInstruments(serialization.system.name) - val messageBuffer = envelopeBuffer.byteBuffer - val context = envelope.asInstanceOf[HasContext].context - val positionBeforeContext = messageBuffer.position() - - if(context.nonEmpty()) { - Kamon.defaultBinaryPropagation().write(context, byteBufferWriter(messageBuffer)) - } - - instruments.serializationTime.record(System.nanoTime() - startTime) - instruments.outboundMessageSize.record(positionBeforeContext) - - val contextSize = messageBuffer.position() - positionBeforeContext - messageBuffer.putInt(contextSize) - } - - def byteBufferWriter(bb: ByteBuffer): BinaryPropagation.ByteStreamWriter = new BinaryPropagation.ByteStreamWriter { - override def write(bytes: Array[Byte]): Unit = - bb.put(bytes) - - override def write(bytes: Array[Byte], offset: Int, count: Int): Unit = - bb.put(bytes, offset, count) - - override def write(byte: Int): Unit = - bb.put(byte.toByte) - } -} - -class DeserializeForArteryAdvice -object DeserializeForArteryAdvice { - - val LastDeserializedContext = new ThreadLocal[Context]() { - override def initialValue(): Context = null - } - - case class DeserializationInfo( - context: Context, - timeStamp: Long, - messageSize: Long - ) - - @Advice.OnMethodEnter - def exit(@Advice.Argument(5) envelopeBuffer: EnvelopeBuffer): DeserializationInfo = { - val startTime = System.nanoTime() - val messageBuffer = envelopeBuffer.byteBuffer - val messageStart = messageBuffer.position() - - messageBuffer.mark() - messageBuffer.position(messageBuffer.limit() - 4) - val contextSize = messageBuffer.getInt() - val contextStart = messageBuffer.limit() - (contextSize + 4) - val messageSize = contextStart - messageStart - - val context = if(contextSize == 0) - Context.Empty - else { - messageBuffer - .position(contextStart) - .limit(contextStart + contextSize) - - Kamon.defaultBinaryPropagation().read(byteBufferReader(messageBuffer)) - } - - messageBuffer.reset() - messageBuffer.limit(contextStart) - DeserializationInfo(context, startTime, messageSize) - } - - @Advice.OnMethodExit(onThrowable = classOf[Throwable]) - def exit(@Advice.Argument(0) system: ActorSystem, @Advice.Argument(5) envelopeBuffer: EnvelopeBuffer, - @Advice.Enter deserializationInfo: DeserializationInfo, @Advice.Thrown error: Throwable): Unit = { - - if(error == null) { - LastDeserializedContext.set(deserializationInfo.context) - - val instruments = AkkaRemoteMetrics.serializationInstruments(system.name) - instruments.deserializationTime.record(System.nanoTime() - deserializationInfo.timeStamp) - instruments.inboundMessageSize.record(deserializationInfo.messageSize) - } - } - - - def byteBufferReader(bb: ByteBuffer): BinaryPropagation.ByteStreamReader = new BinaryPropagation.ByteStreamReader { - override def available(): Int = - bb.remaining() - - override def read(target: Array[Byte]): Int = { - bb.get(target) - target.length - } - - override def read(target: Array[Byte], offset: Int, count: Int): Int = { - bb.get(target, offset, count) - target.length - } - - override def readAll(): Array[Byte] = { - val array = Array.ofDim[Byte](bb.remaining()) - bb.get(array) - array - } - } -} - -class CaptureContextOnInboundEnvelope -object CaptureContextOnInboundEnvelope { - - @Advice.OnMethodEnter - def enter(@Advice.This inboundEnvelope: Any): Unit = { - val lastContext = DeserializeForArteryAdvice.LastDeserializedContext.get() - if(lastContext != null) { - inboundEnvelope.asInstanceOf[HasContext].setContext(lastContext) - DeserializeForArteryAdvice.LastDeserializedContext.set(null) - } - } - -} - -class ArteryMessageDispatcherAdvice -object ArteryMessageDispatcherAdvice { - - @Advice.OnMethodEnter - def enter(@Advice.Argument(0) envelope: InboundEnvelope): Storage.Scope = { - val context = envelope.asInstanceOf[HasContext].context - if(envelope.message.isInstanceOf[SystemMessage]) { - envelope.message.asInstanceOf[HasContext].setContext(context) - } - - Kamon.storeContext(envelope.asInstanceOf[HasContext].context) - } - - @Advice.OnMethodExit - def exit(@Advice.Enter scope: Storage.Scope): Unit = - scope.close() -} \ No newline at end of file diff --git a/instrumentation/kamon-akka/src/common/resources/reference.conf b/instrumentation/kamon-akka/src/common/resources/reference.conf index be3029c48..16c6c356b 100644 --- a/instrumentation/kamon-akka/src/common/resources/reference.conf +++ b/instrumentation/kamon-akka/src/common/resources/reference.conf @@ -152,7 +152,6 @@ kanela.modules { "kamon.instrumentation.akka.instrumentations.AskPatternInstrumentation", "kamon.instrumentation.akka.instrumentations.EventStreamInstrumentation", "kamon.instrumentation.akka.instrumentations.ActorRefInstrumentation", - "kamon.instrumentation.akka.instrumentations.akka_25.DispatcherInstrumentation", "kamon.instrumentation.akka.instrumentations.akka_26.DispatcherInstrumentation", "kamon.instrumentation.akka.instrumentations.akka_26.ActorMonitorInstrumentation", "kamon.instrumentation.akka.instrumentations.SchedulerInstrumentation", @@ -160,13 +159,13 @@ kanela.modules { ] within = [ - "^akka.dispatch..*", - "^akka.event..*", - "^akka.actor..*", - "^akka.pattern..*", - "^akka.cluster..*", - "^akka.routing..*", - "kamon.instrumentation.akka.instrumentations..*" + "akka.dispatch.", + "akka.event.", + "akka.actor.", + "akka.pattern.", + "akka.cluster.", + "akka.routing.", + "kamon.instrumentation.akka.instrumentations." ] } @@ -177,17 +176,16 @@ kanela.modules { instrumentations = [ "kamon.instrumentation.akka.remote.MessageBufferInstrumentation", - "kamon.instrumentation.akka.instrumentations.akka_25.remote.RemotingInstrumentation", "kamon.instrumentation.akka.instrumentations.akka_26.remote.RemotingInstrumentation" ] within = [ - "akka.dispatch..*", - "akka.util..*", - "akka.remote..*", - "akka.actor..*", - "akka.cluster..*", - "akka.serialization..*" + "akka.dispatch.", + "akka.util.", + "akka.remote.", + "akka.actor.", + "akka.cluster.", + "akka.serialization." ] } @@ -201,12 +199,12 @@ kanela.modules { ] within = [ - "akka.dispatch..*", - "akka.util..*", - "akka.remote..*", - "akka.actor..*" - "akka.cluster..*" - "akka.serialization..*" + "akka.dispatch.", + "akka.util.", + "akka.remote.", + "akka.actor." + "akka.cluster." + "akka.serialization." ] } } diff --git a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/remote/MessageBufferTest.scala b/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/remote/MessageBufferTest.scala deleted file mode 100644 index b35bc5bd9..000000000 --- a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/remote/MessageBufferTest.scala +++ /dev/null @@ -1,34 +0,0 @@ -package kamon.instrumentation.akka.remote - -import akka.actor.Actor -import akka.util.MessageBuffer -import kamon.Kamon -import kamon.context.Context -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike - -class MessageBufferTest extends AnyWordSpecLike with Matchers { - - "the MessageBuffer instrumentation" should { - "remember the current context when appending message and apply it when foreach is called when used directly" in { - val messageBuffer = MessageBuffer.empty - val key = Context.key("some_key", "") - - Kamon.runWithContext(Context.of(key, "some_value")) { - messageBuffer.append("scala", Actor.noSender) - } - - Kamon.currentContext().get(key) shouldBe "" - - var iterated = false - messageBuffer.foreach { (msg, ref) => - iterated = true - Kamon.currentContext().get(key) shouldBe "some_value" - } - - iterated shouldBe true - - } - } - -} diff --git a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingInstrumentationSpec.scala b/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingInstrumentationSpec.scala deleted file mode 100644 index 0b75f5e2b..000000000 --- a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingInstrumentationSpec.scala +++ /dev/null @@ -1,173 +0,0 @@ -package akka.kamon.instrumentation.akka.sharding - -import akka.actor._ -import akka.cluster.sharding.ShardCoordinator.Internal.{HandOff, ShardStopped} -import akka.cluster.sharding.ShardCoordinator.ShardAllocationStrategy -import akka.cluster.sharding.ShardRegion.{GracefulShutdown, ShardId} -import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings, ShardRegion} -import akka.testkit.TestActor.Watch -import akka.testkit.{ImplicitSender, TestKitBase} -import com.typesafe.config.ConfigFactory -import kamon.instrumentation.akka.AkkaClusterShardingMetrics._ -import kamon.tag.TagSet -import kamon.testkit.{InitAndStopKamonAfterAll, InstrumentInspection, MetricInspection} -import org.scalactic.TimesOnInt.convertIntToRepeater -import org.scalatest.concurrent.Eventually -import org.scalatest.matchers.should.Matchers -import org.scalatest.time._ -import org.scalatest.wordspec.AnyWordSpecLike - -import scala.collection.immutable -import scala.concurrent.Future -import scala.util.Random -import org.scalatest.time._ - -case class TestMessage(shard: String, entity: String) - -class ShardingInstrumentationSpec - extends TestKitBase - with AnyWordSpecLike - with Matchers - with ImplicitSender - with MetricInspection.Syntax - with InstrumentInspection.Syntax - with InitAndStopKamonAfterAll - with Eventually { - - lazy val system: ActorSystem = { - ActorSystem( - "sharding", - ConfigFactory - .parseString(""" - |akka { - | actor.provider = "cluster" - | remote { - | enabled-transports = ["akka.remote.netty.tcp"] - | netty.tcp { - | hostname = "127.0.0.1" - | port = 2551 - | } - | } - | cluster { - | seed-nodes = ["akka.tcp://sharding@127.0.0.1:2551"] - | log-info = on - | cluster.jmx.multi-mbeans-in-same-jvm = on - | } - |} - """.stripMargin) - .withFallback(ConfigFactory.load()) - ) - } - - val entityIdExtractor: ShardRegion.ExtractEntityId = { case msg @ TestMessage(_, entity) => (entity, msg) } - val shardIdExtractor: ShardRegion.ExtractShardId = { case msg @ TestMessage(shard, _) => shard } - - val StaticAllocationStrategy = new ShardAllocationStrategy { - override def allocateShard( - requester: ActorRef, - shardId: ShardId, - currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]]) - : Future[ActorRef] = { - Future.successful(requester) - } - - override def rebalance( - currentShardAllocations: Map[ActorRef, immutable.IndexedSeq[ShardId]], - rebalanceInProgress: Set[ShardId]): Future[Set[ShardId]] = { - Future.successful(Set.empty) - } - } - - def registerTypes(shardedType: String, props: Props, system: ActorSystem, allocationStrategy: ShardAllocationStrategy): ActorRef = - ClusterSharding(system).start( - typeName = shardedType, - entityProps = props, - settings = ClusterShardingSettings(system), - extractEntityId = entityIdExtractor, - extractShardId = shardIdExtractor, - allocationStrategy = allocationStrategy, - handOffStopMessage = PoisonPill - ) - - class ShardedTypeContext { - val shardType = s"TestType-${Random.nextLong()}" - val region = registerTypes(shardType, TestActor.props(testActor), system, StaticAllocationStrategy) - val shardTags = TagSet.builder() - .add("type", shardType) - .add("system", system.name) - .build() - } - - "the Cluster sharding instrumentation" should { - "track shards, entities and messages" in new ShardedTypeContext { - region ! TestMessage("s1", "e1") - region ! TestMessage("s1", "e2") - region ! TestMessage("s2", "e3") - - 3 times { - expectMsg("OK") - } - - RegionProcessedMessages.withTags(shardTags).value() shouldBe 3L - - eventually(timeout(Span(2, Seconds))) { - RegionHostedShards.withTags(shardTags).distribution().max shouldBe 2L - RegionHostedEntities.withTags(shardTags).distribution().max shouldBe 3L - } - - eventually(timeout(Span(2, Seconds))) { - ShardProcessedMessages.withTags(shardTags).distribution(resetState = false).sum shouldBe 3L - ShardHostedEntities.withTags(shardTags).distribution(resetState = false).max shouldBe 2L - } - } - - "clean metrics on handoff" in new ShardedTypeContext { - region ! TestMessage("s1", "e1") - expectMsg("OK") - - eventually(timeout(Span(2, Seconds))) { - RegionHostedShards.withTags(shardTags).distribution().max shouldBe 1L - RegionHostedEntities.withTags(shardTags).distribution().max shouldBe 1L - } - - region ! HandOff("s1") - expectMsg(ShardStopped("s1")) - - eventually(timeout(Span(10, Seconds))) { - RegionHostedShards.withTags(shardTags).distribution().max shouldBe 0L - RegionHostedEntities.withTags(shardTags).distribution().max shouldBe 0L - } - } - - "clean metrics on shutdown" in new ShardedTypeContext { - region ! TestMessage("s1", "e1") - expectMsg("OK") - - RegionHostedShards.tagValues("type") should contain(shardType) - RegionHostedEntities.tagValues("type") should contain(shardType) - RegionProcessedMessages.tagValues("type") should contain(shardType) - - testActor ! Watch(region) - region ! GracefulShutdown - expectTerminated(region) - - RegionHostedShards.tagValues("type") should not contain(shardType) - RegionHostedEntities.tagValues("type") should not contain(shardType) - RegionProcessedMessages.tagValues("type") should not contain(shardType) - } - } - -} - -object TestActor { - - def props(testActor: ActorRef) = - Props(classOf[TestActor], testActor) -} - -class TestActor(testActor: ActorRef) extends Actor { - - override def receive: Actor.Receive = { - case _ => testActor ! "OK" - } -} diff --git a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingMessageBufferingSpec.scala b/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingMessageBufferingSpec.scala deleted file mode 100644 index de9f904e3..000000000 --- a/instrumentation/kamon-akka/src/test-akka-2.5/scala/kamon/instrumentation/akka/sharding/ShardingMessageBufferingSpec.scala +++ /dev/null @@ -1,91 +0,0 @@ -package kamon.instrumentation.akka.sharding - -import akka.actor._ -import akka.cluster.Cluster -import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings, ShardRegion} -import akka.testkit.{ImplicitSender, TestKitBase} -import com.typesafe.config.ConfigFactory -import kamon.Kamon -import kamon.context.Context -import kamon.instrumentation.akka.ContextEchoActor -import kamon.testkit.{InitAndStopKamonAfterAll, MetricInspection} -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike - -import scala.concurrent.duration._ - -class ShardingMessageBufferingSpec extends TestKitBase with AnyWordSpecLike with Matchers with ImplicitSender - with MetricInspection.Syntax with InitAndStopKamonAfterAll { - - implicit lazy val system: ActorSystem = { - ActorSystem("cluster-sharding-spec-system", ConfigFactory.parseString( - """ - |akka { - | loglevel = INFO - | loggers = [ "akka.event.slf4j.Slf4jLogger" ] - | - | actor { - | provider = "cluster" - | } - | remote { - | enabled-transports = ["akka.remote.netty.tcp"] - | netty.tcp { - | hostname = "127.0.0.1" - | port = 2556 - | } - | } - |} - """.stripMargin)) - } - - val remoteSystem: ActorSystem = ActorSystem("cluster-sharding-spec-remote-system", ConfigFactory.parseString( - """ - |akka { - | loglevel = INFO - | loggers = [ "akka.event.slf4j.Slf4jLogger" ] - | - | actor { - | provider = "cluster" - | } - | remote { - | enabled-transports = ["akka.remote.netty.tcp"] - | netty.tcp { - | hostname = "127.0.0.1" - | port = 2557 - | } - | } - |} - """.stripMargin)) - - def contextWithBroadcast(name: String): Context = - Context.Empty.withTag( - ContextEchoActor.EchoTag, name - ) - - val extractEntityId: ShardRegion.ExtractEntityId = { - case entityId:String => (entityId, "reply-trace-token") - } - val extractShardId: ShardRegion.ExtractShardId = { - case entityId:String => (entityId.toInt % 10).toString - } - - "The MessageBuffer instrumentation" should { - "propagate the current Context when sending message to a sharding region that has not been started" in { - Cluster(system).join(Cluster(system).selfAddress) - Cluster(remoteSystem).join(Cluster(system).selfAddress) - - val replierRegion: ActorRef = ClusterSharding(system).start( - typeName = "replier", - entityProps = ContextEchoActor.props(None), - settings = ClusterShardingSettings(system), - extractEntityId = extractEntityId, - extractShardId = extractShardId) - - Kamon.runWithContext(contextWithBroadcast("cluster-sharding-actor-123")) { - replierRegion ! "123" - } - - expectMsg(10 seconds, "name=cluster-sharding-actor-123") - } - } -} diff --git a/instrumentation/kamon-alpakka-kafka/src/main/resources/reference.conf b/instrumentation/kamon-alpakka-kafka/src/main/resources/reference.conf index ca4d8c51a..c4cb9e0c5 100644 --- a/instrumentation/kamon-alpakka-kafka/src/main/resources/reference.conf +++ b/instrumentation/kamon-alpakka-kafka/src/main/resources/reference.conf @@ -17,8 +17,8 @@ kanela { ] within = [ - "akka.kafka.ProducerMessage\\$Message", - "akka.kafka.ProducerMessage\\$MultiMessage", + "akka.kafka.ProducerMessage$Message", + "akka.kafka.ProducerMessage$MultiMessage", "akka.kafka.internal.DefaultProducerStageLogic" ] } diff --git a/instrumentation/kamon-annotation/src/main/java/kamon/annotation/instrumentation/cache/AnnotationCache.java b/instrumentation/kamon-annotation/src/main/java/kamon/annotation/instrumentation/cache/AnnotationCache.java index b10463986..b10b7c631 100644 --- a/instrumentation/kamon-annotation/src/main/java/kamon/annotation/instrumentation/cache/AnnotationCache.java +++ b/instrumentation/kamon-annotation/src/main/java/kamon/annotation/instrumentation/cache/AnnotationCache.java @@ -26,7 +26,8 @@ import kamon.metric.*; import kamon.tag.TagSet; import kamon.trace.SpanBuilder; -import kanela.agent.util.log.Logger; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.lang.reflect.Method; import java.util.Collections; @@ -38,6 +39,7 @@ public final class AnnotationCache { + private static Logger logger = LoggerFactory.getLogger(AnnotationCache.class); private static Map metrics = buildCache(); private static Map buildCache() { @@ -195,7 +197,7 @@ private static String getOperationName(String name, Object obj, Class clazz, private static RemovalListener LogExpirationListener() { return (key, value, cause) -> { if(value instanceof Instrument) ((Instrument) value).remove(); - Logger.debug(() -> "Expiring key: " + key + "with value" + value); + logger.debug("Expiring key: " + key + "with value" + value); }; } diff --git a/instrumentation/kamon-annotation/src/main/scala/kamon/annotation/el/EnhancedELProcessor.scala b/instrumentation/kamon-annotation/src/main/scala/kamon/annotation/el/EnhancedELProcessor.scala index 490686f73..733bf3042 100644 --- a/instrumentation/kamon-annotation/src/main/scala/kamon/annotation/el/EnhancedELProcessor.scala +++ b/instrumentation/kamon-annotation/src/main/scala/kamon/annotation/el/EnhancedELProcessor.scala @@ -19,7 +19,8 @@ package kamon.annotation.el import java.util.function.Supplier import javax.el.ELProcessor -import kanela.agent.util.log.Logger +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import scala.util.{Failure, Success, Try} @@ -27,34 +28,31 @@ import scala.util.{Failure, Success, Try} * Pimp ELProcessor injecting some useful methods. */ object EnhancedELProcessor { + private val logger = LoggerFactory.getLogger("kamon.annotation.el.EnhancedELProcessor") private val Pattern = """[#|$]\{(.*)\}""".r implicit class Syntax(val processor: ELProcessor) extends AnyVal { - import scala.collection.JavaConverters._ + import scala.jdk.CollectionConverters._ - def evalToString(expression: String): String = extract(expression).map { str ⇒ + def evalToString(expression: String): String = extract(expression).map { str => eval[String](str) match { case Success(value) => value case Failure(cause) => - Logger.warn( - new Supplier[String] { - override def get(): String = - s"${cause.getMessage} -> we will complete the operation with 'unknown' string" - }, + logger.warn( + s"${cause.getMessage} -> we will complete the operation with 'unknown' string", cause ) + "unknown" } } getOrElse expression - def evalToMap(expression: String): Map[String, String] = extract(expression).map { str ⇒ + def evalToMap(expression: String): Map[String, String] = extract(expression).map { str => eval[java.util.HashMap[String, String]](s"{$str}") match { - case Success(value) ⇒ value.asInstanceOf[java.util.HashMap[String, String]].asScala.toMap - case Failure(cause) ⇒ - Logger.warn( - new Supplier[String] { - override def get(): String = s"${cause.getMessage} -> we will complete the operation with an empty map" - }, + case Success(value) => value.asInstanceOf[java.util.HashMap[String, String]].asScala.toMap + case Failure(cause) => + logger.warn( + s"${cause.getMessage} -> we will complete the operation with an empty map", cause ) Map.empty[String, String] @@ -64,8 +62,8 @@ object EnhancedELProcessor { private def eval[A](str: String): Try[A] = Try(processor.eval(str).asInstanceOf[A]) private def extract(expression: String): Option[String] = expression match { - case Pattern(ex) ⇒ Some(ex) - case _ ⇒ None + case Pattern(ex) => Some(ex) + case _ => None } } } diff --git a/instrumentation/kamon-annotation/src/test/resources/application.conf b/instrumentation/kamon-annotation/src/test/resources/application.conf index 014327d1a..fa7ffa7d3 100644 --- a/instrumentation/kamon-annotation/src/test/resources/application.conf +++ b/instrumentation/kamon-annotation/src/test/resources/application.conf @@ -4,7 +4,7 @@ kanela { show-banner = true modules { annotation.within = [ - "kamon.annotation..*" + "kamon.annotation." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-apache-cxf/src/main/resources/reference.conf b/instrumentation/kamon-apache-cxf/src/main/resources/reference.conf index cc2d862e2..63b55c9c5 100644 --- a/instrumentation/kamon-apache-cxf/src/main/resources/reference.conf +++ b/instrumentation/kamon-apache-cxf/src/main/resources/reference.conf @@ -92,7 +92,7 @@ kanela { ] enabled = true within = [ - "org.apache.cxf..*", + "org.apache.cxf." ] } } diff --git a/instrumentation/kamon-apache-httpclient/src/main/resources/reference.conf b/instrumentation/kamon-apache-httpclient/src/main/resources/reference.conf index 4e58dad87..7e7e10227 100644 --- a/instrumentation/kamon-apache-httpclient/src/main/resources/reference.conf +++ b/instrumentation/kamon-apache-httpclient/src/main/resources/reference.conf @@ -92,8 +92,8 @@ kanela { ] within = [ - "org.apache.http..*" + "org.apache.http." ] } } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-cassandra/src/main/resources/reference.conf b/instrumentation/kamon-cassandra/src/main/resources/reference.conf index 247b46c95..fdb649da5 100644 --- a/instrumentation/kamon-cassandra/src/main/resources/reference.conf +++ b/instrumentation/kamon-cassandra/src/main/resources/reference.conf @@ -54,8 +54,8 @@ kanela { ] within = [ - "com.datastax.driver.core..*", - "com.datastax.oss.driver.internal.core..*" + "com.datastax.driver.core.", + "com.datastax.oss.driver.internal.core." ] } } diff --git a/instrumentation/kamon-cats-io-3/src/main/resources/reference.conf b/instrumentation/kamon-cats-io-3/src/main/resources/reference.conf index 67b4a87e4..e71056c1f 100644 --- a/instrumentation/kamon-cats-io-3/src/main/resources/reference.conf +++ b/instrumentation/kamon-cats-io-3/src/main/resources/reference.conf @@ -11,8 +11,8 @@ kanela.modules { ] within = [ - "cats\\.effect\\.IOFiber", - "cats\\.effect\\.unsafe\\.SchedulerCompanionPlatform.*" + "cats.effect.IOFiber", + "cats.effect.unsafe.SchedulerCompanionPlatform." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-cats-io/src/main/resources/reference.conf b/instrumentation/kamon-cats-io/src/main/resources/reference.conf index 9d31317c2..89ff6e1c2 100644 --- a/instrumentation/kamon-cats-io/src/main/resources/reference.conf +++ b/instrumentation/kamon-cats-io/src/main/resources/reference.conf @@ -4,7 +4,7 @@ kanela.modules { executor-service { - within += "cats.effect.internals.IOShift\\$Tick" - within += "cats.effect.internals.IOTimer\\$ShiftTick" + within += "cats.effect.internals.IOShift$Tick" + within += "cats.effect.internals.IOTimer$ShiftTick" } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-elasticsearch/src/main/resources/reference.conf b/instrumentation/kamon-elasticsearch/src/main/resources/reference.conf index d033f03b8..2e03ef294 100644 --- a/instrumentation/kamon-elasticsearch/src/main/resources/reference.conf +++ b/instrumentation/kamon-elasticsearch/src/main/resources/reference.conf @@ -16,7 +16,7 @@ kanela { ] within = [ - "org.elasticsearch.client..*" + "org.elasticsearch.client." ] } } diff --git a/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitAdvices.java b/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitAdvices.java index 4fa8f7d31..30fba4f41 100644 --- a/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitAdvices.java +++ b/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitAdvices.java @@ -16,46 +16,44 @@ package kamon.instrumentation.executor; -import kanela.agent.bootstrap.context.ContextHandler; -import kanela.agent.libs.net.bytebuddy.asm.Advice; - import java.util.ArrayList; import java.util.Collection; import java.util.concurrent.Callable; +import kanela.agent.bootstrap.ContextApi; +import kanela.agent.libs.net.bytebuddy.asm.Advice; final class CaptureContextOnSubmitAdvices { - public static class RunnableWrapperAdvisor { - /** - * Wraps a {@link Runnable} so that it executes with the current context. - */ - @Advice.OnMethodEnter(suppress = Throwable.class) - public static void wrapParam(@Advice.Argument(value = 0, readOnly = false) Runnable runnable) { - runnable = ContextHandler.wrapInContextAware(runnable); - } + public static class RunnableWrapperAdvisor { + /** Wraps a {@link Runnable} so that it executes with the current context. */ + @Advice.OnMethodEnter() + public static void wrapParam(@Advice.Argument(value = 0, readOnly = false) Runnable runnable) { + runnable = ContextApi.wrapRunnable(runnable); } + } - public static class CallableWrapperAdvisor { - /** - * Wraps a {@link Callable} so that it executes with the current context. - */ - @Advice.OnMethodEnter(suppress = Throwable.class) - public static void wrapParam(@Advice.Argument(value = 0, readOnly = false) Callable callable) { - callable = ContextHandler.wrapInContextAware(callable); - } + public static class CallableWrapperAdvisor { + /** Wraps a {@link Callable} so that it executes with the current context. */ + @Advice.OnMethodEnter() + public static void wrapParam( + @Advice.Argument(value = 0, readOnly = false) Callable callable) { + callable = ContextApi.wrapCallable(callable); } + } - public static class CallableCollectionWrapperAdvisor { - /** - * Wraps all elements of a list of {@link Callable}'s so that it executes with the current context. - */ - @Advice.OnMethodEnter(suppress = Throwable.class) - public static void wrapParam(@Advice.Argument(value = 0, readOnly = false) Collection> tasks) { - final Collection> wrappedTasks = new ArrayList<>(tasks.size()); - for (Callable task : tasks) { - if(task != null) wrappedTasks.add(ContextHandler.wrapInContextAware(task)); - } - tasks = wrappedTasks; - } + public static class CallableCollectionWrapperAdvisor { + /** + * Wraps all elements of a list of {@link Callable}'s so that it executes with the current + * context. + */ + @Advice.OnMethodEnter() + public static void wrapParam( + @Advice.Argument(value = 0, readOnly = false) Collection> tasks) { + final Collection> wrappedTasks = new ArrayList<>(tasks.size()); + for (Callable task : tasks) { + if (task != null) wrappedTasks.add(ContextApi.wrapCallable(task)); + } + tasks = wrappedTasks; } -} \ No newline at end of file + } +} diff --git a/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitInstrumentation.java b/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitInstrumentation.java index 9931f611f..44b0a4b32 100644 --- a/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitInstrumentation.java +++ b/instrumentation/kamon-executors/src/main/java/kamon/instrumentation/executor/CaptureContextOnSubmitInstrumentation.java @@ -16,173 +16,75 @@ package kamon.instrumentation.executor; -import com.typesafe.config.Config; -import kamon.Kamon; +import java.util.Collection; +import java.util.concurrent.Callable; import kamon.instrumentation.executor.CaptureContextOnSubmitAdvices.CallableCollectionWrapperAdvisor; import kamon.instrumentation.executor.CaptureContextOnSubmitAdvices.CallableWrapperAdvisor; import kamon.instrumentation.executor.CaptureContextOnSubmitAdvices.RunnableWrapperAdvisor; -import kamon.instrumentation.executor.ContextAware.ContextAwareCallableProvider; -import kamon.instrumentation.executor.ContextAware.ContextAwareRunnableProvider; import kamon.instrumentation.executor.ContextAware.DefaultContextAwareCallable; import kamon.instrumentation.executor.ContextAware.DefaultContextAwareRunnable; import kanela.agent.api.instrumentation.InstrumentationBuilder; -import kanela.agent.bootstrap.context.ContextHandler; -import kanela.agent.bootstrap.context.ContextProvider; +import kanela.agent.bootstrap.ContextApi; +import kanela.agent.bootstrap.ContextApiImplementation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Collection; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.Callable; - -import static java.text.MessageFormat.format; -import static java.util.Collections.emptyList; -import static java.util.stream.Collectors.toList; - public final class CaptureContextOnSubmitInstrumentation extends InstrumentationBuilder { - private static final Logger LOG = LoggerFactory.getLogger(CaptureContextOnSubmitInstrumentation.class); - - private volatile static Settings settings = readSettings(Kamon.config()); - - public CaptureContextOnSubmitInstrumentation() { + private static final Logger LOG = + LoggerFactory.getLogger(CaptureContextOnSubmitInstrumentation.class); - /** - * Set the ContextProvider - */ - ContextHandler.setContextProvider(new KamonContextProvider()); - - Kamon.onReconfigure(newConfig -> { settings = readSettings(newConfig); }); - - /** - * Instrument all implementations of: - * - * java.util.concurrent.Executor::execute - * - */ - onSubTypesOf("java.util.concurrent.Executor") - .advise(method("execute").and(withArgument(Runnable.class)), RunnableWrapperAdvisor.class); - - - /** - * Instrument all implementations of: - * - * java.util.concurrent.ExecutorService::submit(Runnable) - * java.util.concurrent.ExecutorService::submit(Callable) - * java.util.concurrent.ExecutorService::[invokeAny|invokeAll](Collection[Callable]) - * - */ - onSubTypesOf( "java.util.concurrent.ExecutorService") - .advise(method("submit").and(withArgument(Runnable.class)), RunnableWrapperAdvisor.class) - .advise(method("submit").and(withArgument(Callable.class)), CallableWrapperAdvisor.class) - .advise(anyMethods("invokeAny", "invokeAll").and(withArgument(Collection.class)), CallableCollectionWrapperAdvisor.class); - - /** - * Instrument all implementations of: - * - * java.util.concurrent.ScheduledExecutorService::schedule(Runnable, long, TimeUnit) - * java.util.concurrent.ScheduledExecutorService::schedule(Callable, long, TimeUnit) - * - */ - onSubTypesOf("java.util.concurrent.ScheduledExecutorService") - .advise(method("schedule").and(withArgument(0, Runnable.class)), RunnableWrapperAdvisor.class) - .advise(method("schedule").and(withArgument(0, Callable.class)), CallableWrapperAdvisor.class); - - } + public CaptureContextOnSubmitInstrumentation() { - private static final class Settings { - public final List runnableAwareProviders; - public final List callableAwareProviders; + /** Set the ContextProvider */ + ContextApi.setContextApiImplementation(new KamonContextApiImplementation()); - private Settings( - List runnableAwareProviders, - List callableAwareProviders - ) { - this.runnableAwareProviders = runnableAwareProviders; - this.callableAwareProviders = callableAwareProviders; - } - } - - private static Settings readSettings(Config config) { - Config executorCaptureConfig = config.getConfig("kanela.modules.executor-service-capture-on-submit"); - List runnableAwareProviders ; - if (executorCaptureConfig.hasPath("context-aware-runnable-providers")) { - runnableAwareProviders = executorCaptureConfig.getStringList("context-aware-runnable-providers") - .stream() - .map(CaptureContextOnSubmitInstrumentation::loadRunnableProvider) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(toList()); - } else { - runnableAwareProviders = emptyList(); - } - - List callableAwareProviders; - if (executorCaptureConfig.hasPath("context-aware-callable-providers")) { - callableAwareProviders = executorCaptureConfig.getStringList("context-aware-callable-providers") - .stream() - .map(CaptureContextOnSubmitInstrumentation::loadCallableProvider) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(toList()); - } else { - callableAwareProviders = emptyList(); - } - - return new Settings(runnableAwareProviders, callableAwareProviders); - } - - private static Optional loadRunnableProvider(String providerClassName) { - Optional providerOpt; - try { - providerOpt = Optional.of( - (ContextAwareRunnableProvider) Class.forName(providerClassName).getConstructor().newInstance() - ); - } catch (Exception e) { - LOG.warn(format("Error trying to load ContextAwareRunnableProvider: {0}.", providerClassName), e); - providerOpt = Optional.empty(); - } - return providerOpt; - } - - private static Optional loadCallableProvider(String providerClassName) { - Optional providerOpt; - try { - providerOpt = Optional.of( - (ContextAwareCallableProvider) Class.forName(providerClassName).getConstructor().newInstance() - ); - } catch (Exception e) { - LOG.warn(format("Error trying to load ContextAwareCallableProvider: {0}.", providerClassName), e); - providerOpt = Optional.empty(); - } - return providerOpt; - } + /** + * Instrument all implementations of: + * + *

java.util.concurrent.Executor::execute + */ + onSubTypesOf("java.util.concurrent.Executor") + .advise(method("execute").and(withArgument(Runnable.class)), RunnableWrapperAdvisor.class); /** - * implementation of kanela.agent.bootstrap.context.ContextProvider + * Instrument all implementations of: + * + *

java.util.concurrent.ExecutorService::submit(Runnable) + * java.util.concurrent.ExecutorService::submit(Callable) + * java.util.concurrent.ExecutorService::[invokeAny|invokeAll](Collection[Callable]) */ - private static class KamonContextProvider implements ContextProvider { + onSubTypesOf("java.util.concurrent.ExecutorService") + .advise(method("submit").and(withArgument(Runnable.class)), RunnableWrapperAdvisor.class) + .advise(method("submit").and(withArgument(Callable.class)), CallableWrapperAdvisor.class) + .advise( + anyMethods("invokeAny", "invokeAll").and(withArgument(Collection.class)), + CallableCollectionWrapperAdvisor.class); - @Override - public Runnable wrapInContextAware(Runnable r) { - return settings.runnableAwareProviders - .stream() - .filter(p -> p.test(r)) - .findFirst() - .map(it -> it.provide(r)) - .orElse(new DefaultContextAwareRunnable(r)); - } + /** + * Instrument all implementations of: + * + *

java.util.concurrent.ScheduledExecutorService::schedule(Runnable, long, TimeUnit) + * java.util.concurrent.ScheduledExecutorService::schedule(Callable, long, TimeUnit) + */ + onSubTypesOf("java.util.concurrent.ScheduledExecutorService") + .advise( + method("schedule").and(withArgument(0, Runnable.class)), RunnableWrapperAdvisor.class) + .advise( + method("schedule").and(withArgument(0, Callable.class)), CallableWrapperAdvisor.class); + } + + private static class KamonContextApiImplementation implements ContextApiImplementation { + + @Override + public Runnable wrapRunnable(Runnable r) { + return new DefaultContextAwareRunnable(r); + } - @SuppressWarnings("rawtypes") - @Override - public Callable wrapInContextAware(Callable c) { - return settings.callableAwareProviders - .stream() - .filter(p -> p.test(c)) - .findFirst() - .map(it -> it.provide(c)) - .orElse(new DefaultContextAwareCallable<>(c)); - } + @SuppressWarnings("rawtypes") + @Override + public Callable wrapCallable(Callable c) { + return new DefaultContextAwareCallable<>(c); } + } } diff --git a/instrumentation/kamon-executors/src/main/resources/reference.conf b/instrumentation/kamon-executors/src/main/resources/reference.conf index 826388459..ad77b1907 100644 --- a/instrumentation/kamon-executors/src/main/resources/reference.conf +++ b/instrumentation/kamon-executors/src/main/resources/reference.conf @@ -23,27 +23,26 @@ kanela.modules { ] exclude = [ - "^java.*", - "^sun.*", - "^com.sun.tools.*", - "^sbt.internal.*", - "^com.intellij.rt.*", - "^org.jboss.netty.*", - "^com.google.common.base.internal.Finalizer", - "^kamon.module.*", - "^kamon.instrumentation.executor.ExecutorInstrumentation.*", - "^kanela.agent.*" + "java.", + "sun.", + "com.sun.tools.", + "sbt.internal.", + "com.intellij.rt.", + "org.jboss.netty.", + "com.google.common.base.internal.Finalizer", + "kamon.module.", + "kamon.instrumentation.executor.ExecutorInstrumentation.", + "kanela.agent." ] within = [ - "scala.concurrent.ExecutionContext\\$", - "scala.concurrent.ExecutionContext\\$Implicits\\$" + "scala.concurrent.ExecutionContext$", + "scala.concurrent.ExecutionContext$Implicits$" ] } executor-service-capture-on-submit { enabled = false - disable-class-format-changes = true name = "Executor Service Capture on Submit Instrumentation" description = """Experimental. Provides automatic context propagation by capturing the current context at the instant when a @@ -53,23 +52,15 @@ kanela.modules { "kamon.instrumentation.executor.CaptureContextOnSubmitInstrumentation" ] - bootstrap-injection { - enabled = true - helper-class-names = [] - } - within = [ - "java.util.concurrent..*", - "com.google.common.util.concurrent..*", + "com.google.common.util.concurrent.", "scala.concurrent.forkjoin.ForkJoinPool" ] - # Provider classes for Runnable wrappers. - # See kamon.instrumentation.executor.ContextAware. - context-aware-runnable-providers = [] - - # Provider classes for Callalbe wrappers. - # See kamon.instrumentation.executor.ContextAware. - context-aware-callable-providers = [] + within-bootstrap = [ + "java.util.concurrent.ThreadPoolExecutor", + "java.util.concurrent.ScheduledThreadPoolExecutor", + "java.util.concurrent.ForkJoinPool" + ] } } diff --git a/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java b/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java deleted file mode 100644 index f7ec60c05..000000000 --- a/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2013-2023 The Kamon Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package kamon.instrumentation.executor; - -import kanela.agent.libs.net.bytebuddy.asm.Advice; -import scala.concurrent.ExecutionContext$; -import scala.concurrent.impl.ExecutionContextImpl; - -import java.util.concurrent.ExecutorService; - -import static kanela.agent.libs.net.bytebuddy.implementation.bytecode.assign.Assigner.Typing.DYNAMIC; - -final class ScalaGlobalExecutionContextAdvice { - - @Advice.OnMethodExit - public static void onExit(@Advice.Return(readOnly = false, typing = DYNAMIC) ExecutionContextImpl returnValue) { - ExecutorService instrumented = ExecutorInstrumentation.instrument((ExecutorService) returnValue.executor(), "scala-global-execution-context"); - returnValue = new ExecutionContextImpl(instrumented, ExecutionContext$.MODULE$.defaultReporter()); - } -} diff --git a/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/package.scala b/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/package.scala deleted file mode 100644 index 71468873f..000000000 --- a/instrumentation/kamon-executors/src/main/scala-2.11/kamon/instrumentation/package.scala +++ /dev/null @@ -1,4 +0,0 @@ -package kamon.instrumentation -package object executor { - type ScalaForkJoinPool = scala.concurrent.forkjoin.ForkJoinPool -} diff --git a/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java b/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java deleted file mode 100644 index b4dfcc871..000000000 --- a/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/executor/ScalaGlobalExecutionContextAdvice.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright 2013-2023 The Kamon Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package kamon.instrumentation.executor; - -import kanela.agent.libs.net.bytebuddy.asm.Advice; -import scala.concurrent.ExecutionContext; -import scala.concurrent.impl.ExecutionContextImpl; - -import java.lang.reflect.Method; -import java.util.concurrent.ExecutorService; - -import static kanela.agent.libs.net.bytebuddy.implementation.bytecode.assign.Assigner.Typing.DYNAMIC; - -final class ScalaGlobalExecutionContextAdvice { - - @Advice.OnMethodExit - public static void onExit(@Advice.Return(readOnly = false, typing = DYNAMIC) Object returnValue) throws Exception { - // Not ideal to go through reflection but this code will only be executed once in the lifetime of the JVM - Method executorMethod = returnValue.getClass().getDeclaredMethod("executor"); - ExecutorService executor = (ExecutorService) executorMethod.invoke(returnValue); - - ExecutorService instrumented = ExecutorInstrumentation.instrument(executor, "scala-global-execution-context"); - returnValue = new ExecutionContextImpl(instrumented, ExecutionContext.defaultReporter()); - } -} diff --git a/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/package.scala b/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/package.scala deleted file mode 100644 index c5fb17593..000000000 --- a/instrumentation/kamon-executors/src/main/scala-2.12/kamon/instrumentation/package.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright 2013-2020 The Kamon Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package kamon.instrumentation -package object executor { - type ScalaForkJoinPool = java.util.concurrent.ForkJoinPool -} diff --git a/instrumentation/kamon-executors/src/test/resources/application.conf b/instrumentation/kamon-executors/src/test/resources/application.conf index 4c2318c8b..a17aafdb1 100644 --- a/instrumentation/kamon-executors/src/test/resources/application.conf +++ b/instrumentation/kamon-executors/src/test/resources/application.conf @@ -2,21 +2,19 @@ kamon.instrumentation.executor.sample-interval = 1ms kanela.modules { executor-service { - exclude += "^kamon.instrumentation.executor.*" + exclude += "kamon.instrumentation.executor." } executor-service-capture-on-submit { enabled = true within = ${?kanela.modules.executor-service-capture-on-submit.within} [ - "com.google.common.util.concurrent..*", - "java.util.concurrent..*", - "scala.concurrent..*", - "scala.concurrent.impl..*" + "com.google.common.util.concurrent.", + "java.util.concurrent.", + "scala.concurrent.", + "scala.concurrent.impl." "scala.concurrent.forkjoin.ForkJoinPool", - "akka.actor..*", - "play.api.libs.streams..*", + "akka.actor.", + "play.api.libs.streams.", ] - context-aware-runnable-providers += kamon.instrumentation.executor.TestContextAwareRunnableProvider - context-aware-callable-providers += kamon.instrumentation.executor.TestContextAwareCallableProvider } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-executors/src/test/scala/kamon/instrumentation/executor/OnSubmitContextPropagationSpec.scala b/instrumentation/kamon-executors/src/test/scala/kamon/instrumentation/executor/OnSubmitContextPropagationSpec.scala index 326cfdf40..2c1596aaa 100644 --- a/instrumentation/kamon-executors/src/test/scala/kamon/instrumentation/executor/OnSubmitContextPropagationSpec.scala +++ b/instrumentation/kamon-executors/src/test/scala/kamon/instrumentation/executor/OnSubmitContextPropagationSpec.scala @@ -20,7 +20,6 @@ import com.google.common.util.concurrent.MoreExecutors import kamon.Kamon import kamon.instrumentation.executor.ContextAware.{DefaultContextAwareCallable, DefaultContextAwareRunnable} import kamon.testkit.InitAndStopKamonAfterAll -import kanela.agent.bootstrap.context.ContextHandler import org.scalatest.OptionValues import org.scalatest.concurrent.Eventually import org.scalatest.matchers.should.Matchers @@ -170,28 +169,6 @@ class OnSubmitContextPropagationSpec extends AnyWordSpec with Matchers with Cont } values should contain allOf ("all-callables-should-see-this-key-A", "all-callables-should-see-this-key-B", "all-callables-should-see-this-key-C") } - - "wrap Runnable to TestContextAwareRunnable when call ContextHandler.wrapInContextAware" in { - val simpleRunnable = ContextHandler.wrapInContextAware(new SimpleRunnable) - simpleRunnable.isInstanceOf[TestContextAwareRunnable] should be(true) - simpleRunnable.isInstanceOf[DefaultContextAwareRunnable] should be(false) - - val notSimpleRunnable = ContextHandler.wrapInContextAware(new Runnable { override def run(): Unit = {} }) - notSimpleRunnable.isInstanceOf[TestContextAwareRunnable] should be(false) - notSimpleRunnable.isInstanceOf[DefaultContextAwareRunnable] should be(true) - } - - "wrap Callable to TestContextAwareCallable when call ContextHandler.wrapInContextAware" in { - val simpleCallable = ContextHandler.wrapInContextAware(new SimpleCallable) - simpleCallable.isInstanceOf[TestContextAwareCallable[_]] should be(true) - simpleCallable.isInstanceOf[DefaultContextAwareCallable[_]] should be(false) - - val notSimpleCallable = ContextHandler.wrapInContextAware(new Callable[String] { - override def call(): String = "test" - }) - notSimpleCallable.isInstanceOf[TestContextAwareCallable[_]] should be(false) - notSimpleCallable.isInstanceOf[DefaultContextAwareCallable[_]] should be(true) - } } def instrument(executor: ExecutorService): ExecutorService = { diff --git a/instrumentation/kamon-instrumentation-common/src/main/scala-2.11/scala/annotation/static.scala b/instrumentation/kamon-instrumentation-common/src/main/scala-2.11/scala/annotation/static.scala deleted file mode 100644 index 2d336e2e3..000000000 --- a/instrumentation/kamon-instrumentation-common/src/main/scala-2.11/scala/annotation/static.scala +++ /dev/null @@ -1,4 +0,0 @@ -package scala.annotation - -import scala.annotation.meta._ -final class static extends StaticAnnotation diff --git a/instrumentation/kamon-instrumentation-common/src/main/scala-2.12/scala/annotation/static.scala b/instrumentation/kamon-instrumentation-common/src/main/scala-2.12/scala/annotation/static.scala deleted file mode 100644 index 2d336e2e3..000000000 --- a/instrumentation/kamon-instrumentation-common/src/main/scala-2.12/scala/annotation/static.scala +++ /dev/null @@ -1,4 +0,0 @@ -package scala.annotation - -import scala.annotation.meta._ -final class static extends StaticAnnotation diff --git a/instrumentation/kamon-instrumentation-common/src/main/scala/kamon/instrumentation/package.scala b/instrumentation/kamon-instrumentation-common/src/main/scala/kamon/instrumentation/package.scala index f6d5b8e9f..0817cd80a 100644 --- a/instrumentation/kamon-instrumentation-common/src/main/scala/kamon/instrumentation/package.scala +++ b/instrumentation/kamon-instrumentation-common/src/main/scala/kamon/instrumentation/package.scala @@ -32,15 +32,15 @@ package object instrumentation { */ def advise[A](method: Junction[MethodDescription], advice: A)(implicit singletonEvidence: A <:< Singleton - ): InstrumentationBuilder.Target + ): InstrumentationBuilder.Target.Builder } - implicit def adviseWithCompanionObject(target: InstrumentationBuilder.Target): AdviseWithCompanionObject = + implicit def adviseWithCompanionObject(target: InstrumentationBuilder.Target.Builder): AdviseWithCompanionObject = new AdviseWithCompanionObject { override def advise[A](method: Junction[MethodDescription], advice: A)(implicit singletonEvidence: A <:< Singleton - ): InstrumentationBuilder.Target = { + ): InstrumentationBuilder.Target.Builder = { // Companion object instances always have the '$' sign at the end of their class name, we must remove it to get // to the class that exposes the static methods. val className = advice.getClass.getName.dropRight(1) diff --git a/instrumentation/kamon-instrumentation-common/src/test/resources/application.conf b/instrumentation/kamon-instrumentation-common/src/test/resources/application.conf index d92850637..083011738 100644 --- a/instrumentation/kamon-instrumentation-common/src/test/resources/application.conf +++ b/instrumentation/kamon-instrumentation-common/src/test/resources/application.conf @@ -90,7 +90,7 @@ kanela.modules { ] within = [ - "kamon.instrumentation.context.*" + "kamon.instrumentation.context." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-jdbc/src/main/resources/reference.conf b/instrumentation/kamon-jdbc/src/main/resources/reference.conf index c9542a27f..14679c37d 100644 --- a/instrumentation/kamon-jdbc/src/main/resources/reference.conf +++ b/instrumentation/kamon-jdbc/src/main/resources/reference.conf @@ -59,33 +59,33 @@ kanela.modules { ] within = [ - "^org.h2..*", - "^org.sqlite..*", - "^oracle.jdbc..*", - "^com.amazon.redshift.jdbc42..*", - "^com.amazon.redshift.core.jdbc42..*", - "^com.mysql.jdbc..*", - "^com.mysql.cj.jdbc..*", - "^org.h2.Driver", - "^org.h2.jdbc..*", - "^org.hsqldb.jdbc.*", - "^net.sf.log4jdbc..*", - "^org.mariadb.jdbc..*", - "^org.postgresql.jdbc..*", - "^com.facebook.presto.jdbc..*", - "^com.microsoft.sqlserver.jdbc..*", - "^net.snowflake.client.jdbc..*", - "^com.zaxxer.hikari.pool.PoolBase", - "^com.zaxxer.hikari.pool.PoolEntry", - "^com.zaxxer.hikari.pool.HikariPool", - "^com.zaxxer.hikari.pool.ProxyConnection", - "^com.zaxxer.hikari.pool.HikariProxyStatement", - "^com.zaxxer.hikari.pool.HikariProxyPreparedStatement", - "^com.zaxxer.hikari.pool.HikariProxyCallableStatement", + "org.h2.", + "org.sqlite.", + "oracle.jdbc.", + "com.amazon.redshift.jdbc42.", + "com.amazon.redshift.core.jdbc42.", + "com.mysql.jdbc.", + "com.mysql.cj.jdbc.", + "org.h2.Driver", + "org.h2.jdbc.", + "org.hsqldb.jdbc.", + "net.sf.log4jdbc.", + "org.mariadb.jdbc.", + "org.postgresql.jdbc.", + "com.facebook.presto.jdbc.", + "com.microsoft.sqlserver.jdbc.", + "net.snowflake.client.jdbc.", + "com.zaxxer.hikari.pool.PoolBase", + "com.zaxxer.hikari.pool.PoolEntry", + "com.zaxxer.hikari.pool.HikariPool", + "com.zaxxer.hikari.pool.ProxyConnection", + "com.zaxxer.hikari.pool.HikariProxyStatement", + "com.zaxxer.hikari.pool.HikariProxyPreparedStatement", + "com.zaxxer.hikari.pool.HikariProxyCallableStatement" ] } executor-service { - within += "^slick.*" + within += "slick." } } diff --git a/instrumentation/kamon-jdbc/src/main/scala/kamon/instrumentation/jdbc/StatementMonitor.scala b/instrumentation/kamon-jdbc/src/main/scala/kamon/instrumentation/jdbc/StatementMonitor.scala index ea4d854fe..1d1fef0de 100644 --- a/instrumentation/kamon-jdbc/src/main/scala/kamon/instrumentation/jdbc/StatementMonitor.scala +++ b/instrumentation/kamon-jdbc/src/main/scala/kamon/instrumentation/jdbc/StatementMonitor.scala @@ -22,12 +22,12 @@ import kamon.instrumentation.jdbc.utils.{LoggingSupport, SqlVisitor} import kamon.metric.RangeSampler import kamon.tag.{Lookups, TagSet} import kamon.trace.Span -import kanela.agent.bootstrap.stack.CallStackDepth import net.sf.jsqlparser.parser.CCJSqlParserUtil import java.sql.PreparedStatement import java.time.Instant import java.time.temporal.ChronoUnit +import java.util.{HashMap => JavaHashMap} object StatementMonitor extends LoggingSupport { @@ -42,6 +42,18 @@ object StatementMonitor extends LoggingSupport { @volatile private var addStatementSQL: Boolean = true @volatile private var addPreparedStatementSQL: Boolean = true + val StackDepthThreadLocal = ThreadLocal.withInitial(() => new JavaHashMap[Any, Int]()) + + def incrementCallStackDepth(key: Any): Int = { + val stackDepthMap = StackDepthThreadLocal.get() + if (stackDepthMap.containsKey(key)) return stackDepthMap.compute(key, (k, v) => v + 1) + return stackDepthMap.computeIfAbsent(key, v => 0) + } + + def resetCallStackDepth(key: Any): Unit = { + StackDepthThreadLocal.get().remove(key); + } + Kamon.onReconfigure(c => updateSettings(c)) updateSettings(Kamon.config()) @@ -69,7 +81,7 @@ object StatementMonitor extends LoggingSupport { } def start(statement: Any, sql: String, statementType: String): Option[Invocation] = { - if (CallStackDepth.incrementFor(statement) == 0) { + if (incrementCallStackDepth(statement) == 0) { val startTimestamp = Kamon.clock().instant() // It could happen that there is no Pool Telemetry on the Pool when fail-fast is enabled and a connection is @@ -135,7 +147,7 @@ object StatementMonitor extends LoggingSupport { span.finish(endedAt) JdbcInstrumentation.onStatementFinish(sql, elapsedTime) - CallStackDepth.resetFor(statement) + resetCallStackDepth(statement) } } } diff --git a/instrumentation/kamon-kafka/src/main/resources/reference.conf b/instrumentation/kamon-kafka/src/main/resources/reference.conf index 637b67577..4be32c764 100644 --- a/instrumentation/kamon-kafka/src/main/resources/reference.conf +++ b/instrumentation/kamon-kafka/src/main/resources/reference.conf @@ -41,7 +41,7 @@ kanela.modules { ] within = [ - "org.apache.kafka.clients..*", + "org.apache.kafka.clients.", ] } } diff --git a/instrumentation/kamon-logback/src/main/resources/reference.conf b/instrumentation/kamon-logback/src/main/resources/reference.conf index 69335c1e8..220158470 100644 --- a/instrumentation/kamon-logback/src/main/resources/reference.conf +++ b/instrumentation/kamon-logback/src/main/resources/reference.conf @@ -49,9 +49,9 @@ kanela.modules { ] within = [ - "ch.qos.logback..*", - "com.cwbase.logback..*", - "net.logstash.logback..*" + "ch.qos.logback.", + "com.cwbase.logback.", + "net.logstash.logback." ] } } diff --git a/instrumentation/kamon-logback/src/test/scala/kamon/instrumentation/logback/package.scala b/instrumentation/kamon-logback/src/test/scala/kamon/instrumentation/logback/package.scala index f50bb7d1d..5c8b66233 100644 --- a/instrumentation/kamon-logback/src/test/scala/kamon/instrumentation/logback/package.scala +++ b/instrumentation/kamon-logback/src/test/scala/kamon/instrumentation/logback/package.scala @@ -15,11 +15,11 @@ import kamon.instrumentation.logback.tools.{ TraceIDConverter } import kamon.logback.util.LogbackConfigurator -import org.slf4j.impl.StaticLoggerBinder +import org.slf4j.LoggerFactory package object logback { - val context: LoggerContext = StaticLoggerBinder.getSingleton.getLoggerFactory.asInstanceOf[LoggerContext] + val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext] val configurator = new LogbackConfigurator(context) configurator.conversionRule("traceID", classOf[TraceIDConverter]) configurator.conversionRule("spanID", classOf[SpanIDConverter]) diff --git a/instrumentation/kamon-mongo-legacy/src/main/resources/reference.conf b/instrumentation/kamon-mongo-legacy/src/main/resources/reference.conf index c35c26e03..728ba2f65 100644 --- a/instrumentation/kamon-mongo-legacy/src/main/resources/reference.conf +++ b/instrumentation/kamon-mongo-legacy/src/main/resources/reference.conf @@ -8,10 +8,10 @@ kanela.modules { ] within = [ - "^com.mongodb.client.*" - "^com.mongodb.async.client.*" - "^com.mongodb.internal.*" - "^com.mongodb.operation.*" + "com.mongodb.client." + "com.mongodb.async.client." + "com.mongodb.internal." + "com.mongodb.operation." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-mongo/src/main/resources/reference.conf b/instrumentation/kamon-mongo/src/main/resources/reference.conf index 362c394b4..6170bcf13 100644 --- a/instrumentation/kamon-mongo/src/main/resources/reference.conf +++ b/instrumentation/kamon-mongo/src/main/resources/reference.conf @@ -8,10 +8,10 @@ kanela.modules { ] within = [ - "^com.mongodb.client.*" - "^com.mongodb.async.client.*" - "^com.mongodb.internal.*" - "^com.mongodb.operation.*" + "com.mongodb.client." + "com.mongodb.async.client." + "com.mongodb.internal." + "^com.mongodb.operation." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-okhttp/src/main/resources/reference.conf b/instrumentation/kamon-okhttp/src/main/resources/reference.conf index fdf354072..b515bf4d1 100644 --- a/instrumentation/kamon-okhttp/src/main/resources/reference.conf +++ b/instrumentation/kamon-okhttp/src/main/resources/reference.conf @@ -97,7 +97,7 @@ kanela { "kamon.okhttp3.instrumentation.OkHttpInstrumentation" ] within = [ - "okhttp3..*" + "okhttp3." ] } } diff --git a/instrumentation/kamon-opensearch/src/main/resources/reference.conf b/instrumentation/kamon-opensearch/src/main/resources/reference.conf index 11f839324..3a39a313f 100644 --- a/instrumentation/kamon-opensearch/src/main/resources/reference.conf +++ b/instrumentation/kamon-opensearch/src/main/resources/reference.conf @@ -16,7 +16,7 @@ kanela { ] within = [ - "org.opensearch.client..*" + "org.opensearch.client." ] } } diff --git a/instrumentation/kamon-pekko-connectors-kafka/src/main/resources/reference.conf b/instrumentation/kamon-pekko-connectors-kafka/src/main/resources/reference.conf index ff30765f8..942c4a51c 100644 --- a/instrumentation/kamon-pekko-connectors-kafka/src/main/resources/reference.conf +++ b/instrumentation/kamon-pekko-connectors-kafka/src/main/resources/reference.conf @@ -13,8 +13,8 @@ kanela { ] within = [ - "org.apache.pekko.kafka.ProducerMessage\\$Message", - "org.apache.pekko.kafka.ProducerMessage\\$MultiMessage", + "org.apache.pekko.kafka.ProducerMessage$Message", + "org.apache.pekko.kafka.ProducerMessage$MultiMessage", "org.apache.pekko.kafka.internal.DefaultProducerStageLogic" ] } diff --git a/instrumentation/kamon-pekko-grpc/src/main/resources/reference.conf b/instrumentation/kamon-pekko-grpc/src/main/resources/reference.conf index ba773a8b0..8e7e9d1cc 100644 --- a/instrumentation/kamon-pekko-grpc/src/main/resources/reference.conf +++ b/instrumentation/kamon-pekko-grpc/src/main/resources/reference.conf @@ -13,8 +13,8 @@ kanela.modules { ] within = [ - "^org.apache.pekko.grpc.internal..*", - "^org.apache.pekko.grpc.scaladsl.GrpcMarshalling$" + "org.apache.pekko.grpc.internal.", + "org.apache.pekko.grpc.scaladsl.GrpcMarshalling$" ] } } diff --git a/instrumentation/kamon-pekko-grpc/src/test/resources/application.conf b/instrumentation/kamon-pekko-grpc/src/test/resources/application.conf index 5d726de4a..38a2a9230 100644 --- a/instrumentation/kamon-pekko-grpc/src/test/resources/application.conf +++ b/instrumentation/kamon-pekko-grpc/src/test/resources/application.conf @@ -1 +1,2 @@ -pekko.http.server.preview.enable-http2 = on \ No newline at end of file +kamon.trace.sampler = "always" +pekko.http.server.preview.enable-http2 = on diff --git a/instrumentation/kamon-pekko-http/src/main/resources/reference.conf b/instrumentation/kamon-pekko-http/src/main/resources/reference.conf index b497c5428..3efebe056 100644 --- a/instrumentation/kamon-pekko-http/src/main/resources/reference.conf +++ b/instrumentation/kamon-pekko-http/src/main/resources/reference.conf @@ -244,8 +244,8 @@ kanela.modules { ] within = [ - "org.apache.pekko.http.*", - "org.apache.pekko.grpc.internal.*", + "org.apache.pekko.http.", + "org.apache.pekko.grpc.internal.", "org.apache.pekko.stream.scaladsl.Flow", "org.apache.pekko.stream.scaladsl.FlowOps" ] diff --git a/instrumentation/kamon-pekko/build.sbt b/instrumentation/kamon-pekko/build.sbt index 4a0c78a63..5b412cee4 100644 --- a/instrumentation/kamon-pekko/build.sbt +++ b/instrumentation/kamon-pekko/build.sbt @@ -1,26 +1,23 @@ // The Common configuration should always depend on the latest version of Pekko. All code in the Common configuration // should be source compatible with all Pekko versions. inConfig(Compile)(Defaults.compileSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version) + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version) )) val pekkoVersion = "1.0.1" -libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.11") Seq.empty - else Seq( - kanelaAgent % "provided", - scalatest % Test, - logbackClassic % Test, - "org.apache.pekko" %% "pekko-actor" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-testkit" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-slf4j" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-remote" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-cluster" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-cluster-sharding" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-protobuf" % pekkoVersion % "provided,test", - "org.apache.pekko" %% "pekko-testkit" % pekkoVersion % Test - ) -} +libraryDependencies ++= Seq( + kanelaAgent % "provided", + scalatest % Test, + logbackClassic % Test, + "org.apache.pekko" %% "pekko-actor" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-testkit" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-slf4j" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-remote" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-cluster" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-cluster-sharding" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-protobuf" % pekkoVersion % "provided,test", + "org.apache.pekko" %% "pekko-testkit" % pekkoVersion % Test +) exportJars := true @@ -36,5 +33,5 @@ lazy val baseTestSettings = Seq( ) inConfig(Test)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - crossScalaVersions := Seq(`scala_2.12_version`, `scala_2.13_version`, scala_3_version) + crossScalaVersions := Seq(`scala_2.13_version`, scala_3_version) )) diff --git a/instrumentation/kamon-pekko/src/main/resources/reference.conf b/instrumentation/kamon-pekko/src/main/resources/reference.conf index 283119e6c..e94761261 100644 --- a/instrumentation/kamon-pekko/src/main/resources/reference.conf +++ b/instrumentation/kamon-pekko/src/main/resources/reference.conf @@ -159,13 +159,13 @@ kanela.modules { ] within = [ - "^org.apache.pekko.dispatch..*", - "^org.apache.pekko.event..*", - "^org.apache.pekko.actor..*", - "^org.apache.pekko.pattern..*", - "^org.apache.pekko.cluster..*", - "^org.apache.pekko.routing..*", - "kamon.instrumentation.pekko.instrumentations..*" + "org.apache.pekko.dispatch.", + "org.apache.pekko.event.", + "org.apache.pekko.actor.", + "org.apache.pekko.pattern.", + "org.apache.pekko.cluster.", + "org.apache.pekko.routing.", + "kamon.instrumentation.pekko.instrumentations." ] } @@ -180,12 +180,12 @@ kanela.modules { ] within = [ - "org.apache.pekko.dispatch..*", - "org.apache.pekko.util..*", - "org.apache.pekko.remote..*", - "org.apache.pekko.actor..*", - "org.apache.pekko.cluster..*", - "org.apache.pekko.serialization..*" + "org.apache.pekko.dispatch.", + "org.apache.pekko.util.", + "org.apache.pekko.remote.", + "org.apache.pekko.actor.", + "org.apache.pekko.cluster.", + "org.apache.pekko.serialization." ] } @@ -199,12 +199,12 @@ kanela.modules { ] within = [ - "org.apache.pekko.dispatch..*", - "org.apache.pekko.util..*", - "org.apache.pekko.remote..*", - "org.apache.pekko.actor..*" - "org.apache.pekko.cluster..*" - "org.apache.pekko.serialization..*" + "org.apache.pekko.dispatch.", + "org.apache.pekko.util.", + "org.apache.pekko.remote.", + "org.apache.pekko.actor." + "org.apache.pekko.cluster." + "org.apache.pekko.serialization." ] } } diff --git a/instrumentation/kamon-play/build.sbt b/instrumentation/kamon-play/build.sbt index b19f7914a..046896484 100644 --- a/instrumentation/kamon-play/build.sbt +++ b/instrumentation/kamon-play/build.sbt @@ -1,6 +1,5 @@ import sbt.Tests._ -val `Play-2.6-version` = "2.6.25" val `Play-2.7-version` = "2.7.9" val `Play-2.8-version` = "2.8.2" @@ -8,15 +7,13 @@ val `Play-2.8-version` = "2.8.2" * Test Configurations */ lazy val TestCommon = config("test-common") extend (Compile) -lazy val `Test-Play-2.6` = config("test-play-2.6") lazy val `Test-Play-2.7` = config("test-play-2.7") lazy val `Test-Play-2.8` = config("test-play-2.8") configs( TestCommon, `Test-Play-2.8`, - `Test-Play-2.7`, - `Test-Play-2.6` + `Test-Play-2.7` ) libraryDependencies ++= Seq( @@ -27,34 +24,19 @@ libraryDependencies ++= Seq( "com.typesafe.play" %% "play-ws" % `Play-2.7-version` % "provided,test-common,test-play-2.7", "com.typesafe.play" %% "play-test" % `Play-2.7-version` % "provided,test-common,test-play-2.7", "com.typesafe.play" %% "play-logback" % `Play-2.7-version` % "test-common,test-play-2.7", - scalatest % "test-common,test-play-2.8,test-play-2.7,test-play-2.6", - "org.scalatestplus.play" %% "scalatestplus-play" % "4.0.3" % "test-play-2.8,test-play-2.7,test-play-2.6" + scalatest % "test-common,test-play-2.8,test-play-2.7", + "org.scalatestplus.play" %% "scalatestplus-play" % "4.0.3" % "test-play-2.8,test-play-2.7" ) -libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.13") Seq.empty - else Seq( - "com.typesafe.play" %% "play" % `Play-2.6-version` % "test-play-2.6", - "com.typesafe.play" %% "play-netty-server" % `Play-2.6-version` % "test-play-2.6", - "com.typesafe.play" %% "play-akka-http-server" % `Play-2.6-version` % "test-play-2.6", - "com.typesafe.play" %% "play-ws" % `Play-2.6-version` % "test-play-2.6", - "com.typesafe.play" %% "play-test" % `Play-2.6-version` % "test-play-2.6", - "com.typesafe.play" %% "play-logback" % `Play-2.6-version` % "test-play-2.6" - ) -} - -libraryDependencies ++= { - if (scalaBinaryVersion.value == "2.11") Seq.empty - else Seq( - "com.typesafe.play" %% "play-akka-http2-support" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play-netty-server" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play-akka-http-server" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play-ws" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play-test" % `Play-2.8-version` % "test-play-2.8", - "com.typesafe.play" %% "play-logback" % `Play-2.8-version` % "test-play-2.8" - ) -} +libraryDependencies ++= Seq( + "com.typesafe.play" %% "play-akka-http2-support" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play-netty-server" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play-akka-http-server" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play-ws" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play-test" % `Play-2.8-version` % "test-play-2.8", + "com.typesafe.play" %% "play-logback" % `Play-2.8-version` % "test-play-2.8" +) /** * Test-related settings @@ -68,15 +50,7 @@ lazy val baseTestSettings = Seq( ) inConfig(TestCommon)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - crossScalaVersions := Seq("2.11.12", "2.12.13") -)) - -inConfig(`Test-Play-2.6`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( - sources := joinSources(TestCommon, `Test-Play-2.6`).value, - crossScalaVersions := Seq("2.11.12", "2.12.13"), - testGrouping := singleTestPerJvm(definedTests.value, javaOptions.value), - unmanagedResourceDirectories ++= (Compile / unmanagedResourceDirectories).value, - unmanagedResourceDirectories ++= (TestCommon / unmanagedResourceDirectories).value + crossScalaVersions := Seq(`scala_2.13_version`) )) inConfig(`Test-Play-2.7`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( @@ -88,23 +62,17 @@ inConfig(`Test-Play-2.7`)(Defaults.testSettings ++ instrumentationSettings ++ ba inConfig(`Test-Play-2.8`)(Defaults.testSettings ++ instrumentationSettings ++ baseTestSettings ++ Seq( sources := joinSources(TestCommon, `Test-Play-2.8`).value, - crossScalaVersions := Seq("2.12.13", "2.13.3"), + crossScalaVersions := Seq(`scala_2.13_version`), testGrouping := singleTestPerJvm(definedTests.value, javaOptions.value), unmanagedResourceDirectories ++= (Compile / unmanagedResourceDirectories).value, unmanagedResourceDirectories ++= (TestCommon / unmanagedResourceDirectories).value )) Test / test := Def.taskDyn { - if (scalaBinaryVersion.value == "2.13") - Def.task { - (`Test-Play-2.7` / test).value - (`Test-Play-2.8` / test).value - } - else - Def.task { - (`Test-Play-2.6` / test).value - (`Test-Play-2.7` / test).value - } + Def.task { + (`Test-Play-2.7` / test).value + (`Test-Play-2.8` / test).value + } }.value def singleTestPerJvm(tests: Seq[TestDefinition], jvmSettings: Seq[String]): Seq[Group] = diff --git a/instrumentation/kamon-play/src/main/resources/reference.conf b/instrumentation/kamon-play/src/main/resources/reference.conf index 4b81d63ef..d3ccfcd15 100644 --- a/instrumentation/kamon-play/src/main/resources/reference.conf +++ b/instrumentation/kamon-play/src/main/resources/reference.conf @@ -251,7 +251,7 @@ kanela.modules { ] within = [ - "^play.*" + "play." ] } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-play/src/main/scala/kamon/instrumentation/play/PlayServerInstrumentation.scala b/instrumentation/kamon-play/src/main/scala/kamon/instrumentation/play/PlayServerInstrumentation.scala index 2743afba1..2356304a2 100644 --- a/instrumentation/kamon-play/src/main/scala/kamon/instrumentation/play/PlayServerInstrumentation.scala +++ b/instrumentation/kamon-play/src/main/scala/kamon/instrumentation/play/PlayServerInstrumentation.scala @@ -31,7 +31,6 @@ import kamon.instrumentation.http.HttpServerInstrumentation.RequestHandler import kamon.instrumentation.http.{HttpMessage, HttpServerInstrumentation} import kamon.util.CallingThreadExecutionContext import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.classloader.ClassRefiner import kanela.agent.api.instrumentation.mixin.Initializer import kanela.agent.libs.net.bytebuddy.asm.Advice import org.slf4j.LoggerFactory @@ -46,32 +45,28 @@ import scala.util.{Failure, Success} class PlayServerInstrumentation extends InstrumentationBuilder { /** - * When using the Akka HTTP server, we will use the exact same instrumentation that comes from the Akka HTTP module, - * the only difference here is that we will change the component name. - */ - private val isAkkaHttpAround = ClassRefiner.builder().mustContain("play.core.server.AkkaHttpServerProvider").build() - + * When using the Akka HTTP server, we will use the exact same instrumentation that comes from the Akka HTTP module, + * the only difference here is that we will change the component name. + */ onType("play.core.server.AkkaHttpServer") - .when(isAkkaHttpAround) + .when(classIsPresent("play.core.server.AkkaHttpServerProvider")) .advise( anyMethods("createServerBinding", "play$core$server$AkkaHttpServer$$createServerBinding"), CreateServerBindingAdvice ) /** - * When using the Netty HTTP server we are rolling our own instrumentation which simply requires us to create the - * HttpServerInstrumentation instance and call the expected callbacks on it. - */ - private val isNettyAround = ClassRefiner.builder().mustContain("play.core.server.NettyServerProvider").build() - + * When using the Netty HTTP server we are rolling our own instrumentation which simply requires us to create the + * HttpServerInstrumentation instance and call the expected callbacks on it. + */ onType("play.core.server.NettyServer") - .when(isNettyAround) + .when(classIsPresent("play.core.server.NettyServerProvider")) .mixin(classOf[HasServerInstrumentation.Mixin]) .advise(isConstructor, NettyServerInitializationAdvice) if (hasGenericFutureListener()) { onType("play.core.server.netty.PlayRequestHandler") - .when(isNettyAround) + .when(classIsPresent("play.core.server.NettyServerProvider")) .mixin(classOf[HasServerInstrumentation.Mixin]) .mixin(classOf[HasTimestamp.Mixin]) .advise(isConstructor, PlayRequestHandlerConstructorAdvice) @@ -87,7 +82,7 @@ class PlayServerInstrumentation extends InstrumentationBuilder { private def hasGenericFutureListener(): Boolean = { try { Class.forName("io.netty.util.concurrent.GenericFutureListener") != null } - catch { case _ => false } + catch { case _: Throwable => false } } } diff --git a/instrumentation/kamon-redis/src/main/resources/reference.conf b/instrumentation/kamon-redis/src/main/resources/reference.conf index 6cd5ee97c..5532576b7 100644 --- a/instrumentation/kamon-redis/src/main/resources/reference.conf +++ b/instrumentation/kamon-redis/src/main/resources/reference.conf @@ -10,9 +10,9 @@ kanela.modules { ] within = [ - "redis.clients.jedis..*", - "io.lettuce.core..*", - "redis..*", + "redis.clients.jedis.", + "io.lettuce.core.", + "redis.", ] } } diff --git a/instrumentation/kamon-scala-future/src/main/resources/reference.conf b/instrumentation/kamon-scala-future/src/main/resources/reference.conf index 1e8f74f64..548c762bb 100644 --- a/instrumentation/kamon-scala-future/src/main/resources/reference.conf +++ b/instrumentation/kamon-scala-future/src/main/resources/reference.conf @@ -23,8 +23,8 @@ kamon.instrumentation.futures.scala { kanela.modules { executor-service { within += "scala.concurrent.impl.CallbackRunnable" - within += "scala.concurrent.impl.Future\\$PromiseCompletingRunnable" - within += "scala.concurrent.impl.Promise\\$Transformation" + within += "scala.concurrent.impl.Future$PromiseCompletingRunnable" + within += "scala.concurrent.impl.Promise$Transformation" } scala-future { @@ -37,8 +37,8 @@ kanela.modules { ] within = [ - "scala.concurrent.*", - "scala.util.*", + "scala.concurrent", + "scala.util", ] } } diff --git a/instrumentation/kamon-scala-future/src/main/scala-2.11/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala b/instrumentation/kamon-scala-future/src/main/scala-2.11/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala deleted file mode 100644 index 0ae9ee53d..000000000 --- a/instrumentation/kamon-scala-future/src/main/scala-2.11/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala +++ /dev/null @@ -1,130 +0,0 @@ -package kamon.instrumentation.futures.scala - -import kamon.Kamon -import kamon.context.Storage.Scope -import kamon.instrumentation.context._ -import kamon.instrumentation.futures.scala.CallbackRunnableRunInstrumentation.InternalState -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.bridge.Bridge -import kanela.agent.libs.net.bytebuddy.asm.Advice - -/** - * Ensures that chained transformations on Scala Futures (e.g. future.map(...).flatmap(...)) will propagate the context - * set on each transformation to the next transformation. - */ -class FutureChainingInstrumentation extends InstrumentationBuilder { - - /** - * Captures the current context when a Try instance is created. Since Future's use a Try underneath to handle the - * completed value we decided to instrument that instead. As a side effect, all Try instances are instrumented even - * if they are not being used in a future, although that is just one extra field that will not be used or visible to - * anybody who is not looking for it. - */ - onTypes("scala.util.Success", "scala.util.Failure") - .mixin(classOf[HasContext.Mixin]) - .advise(isConstructor, CaptureCurrentContextOnExit) - - /** - * Ensures that if resolveTry returns a new Try instance, the captured context will be transferred to that the new - * instance. - */ - onType("scala.concurrent.impl.Promise") - .advise(method("resolveTry"), CopyContextFromArgumentToResult) - - /** - * Captures the scheduling timestamp when a CallbackRunnable is scheduled for execution and then uses the Context - * from the completed value as the current Context while the Runnable is executed. - */ - onType("scala.concurrent.impl.CallbackRunnable") - .mixin(classOf[HasContext.Mixin]) - .mixin(classOf[HasTimestamp.Mixin]) - .bridge(classOf[InternalState]) - .advise(isConstructor, CaptureCurrentContextOnExit) - .advise(method("run"), CallbackRunnableRunInstrumentation) - .advise(method("executeWithValue"), CaptureCurrentTimestampOnEnter) - - /** - * Similarly to the CallbackRunnable instrumentation, although the PromiseCompletingRunnable is only used to run the - * Future's body on Scala 2.11. - */ - onType("scala.concurrent.impl.Future$PromiseCompletingRunnable") - .mixin(classOf[HasContext.Mixin]) - .mixin(classOf[HasTimestamp.Mixin]) - .advise(isConstructor, CaptureCurrentContextOnExit) - .advise(isConstructor, CaptureCurrentTimestampOnExit) - .advise(method("run"), PromiseCompletingRunnableRunInstrumentation) -} - -object CallbackRunnableRunInstrumentation { - - /** - * Exposes access to the "value" member of "scala.concurrent.impl.CallbackRunnable". - */ - trait InternalState { - - @Bridge("scala.util.Try value()") - def valueBridge(): Any - - } - - @Advice.OnMethodEnter(suppress = classOf[Throwable]) - def enter(@Advice.This runnable: HasContext with HasTimestamp with InternalState): Scope = { - val timestamp = runnable.timestamp - val valueContext = runnable.valueBridge().asInstanceOf[HasContext].context - val context = if (valueContext.nonEmpty()) valueContext else runnable.context - - storeCurrentRunnableTimestamp(timestamp) - Kamon.storeContext(context) - } - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def exit(@Advice.Enter scope: Scope): Unit = { - clearCurrentRunnableTimestamp() - scope.close() - } - - /** - * Exposes the scheduling timestamp of the currently running CallbackRunnable, if any. This timestamp should be - * taken when the CallbackRunnable.executeWithValue method is called. - */ - def currentRunnableScheduleTimestamp(): Option[Long] = - Option(_schedulingTimestamp.get()) - - /** Keeps track of the scheduling time of the CallbackRunnable currently running on this thread, if any */ - private val _schedulingTimestamp = new ThreadLocal[java.lang.Long]() - - private[scala] def storeCurrentRunnableTimestamp(timestamp: Long): Unit = - _schedulingTimestamp.set(timestamp) - - private[scala] def clearCurrentRunnableTimestamp(): Unit = - _schedulingTimestamp.remove() -} - -object PromiseCompletingRunnableRunInstrumentation { - - @Advice.OnMethodEnter(suppress = classOf[Throwable]) - def enter(@Advice.This runnable: HasContext with HasTimestamp): Scope = { - CallbackRunnableRunInstrumentation.storeCurrentRunnableTimestamp(runnable.timestamp) - Kamon.storeContext(runnable.context) - } - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def exit(@Advice.Enter scope: Scope): Unit = { - CallbackRunnableRunInstrumentation.clearCurrentRunnableTimestamp() - scope.close() - } -} - -object CopyContextFromArgumentToResult { - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def enter(@Advice.Argument(0) arg: Any, @Advice.Return result: Any): Any = - result.asInstanceOf[HasContext].setContext(arg.asInstanceOf[HasContext].context) -} - -object CopyCurrentContextToArgument { - - @Advice.OnMethodEnter(suppress = classOf[Throwable]) - def enter(@Advice.Argument(0) arg: Any): Unit = - arg.asInstanceOf[HasContext].setContext(Kamon.currentContext()) -} diff --git a/instrumentation/kamon-scala-future/src/main/scala-2.12/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala b/instrumentation/kamon-scala-future/src/main/scala-2.12/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala deleted file mode 100644 index a73b0a1b3..000000000 --- a/instrumentation/kamon-scala-future/src/main/scala-2.12/kamon/instrumentation/futures/scala/FutureChainingInstrumentation.scala +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright 2013-2020 The Kamon Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package kamon.instrumentation.futures.scala - -import kamon.Kamon -import kamon.context.Context -import kamon.context.Storage.Scope -import kamon.instrumentation.context._ -import kamon.instrumentation.futures.scala.CallbackRunnableRunInstrumentation.InternalState -import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.bridge.Bridge -import kanela.agent.libs.net.bytebuddy.asm.Advice - -import scala.concurrent.Future - -/** - * Ensures that chained transformations on Scala Futures (e.g. future.map(...).flatmap(...)) will propagate the context - * set on each transformation to the next transformation. - */ -class FutureChainingInstrumentation extends InstrumentationBuilder { - - /** - * Captures the current context when a Try instance is created. Since Future's use a Try underneath to handle the - * completed value we decided to instrument that instead. As a side effect, all Try instances are instrumented even - * if they are not being used in a future, although that is just one extra field that will not be used or visible to - * anybody who is not looking for it. - */ - onTypes("scala.util.Success", "scala.util.Failure") - .mixin(classOf[HasContext.Mixin]) - .advise(isConstructor, CaptureCurrentContextOnExit) - - /** - * Ensures that if resolveTry returns a new Try instance, the captured context will be transferred to that the new - * instance. - */ - onType("scala.concurrent.impl.Promise") - .advise(method("resolveTry"), CopyContextFromArgumentToResult) - - /** - * Captures the scheduling timestamp when a CallbackRunnable is scheduled for execution and then uses the Context - * from the completed value as the current Context while the Runnable is executed. - */ - onType("scala.concurrent.impl.CallbackRunnable") - .mixin(classOf[HasContext.Mixin]) - .mixin(classOf[HasTimestamp.Mixin]) - .bridge(classOf[InternalState]) - .advise(isConstructor, CaptureCurrentContextOnExit) - .advise(method("run"), CallbackRunnableRunInstrumentation) - .advise(method("executeWithValue"), CaptureCurrentTimestampOnEnter) - - /** - * In Scala 2.12, all Futures are created by calling .map(...) on Future.unit and if happens that while that seed - * Future was initialized there was non-empty current Context, that Context will be tied to all Futures which is - * obviously wrong. Little tweak ensures that no Context is retained on that seed Future. - */ - onType("scala.concurrent.Future$") - .advise(isConstructor, CleanContextFromSeedFuture) - -} - -object CallbackRunnableRunInstrumentation { - - /** - * Exposes access to the "value" member of "scala.concurrent.impl.CallbackRunnable". - */ - trait InternalState { - - @Bridge("scala.util.Try value()") - def valueBridge(): Any - - } - - @Advice.OnMethodEnter(suppress = classOf[Throwable]) - def enter(@Advice.This runnable: HasContext with HasTimestamp with InternalState): Scope = { - val timestamp = runnable.timestamp - val valueContext = runnable.valueBridge().asInstanceOf[HasContext].context - val context = if (valueContext.nonEmpty()) valueContext else runnable.context - - storeCurrentRunnableTimestamp(timestamp) - Kamon.storeContext(context) - } - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def exit(@Advice.Enter scope: Scope): Unit = { - clearCurrentRunnableTimestamp() - scope.close() - } - - /** - * Exposes the scheduling timestamp of the currently running CallbackRunnable, if any. This timestamp should be - * taken when the CallbackRunnable.executeWithValue method is called. - */ - def currentRunnableScheduleTimestamp(): Option[Long] = - Option(_schedulingTimestamp.get()) - - /** Keeps track of the scheduling time of the CallbackRunnable currently running on this thread, if any */ - private val _schedulingTimestamp = new ThreadLocal[java.lang.Long]() - - private def storeCurrentRunnableTimestamp(timestamp: Long): Unit = - _schedulingTimestamp.set(timestamp) - - private def clearCurrentRunnableTimestamp(): Unit = - _schedulingTimestamp.remove() -} - -object CopyContextFromArgumentToResult { - - @Advice.OnMethodExit(suppress = classOf[Throwable]) - def exit(@Advice.Argument(0) arg: Any, @Advice.Return result: Any): Unit = { - result.asInstanceOf[HasContext].setContext(arg.asInstanceOf[HasContext].context) - } -} - -object CopyCurrentContextToArgument { - - @Advice.OnMethodEnter(suppress = classOf[Throwable]) - def enter(@Advice.Argument(0) arg: Any): Unit = - arg.asInstanceOf[HasContext].setContext(Kamon.currentContext()) -} - -object CleanContextFromSeedFuture { - - @Advice.OnMethodExit - def exit(@Advice.This futureCompanionObject: Any): Unit = { - val unitField = futureCompanionObject.getClass.getDeclaredField("unit") - unitField.setAccessible(true) - unitField.get(futureCompanionObject).asInstanceOf[Future[Unit]].value.foreach(unitValue => { - unitValue.asInstanceOf[HasContext].setContext(Context.Empty) - }) - } -} diff --git a/instrumentation/kamon-scalaz-future/src/main/resources/reference.conf b/instrumentation/kamon-scalaz-future/src/main/resources/reference.conf index ea3e1be93..68644ddf9 100644 --- a/instrumentation/kamon-scalaz-future/src/main/resources/reference.conf +++ b/instrumentation/kamon-scalaz-future/src/main/resources/reference.conf @@ -4,6 +4,6 @@ kanela.modules { executor-service { - within += "scalaz.concurrent.*" + within += "scalaz.concurrent." } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-spring/src/main/resources/reference.conf b/instrumentation/kamon-spring/src/main/resources/reference.conf index 0ef0b479d..4a8f61895 100644 --- a/instrumentation/kamon-spring/src/main/resources/reference.conf +++ b/instrumentation/kamon-spring/src/main/resources/reference.conf @@ -268,7 +268,7 @@ kanela { ] within = [ - "org.springframework.web.reactive.function.client..*", + "org.springframework.web.reactive.function.client.", "org.springframework.web.servlet.DispatcherServlet", "org.springframework.web.context.request.async.WebAsyncManager" ] diff --git a/instrumentation/kamon-spring/src/main/scala/kamon/instrumentation/spring/server/InstrumentationUtils.scala b/instrumentation/kamon-spring/src/main/scala/kamon/instrumentation/spring/server/InstrumentationUtils.scala index a61ddb6be..81c6c7834 100644 --- a/instrumentation/kamon-spring/src/main/scala/kamon/instrumentation/spring/server/InstrumentationUtils.scala +++ b/instrumentation/kamon-spring/src/main/scala/kamon/instrumentation/spring/server/InstrumentationUtils.scala @@ -28,7 +28,7 @@ object InstrumentationUtils { private var _headers = Map.empty[String, String] override def statusCode: Int = { - response.getStatus + response.getStatus() } override def write(header: String, value: String): Unit = { diff --git a/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringClientInstrumentationSpec.scala b/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringClientInstrumentationSpec.scala index 0cf26f21d..4f3571ce5 100644 --- a/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringClientInstrumentationSpec.scala +++ b/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringClientInstrumentationSpec.scala @@ -22,6 +22,7 @@ class SpringClientInstrumentationSpec override def beforeAll(): Unit = { super.beforeAll() + System.setProperty("org.springframework.boot.logging.LoggingSystem", "none") TestApp.main(Array(port)) } diff --git a/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringMVCInstrumentationSpec.scala b/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringMVCInstrumentationSpec.scala index 87dadf5ea..142f05bb7 100644 --- a/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringMVCInstrumentationSpec.scala +++ b/instrumentation/kamon-spring/src/test/scala/kamon.instrumentation/SpringMVCInstrumentationSpec.scala @@ -24,6 +24,7 @@ class SpringMVCInstrumentationSpec override def beforeAll(): Unit = { super.beforeAll() + System.setProperty("org.springframework.boot.logging.LoggingSystem", "none") TestApp.main(Array(port)) } diff --git a/instrumentation/kamon-tapir/src/legacy/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala b/instrumentation/kamon-tapir/src/legacy/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala index 82ee61b42..6a5d02988 100644 --- a/instrumentation/kamon-tapir/src/legacy/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala +++ b/instrumentation/kamon-tapir/src/legacy/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala @@ -23,11 +23,10 @@ import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{Argument, import sttp.tapir.server.ServerEndpoint import java.util.concurrent.Callable -import kanela.agent.api.instrumentation.classloader.ClassRefiner class TapirInstrumentationLegacy extends InstrumentationBuilder { onTypes("sttp.tapir.server.akkahttp.EndpointToAkkaServer", "sttp.tapir.server.akkahttp.AkkaHttpServerInterpreter") - .when(ClassRefiner.builder().mustContain("sttp.tapir.server.ServerEndpoint").withMethod("renderPathTemplate")) + .when(classIsPresent("sttp.tapir.server.ServerEndpoint").withExpectedMethodNames("renderPathTemplate")) .intercept(method("toRoute"), classOf[TapirToRouteInterceptorLegacy]) } diff --git a/instrumentation/kamon-tapir/src/main/resources/reference.conf b/instrumentation/kamon-tapir/src/main/resources/reference.conf index 636dc0649..473d0aec9 100644 --- a/instrumentation/kamon-tapir/src/main/resources/reference.conf +++ b/instrumentation/kamon-tapir/src/main/resources/reference.conf @@ -16,7 +16,7 @@ kanela.modules { "kamon.instrumentation.tapir.TapirInstrumentation" ] within = [ - "^sttp.tapir.server.akkahttp.*" + "sttp.tapir.server.akkahttp." ] } } @@ -29,7 +29,7 @@ kanela.modules { "kamon.instrumentation.tapir.TapirInstrumentationLegacy" ] within = [ - "^sttp.tapir.server.akkahttp.*" + "sttp.tapir.server.akkahttp." ] } } diff --git a/instrumentation/kamon-tapir/src/main/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala b/instrumentation/kamon-tapir/src/main/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala index b7d19714f..1ec507433 100644 --- a/instrumentation/kamon-tapir/src/main/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala +++ b/instrumentation/kamon-tapir/src/main/scala/kamon/instrumentation/tapir/TapirInstrumentation.scala @@ -19,7 +19,6 @@ package kamon.instrumentation.tapir import akka.http.scaladsl.server.Route import kamon.Kamon import kanela.agent.api.instrumentation.InstrumentationBuilder -import kanela.agent.api.instrumentation.classloader.ClassRefiner import kanela.agent.libs.net.bytebuddy.implementation.bind.annotation.{Argument, SuperCall} import sttp.tapir.server.ServerEndpoint @@ -27,7 +26,7 @@ import java.util.concurrent.Callable class TapirInstrumentation extends InstrumentationBuilder { onTypes("sttp.tapir.server.akkahttp.EndpointToAkkaServer", "sttp.tapir.server.akkahttp.AkkaHttpServerInterpreter") - .when(ClassRefiner.builder().mustContain("sttp.tapir.server.ServerEndpoint").withMethods("showPathTemplate")) + .when(classIsPresent("sttp.tapir.server.ServerEndpoint").withExpectedMethodNames("showPathTemplate")) .intercept(method("toRoute"), classOf[TapirToRouteInterceptor]) } diff --git a/instrumentation/kamon-twitter-future/src/main/resources/reference.conf b/instrumentation/kamon-twitter-future/src/main/resources/reference.conf index 6dec97fa3..071ea5035 100644 --- a/instrumentation/kamon-twitter-future/src/main/resources/reference.conf +++ b/instrumentation/kamon-twitter-future/src/main/resources/reference.conf @@ -12,13 +12,13 @@ kanela.modules { ] within = [ - "com.twitter.util.Promise.*" + "com.twitter.util.Promise" ] } executor-service { - within += "^com.twitter.util.ConstFuture.*" - within += "^com.twitter.util.ExecutorServiceFuturePool.*" - within += "^com.twitter.bijection.twitter_util.ScalaFuturePool.*" + within += "com.twitter.util.ConstFuture" + within += "com.twitter.util.ExecutorServiceFuturePool" + within += "com.twitter.bijection.twitter_util.ScalaFuturePool" } -} \ No newline at end of file +} diff --git a/instrumentation/kamon-zio-2/src/main/resources/reference.conf b/instrumentation/kamon-zio-2/src/main/resources/reference.conf index e600a9095..d711c024a 100644 --- a/instrumentation/kamon-zio-2/src/main/resources/reference.conf +++ b/instrumentation/kamon-zio-2/src/main/resources/reference.conf @@ -12,7 +12,7 @@ kanela.modules { within = [ "zio.internal.FiberRuntime", - "zio\\.Runtime.*", + "zio.Runtime", ] } diff --git a/project/Build.scala b/project/Build.scala index df1f765b4..d482c4739 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -34,10 +34,10 @@ object BaseProject extends AutoPlugin { /** Marker configuration for dependencies that will be shaded into their module's jar. */ lazy val Shaded = config("shaded").hide - val kanelaAgent = "io.kamon" % "kanela-agent" % "1.0.18" - val slf4jApi = "org.slf4j" % "slf4j-api" % "1.7.36" - val slf4jnop = "org.slf4j" % "slf4j-nop" % "1.7.36" - val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.2.12" + val kanelaAgent = "io.kamon" % "kanela-agent" % "2.0.0-beta.1" + val slf4jApi = "org.slf4j" % "slf4j-api" % "2.0.17" + val slf4jnop = "org.slf4j" % "slf4j-nop" % "2.0.17" + val logbackClassic = "ch.qos.logback" % "logback-classic" % "1.3.15" val scalatest = "org.scalatest" %% "scalatest" % "3.2.9" val hdrHistogram = "org.hdrhistogram" % "HdrHistogram" % "2.1.10" val okHttp = "com.squareup.okhttp3" % "okhttp" % "4.12.0" @@ -63,10 +63,8 @@ object BaseProject extends AutoPlugin { SettingKey[Boolean]("ideSkipProject") := true ) - val `scala_2.11_version` = "2.11.12" - val `scala_2.12_version` = "2.12.19" val `scala_2.13_version` = "2.13.13" - val scala_3_version = "3.3.1" + val scala_3_version = "3.3.5" // This installs the GPG signing key from the setupGpg() @@ -143,8 +141,6 @@ object BaseProject extends AutoPlugin { crossPaths := true, scalaVersion := autoImport.`scala_2.13_version`, crossScalaVersions := Seq( - autoImport.`scala_2.11_version`, - autoImport.`scala_2.12_version`, autoImport.`scala_2.13_version`, autoImport.`scala_3_version` ), @@ -159,24 +155,19 @@ object BaseProject extends AutoPlugin { "-XDignore.symbol.file" ), scalacOptions := Seq( - "-g:vars", + "-release:8", "-feature", "-unchecked", "-deprecation", - "-target:jvm-1.8", - "-Ywarn-dead-code", "-encoding", "UTF-8", "-language:postfixOps", "-language:higherKinds", - "-Xlog-reflective-calls", "-language:existentials", "-language:implicitConversions" ) ++ (CrossVersion.partialVersion(scalaVersion.value) match { - case Some((2, 11)) => Seq("-Xfuture", "-Ybackend:GenASM") - case Some((2, 12)) => Seq("-Xfuture", "-opt:l:method,-closure-invocations") - case Some((2, 13)) => Seq.empty - case Some((3, _)) => Seq("-source:3.0-migration", "-Xtarget:8") + case Some((2, 13)) => Seq("-g:vars", "-Ywarn-dead-code", "-Xlog-reflective-calls") + case Some((3, _)) => Seq("-source:3.0-migration") case _ => Seq.empty }) ) diff --git a/project/build.properties b/project/build.properties index 27430827b..cc68b53f1 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.9.6 +sbt.version=1.10.11