diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..f3b2759
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,8 @@
+target/
+.idea
+*.iml
+*.log
+logs/
+.DS_Store
+.bloop/
+.metals/
\ No newline at end of file
diff --git a/.scalafmt.conf b/.scalafmt.conf
new file mode 100644
index 0000000..b8533b5
--- /dev/null
+++ b/.scalafmt.conf
@@ -0,0 +1,10 @@
+style = defaultWithAlign
+maxColumn = 140
+align.openParenCallSite = false
+align.openParenDefnSite = false
+align = true
+danglingParentheses = true
+
+rewrite.rules = [RedundantBraces, RedundantParens, SortImports, PreferCurlyFors]
+rewrite.redundantBraces.includeUnitMethods = true
+rewrite.redundantBraces.stringInterpolation = true
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..4ee85a4
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,10 @@
+language: scala
+scala:
+ - 2.12.8
+ - 2.11.12
+
+script:
+ - sbt clean coverage test coverageReport
+
+after_success:
+ - bash <(curl -s https://codecov.io/bash)
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..cfcb395
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 dataroot
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e7341e5
--- /dev/null
+++ b/README.md
@@ -0,0 +1,215 @@
+---
+Project: Trembita
+Current version: 0.7.2-SNAPSHOT
+Scala version: 2.11.12, 2.12.8
+---
+
+[](https://codecov.io/gh/vitaliihonta/trembita)
+[](https://travis-ci.com/vitaliihonta/trembita)
+
+
+
+
+
+## Description
+Project Trembita - Functional Data Pipelining library.
+Lets you query and transform your data in a pure functional, typesafe & declarative way.
+Trembita allows you to make complicated transformation pipelines where some of them are executed locally sequentially, locally in parallel on in other environments (for instance on Spark cluster, see below)
+
+```scala
+resolvers += "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
+libraryDependencies ++= {
+ val trembitaV = "0.7.2-SNAPSHOT"
+ Seq(
+ "com.github.vitaliihonta.trembita" %% "trembita-kernel" % trembitaV, // kernel,
+
+ "com.github.vitaliihonta.trembita" %% "trembita-cassandra-connector" % trembitaV, // cassandra
+
+ "com.github.vitaliihonta.trembita" %% "trembita-cassandra-connector-phantom" % trembitaV, // phantom
+
+ "com.github.vitaliihonta.trembita" %% "trembita-slf4j" % trembitaV // slf4j, for logging
+ )
+}
+```
+
+## Core features
+
+- [Typesafe querying dsl](./examples/src/main/scala/com/examples/kernel/QLSample.scala) for data pipelines provides a unified model and typechecking for various data sources (including collections and Spark RDD)
+- [Purely functional stateful transformations using Finite State Machines](./examples/src/main/scala/com/examples/kernel/FSMSample.scala) provides dsl for defining FSM that can be run on various data source (collections, Spark Datasets, Akka Streams...)
+- [Caching](./caching)
+- [Logging](./utils/logging)
+
+## Available Integrations
+- Apache Spark ([core](http://spark.apache.org/docs/latest/rdd-programming-guide.html), [SQL](http://spark.apache.org/docs/latest/sql-programming-guide.html))
+- Apache Spark [Streaming](http://spark.apache.org/docs/latest/streaming-programming-guide.html)
+- [Akka Streams](https://doc.akka.io/docs/akka/current/stream/)
+- [Cassandra](http://cassandra.apache.org/)
+- Cassandra using [phantom](https://github.com/outworkers/phantom))
+- [Infinispan](http://infinispan.org/)
+
+## Processing modules
+- [kernel](./kernel) - lazy (parallel) data pipelines, QL for grouping/aggregations and stateful computations using [Cats](https://github.com/typelevel/cats) and [Shapeless](https://github.com/milessabin/shapeless)
+
+## Data sources
+ - Any `Iterable` - just wrap your collection into `DataPipeline`
+ - [cassandra connector](./cassandra_connector) - fetch rows from your `Cassandra` database with `CassandraSource`
+ - [cassandra phantom](./cassandra_connector_phantom) - provides [Phantom](https://github.com/outworkers/phantom) library support
+ - [akka stream](./integrations/akka/streams) - allows to make pipeline from akka stream (e.g. from any data source compatible with akka)
+ - [spark RDD / DataSet](./integrations/spark/core) - allows to make pipeline from RDD / DataSet (e.g. from any non-streaming data source compatible with Spark)
+
+## Miscelone
+ - [trembita slf4j](./utils/logging/slf4j) - provides [slf4j](https://www.slf4j.org/) logging support. Use it with any compatible logging backend (for instance [logback](https://logback.qos.ch/))
+ - [trembita log4j](./utils/logging/log4j) - provides [log4j](https://logging.apache.org/log4j/2.x/manual/scala-api.html) logging support.
+
+ ## Experimental: Spark support
+ ### Introducing spark pipelines
+You can run some your transformations on [spark](http://spark.apache.org/) cluster.
+To do that, add the following dependencies:
+```scala
+libraryDependencies ++= Seq(
+ "com.github.vitaliihonta.trembita" %% "trembita-spark" % trembitaV,
+ "org.apache.spark" %% "spark-core" % "2.4.0" // first spark version with scala 2.12 support
+)
+```
+### Asynchronous computations in spark
+Using spark integration you can even easily run asynchronous computations on spark with Futures:
+```scala
+import com.github.trembita._
+import com.github.trembita.experimental.spark._
+import org.apache.spark._
+import scala.concurrent.{ExecutionContext, Future}
+import java.util.concurrent.Executors
+
+implicit val sc: SparkContext = ??? // requires implicit SparkContext in scope
+implicit val timeout: Timeout = Timeout(5.minutes) // requires implicit timeout for async operations
+implicit val ec: ExecutionContext = ???
+
+val cachedThreadPool =
+ ExecutionContext.fromExecutor(Executors.newCachedThreadPool())
+
+val numbers = DataPipelineT[Future, Int](1, 2, 3, 20, 40, 60) // some basic pipeline
+ // will be executed on spark
+numbers
+ .to[Spark]
+ // below transformations will be executed on spark
+ .map(_ + 1)
+ .mapM { i: Int =>
+ val n = Future { i + 1 }(cachedThreadPool)
+ val b = Future {
+ val x = 1 + 2
+ x * 3
+ }
+
+ for {
+ nx <- n
+ bx <- b
+ } yield nx + bx
+ }
+ .eval // collects results into driver program
+```
+Trembita will do the best to transform async lambda into serializable format.
+By default a special macro detects all references to `ExecutionContext` within lambda you pass into `mapM`.
+All `ExecutionContext`'s should be globally accessible (e.g. need to be `def` or `val` in some object).
+If not - your code won't compile with appropriate error.
+If everything is ok - macro creates helper object with references to all found `ExecutionContext`s making them `@transient lazy val` (well known technique) and rewrites your lambda so that all async transformations references to fields in that object.
+You can find full example [here](./examples/src/main/scala/com/examples/spark/Main.scala).
+
+Happy to say that using `cats.effect.IO` on spark is also supported =)
+### FSM on Spark Datasets
+You can now define stateful transformations on Spark Dataset using Finite State Machines.
+It's implemented using `Dataset.mapWithState`.
+Defining FSM for Spark is as simple as defining FSM for regular pipeline except of state is preserved only at level for specific `key` (due to `mapWithState` limitation).
+To do so, use `fsmByKey`:
+```scala
+val pipeline: DataPipelineT[F, A, Spark] = ???
+pipeline.fsmByKey(getKey = ???)(... /* your FSM definition here */)
+```
+Full example can be found [here](./examples/src/main/scala/com/examples/spark/FSMSample.scala).
+### Typesafe QL on RDD
+See the full example [here](./examples/src/main/scala/com/examples/spark/QLExample.scala)
+### Limitations
+ - Be careful not to make closures against the `SparkContext` or `SparkSession` because it will fall in runtime
+ - Other non-serializable resources also will fail in runtime. This will be adapted later
+
+### Examples
+You can find a script to run the example on spark cluster within docker:
+```bash
+# in project root
+sbt trembita-examples/assembly # prepare fat jar for spark-submit
+sh examples/src/main/resources/spark/cluster/run.sh
+```
+To run Spark FSM example in docker use the following script:
+```bash
+# in project root
+sbt trembita-examples/assembly # prepare fat jar for spark-submit
+sh examples/src/main/resources/spark/cluster/run_fsm.sh
+```
+
+To run Spark QL example in docker use the following script:
+```bash
+# in project root
+sbt trembita-examples/assembly # prepare fat jar for spark-submit
+sh examples/src/main/resources/spark/cluster/run_ql.sh
+```
+
+Before running QL please remove [spire](https://github.com/non/spire) jars from spark classpath to avoid dependency conflicts
+
+## Experimental: Akka streams support
+Trembita now supports running a part of your transformations on [akka-streams](https://doc.akka.io/docs/akka/current/stream/).
+To use it, add the following dependency:
+```scala
+libraryDependencies += "com.github.vitaliihonta.trembita" %% "trembita-akka-streams" % trembitaV
+```
+
+You can run existing pipeline through akka stream or create a pipeline from source directly:
+```scala
+import akka.stream.scaladsl._
+import com.github.trembita.experimental.akka._
+
+val fileLines =
+ DataPipelineT
+ .fromReprF[IO, ByteString, Akka](IO {
+ FileIO
+ .fromPath(Paths.get(getClass.getResource("/words.txt").toURI))
+ .mapMaterializedValue(_ => NotUsed)
+ })
+```
+
+Akka streaming pipelines also support `FSM` using custom graph state:
+```scala
+val pipeline: DataPipelineT[IO, Int, Akka] = ???
+val stateful = pipeline.fsm(/* your FSM definition here */)
+```
+You can find full examples [here](./examples/src/main/scala/com/examples/akka)
+
+## Seamless Akka to Spark integration
+Add the following dependency if you wan't to run your pipeline through both akka streams and spark RDD:
+```scala
+libraryDependencies += "com.github.vitaliihonta.trembita" %% "trembita-seamless-akka-spark" % trembitaV
+```
+It goal is to avoid additional overhead when switching between akka and spark.
+`Akka -> Spark` is implemented using custom Sink.
+`Spark -> Akka` is implemented using `toLocalIterator`
+
+## Experimental: Spark streaming support
+Trembita now allows to write `QL` and `FSM` upon [spark DStreams](https://spark.apache.org/docs/latest/streaming-programming-guide.html).
+```scala
+libraryDependencies += "com.github.vitaliihonta.trembita" %% "trembita-spark-streaming" % trembitaV
+```
+
+For examples see [here](./examples/src/main/scala/com/examples/spark/streaming)
+Run scripts:
+- [basic](./examples/src/main/resources/spark/cluster/run_streaming.sh)
+- [FSM](./examples/src/main/resources/spark/cluster/run_streaming_fsm.sh)
+- [QL](./examples/src/main/resources/spark/cluster/run_streaming_ql.sh)
+
+## To be done
+- [x] caching
+- [x] integration with distributed streaming frameworks
+- [ ] tensorflow
+- [ ] slick
+
+## What means `trembita`?
+
+
+Trembita is a alpine horn made of wood. It is common among Ukrainian highlanders Hutsuls who used to live in western Ukraine, eastern Poland, Slovakia and northern Romania. In southern Poland it's called trombita, bazuna in the North and ligawka in central Poland.
diff --git a/assets/trembita-p.png b/assets/trembita-p.png
new file mode 100644
index 0000000..5e14a6a
Binary files /dev/null and b/assets/trembita-p.png differ
diff --git a/build.sbt b/build.sbt
new file mode 100644
index 0000000..3d6b774
--- /dev/null
+++ b/build.sbt
@@ -0,0 +1,280 @@
+import xerial.sbt.Sonatype._
+import Dependencies._
+
+lazy val snapshot: Boolean = true
+lazy val v: String = {
+ val vv = "0.7.2"
+ if (!snapshot) vv
+ else vv + "-SNAPSHOT"
+}
+
+lazy val scalaReflect = Def.setting {
+ "org.scala-lang" % "scala-reflect" % scalaVersion.value
+}
+
+organization in ThisBuild := "com.github.vitaliihonta.trembita"
+
+def sonatypeProject(id: String, base: File) =
+ Project(id, base)
+ .enablePlugins(JmhPlugin)
+ .settings(
+ name := id,
+ isSnapshot := snapshot,
+ version := v,
+ scalaVersion := `scala-2-12`,
+ crossScalaVersions := Seq(`scala-2-11`, `scala-2-12`),
+ publishTo := {
+ val nexus = "https://oss.sonatype.org/"
+ if (isSnapshot.value)
+ Some("snapshots" at nexus + "content/repositories/snapshots")
+ else
+ Some("releases" at nexus + "service/local/staging/deploy/maven2")
+ },
+ scalacOptions ++= Seq("-Ypartial-unification", "-feature"),
+ sourceDirectory in Jmh := (sourceDirectory in Test).value,
+ classDirectory in Jmh := (classDirectory in Test).value,
+ dependencyClasspath in Jmh := (dependencyClasspath in Test).value,
+ compile in Jmh := (compile in Jmh).dependsOn(compile in Test).value,
+ run in Jmh := (run in Jmh).dependsOn(Keys.compile in Jmh).evaluated,
+ resolvers += Resolver.sonatypeRepo("releases"),
+ addCompilerPlugin("org.spire-math" %% "kind-projector" % "0.9.8"),
+ libraryDependencies ++= commonDeps
+ )
+
+lazy val collection_extentions = sonatypeProject(
+ id = "collection-extensions",
+ base = file("./utils/collection_extensions")
+)
+
+lazy val kernel =
+ sonatypeProject(id = "trembita-kernel", base = file("./kernel"))
+ .dependsOn(collection_extentions)
+ .settings(libraryDependencies ++= {
+ Seq("org.scalatest" %% "scalatest" % testV % "test")
+ })
+
+lazy val cassandra_connector = sonatypeProject(
+ id = "trembita-cassandra",
+ base = file("./connectors/cassandra")
+).dependsOn(kernel)
+ .settings(libraryDependencies ++= {
+ Seq(
+ Cassandra.driver
+ )
+ })
+
+lazy val cassandra_connector_phantom =
+ sonatypeProject(id = "trembita-phantom", base = file("./connectors/phantom"))
+ .dependsOn(cassandra_connector)
+ .settings(libraryDependencies ++= {
+ Seq(
+ Cassandra.phantom % "provided",
+ Cassandra.driverExtras % "provided"
+ )
+ })
+
+lazy val logging_commons =
+ sonatypeProject(id = "trembita-logging-commons", base = file("./utils/logging/commons"))
+ .dependsOn(kernel)
+
+lazy val slf4j =
+ sonatypeProject(id = "trembita-slf4j", base = file("./utils/logging/slf4j"))
+ .dependsOn(kernel, logging_commons)
+ .settings(libraryDependencies ++= {
+ Seq(Utils.slf4j)
+ })
+
+lazy val log4j =
+ sonatypeProject(id = "trembita-log4j", base = file("./utils/logging/log4j"))
+ .dependsOn(kernel, logging_commons)
+ .settings(libraryDependencies ++= {
+ Seq(
+ Utils.log4j,
+ Utils.log4jScala
+ )
+ })
+
+lazy val trembita_spark =
+ sonatypeProject(id = "trembita-spark", base = file("./integrations/spark/core"))
+ .dependsOn(kernel)
+ .settings(
+ name := "trembita-spark",
+ version := v,
+ scalacOptions ++= Seq(
+ "-Ypartial-unification",
+ "-language:experimental.macros"
+ ),
+ libraryDependencies ++= {
+ Seq(
+ Spark.core % "provided",
+ Spark.sql % "provided",
+ Macros.resetallattrs
+ )
+ }
+ )
+
+lazy val trembita_akka_streamns =
+ sonatypeProject(
+ id = "trembita-akka-streams",
+ base = file("./integrations/akka/streams")
+ ).dependsOn(kernel)
+ .settings(
+ name := "trembita-akka-streams",
+ version := v,
+ scalacOptions ++= Seq("-Ypartial-unification"),
+ libraryDependencies ++= {
+ Seq(
+ Akka.actors,
+ Akka.streams,
+ Akka.testkit
+ )
+ }
+ )
+
+lazy val seamless_akka_spark =
+ sonatypeProject(
+ id = "trembita-seamless-akka-spark",
+ base = file("./integrations/seamless/akka-spark")
+ ).dependsOn(kernel, trembita_akka_streamns, trembita_spark)
+ .settings(
+ name := "trembita-seamless-akka-spark",
+ version := v,
+ scalacOptions ++= Seq("-Ypartial-unification"),
+ libraryDependencies ++= Seq(
+ Spark.core % "provided",
+ Spark.sql % "provided"
+ ),
+ addCompilerPlugin("org.spire-math" %% "kind-projector" % "0.9.8")
+ )
+
+lazy val trembita_spark_streaming =
+ sonatypeProject(id = "trembita-spark-streaming", base = file("./integrations/spark/streaming"))
+ .dependsOn(kernel, trembita_spark)
+ .settings(
+ name := "trembita-spark-streaming",
+ version := v,
+ scalacOptions ++= Seq(
+ "-Ypartial-unification",
+ "-language:experimental.macros"
+ ),
+ libraryDependencies ++= {
+ Seq(
+ Spark.core % "provided",
+ Spark.sql % "provided",
+ Spark.streaming % "provided"
+ )
+ }
+ )
+
+lazy val trembita_caching =
+ sonatypeProject(id = "trembita-caching", base = file("./caching/kernel"))
+ .dependsOn(kernel)
+ .settings(
+ name := "trembita-caching",
+ version := v
+ )
+
+lazy val trembita_caching_infinispan =
+ sonatypeProject(id = "trembita-caching-infinispan", base = file("./caching/infinispan"))
+ .dependsOn(trembita_caching)
+ .settings(
+ name := "trembita-caching-infinispan",
+ version := v,
+ libraryDependencies ++= Seq(
+ Infinispan.core,
+ Infinispan.commons,
+ ScalaCompat.java8compat,
+ Testing.mockito % "test"
+ )
+ )
+
+lazy val examples = Project(id = "trembita-examples", base = file("./examples"))
+ .dependsOn(
+ collection_extentions,
+ kernel,
+ slf4j,
+ cassandra_connector,
+ cassandra_connector_phantom,
+ trembita_spark,
+ trembita_akka_streamns,
+ seamless_akka_spark,
+ trembita_spark_streaming,
+ trembita_caching,
+ trembita_caching_infinispan
+ )
+ .settings(
+ name := "trembita-examples",
+ version := v,
+ scalacOptions += "-Ypartial-unification",
+ scalaVersion := `scala-2-12`,
+ crossScalaVersions := Seq(`scala-2-11`, `scala-2-12`),
+ isSnapshot := snapshot,
+ skip in publish := true,
+ publish := {},
+ publishLocal := {},
+ addCompilerPlugin(
+ "org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full
+ ),
+ addCompilerPlugin("org.spire-math" %% "kind-projector" % "0.9.8"),
+ libraryDependencies ++= {
+ Seq(
+ Cassandra.driver,
+ Cassandra.driverExtras,
+ Cassandra.phantom,
+ Spark.core % "provided",
+ Spark.sql % "provided",
+ Spark.streaming % "provided",
+ Utils.console,
+ Akka.csv,
+ Akka.http,
+ Infinispan.hotrod,
+ Sttp.core,
+ Sttp.catsBackend,
+ Sttp.asyncHttp
+ ).map(_ exclude ("org.slf4j", "log4j-over-slf4j"))
+ },
+ test in assembly := {},
+ mainClass in assembly := Some("com.examples.spark.Main"),
+ assemblyJarName in assembly := "trembita-spark.jar",
+ assemblyMergeStrategy in assembly := {
+ case m if m.toLowerCase.endsWith("manifest.mf") => MergeStrategy.discard
+ case m if m.toLowerCase.matches("meta-inf.*\\.sf$") =>
+ MergeStrategy.discard
+ case "log4j.properties" => MergeStrategy.discard
+ case m if m.toLowerCase.startsWith("meta-inf/services/") =>
+ MergeStrategy.filterDistinctLines
+ case "reference.conf" => MergeStrategy.concat
+ case m if m endsWith ".conf" => MergeStrategy.concat
+ case _ => MergeStrategy.first
+ }
+ )
+
+lazy val root = Project(id = "trembita", base = file("."))
+ .aggregate(
+ collection_extentions,
+ kernel,
+ slf4j,
+ cassandra_connector,
+ cassandra_connector_phantom,
+ trembita_spark,
+ trembita_akka_streamns,
+ seamless_akka_spark,
+ trembita_spark_streaming,
+ trembita_caching,
+ trembita_caching_infinispan,
+ log4j,
+ logging_commons
+ )
+ .settings(
+ name := "trembita",
+ version := v,
+ scalaVersion := `scala-2-12`,
+ crossScalaVersions := Seq(`scala-2-11`, `scala-2-12`),
+ scalacOptions += "-Ypartial-unification",
+ isSnapshot := snapshot,
+ skip in publish := true,
+ publish := {},
+ publishLocal := {},
+ coverageExcludedPackages := ".*operations.*",
+ coverageExcludedFiles := ".*orderingInstances | .*arrows* | .*ToCaseClass*"
+ )
diff --git a/project/Dependencies.scala b/project/Dependencies.scala
new file mode 100644
index 0000000..9874ad3
--- /dev/null
+++ b/project/Dependencies.scala
@@ -0,0 +1,91 @@
+import sbt._
+
+object Dependencies {
+ val `scala-2-12` = "2.12.8"
+ val `scala-2-11` = "2.11.12"
+ val testV = "3.0.4"
+ val catsEffectsV = "1.1.0"
+ val shapelessV = "2.3.3"
+ val spireV = "0.16.0"
+ val cassandraDriverV = "3.6.0"
+ val phantomV = "2.29.0"
+ val slf4jV = "1.7.25"
+ val sparkV = "2.4.0"
+ val akkaV = "2.5.19"
+ val akkaHttpV = "10.1.6"
+ val alpakkaV = "0.8"
+ val catsConsoleV = "0.5"
+ val infinispanV = "9.4.5.Final"
+ val scalaJava8CompatV = "0.9.0"
+ val mockitoV = "2.23.4"
+ val sttpV = "1.5.2"
+ val log4jV = "2.11.0"
+ val log4jScalaV = "11.0"
+
+ object Testing {
+ val scalastic = "org.scalactic" %% "scalactic" % testV withSources ()
+ val scalatest = "org.scalatest" %% "scalatest" % testV withSources ()
+ val mockito = "org.mockito" % "mockito-core" % mockitoV withSources ()
+ }
+
+ object Typelevel {
+ val catsEffect = "org.typelevel" %% "cats-effect" % catsEffectsV withSources ()
+ val shapeless = "com.chuusai" %% "shapeless" % shapelessV withSources ()
+ val spire = "org.typelevel" %% "spire" % spireV withSources ()
+ }
+
+ object Cassandra {
+ val driver = "com.datastax.cassandra" % "cassandra-driver-core" % cassandraDriverV withSources ()
+ val driverExtras = "com.datastax.cassandra" % "cassandra-driver-extras" % cassandraDriverV withSources ()
+ val phantom = "com.outworkers" %% "phantom-jdk8" % phantomV withSources ()
+ }
+
+ object Utils {
+ val slf4j = "org.slf4j" % "slf4j-api" % slf4jV withSources ()
+ val log4j = "org.apache.logging.log4j" % "log4j-api" % log4jV withSources ()
+ val log4jScala = "org.apache.logging.log4j" %% "log4j-api-scala" % log4jScalaV withSources ()
+ val console = "com.github.gvolpe" %% "console4cats" % catsConsoleV withSources ()
+ }
+
+ object Macros {
+ val resetallattrs = "org.scalamacros" %% "resetallattrs" % "1.0.0" withSources ()
+ }
+
+ object Spark {
+ val core = "org.apache.spark" %% "spark-core" % sparkV withSources ()
+ val sql = "org.apache.spark" %% "spark-sql" % sparkV withSources ()
+ val streaming = "org.apache.spark" %% "spark-streaming" % sparkV withSources ()
+ }
+
+ object Akka {
+ val actors = "com.typesafe.akka" %% "akka-actor" % akkaV withSources ()
+ val streams = "com.typesafe.akka" %% "akka-stream" % akkaV withSources ()
+ val http = "com.typesafe.akka" %% "akka-http" % akkaHttpV withSources ()
+ val csv = "com.lightbend.akka" %% "akka-stream-alpakka-csv" % alpakkaV withSources ()
+ val testkit = "com.typesafe.akka" %% "akka-testkit" % akkaV % "test" withSources ()
+ }
+
+ object Infinispan {
+ val core = "org.infinispan" % "infinispan-core" % infinispanV withSources ()
+ val commons = "org.infinispan" % "infinispan-commons" % infinispanV withSources ()
+ val hotrod = "org.infinispan" % "infinispan-client-hotrod" % infinispanV withSources ()
+ }
+
+ object ScalaCompat {
+ val java8compat = "org.scala-lang.modules" %% "scala-java8-compat" % scalaJava8CompatV withSources ()
+ }
+
+ object Sttp {
+ val core = "com.softwaremill.sttp" %% "core" % sttpV
+ val asyncHttp = "com.softwaremill.sttp" %% "async-http-client-backend" % sttpV
+ val catsBackend = "com.softwaremill.sttp" %% "async-http-client-backend-cats" % sttpV
+ }
+
+ val commonDeps = Seq(
+ Testing.scalastic,
+ Testing.scalatest % "test",
+ Typelevel.catsEffect,
+ Typelevel.shapeless,
+ Typelevel.spire
+ )
+}
diff --git a/project/build.properties b/project/build.properties
new file mode 100644
index 0000000..72f9028
--- /dev/null
+++ b/project/build.properties
@@ -0,0 +1 @@
+sbt.version=1.2.7
diff --git a/project/plugins.sbt b/project/plugins.sbt
new file mode 100644
index 0000000..affb8c7
--- /dev/null
+++ b/project/plugins.sbt
@@ -0,0 +1,6 @@
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.3.3")
+addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.0")
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0")
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1")
+addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2")
+addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.8")
\ No newline at end of file
diff --git a/sonatype.sbt b/sonatype.sbt
new file mode 100644
index 0000000..49f8313
--- /dev/null
+++ b/sonatype.sbt
@@ -0,0 +1,43 @@
+import xerial.sbt.Sonatype._
+
+import scala.io.StdIn
+
+val password = sys.env
+ .get("SONATYPE_PASSWORD")
+ .orElse(Option(System.getProperty("SONATYPE_PASSWORD")))
+ .getOrElse(StdIn.readLine("Enter SONATYPE_PASSWORD: "))
+
+credentials += Credentials(
+ "Sonatype Nexus Repository Manager",
+ "oss.sonatype.org",
+ "vitalii-honta",
+ s"Murcielago@$password"
+)
+
+sonatypeProfileName := "com.github"
+
+publishMavenStyle := true
+pgpPassphrase := Some(password.toCharArray)
+licenses := Seq(
+ "APL2" -> url("https://github.com/vitaliihonta/trembita/blob/master/LICENSE")
+)
+
+homepage := Some(url("https://github.com/vitaliihonta/trembita"))
+scmInfo := Some(
+ ScmInfo(
+ url("https://github.com/vitaliihonta/trembita"),
+ "scm:git@github.com:vitaliihonta/trembita.git"
+ )
+)
+
+developers := List(
+ Developer(
+ id = "vitliihonta",
+ name = "Vitalii Honta",
+ email = "vitaliy.honta@gmail.com",
+ url = url("https://github.com/vitaliihonta/")
+ )
+)
+
+useGpg := true
+pgpReadOnly := false