diff --git a/.java-version b/.java-version
new file mode 100644
index 0000000..6259340
--- /dev/null
+++ b/.java-version
@@ -0,0 +1 @@
+1.8
diff --git a/.travis.yml b/.travis.yml
index 486dc6b..5019057 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -8,18 +8,28 @@ scala:
jdk:
- oraclejdk7
- oraclejdk8
+
matrix:
exclude:
- scala: 2.12.1
jdk: oraclejdk7
+
services:
- docker
before_install:
- sudo service memcached stop
- docker pull memcached
- docker run -d -p 127.0.0.1:11211:11211 memcached memcached
-script: "sbt clean coverage test"
-after_success: "sbt coverageReport coveralls"
+
+env:
+ global:
+ - MAIN_SCALA_VERSION=2.11.8
+
+script:
+ - project/travis-build.sh
+after_success:
+ - project/travis-post-build.sh
+
cache:
directories:
- $HOME/.sbt/0.13
@@ -27,6 +37,7 @@ cache:
- $HOME/.sbt/cache
- $HOME/.sbt/launchers
- $HOME/.ivy2
+
before_cache:
- du -h -d 1 $HOME/.ivy2/
- du -h -d 2 $HOME/.sbt/
diff --git a/README.md b/README.md
index d1cde98..e6a0b5c 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# Shade - Memcached Client for Scala
[](https://travis-ci.org/monix/shade)
-[](https://coveralls.io/github/alexandru/shade?branch=master)
+[](https://codecov.io/gh/monix/shade?branch=master)
[](https://gitter.im/monix/shade?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
## Overview
diff --git a/benchmarking/src/main/scala/shade/benchmarks/ExistingKeyOps.scala b/benchmarking/src/main/scala/shade/benchmarks/ExistingKeyOps.scala
deleted file mode 100644
index db439ac..0000000
--- a/benchmarking/src/main/scala/shade/benchmarks/ExistingKeyOps.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-package shade.benchmarks
-
-import org.openjdk.jmh.annotations.{Benchmark, Setup}
-import org.openjdk.jmh.infra.Blackhole
-
-import scala.concurrent.duration._
-
-class ExistingKeyOps extends MemcachedBase {
-
- val key: String = "existing"
- val duration: FiniteDuration = 1.day
-
- @Setup
- def prepare(): Unit = {
- memcached.set(key, 10L, duration)
- }
-
- @Benchmark
- def get(bh: Blackhole): Unit = bh.consume {
- memcached.awaitGet[String](key)
- }
-
- @Benchmark
- def set(bh: Blackhole): Unit = bh.consume{
- memcached.awaitSet(key, 100L, duration)
- }
-
- @Benchmark
- def delete(bh: Blackhole): Unit = bh.consume {
- memcached.awaitDelete(key)
- }
-
-}
\ No newline at end of file
diff --git a/benchmarking/src/main/scala/shade/benchmarks/IncDecrOps.scala b/benchmarking/src/main/scala/shade/benchmarks/IncDecrOps.scala
deleted file mode 100644
index f44ec8e..0000000
--- a/benchmarking/src/main/scala/shade/benchmarks/IncDecrOps.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package shade.benchmarks
-
-
-import scala.concurrent.duration._
-import org.openjdk.jmh.annotations._
-import org.openjdk.jmh.infra.Blackhole
-
-class IncDecrOps extends MemcachedBase {
-
- val key: String = "incr-decr"
- val duration: FiniteDuration = 1.day
-
- @Setup
- def prepare(): Unit = {
- memcached.awaitSet(key, 1E10.toLong.toString, duration)
- }
-
- @Benchmark
- def increment(bh: Blackhole): Unit = bh.consume{
- memcached.awaitIncrement(key, 1L, None, duration)
- }
-
- @Benchmark
- def decrement(bh: Blackhole): Unit = bh.consume {
- memcached.awaitDecrement(key, 1L, None, duration)
- }
-
-}
\ No newline at end of file
diff --git a/benchmarking/src/main/scala/shade/benchmarks/MemcachedBase.scala b/benchmarking/src/main/scala/shade/benchmarks/MemcachedBase.scala
deleted file mode 100644
index 2281f07..0000000
--- a/benchmarking/src/main/scala/shade/benchmarks/MemcachedBase.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-package shade.benchmarks
-
-
-import java.util.concurrent.TimeUnit
-
-import org.openjdk.jmh.annotations._
-import shade.memcached.{Configuration, FailureMode, Memcached, Protocol}
-
-import scala.concurrent.ExecutionContext.global
-import scala.concurrent.duration._
-
-/**
- * Base class for benchmarks that need an instance of [[Memcached]]
- */
-@State(Scope.Thread)
-@BenchmarkMode(Array(Mode.AverageTime))
-@OutputTimeUnit(TimeUnit.NANOSECONDS)
-abstract class MemcachedBase {
-
- val memcached: Memcached = {
- val defaultConfig = Configuration(
- addresses = "127.0.0.1:11211",
- authentication = None,
- keysPrefix = Some("my-benchmarks"),
- protocol = Protocol.Binary,
- failureMode = FailureMode.Retry,
- operationTimeout = 15.seconds
- )
- Memcached(defaultConfig)(global)
- }
-
-}
diff --git a/benchmarking/src/main/scala/shade/benchmarks/NonExistingKeyOps.scala b/benchmarking/src/main/scala/shade/benchmarks/NonExistingKeyOps.scala
deleted file mode 100644
index 15a4638..0000000
--- a/benchmarking/src/main/scala/shade/benchmarks/NonExistingKeyOps.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-package shade.benchmarks
-
-import scala.concurrent.duration._
-import org.openjdk.jmh.annotations._
-import org.openjdk.jmh.infra.Blackhole
-
-class NonExistingKeyOps extends MemcachedBase {
-
- val key: String = "non-existing"
- val duration: FiniteDuration = 1.day
-
- @Setup
- def prepare(): Unit = memcached.delete(key)
-
- @Benchmark
- def get(bh: Blackhole): Unit = bh.consume {
- memcached.awaitGet[String](key)
- }
-
- @Benchmark
- def set(bh: Blackhole): Unit = bh.consume {
- memcached.awaitSet(key, 1L, duration)
- }
-
- @Benchmark
- def delete(bh: Blackhole): Unit = bh.consume {
- memcached.awaitDelete(key)
- }
-}
\ No newline at end of file
diff --git a/build.sbt b/build.sbt
index 6dbe620..f859cab 100644
--- a/build.sbt
+++ b/build.sbt
@@ -1,124 +1,228 @@
-name := "shade"
+import com.typesafe.sbt.pgp.PgpKeys
-version := "1.9.2"
+import scala.xml.Elem
+import scala.xml.transform.{RewriteRule, RuleTransformer}
-organization := "io.monix"
+val monixVersion = "2.2.2"
-scalaVersion := "2.11.8"
+lazy val sharedSettings = Seq(
+ version := "1.9.2",
+ organization := "io.monix",
-crossScalaVersions := Seq("2.10.6", "2.11.8", "2.12.1")
+ scalaVersion := "2.11.8",
+ crossScalaVersions := Seq("2.10.6", "2.11.8", "2.12.1"),
+ compileOrder in ThisBuild := CompileOrder.JavaThenScala,
-compileOrder in ThisBuild := CompileOrder.JavaThenScala
-
-scalacOptions ++= {
- val baseOptions = Seq(
- "-Xfatal-warnings", // turns all warnings into errors ;-)
+ scalacOptions ++= Seq(
// warnings
"-unchecked", // able additional warnings where generated code depends on assumptions
"-deprecation", // emit warning for usages of deprecated APIs
- "-feature", // emit warning usages of features that should be imported explicitly
+ "-feature", // emit warning usages of features that should be imported explicitly
+ // Features enabled by default
+ "-language:higherKinds",
+ "-language:implicitConversions",
+ "-language:experimental.macros",
// possibly deprecated options
"-Ywarn-dead-code",
"-Ywarn-inaccessible"
- )
- CrossVersion.partialVersion(scalaVersion.value) match {
- case Some((2, majorVersion)) if majorVersion >= 12 => baseOptions
- case _ => baseOptions :+ "-target:jvm-1.6" // generates code with the Java 6 class format
- }
-}
-
-// version specific compiler options
-scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
- case Some((2, majorVersion)) if majorVersion >= 11 =>
- Seq(
- // enables linter options
- "-Xlint:adapted-args", // warn if an argument list is modified to match the receiver
- "-Xlint:nullary-unit", // warn when nullary methods return Unit
- "-Xlint:inaccessible", // warn about inaccessible types in method signatures
- "-Xlint:nullary-override", // warn when non-nullary `def f()' overrides nullary `def f'
- "-Xlint:infer-any", // warn when a type argument is inferred to be `Any`
- "-Xlint:missing-interpolator", // a string literal appears to be missing an interpolator id
- "-Xlint:doc-detached", // a ScalaDoc comment appears to be detached from its element
- "-Xlint:private-shadow", // a private field (or class parameter) shadows a superclass field
- "-Xlint:type-parameter-shadow", // a local type parameter shadows a type already in scope
- "-Xlint:poly-implicit-overload", // parameterized overloaded implicit methods are not visible as view bounds
- "-Xlint:option-implicit", // Option.apply used implicit view
- "-Xlint:delayedinit-select", // Selecting member of DelayedInit
- "-Xlint:by-name-right-associative", // By-name parameter of right associative operator
- "-Xlint:package-object-classes", // Class or object defined in package object
- "-Xlint:unsound-match" // Pattern match may not be typesafe
- )
- case _ =>
- Seq.empty
-})
-
-// Turning off fatal warnings for ScalaDoc, otherwise we can't release.
-scalacOptions in (Compile, doc) ~= (_ filterNot (_ == "-Xfatal-warnings"))
-
-resolvers ++= Seq(
- "Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases",
- "Spy" at "http://files.couchbase.com/maven2/",
- Resolver.sonatypeRepo("snapshots")
-)
-
-libraryDependencies ++= Seq(
- "net.spy" % "spymemcached" % "2.12.2",
- "org.slf4j" % "slf4j-api" % "1.7.23",
- "io.monix" %% "monix-eval" % "2.2.2",
- "ch.qos.logback" % "logback-classic" % "1.1.7" % Test,
- "org.scalatest" %% "scalatest" % "3.0.1" % Test,
- "org.scalacheck" %% "scalacheck" % "1.13.4" % Test
+ ),
+
+ // Targeting Java 6, but only for Scala <= 2.11
+ javacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, majorVersion)) if majorVersion <= 11 =>
+ // generates code with the Java 6 class format
+ Seq("-source", "1.6", "-target", "1.6")
+ case _ =>
+ // For 2.12 we are targeting the Java 8 class format
+ Seq("-source", "1.8", "-target", "1.8")
+ }),
+ scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, majorVersion)) if majorVersion <= 11 =>
+ // generates code with the Java 6 class format
+ Seq("-target:jvm-1.6")
+ case _ =>
+ // For 2.12 we are targeting the Java 8 class format
+ Seq.empty
+ }),
+
+ // Linter
+ scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, majorVersion)) if majorVersion >= 11 =>
+ Seq(
+ // Turns all warnings into errors ;-)
+ "-Xfatal-warnings",
+ // Enables linter options
+ "-Xlint:adapted-args", // warn if an argument list is modified to match the receiver
+ "-Xlint:nullary-unit", // warn when nullary methods return Unit
+ "-Xlint:inaccessible", // warn about inaccessible types in method signatures
+ "-Xlint:nullary-override", // warn when non-nullary `def f()' overrides nullary `def f'
+ "-Xlint:infer-any", // warn when a type argument is inferred to be `Any`
+ "-Xlint:missing-interpolator", // a string literal appears to be missing an interpolator id
+ "-Xlint:doc-detached", // a ScalaDoc comment appears to be detached from its element
+ "-Xlint:private-shadow", // a private field (or class parameter) shadows a superclass field
+ "-Xlint:type-parameter-shadow", // a local type parameter shadows a type already in scope
+ "-Xlint:poly-implicit-overload", // parameterized overloaded implicit methods are not visible as view bounds
+ "-Xlint:option-implicit", // Option.apply used implicit view
+ "-Xlint:delayedinit-select", // Selecting member of DelayedInit
+ "-Xlint:by-name-right-associative", // By-name parameter of right associative operator
+ "-Xlint:package-object-classes", // Class or object defined in package object
+ "-Xlint:unsound-match" // Pattern match may not be typesafe
+ )
+ case _ =>
+ Seq.empty
+ }),
+
+ // For warning against unused imports
+ scalacOptions ++= {
+ CrossVersion.partialVersion(scalaVersion.value) match {
+ case Some((2, 10)) =>
+ Seq()
+ case Some((2, n)) if n >= 11 =>
+ Seq("-Ywarn-unused-import")
+ }
+ },
+ scalacOptions in (Compile, console) ~= {_.filterNot("-Ywarn-unused-import" == _)},
+ scalacOptions in (Test, console) ~= {_.filterNot("-Ywarn-unused-import" == _)},
+
+ // Common dependencies
+
+ resolvers ++= Seq(
+ "Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases",
+ //"Spy" at "http://files.couchbase.com/maven2/",
+ Resolver.sonatypeRepo("snapshots")
+ ),
+
+ testFrameworks := Seq(new TestFramework("minitest.runner.Framework")),
+ libraryDependencies ++= Seq(
+ "io.monix" %%% "minitest-laws" % "0.27" % Test
+ ),
+
+ // Trying to disable parallel testing
+ parallelExecution in Test := false,
+ parallelExecution in IntegrationTest := false,
+ testForkedParallel in Test := false,
+ testForkedParallel in IntegrationTest := false,
+ concurrentRestrictions in Global += Tags.limit(Tags.Test, 1),
+
+ // -- Settings meant for deployment on oss.sonatype.org
+
+ useGpg := true,
+ useGpgAgent := true,
+ usePgpKeyHex("2673B174C4071B0E"),
+
+ publishMavenStyle := true,
+ releaseCrossBuild := true,
+ releasePublishArtifactsAction := PgpKeys.publishSigned.value,
+
+ publishTo := {
+ val nexus = "https://oss.sonatype.org/"
+ if (isSnapshot.value)
+ Some("snapshots" at nexus + "content/repositories/snapshots")
+ else
+ Some("releases" at nexus + "service/local/staging/deploy/maven2")
+ },
+
+ publishArtifact in Test := false,
+ pomIncludeRepository := { _ => false }, // removes optional dependencies
+
+ // For evicting Scoverage out of the generated POM
+ // See: https://github.com/scoverage/sbt-scoverage/issues/153
+ pomPostProcess := { (node: xml.Node) =>
+ new RuleTransformer(new RewriteRule {
+ override def transform(node: xml.Node): Seq[xml.Node] = node match {
+ case e: Elem
+ if e.label == "dependency" && e.child.exists(child => child.label == "groupId" && child.text == "org.scoverage") => Nil
+ case _ => Seq(node)
+ }
+ }).transform(node).head
+ },
+
+ pomExtra in ThisBuild :=
+ https://github.com/monix/shade
+
+
+ The MIT License
+ http://opensource.org/licenses/MIT
+ repo
+
+
+
+ git@github.com:monix/shade.git
+ scm:git:git@github.com:monix/shade.git
+
+
+
+ alex_ndc
+ Alexandru Nedelcu
+ https://alexn.org
+
+
)
-libraryDependencies += ("org.scala-lang" % "scala-reflect" % scalaVersion.value % "compile")
-
-// -- Settings meant for deployment on oss.sonatype.org
+lazy val cmdlineProfile =
+ sys.props.getOrElse("sbt.profile", default = "")
-useGpg := true
-useGpgAgent := true
-usePgpKeyHex("2673B174C4071B0E")
+def profile: Project ⇒ Project = pr => cmdlineProfile match {
+ case "coverage" => pr
+ case _ => pr.disablePlugins(scoverage.ScoverageSbtPlugin)
+}
-publishMavenStyle := true
+lazy val doNotPublishArtifact = Seq(
+ publishArtifact := false,
+ publishArtifact in (Compile, packageDoc) := false,
+ publishArtifact in (Compile, packageSrc) := false,
+ publishArtifact in (Compile, packageBin) := false
+)
-publishTo := {
- val nexus = "https://oss.sonatype.org/"
- if (isSnapshot.value)
- Some("snapshots" at nexus + "content/repositories/snapshots")
- else
- Some("releases" at nexus + "service/local/staging/deploy/maven2")
-}
+lazy val crossSettings = sharedSettings ++ Seq(
+ unmanagedSourceDirectories in Compile += {
+ baseDirectory.value.getParentFile / "shared" / "src" / "main" / "scala"
+ },
+ unmanagedSourceDirectories in Test += {
+ baseDirectory.value.getParentFile / "shared" / "src" / "test" / "scala"
+ }
+)
-publishArtifact in Test := false
-pomIncludeRepository := { _ => false } // removes optional dependencies
-
-scalariformSettings
-
-pomExtra in ThisBuild :=
- https://github.com/monix/shade
-
-
- The MIT License
- http://opensource.org/licenses/MIT
- repo
-
-
-
- git@github.com:monix/shade.git
- scm:git:git@github.com:monix/shade.git
-
-
-
- alex_ndc
- Alexandru Nedelcu
- https://alexn.org
-
-
+lazy val scalaJSSettings = Seq(
+ coverageExcludedFiles := ".*"
+)
// Multi-project-related
-lazy val root = project in file(".")
+lazy val shade = project.in(file("."))
+ .configure(profile)
+ .aggregate(localJVM, localJS, memcached)
+ .settings(sharedSettings)
+ .settings(doNotPublishArtifact)
+ .settings(name := "shade")
+
+lazy val localCommon = crossSettings ++ sharedSettings ++ Seq(
+ name := "shade-local",
+ libraryDependencies ++= Seq(
+ "io.monix" %%% "monix-eval" % monixVersion
+ )
+)
-lazy val benchmarking = (project in file("benchmarking"))
- .enablePlugins(JmhPlugin)
- .settings(libraryDependencies += "org.slf4j" % "slf4j-simple" % "1.7.21")
- .dependsOn(root)
+lazy val localJVM = project.in(file("shade-local/jvm"))
+ .configure(profile)
+ .settings(localCommon)
+
+lazy val localJS = project.in(file("shade-local/js"))
+ .settings(localCommon)
+ .settings(scalaJSSettings)
+ .enablePlugins(ScalaJSPlugin)
+
+lazy val memcached = project.in(file("shade-memcached"))
+ .configure(profile)
+ .dependsOn(localJVM)
+ .settings(sharedSettings)
+ .settings(Seq(
+ name := "shade-memcached",
+ libraryDependencies ++= Seq(
+ "net.spy" % "spymemcached" % "2.12.2",
+ "org.slf4j" % "slf4j-api" % "1.7.23",
+ "ch.qos.logback" % "logback-classic" % "1.1.7" % Test
+ ),
+ // Will trigger serialization error if not forked
+ fork := true
+ ))
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 939dc20..5a6fd23 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -1,11 +1,8 @@
resolvers += Classpaths.sbtPluginReleases
-addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
-
-addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.3.0")
-
-addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0")
-
-addSbtPlugin("org.scoverage" % "sbt-coveralls" % "1.1.0")
-
-addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.18")
\ No newline at end of file
+addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.0")
+addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.18")
+addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.14")
+addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.13")
+addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.4")
diff --git a/project/travis-build.sh b/project/travis-build.sh
new file mode 100755
index 0000000..f509f94
--- /dev/null
+++ b/project/travis-build.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+set -e
+
+cd `dirname $0`/..
+
+if [ -z "$MAIN_SCALA_VERSION" ]; then
+ >&2 echo "Environment MAIN_SCALA_VERSION is not set. Check .travis.yml."
+ exit 1
+elif [ -z "$TRAVIS_SCALA_VERSION" ]; then
+ >&2 echo "Environment TRAVIS_SCALA_VERSION is not set."
+ exit 1
+else
+ echo
+ echo "TRAVIS_SCALA_VERSION=$TRAVIS_SCALA_VERSION"
+ echo "MAIN_SCALA_VERSION=$MAIN_SCALA_VERSION"
+fi
+
+INIT=";++$TRAVIS_SCALA_VERSION;clean"
+COMPILE="test:compile"
+TEST="test"
+
+if [ "$TRAVIS_SCALA_VERSION" = "$MAIN_SCALA_VERSION" ]; then
+ COMMAND="$INIT;coverage;$COMPILE;$TEST"
+ echo
+ echo "Executing tests (with coverage): sbt -Dsbt.profile=coverage $COMMAND"
+ echo
+ sbt -Dsbt.profile=coverage "$COMMAND"
+else
+ COMMAND="$INIT;$COMPILE;$TEST"
+ echo
+ echo "Executing tests: sbt \"$COMMAND\""
+ echo
+ sbt "$COMMAND"
+fi
\ No newline at end of file
diff --git a/project/travis-post-build.sh b/project/travis-post-build.sh
new file mode 100755
index 0000000..06bc537
--- /dev/null
+++ b/project/travis-post-build.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+
+set -e
+
+cd `dirname $0`/..
+
+if [ -z "$MAIN_SCALA_VERSION" ]; then
+ >&2 echo "Environment MAIN_SCALA_VERSION is not set. Check .travis.yml."
+ exit 1
+elif [ -z "$TRAVIS_SCALA_VERSION" ]; then
+ >&2 echo "Environment TRAVIS_SCALA_VERSION is not set."
+ exit 1
+else
+ echo "TRAVIS_SCALA_VERSION=$TRAVIS_SCALA_VERSION"
+ echo "MAIN_SCALA_VERSION=$MAIN_SCALA_VERSION"
+fi
+
+if [ "$TRAVIS_SCALA_VERSION" = "$MAIN_SCALA_VERSION" ]; then
+ echo "Uploading coverage for Scala $TRAVIS_SCALA_VERSION"
+ sbt -Dsbt.profile=coverage ";coverageAggregate;coverageReport"
+ bash <(curl -s https://codecov.io/bash)
+else
+ echo "Skipping uploading coverage for Scala $TRAVIS_SCALA_VERSION"
+fi
+
diff --git a/shade-local/js/src/main/scala/shade/local/Platform.scala b/shade-local/js/src/main/scala/shade/local/Platform.scala
new file mode 100644
index 0000000..295d497
--- /dev/null
+++ b/shade-local/js/src/main/scala/shade/local/Platform.scala
@@ -0,0 +1,26 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local
+
+object Platform {
+ /** Returns the recommended parallelism factor.
+ *
+ * On the JVM it returns the number of processors available to the Java
+ * virtual machine, being equivalent with:
+ * {{{
+ * Runtime.getRuntime.availableProcessors()
+ * }}}
+ *
+ * On top of Javascript this is always going to be equal to `1`.
+ */
+ final val parallelism: Int = 1
+}
diff --git a/shade-local/jvm/src/main/scala/shade/local/Platform.scala b/shade-local/jvm/src/main/scala/shade/local/Platform.scala
new file mode 100644
index 0000000..5a72271
--- /dev/null
+++ b/shade-local/jvm/src/main/scala/shade/local/Platform.scala
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local
+
+object Platform {
+ /** Returns the recommended parallelism factor.
+ *
+ * On the JVM it returns the number of processors available to the Java
+ * virtual machine, being equivalent with:
+ * {{{
+ * Runtime.getRuntime.availableProcessors()
+ * }}}
+ *
+ * On top of Javascript this is always going to be equal to `1`.
+ */
+ def parallelism: Int = {
+ val count = Runtime.getRuntime.availableProcessors()
+ math.max(1, count)
+ }
+}
diff --git a/shade-local/shared/src/main/scala/shade/local/immutable/TimeBasedCache.scala b/shade-local/shared/src/main/scala/shade/local/immutable/TimeBasedCache.scala
new file mode 100644
index 0000000..afcd4a8
--- /dev/null
+++ b/shade-local/shared/src/main/scala/shade/local/immutable/TimeBasedCache.scala
@@ -0,0 +1,416 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local.immutable
+
+import shade.local.immutable.TimeBasedCache.{Timestamp, Value}
+import scala.annotation.tailrec
+import scala.collection.immutable.SortedMap
+import scala.concurrent.duration._
+
+/** Describes an immutable cache data-structure.
+ *
+ * It behaves much like a standard `scala.collection.immutable.Map`, but
+ * the values have an expiration timestamp attached. So the cached values
+ * might become unavailable depending on the current time, explicitly
+ * specified as `now` in the operations that need it.
+ *
+ * Example:
+ * {{{
+ * import scala.concurrent.duration._
+ * import shade.local.immutable.TimeBasedCache
+ *
+ * val now = System.currentTimeMillis()
+ *
+ * val cache = TimeBasedCache.empty[String]
+ * .set("key1", "value1", 1.minute, now)
+ * .set("key2", "value2", 1.minute, now)
+ *
+ * cache.get("key1", now)
+ * //=> Some("value1")
+ *
+ * cache.get("key1", now + 1.minute.toMillis)
+ * //=> None
+ * }}}
+ *
+ * @param keysToValues is a map that keeps the cached key and value tuples,
+ * where the [[TimeBasedCache$.Value values]] have an expiry
+ * timestamp attached
+ *
+ * @param expiryOrder is a sorted sequence of timestamps to keys mapping
+ * that represent the order in which keys need to be expired from
+ * the cache, as an optimization when doing the cleanup
+ *
+ */
+final case class TimeBasedCache[+A](
+ keysToValues: Map[String, Value[A]],
+ expiryOrder: SortedMap[Timestamp, Set[String]]) {
+
+ /** Fetches the cached value associated with a given key,
+ * returning `None` if the `key` does not exist in the cache,
+ * or if it expired (relative to `now`).
+ *
+ * @param key is the associated key for the returned cached value
+ * @param now is the current timestamp, used to determine if the
+ * cached value is expired or not
+ *
+ * @return `Some(value)` in case the value exists in the cache and
+ * isn't expired, or `None` otherwise
+ */
+ def get(key: String, now: Timestamp): Option[A] =
+ keysToValues.get(key) match {
+ case Some(r) if r.expiresAt > now => Some(r.value)
+ case _ => None
+ }
+
+ /** Fetches the cached value associated with a given key,
+ * returning the given `default` if the `key` does not exist in
+ * the cache, or if it expired (relative to `now`).
+ *
+ * @param key is the associated key for the returned cached value
+ * @param default is the value to return in case the given `key`
+ * doesn't exist, or the cached value is expired
+ * @param now is the current timestamp, used to determine if the
+ * cached value is expired or not
+ *
+ * @return the cached value, in case the associated `key` exists
+ * and it isn't expired, or otherwise the `default`
+ */
+ def getOrElse[B >: A](key: String, default: B, now: Timestamp): B =
+ keysToValues.get(key) match {
+ case Some(r) if r.expiresAt > now => r.value
+ case _ => default
+ }
+
+ /** Returns the number of non-expired keys in the cache. */
+ def size(now: Timestamp): Int =
+ keysToValues.count(_._2.expiresAt > now)
+
+ /** Returns the number of keys in the cache, both active and expired. */
+ def rawSize: Int =
+ keysToValues.size
+
+ /** Adds a new value to the cache, associated with the given `key`,
+ * but only if the given `key` doesn't already exist in the cache.
+ *
+ * @param key is the key to associate with the given value
+ * @param value is the value to persist in the cache
+ * @param expiry is the duration after which the value is expired,
+ * can be infinite (e.g. `Duration.Inf`)
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * calculate the exact timestamp when the new value will
+ * be expired
+ *
+ * @return an `(isSuccess, newState)` tuple, signaling `true` if a
+ * new key was added to the cache, or `false` if no changes
+ * have been made due to the `key` already being present and
+ * its value being active
+ */
+ def add[B >: A](key: String, value: B, expiry: Duration, now: Timestamp): (Boolean, TimeBasedCache[B]) = {
+ val ts = getExpiryTS(expiry, now)
+ val oldRawValue = keysToValues.get(key)
+ val itemExists = oldRawValue match {
+ case Some(item) if item.expiresAt > now => true
+ case _ => false
+ }
+
+ if (itemExists || ts <= now)
+ (false, this)
+ else
+ (true, buildNewState(key, value, ts, oldRawValue))
+ }
+
+ /** Sets the given `key` to the given `value` in the cache.
+ *
+ * @param key is the key to associate with the given value
+ * @param value is the value to persist in the cache
+ * @param expiry is the duration after which the value is expired,
+ * can be infinite (e.g. `Duration.Inf`)
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * calculate the exact timestamp when the new value will
+ * be expired
+ *
+ * @return a new cache containing the given `value` associated
+ * with the given `key`
+ */
+ def set[B >: A](key: String, value: B, expiry: Duration, now: Timestamp): TimeBasedCache[B] = {
+ val ts = getExpiryTS(expiry, now)
+ buildNewState(key, value, ts, keysToValues.get(key))
+ }
+
+ /** Deletes a given `key` from the cache.
+ *
+ * @param key is the `key` to delete from the cache
+ *
+ * @return `(isSuccess, newState)` tuple, by which it signals `true`
+ * in case the `key` was present in the cache with an unexpired
+ * value, a key that was deleted, or `false` in case no such
+ * key was present and so nothing was deleted
+ */
+ def delete(key: String): (Boolean, TimeBasedCache[A]) =
+ this.keysToValues.get(key) match {
+ case Some(value) =>
+ val newValues = this.keysToValues - key
+ val newOrder = {
+ val ts = value.expiresAt
+ // If expiresAt is Inf, don't even bother to delete it
+ // from expiryOrder, because it shouldn't be there!
+ if (ts == Long.MaxValue) this.expiryOrder else {
+ val set = this.expiryOrder.getOrElse(ts, Set.empty) - key
+ if (set.isEmpty) this.expiryOrder - ts
+ else this.expiryOrder.updated(ts, set)
+ }
+ }
+
+ val state = TimeBasedCache(keysToValues=newValues, expiryOrder=newOrder)
+ (true, state)
+
+ case None =>
+ (false, this)
+ }
+
+ /** Performs a compare and set operation, that updates the given `key`
+ * only if the `expected` value is equal to the cached value.
+ *
+ * @param key is the `key` to be updated
+ * @param expected is the value we expect to be in the cache, passed as
+ * `None` in case the given `key` shouldn't exist or if its associated
+ * value is expired
+ * @param update is the value to be stored for the the given `key` in
+ * case of success
+ * @param expiry is the duration after which the value is expired,
+ * can be infinite (e.g. `Duration.Inf`), to be used onlt
+ * in case the update is a success
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * test whether the current value associated to the given
+ * `key` is expired and also to calculate the exact timestamp
+ * when the new value will be expired
+ *
+ * @return either `true` in case the operation was a success, or
+ * `false` otherwise, along with the updated cache
+ */
+ def compareAndSet[B >: A](key: String, expected: Option[B], update: B,
+ expiry: Duration, now: Timestamp): (Boolean, TimeBasedCache[B]) = {
+
+ expected match {
+ case None => add(key, update, expiry, now)
+ case Some(expectedValue) =>
+ keysToValues.get(key) match {
+ case Some(r) if r.expiresAt > now =>
+ if (r.value == expectedValue) {
+ val ts = getExpiryTS(expiry, now)
+ val newState = buildNewState(key, update, ts, keysToValues.get(key))
+ (true, newState)
+ } else {
+ (false, this)
+ }
+ case _ =>
+ (false, this)
+ }
+ }
+ }
+
+ /** Given a function, transforms and persists an update for
+ * the value associated with the given `key`, returning the
+ * updated value.
+ *
+ * The given function admits keys not already present in the
+ * cache, or with values that are expired, thus receiving
+ * `None` in such a case.
+ *
+ * @param key is the key that will have its associated value transformed
+ * @param expiry is the duration after which the new value is expired,
+ * can be infinite (e.g. `Duration.Inf`)
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * calculate the exact timestamp when the new value will
+ * be expired
+ * @param f is the transformation function, can receive `None` in
+ * case the `key` doesn't exist in the cache or if its value
+ * is expired
+ *
+ * @return the updated value along with the new cache
+ */
+ def transformAndGet[B >: A](key: String, expiry: Duration, now: Timestamp)
+ (f: Option[A] => B): (B, TimeBasedCache[B]) = {
+
+ val ts = getExpiryTS(expiry, now)
+ val oldRawValue = keysToValues.get(key)
+ val value = oldRawValue match {
+ case Some(v) if v.expiresAt > now => Some(v.value)
+ case _ => None
+ }
+
+ val newValue = f(value)
+ val update = buildNewState(key, newValue, ts, oldRawValue)
+ (newValue, update)
+ }
+
+ /** Given a function, transforms and persists an update for
+ * the value associated with the given `key`, returning the
+ * old value, prior to its update.
+ *
+ * The given function admits keys not already present in the
+ * cache, or with values that are expired, thus receiving
+ * `None` in such a case.
+ *
+ * @param key is the key that will have its associated value transformed
+ * @param expiry is the duration after which the new value is expired,
+ * can be infinite (e.g. `Duration.Inf`)
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * calculate the exact timestamp when the new value will
+ * be expired
+ * @param f is the transformation function, can receive `None` in
+ * case the `key` doesn't exist in the cache or if its value
+ * is expired
+ *
+ * @return the old value, prior to its update, along with the new cache
+ */
+ def getAndTransform[B >: A](key: String, expiry: Duration, now: Timestamp)
+ (f: Option[A] => B): (Option[B], TimeBasedCache[B]) = {
+
+ val ts = getExpiryTS(expiry, now)
+ val oldRawValue = keysToValues.get(key)
+ val value = oldRawValue match {
+ case Some(v) if v.expiresAt > now => Some(v.value)
+ case _ => None
+ }
+
+ val newValue = f(value)
+ val update = buildNewState(key, newValue, ts, oldRawValue)
+ (value, update)
+ }
+
+ /** Given a function, transforms and persists an update for
+ * the value associated with the given `key`, returning an
+ * extracted result.
+ *
+ * The given function admits keys not already present in the
+ * cache, or with values that are expired, thus receiving
+ * `None` in such a case.
+ *
+ * @param key is the key that will have its associated value transformed
+ * @param expiry is the duration after which the new value is expired,
+ * can be infinite (e.g. `Duration.Inf`)
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * calculate the exact timestamp when the new value will
+ * be expired
+ * @param f is the transformation function, can receive `None` in
+ * case the `key` doesn't exist in the cache or if its value
+ * is expired
+ *
+ * @return an extracted `R` value, along with the new cache
+ */
+ def transformAndExtract[B >: A, R](key: String, expiry: Duration, now: Timestamp)
+ (f: Option[A] => (R,B)): (R, TimeBasedCache[B]) = {
+
+ val ts = getExpiryTS(expiry, now)
+ val oldRawValue = keysToValues.get(key)
+ val value = oldRawValue match {
+ case Some(v) if v.expiresAt > now => Some(v.value)
+ case _ => None
+ }
+
+ val (extract, newValue) = f(value)
+ val update = buildNewState(key, newValue, ts, oldRawValue)
+ (extract, update)
+ }
+
+ /** Performs cleanup of the source cache, deleting keys that are expired,
+ * relative to the given `now`.
+ *
+ * @param now is the current timestamp, given in milliseconds since
+ * the epoch (e.g. `System.currentTimeMillis`), used to
+ * determine which keys are expired
+ *
+ * @return the number of keys that have been deleted from the source
+ * cache, along with the new cache that has those keys deleted
+ */
+ def cleanse(now: Timestamp): (Int, TimeBasedCache[A]) = {
+ @tailrec def loop(self: TimeBasedCache[A], now: Timestamp, acc: Int): (Int, TimeBasedCache[A]) = {
+ val order = self.expiryOrder
+ if (order.isEmpty) (acc, self) else {
+ val (ts, keys) = order.head
+
+ if (ts > now) (acc, self) else {
+ val newOrder = order - ts
+ val newMap = self.keysToValues -- keys
+ val update = TimeBasedCache(keysToValues = newMap, expiryOrder = newOrder)
+ loop(update, now, acc + keys.size)
+ }
+ }
+ }
+
+ loop(this, now, 0)
+ }
+
+ @inline
+ private def getExpiryTS(expiry: Duration, now: Timestamp): Timestamp =
+ if (expiry.isFinite()) now + expiry.toMillis
+ else Long.MaxValue
+
+ private def buildNewState[B >: A](key: String, value: B, ts: Timestamp, oldRawValue: Option[Value[B]]) = {
+ val newValues = keysToValues.updated(key, Value(value, ts))
+
+ // We might have a previous entry in the expiry order for
+ // the given key, so we need to remove it
+ val orderClean = oldRawValue match {
+ case Some(v) if v.expiresAt != ts && v.expiresAt < Long.MaxValue =>
+ this.expiryOrder.get(v.expiresAt) match {
+ case None => this.expiryOrder
+ case Some(set) =>
+ val newSet = set - key
+ if (newSet.isEmpty) this.expiryOrder - v.expiresAt
+ else this.expiryOrder.updated(v.expiresAt, newSet)
+ }
+ case _ =>
+ this.expiryOrder
+ }
+
+ // Building a new expiry order that includes the new timestamp.
+ // With optimization for `Duration.Inf`.
+ val newOrder =
+ if (ts == Long.MaxValue) orderClean else {
+ val collisionSet = orderClean.getOrElse(ts, Set.empty)
+ orderClean.updated(ts, collisionSet + key)
+ }
+
+ TimeBasedCache(keysToValues = newValues, expiryOrder = newOrder)
+ }
+}
+
+object TimeBasedCache {
+ /** Returns an empty [[TimeBasedCache]] instance. */
+ def empty[A]: TimeBasedCache[A] = emptyRef
+
+ /** Returns an empty [[TimeBasedCache]] instance. */
+ def apply[A](): TimeBasedCache[A] = emptyRef
+
+ /** Using a type-alias for `Long`, describing Unix timestamps
+ * specified in milliseconds since the epoch.
+ */
+ type Timestamp = Long
+
+ /** Represents the stored values, having an `expiresAt`
+ * timestamp attached, as a Unix timestamp, thus specified
+ * in milliseconds since the epoch.
+ */
+ final case class Value[+A](value: A, expiresAt: Timestamp)
+
+ // Empty reference reusable because of covariance.
+ private[this] val emptyRef: TimeBasedCache[Nothing] =
+ TimeBasedCache(Map.empty, SortedMap.empty)
+}
diff --git a/shade-local/shared/src/main/scala/shade/local/mutable/TimeBasedCache.scala b/shade-local/shared/src/main/scala/shade/local/mutable/TimeBasedCache.scala
new file mode 100644
index 0000000..4a78a25
--- /dev/null
+++ b/shade-local/shared/src/main/scala/shade/local/mutable/TimeBasedCache.scala
@@ -0,0 +1,428 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local.mutable
+
+import monix.eval.Task
+import monix.execution.Scheduler
+import monix.execution.atomic.PaddingStrategy.NoPadding
+import monix.execution.atomic.{AtomicAny, PaddingStrategy}
+import shade.local.Platform
+import shade.local.immutable.{TimeBasedCache => ImmutableTimeBasedCache}
+import shade.local.immutable.TimeBasedCache.Timestamp
+import scala.annotation.tailrec
+import scala.concurrent.duration._
+import scala.concurrent.{Future, Promise}
+import scala.math.ceil
+import scala.util.control.NonFatal
+
+/** Interface for mutable cache implementations.
+ *
+ * @define addDesc Atomically persists the given `(key, value)`
+ * in the cache, but only if the `key` doesn't exist.
+ *
+ * @define addReturnDesc `true` if there was no such `key` in the cache and
+ * the persistence took place, or `false` otherwise
+ *
+ * @define setDesc Atomically updates the specified `key` with the given `value`.
+ *
+ * @define cachedFutureDesc If the given `key` exists in the cache and isn't expired
+ * then returns its associated value, otherwise atomically
+ * executes the given `Future`, cache its result and return it.
+ *
+ * @define cachedTaskDesc If the given `key` exists in the cache and isn't expired
+ * then returns its associated value, otherwise creates a `Task`
+ * that upon evaluation will evaluate the given `Task`, store
+ * store its value in the cache and returns it
+ *
+ * @define cachedFutureReturn the value associated with the given `key`,
+ * or that was generated in case the `key` was missing from
+ * the cache
+ *
+ * @define keyUpdateParamDesc is the key in memcached to update
+ *
+ * @define valueParamDesc is the cached value, to associate with the
+ * given `key`
+ *
+ * @define expParamDesc specifies the expiry time, can be infinite
+ * (`Duration.Inf`)
+ */
+abstract class TimeBasedCache[A] extends AutoCloseable {
+ /** Return the value associated with the given `key`.
+ *
+ * @return `Some(value)` in case there exists a `key` in
+ * the cache that isn't expired, or `None` otherwise
+ */
+ def get(key: String): Option[A]
+
+ /** Return the value associated with the given `key`,
+ * or a `default` in case the given `key` doesn't exist.
+ *
+ * In case the given `key` doesn't have an associated value,
+ * or if it is expired, then the `default` by-name parameter
+ * is returned instead.
+ */
+ def getOrElse(key: String, default: A): A
+
+ /** $addDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param expiry $expParamDesc
+ *
+ * @return $addReturnDesc
+ */
+ def add(key: String, value: A, expiry: Duration): Boolean
+
+ /** $setDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param expiry $expParamDesc
+ */
+ def set(key: String, value: A, expiry: Duration): Unit
+
+ /** Deletes the given `key` from the cache.
+ *
+ * @return `true` if there was a `key` in the cache that was
+ * deleted, or `false` otherwise
+ */
+ def delete(key: String): Boolean
+
+ /** $cachedFutureDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param cb is the callback to execute in case the `key` is missing
+ * @param expiry $expParamDesc
+ *
+ * @return $cachedFutureReturn
+ */
+ def cachedFuture(key: String, expiry: Duration)
+ (cb: Scheduler => Future[A]): Future[A]
+
+ /** $cachedTaskDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param task is the task to evaluate in case the `key` is missing
+ * @param expiry $expParamDesc
+ *
+ * @return $cachedFutureReturn
+ */
+ def cachedTask(key: String, expiry: Duration)(task: Task[A]): Task[A]
+
+ /** Atomic compare and set operation.
+ *
+ * @param key $keyUpdateParamDesc
+ * @param current is the current value that is expected, or `None` in case
+ * there should be no `key` currently stored in the cache
+ * @param update is the value to be persisted for the given `key`
+ * in case of success
+ * @param expiry $expParamDesc
+ */
+ def compareAndSet(key: String, current: Option[A], update: A, expiry: Duration): Boolean
+
+ /** Atomic transform and get operation.
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is function to execute for updating the current value
+ * @param expiry $expParamDesc
+ *
+ * @return the updated value
+ */
+ def transformAndGet(key: String, expiry: Duration)(f: Option[A] => A): A
+
+ /** Atomic get and transform operation.
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is function to execute for updating the current value
+ * @param expiry $expParamDesc
+ *
+ * @return the value associated with the given `key` before the update
+ */
+ def getAndTransform(key: String, expiry: Duration)(f: Option[A] => A): Option[A]
+
+ /** Returns the number of non-expired keys currently
+ * stored in the cache.
+ */
+ def size: Int
+
+ /** Returns the number of keys (both expired and active)
+ * currently stored in the cache.
+ */
+ def rawSize: Int
+
+ /** Future that completes when a maintenance window has run,
+ * giving the number of items that were removed.
+ */
+ def nextCleanse: Task[Int]
+
+ /** Closes this cache and performance and cleanup operations. */
+ def close(): Unit
+}
+
+object TimeBasedCache {
+ /** Builds an [[TimeBasedCache]] instance.
+ *
+ * @param cleanupPeriod is the period at which to repeat the
+ * periodic cache cleanse
+ * @param distribution is the number of atomic references to use
+ * under the hood, useful in order to distribute the load
+ * @param padding is the padding strategy to use, for performance
+ * tuning, in order to avoid "false sharing"
+ * @param s is the `Scheduler` to use for scheduling the periodic
+ * cleanse or for future-related activities
+ */
+ def apply[A](
+ cleanupPeriod: FiniteDuration = 3.seconds,
+ distribution: Int = Platform.parallelism,
+ padding: PaddingStrategy = NoPadding)
+ (implicit s: Scheduler): TimeBasedCache[A] = {
+
+ require(distribution >= 1, "distribution >= 1")
+ require(cleanupPeriod > Duration.Zero, "cleanupPeriod > 0")
+
+ if (distribution == 1)
+ new SingleAtomic[A](cleanupPeriod, padding)(s)
+ else
+ new Distributed[A](distribution, cleanupPeriod, padding)
+ }
+
+ /** Implementation that distributes the load among multiple atomic references. */
+ private final class Distributed[A](distribution: Int, cleanupPeriod: FiniteDuration, ps: PaddingStrategy)
+ (implicit s: Scheduler) extends TimeBasedCache[A] {
+
+ require(distribution >= 2)
+
+ private[this] val arraySize: Int = {
+ // Rounding up to a power of two
+ val lnOf2 = scala.math.log(2)
+ val log2 = scala.math.log(distribution) / lnOf2
+ val bit = ceil(log2)
+ 1 << (if (bit > 30) 30 else bit.toInt)
+ }
+
+ private[this] val modulus = arraySize - 1
+ private[this] val array: Array[SingleAtomic[A]] =
+ Array.fill(arraySize)(new SingleAtomic(cleanupPeriod, ps))
+
+ private def cacheFor(key: String): SingleAtomic[A] =
+ array(key.hashCode & modulus)
+
+ override def get(key: String): Option[A] =
+ cacheFor(key).get(key)
+ override def getOrElse(key: String, default: A): A =
+ cacheFor(key).getOrElse(key, default)
+ override def add(key: String, value: A, expiry: Duration): Boolean =
+ cacheFor(key).add(key, value, expiry)
+ override def set(key: String, value: A, expiry: Duration): Unit =
+ cacheFor(key).set(key, value, expiry)
+ override def delete(key: String): Boolean =
+ cacheFor(key).delete(key)
+ override def cachedFuture(key: String, expiry: Duration)(cb: (Scheduler) => Future[A]): Future[A] =
+ cacheFor(key).cachedFuture(key, expiry)(cb)
+ override def cachedTask(key: String, expiry: Duration)(task: Task[A]): Task[A] =
+ cacheFor(key).cachedTask(key, expiry)(task)
+ override def compareAndSet(key: String, current: Option[A], update: A, expiry: Duration): Boolean =
+ cacheFor(key).compareAndSet(key, current, update, expiry)
+ override def transformAndGet(key: String, expiry: Duration)(f: (Option[A]) => A): A =
+ cacheFor(key).transformAndGet(key, expiry)(f)
+ override def getAndTransform(key: String, expiry: Duration)(f: (Option[A]) => A): Option[A] =
+ cacheFor(key).getAndTransform(key, expiry)(f)
+ override def size: Int =
+ array.foldLeft(0)((acc,e) => acc + e.size)
+ override def rawSize: Int =
+ array.foldLeft(0)((acc,e) => acc + e.rawSize)
+
+ override def nextCleanse: Task[Int] = {
+ val tasks = array.map(_.nextCleanse).iterator
+ Task.gather(tasks).map(_.sum)
+ }
+
+ override def close(): Unit =
+ array.foreach(_.close())
+ }
+
+ /** Implementation that piggy-backs on top of [[TimeBasedCache]]
+ * kept in an atomic reference.
+ */
+ private final class SingleAtomic[A](cleanupPeriod: FiniteDuration, ps: PaddingStrategy)
+ (implicit scheduler: Scheduler) extends TimeBasedCache[A] {
+
+ def get(key: String): Option[A] = {
+ val now = scheduler.currentTimeMillis()
+ stateRef.get.get(key, now).asInstanceOf[Option[A]]
+ }
+
+ def getOrElse(key: String, default: A): A = {
+ val now = scheduler.currentTimeMillis()
+ stateRef.get.getOrElse(key, default, now)
+ .asInstanceOf[A]
+ }
+
+ def add(key: String, value: A, expiry: Duration): Boolean = {
+ @tailrec def loop(now: Timestamp): Boolean = {
+ val current = stateRef.get
+ val (isSuccess, update) = current.add(key, value, expiry, now)
+ if (!isSuccess) false else {
+ if (stateRef.compareAndSet(current, update)) true
+ else loop(now) // retry
+ }
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ def set(key: String, value: A, expiry: Duration): Unit = {
+ @tailrec def loop(now: Timestamp): Unit = {
+ val current = stateRef.get
+ val update = current.set(key, value, expiry, now)
+ if (update ne current) {
+ if (!stateRef.compareAndSet(current, update))
+ loop(now) // retry
+ }
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ @tailrec
+ def delete(key: String): Boolean = {
+ val current = stateRef.get
+ val (isSuccess, update) = current.delete(key)
+ if (!isSuccess) false else {
+ if (stateRef.compareAndSet(current, update)) true
+ else delete(key) // retry
+ }
+ }
+
+ def cachedFuture(key: String, expiry: Duration)(f: Scheduler => Future[A]): Future[A] = {
+ @tailrec def loop(now: Timestamp): Future[A] = {
+ val current = stateRef.get
+ current.get(key, now) match {
+ case Some(future) =>
+ future.asInstanceOf[Future[A]]
+ case None =>
+ val promise = Promise[A]()
+ val update = current.set(key, promise, expiry, now)
+
+ if (!stateRef.compareAndSet(current, update))
+ loop(now) // retry
+ else {
+ try promise.tryCompleteWith(f(scheduler))
+ catch { case NonFatal(ex) => promise.failure(ex) }
+ promise.future
+ }
+ }
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ override def cachedTask(key: String, expiry: Duration)(task: Task[A]): Task[A] = {
+ @tailrec def loop(now: Timestamp): Task[A] = {
+ val current = stateRef.get
+
+ current.get(key, now) match {
+ case Some(future) =>
+ future.asInstanceOf[Task[A]]
+ case None =>
+ val cached = task.memoize
+ val update = current.set(key, cached, expiry, now)
+
+ if (!stateRef.compareAndSet(current, update))
+ loop(now) // retry
+ else
+ cached
+ }
+ }
+
+ Task.defer(loop(scheduler.currentTimeMillis()))
+ }
+
+ def compareAndSet(key: String, expected: Option[A], update: A, expiry: Duration): Boolean = {
+ @tailrec def loop(now: Timestamp): Boolean = {
+ val current = stateRef.get
+ val (isSuccess, cacheUpdate) =
+ current.compareAndSet(key, expected, update, expiry, now)
+
+ if (!isSuccess) false else {
+ if (stateRef.compareAndSet(current, cacheUpdate)) true
+ else loop(now) // retry
+ }
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ def transformAndGet(key: String, expiry: Duration)(cb: (Option[A]) => A): A = {
+ @tailrec def loop(now: Timestamp): A = {
+ val current = stateRef.get
+ val (value, update) =
+ current.transformAndGet(key, expiry, now)(cb.asInstanceOf[Option[Any] => A])
+
+ if (stateRef.compareAndSet(current, update)) value.asInstanceOf[A]
+ else loop(now) // retry
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ def getAndTransform(key: String, expiry: Duration)(cb: (Option[A]) => A): Option[A] = {
+ @tailrec def loop(now: Timestamp): Option[A] = {
+ val current = stateRef.get
+ val (value, update) =
+ current.getAndTransform(key, expiry, now)(cb.asInstanceOf[Option[Any] => A])
+
+ if (stateRef.compareAndSet(current, update)) value.asInstanceOf[Option[A]]
+ else loop(now) // retry
+ }
+
+ loop(scheduler.currentTimeMillis())
+ }
+
+ def cleanse(): Int = {
+ val difference = stateRef.transformAndExtract { current =>
+ val now = scheduler.currentTimeMillis()
+ current.cleanse(now)
+ }
+
+ val old = maintenancePromise.getAndSet(Promise())
+ old.success(difference)
+ difference
+ }
+
+ def size: Int = {
+ val ts = scheduler.currentTimeMillis()
+ stateRef.get.size(ts)
+ }
+
+ def rawSize: Int =
+ stateRef.get.rawSize
+
+ def nextCleanse: Task[Int] =
+ Task.deferFuture(maintenancePromise.get.future)
+
+ def close(): Unit = {
+ task.cancel()
+ stateRef.set(ImmutableTimeBasedCache.empty)
+ }
+
+ private[this] val task =
+ scheduler.scheduleWithFixedDelay(cleanupPeriod, cleanupPeriod) {
+ cleanse()
+ }
+
+ private[this] val maintenancePromise =
+ AtomicAny(Promise[Int]())
+ private[this] val stateRef: AtomicAny[ImmutableTimeBasedCache[Any]] =
+ AtomicAny.withPadding(ImmutableTimeBasedCache.empty, ps)
+ }
+}
diff --git a/shade-local/shared/src/test/scala/shade/local/immutable/TimeBasedCacheSuite.scala b/shade-local/shared/src/test/scala/shade/local/immutable/TimeBasedCacheSuite.scala
new file mode 100644
index 0000000..a22a757
--- /dev/null
+++ b/shade-local/shared/src/test/scala/shade/local/immutable/TimeBasedCacheSuite.scala
@@ -0,0 +1,205 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local.immutable
+
+import minitest.SimpleTestSuite
+import scala.concurrent.duration._
+
+object TimeBasedCacheSuite extends SimpleTestSuite {
+ test("simple set and get") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", 1.minute, now)
+ .set("key2", "value2", 1.minute, now)
+
+ assertEquals(cache.get("key1", now), Some("value1"))
+ assertEquals(cache.get("key1", now + 59.seconds.toMillis), Some("value1"))
+ assertEquals(cache.get("key1", now + 60.seconds.toMillis), None)
+
+ assertEquals(cache.get("key2", now), Some("value2"))
+ assertEquals(cache.get("key2", now + 59.seconds.toMillis), Some("value2"))
+ assertEquals(cache.get("key2", now + 60.seconds.toMillis), None)
+
+ assertEquals(cache.keysToValues.size, 2)
+ assertEquals(cache.expiryOrder.size, 1)
+
+ val (diff1, cleansed1) = cache.cleanse(now + 59.seconds.toMillis)
+ assertEquals(diff1, 0)
+ assertEquals(cleansed1, cache)
+
+ val (diff2, cleansed2) = cache.cleanse(now + 60.seconds.toMillis)
+ assertEquals(diff2, 2)
+ assertEquals(cleansed2.keysToValues.size, 0)
+ assertEquals(cleansed2.expiryOrder.size, 0)
+ }
+
+ test("set on a duplicate") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", 1.minute, now - 10.seconds.toMillis)
+ .set("key2", "value2", 1.minute, now - 10.seconds.toMillis)
+ .set("key1", "value1-updated", 1.minute, now)
+
+ assertEquals(cache.get("key1", now), Some("value1-updated"))
+ assertEquals(cache.expiryOrder.size, 2)
+ assert(cache.expiryOrder(now + 50.seconds.toMillis).contains("key2"))
+ assert(cache.expiryOrder(now + 60.seconds.toMillis).contains("key1"))
+
+ val (diff1, cleansed1) = cache.cleanse(now + 50.seconds.toMillis)
+ assertEquals(diff1, 1)
+ assertEquals(cleansed1.get("key1", now), Some("value1-updated"))
+
+ val (diff2, cleansed2) = cache.cleanse(now + 60.seconds.toMillis)
+ assertEquals(diff2, 2)
+ assertEquals(cleansed2.get("key1", now), None)
+
+ val (diff3, cleansed3) = cleansed1.cleanse(now + 60.seconds.toMillis)
+ assertEquals(diff3, 1)
+ assertEquals(cleansed3.get("key1", now), None)
+ }
+
+ test("delete") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", 1.minute, now - 10.seconds.toMillis)
+ .set("key2", "value2", 1.minute, now - 10.seconds.toMillis)
+ .set("key3", "value3", 1.minute, now)
+
+ val (isSuccess1, updated1) = cache.delete("key1")
+ assert(isSuccess1, "isSuccess1")
+ assertEquals(updated1.get("key1", now), None)
+ assertEquals(updated1.get("key2", now), Some("value2"))
+ assertEquals(updated1.get("key3", now), Some("value3"))
+
+ val (isSuccess2, updated2) = cache.delete("key2")
+ assert(isSuccess2, "isSuccess2")
+ assertEquals(updated2.get("key1", now), Some("value1"))
+ assertEquals(updated2.get("key2", now), None)
+ assertEquals(updated2.get("key3", now), Some("value3"))
+
+ val (isSuccess3, updated3) = cache.delete("key3")
+ assert(isSuccess3, "isSuccess3")
+ assertEquals(updated3.get("key1", now), Some("value1"))
+ assertEquals(updated3.get("key2", now), Some("value2"))
+ assertEquals(updated3.get("key3", now), None)
+
+ val (isSuccess4, updated4) = cache.delete("key4")
+ assert(!isSuccess4, "!isSuccess4")
+ assertEquals(updated4, cache)
+ }
+
+ test("add") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", 1.minute, now - 10.seconds.toMillis)
+ .set("key2", "value2", 1.minute, now - 10.seconds.toMillis)
+ .set("key3", "value3", 1.minute, now)
+
+ val (isSuccess1, updated1) = cache.add("key4", "value4", 1.minute, now)
+ assert(isSuccess1, "isSuccess1")
+ assertEquals(updated1.get("key1", now), Some("value1"))
+ assertEquals(updated1.get("key2", now), Some("value2"))
+ assertEquals(updated1.get("key3", now), Some("value3"))
+ assertEquals(updated1.get("key4", now), Some("value4"))
+
+ val (isSuccess2, updated2) = cache.add("key1", "value1", 1.minute, now)
+ assert(!isSuccess2, "!isSuccess2")
+ assertEquals(updated2, cache)
+
+ val (isSuccess3, updated3) = cache.add("key1", "value1-updated", 1.minute, now + 1.minute.toMillis)
+ assert(isSuccess3, "isSuccess3")
+ assertEquals(updated3.get("key1", now), Some("value1-updated"))
+ assertEquals(updated3.get("key2", now), Some("value2"))
+ assertEquals(updated3.get("key3", now), Some("value3"))
+ }
+
+ test("compareAndSet") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", 1.minute, now - 10.seconds.toMillis)
+ .set("key2", "value2", 1.minute, now)
+
+ val (isSuccess1, updated1) = cache.compareAndSet("key3", None, "value3", 1.minute, now)
+ assert(isSuccess1, "isSuccess1")
+ assertEquals(updated1.get("key1", now), Some("value1"))
+ assertEquals(updated1.get("key2", now), Some("value2"))
+ assertEquals(updated1.get("key3", now), Some("value3"))
+
+ val (isSuccess2, updated2) = cache.compareAndSet("key3", Some("valueX"), "value3", 1.minute, now)
+ assert(!isSuccess2, "!isSuccess2")
+ assertEquals(updated2, cache)
+
+ val (isSuccess3, updated3) = cache.compareAndSet("key1", None, "value1-updated", 1.minute, now + 1.minute.toMillis)
+ assert(isSuccess3, "isSuccess3")
+ assertEquals(updated3.get("key1", now), Some("value1-updated"))
+ assertEquals(updated3.get("key2", now), Some("value2"))
+ }
+
+ test("transformAndGet") {
+ val now = System.currentTimeMillis()
+ var cache = TimeBasedCache.empty[Int]
+
+ for (i <- 0 until 10) {
+ val (value, update) = cache.transformAndGet("key1", 1.minute, now) { case None => 0; case Some(x) => x + 1 }
+ assertEquals(update.get("key1", now), Some(i))
+ assertEquals(value, i)
+ cache = update
+ }
+ }
+
+ test("getAndTransform") {
+ val now = System.currentTimeMillis()
+ var cache = TimeBasedCache.empty[Int]
+
+ for (i <- 0 until 10) {
+ val expected = if (i == 0) None else Some(i-1)
+ val (value, update) = cache.getAndTransform("key1", 1.minute, now) { case None => 0; case Some(x) => x + 1 }
+ assertEquals(update.get("key1", now), Some(i))
+ assertEquals(value, expected)
+ cache = update
+ }
+ }
+
+ test("transformAndExtract") {
+ val now = System.currentTimeMillis()
+ var cache = TimeBasedCache.empty[Int]
+
+ for (i <- 0 until 10) {
+ val (value, update) = cache.transformAndExtract("key1", 1.minute, now) {
+ case None => (1.toString, 0)
+ case Some(x) => ((x + 2).toString, x + 1)
+ }
+
+ assertEquals(update.get("key1", now), Some(i))
+ assertEquals(value, (i+1).toString)
+ cache = update
+ }
+ }
+
+ test("infinite expiry should not add to expiryOrder") {
+ val now = System.currentTimeMillis()
+ val cache = TimeBasedCache.empty[String]
+ .set("key1", "value1", Duration.Inf, now)
+ .set("key2", "value2", Duration.Inf, now)
+
+ assert(cache.expiryOrder.isEmpty, "expiryOrder.isEmpty")
+
+ val (diff, cleansed) = cache.cleanse(now + 366.days.toMillis)
+ assertEquals(diff, 0)
+ assertEquals(cleansed, cache)
+
+ val (isSuccess, update) = cache.delete("key1")
+ assert(isSuccess, "isSuccess")
+ assertEquals(update.get("key1", now), None)
+ assertEquals(update.get("key2", now), Some("value2"))
+ }
+}
diff --git a/shade-local/shared/src/test/scala/shade/local/mutable/TimeBasedCacheSuite.scala b/shade-local/shared/src/test/scala/shade/local/mutable/TimeBasedCacheSuite.scala
new file mode 100644
index 0000000..88bfc57
--- /dev/null
+++ b/shade-local/shared/src/test/scala/shade/local/mutable/TimeBasedCacheSuite.scala
@@ -0,0 +1,279 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.local.mutable
+
+import minitest.TestSuite
+import monix.eval.Task
+import monix.execution.schedulers.TestScheduler
+
+import scala.concurrent.Future
+import scala.concurrent.duration._
+import scala.util.Success
+
+object TimeBasedCacheSuite extends TestSuite[TestScheduler] {
+ def setup(): TestScheduler = TestScheduler()
+ def tearDown(env: TestScheduler): Unit =
+ assert(env.state.tasks.isEmpty, "tasks.isEmpty")
+
+ test("simple set and get") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4)
+ try {
+ cache.set("key1", "value1", 1.minute + 10.seconds)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute)
+
+ assertEquals(cache.get("key1"), Some("value1"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), Some("value3"))
+
+ s.tick(59.seconds)
+ assertEquals(cache.get("key1"), Some("value1"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), Some("value3"))
+
+ s.tick(1.second)
+ assertEquals(cache.get("key1"), Some("value1"))
+ assertEquals(cache.get("key2"), None)
+ assertEquals(cache.get("key3"), None)
+
+ s.tick(10.seconds)
+ assertEquals(cache.get("key1"), None)
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("set on a duplicate") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4)
+ try {
+ cache.set("key1", "value1", 1.minute)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute)
+
+ cache.set("key1", "value1-updated", 1.minute + 10.seconds)
+
+ assertEquals(cache.get("key1"), Some("value1-updated"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), Some("value3"))
+
+ s.tick(59.seconds)
+ assertEquals(cache.get("key1"), Some("value1-updated"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), Some("value3"))
+
+ s.tick(1.second)
+ assertEquals(cache.get("key1"), Some("value1-updated"))
+ assertEquals(cache.get("key2"), None)
+ assertEquals(cache.get("key3"), None)
+
+ s.tick(10.seconds)
+ assertEquals(cache.get("key1"), None)
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("delete") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4)
+ try {
+ cache.set("key1", "value1", 1.minute)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute + 10.seconds)
+
+ assertEquals(cache.get("key1"), Some("value1"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), Some("value3"))
+
+ cache.delete("key3")
+
+ assertEquals(cache.get("key1"), Some("value1"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), None)
+
+ cache.delete("key1")
+
+ assertEquals(cache.get("key1"), None)
+ assertEquals(cache.get("key2"), Some("value2"))
+ assertEquals(cache.get("key3"), None)
+
+ s.tick(60.seconds)
+ assertEquals(cache.get("key2"), None)
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("add") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4)
+ try {
+ cache.set("key1", "value1", 1.minute)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute + 10.seconds)
+
+ assert(cache.add("key4", "value4", 1.minute), "cache.add")
+ assertEquals(cache.get("key4"), Some("value4"))
+
+ assert(!cache.add("key1", "update", 1.minute), "!cache.add")
+ assert(!cache.add("key4", "update", 1.minute), "!cache.add")
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("compareAndSet") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4)
+ try {
+ cache.set("key1", "value1", 1.minute)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute + 10.seconds)
+
+ assert(cache.compareAndSet("key4", None, "value4", 1.minute))
+ assert(!cache.compareAndSet("key5", Some("missing"), "value5", 1.minute))
+ assert(cache.compareAndSet("key1", Some("value1"), "value1-updated", 1.minute))
+ assert(!cache.compareAndSet("key2", Some("wrong"), "value2-updated", 1.minute))
+
+ assertEquals(cache.get("key4"), Some("value4"))
+ assertEquals(cache.get("key5"), None)
+ assertEquals(cache.get("key1"), Some("value1-updated"))
+ assertEquals(cache.get("key2"), Some("value2"))
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("transformAndGet") { implicit s =>
+ val cache = TimeBasedCache[Int](distribution = 4)
+ try {
+ for (i <- 0 until 10) {
+ val value = cache.transformAndGet("test", 1.minute) {
+ case None => 0
+ case Some(x) => x + 1
+ }
+
+ assertEquals(value, i)
+ }
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("getAndTransform") { implicit s =>
+ val cache = TimeBasedCache[Int](distribution = 4)
+ try {
+ for (i <- 0 until 10) {
+ val value = cache.getAndTransform("test", 1.minute) {
+ case None => 0
+ case Some(x) => x + 1
+ }
+
+ assertEquals(value, if (i == 0) None else Some(i-1))
+ }
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("nextCleanse") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4, cleanupPeriod = 1.second)
+ try {
+ cache.set("key1", "value1", 1.minute + 10.seconds)
+ cache.set("key2", "value2", 1.minute)
+ cache.set("key3", "value3", 1.minute)
+
+ assertEquals(cache.size, 3)
+ assertEquals(cache.rawSize, 3)
+
+ s.tick(1.minute - 1.second)
+ val next1 = cache.nextCleanse.runAsync
+ s.tick(); assertEquals(next1.value, None)
+
+ s.tick(1.second)
+ assertEquals(next1.value, Some(Success(2)))
+ assertEquals(cache.size, 1)
+ assertEquals(cache.rawSize, 1)
+
+ s.tick(9.seconds)
+ val next2 = cache.nextCleanse.runAsync
+ s.tick(); assertEquals(next2.value, None)
+
+ s.tick(1.second)
+ assertEquals(next2.value, Some(Success(1)))
+
+ val next3 = cache.nextCleanse.runAsync
+ s.tick(); assertEquals(next3.value, None)
+
+ s.tick(1.second)
+ assertEquals(next3.value, Some(Success(0)))
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("cache future") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4, cleanupPeriod = 1.second)
+ try {
+ var effect = 0
+ def fetch(): Future[String] =
+ cache.cachedFuture("hello", 1.minute) { implicit s =>
+ Future {
+ effect += 1
+ s"world$effect"
+ }
+ }
+
+ val f1 = fetch(); s.tick()
+ assertEquals(f1.value, Some(Success("world1")))
+ val f2 = fetch(); s.tick()
+ assertEquals(f2.value, Some(Success("world1")))
+
+ s.tick(1.minute)
+ val f3 = fetch(); s.tick()
+ assertEquals(f3.value, Some(Success("world2")))
+ }
+ finally {
+ cache.close()
+ }
+ }
+
+ test("cache task") { implicit s =>
+ val cache = TimeBasedCache[String](distribution = 4, cleanupPeriod = 1.second)
+ try {
+ var effect = 0
+ val task = cache.cachedTask("hello", 1.minute) {
+ Task {
+ effect += 1
+ s"world$effect"
+ }
+ }
+
+ val f1 = task.runAsync; s.tick()
+ assertEquals(f1.value, Some(Success("world1")))
+ val f2 = task.runAsync; s.tick()
+ assertEquals(f2.value, Some(Success("world1")))
+
+ s.tick(1.minute)
+ val f3 = task.runAsync; s.tick()
+ assertEquals(f3.value, Some(Success("world2")))
+ }
+ finally {
+ cache.close()
+ }
+ }
+}
diff --git a/src/main/java/shade/memcached/internals/Slf4jLogger.java b/shade-memcached/src/main/java/shade/memcached/internals/Slf4jLogger.java
similarity index 100%
rename from src/main/java/shade/memcached/internals/Slf4jLogger.java
rename to shade-memcached/src/main/java/shade/memcached/internals/Slf4jLogger.java
diff --git a/shade-memcached/src/main/scala/shade/memcached/Codec.scala b/shade-memcached/src/main/scala/shade/memcached/Codec.scala
new file mode 100644
index 0000000..ed18af5
--- /dev/null
+++ b/shade-memcached/src/main/scala/shade/memcached/Codec.scala
@@ -0,0 +1,201 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import net.spy.memcached.transcoders._
+import scala.annotation.implicitNotFound
+import net.spy.memcached.CachedData.MAX_SIZE
+
+/** Represents a type class that needs to be implemented
+ * for serialization/deserialization to work.
+ */
+@implicitNotFound("Could not find any Codec implementation for type ${T}.")
+trait Codec[T] extends Transcoder[T] {
+ /** Returns `true` if the decoding needs to happen asynchronously,
+ * or `false` otherwise.
+ *
+ * Decoding should be marked for asynchrony in case it is
+ * expensive, for example when compression is applied.
+ */
+ def asyncDecode(d: CachedData): Boolean
+
+ /** Encode the given value to a byte array with flags attached,
+ * meant for storage by the Memcached client.
+ */
+ def encode(value: T): CachedData
+
+ /** Decodes byte arrays with flags, as retrieved by the Memcached client,
+ * into the value it represents.
+ */
+ def decode(data: CachedData): T
+
+ /** Get the maximum size of objects handled by this codec. */
+ def getMaxSize: Int
+}
+
+object Codec extends DefaultCodecs
+
+abstract class DefaultCodecs extends DefaultCodecsLevel0 {
+ import java.lang.{Float => JvmFloat, Double => JvmDouble}
+
+ /** Default codec for `Int`. */
+ implicit object IntBinaryCodec extends GenericIntCodec[Int](
+ flags = 2 << 8, // SerializingTranscoder.SPECIAL_INT
+ toInt = (v: Int) => v,
+ fromInt = (v: Int) => v
+ )
+
+ /** Default codec for `Long`. */
+ implicit object LongBinaryCodec extends GenericLongCodec[Long](
+ flags = 3 << 8, // SerializingTranscoder.SPECIAL_LONG
+ toLong = (v: Long) => v,
+ fromLong = (v: Long) => v
+ )
+
+ /** Default codec for `Float`. */
+ implicit object FloatBinaryCodec extends GenericIntCodec[Float](
+ flags = 6 << 8, // SerializingTranscoder.SPECIAL_FLOAT
+ toInt = JvmFloat.floatToRawIntBits,
+ fromInt = JvmFloat.intBitsToFloat
+ )
+
+ /** Default codec for `Double`. */
+ implicit object DoubleBinaryCodec extends GenericLongCodec[Double](
+ flags = 7 << 8, // SerializingTranscoder.SPECIAL_DOUBLE
+ toLong = JvmDouble.doubleToRawLongBits,
+ fromLong = JvmDouble.longBitsToDouble
+ )
+
+ /** Default codec for `Byte`. */
+ implicit object ByteBinaryCodec extends Codec[Byte] {
+ final val FLAGS = 5 << 8 // SerializingTranscoder.SPECIAL_BYTE
+
+ def asyncDecode(d: CachedData): Boolean = false
+
+ def encode(value: Byte): CachedData = {
+ val bytes = packedUtils.encodeByte(value)
+ new CachedData(FLAGS, bytes, getMaxSize)
+ }
+
+ def decode(data: CachedData): Byte =
+ data.getData match {
+ case null => 0
+ case bytes =>
+ packedUtils.decodeByte(bytes)
+ }
+
+ def getMaxSize: Int =
+ MAX_SIZE
+ }
+
+ /** Default codec for `Boolean`. */
+ implicit object BooleanCodec extends Codec[Boolean] {
+ // SerializingTranscoder.SPECIAL_BOOLEAN
+ final val FLAGS = 1 << 8
+
+ def asyncDecode(d: CachedData): Boolean = false
+
+ def encode(value: Boolean): CachedData = {
+ val bytes = packedUtils.encodeBoolean(value)
+ new CachedData(FLAGS, bytes, getMaxSize)
+ }
+
+ def decode(data: CachedData): Boolean =
+ data.getData match {
+ case null => false
+ case bytes =>
+ packedUtils.decodeBoolean(bytes)
+ }
+
+ def getMaxSize: Int =
+ MAX_SIZE
+ }
+}
+
+private[memcached] abstract class DefaultCodecsLevel0 {
+ /** Returns a [[Codec]] reference that can serialize and
+ * deserialize anything.
+ */
+ implicit def serializingCodec[A]: Codec[A] =
+ SerializingCodecAnyRef.asInstanceOf[Codec[A]]
+
+ /** Reusable reference for [[serializingCodec]]. */
+ private object SerializingCodecAnyRef extends Codec[Any] {
+ private[this] val tc = new SerializingTranscoder()
+
+ def asyncDecode(d: CachedData): Boolean =
+ tc.asyncDecode(d)
+
+ def encode(value: Any): CachedData = {
+ if (value == null) throw new NullPointerException("Null values not supported!")
+ tc.encode(value)
+ }
+
+ def decode(data: CachedData): Any =
+ tc.decode(data) match {
+ case null => throw new NullPointerException("Null values not supported!")
+ case value => value
+ }
+
+ def getMaxSize: Int =
+ tc.getMaxSize
+ }
+
+ /** Helper for building codecs that serialize/deserialize to and from `Long`. */
+ class GenericLongCodec[A](flags: Int, toLong: A => Long, fromLong: Long => A) extends Codec[A] {
+ final val FLAGS = flags
+
+ final def asyncDecode(d: CachedData): Boolean =
+ false
+
+ final def encode(value: A): CachedData = {
+ val bytes = packedUtils.encodeLong(toLong(value))
+ new CachedData(FLAGS, bytes, MAX_SIZE)
+ }
+
+ final def decode(data: CachedData): A =
+ fromLong(data.getData match {
+ case null => 0
+ case bytes =>
+ packedUtils.decodeLong(bytes)
+ })
+
+ final def getMaxSize: Int =
+ MAX_SIZE
+ }
+
+ /** Helper for building codecs that serialize/deserialize to and from `Int`. */
+ class GenericIntCodec[A](flags: Int, toInt: A => Int, fromInt: Int => A) extends Codec[A] {
+ final val FLAGS = flags
+
+ final def asyncDecode(d: CachedData): Boolean =
+ false
+
+ final def encode(value: A): CachedData = {
+ val bytes = packedUtils.encodeInt(toInt(value))
+ new CachedData(FLAGS, bytes, MAX_SIZE)
+ }
+
+ final def decode(data: CachedData): A =
+ fromInt(data.getData match {
+ case null => 0
+ case bytes =>
+ packedUtils.decodeInt(bytes)
+ })
+
+ final def getMaxSize: Int =
+ MAX_SIZE
+ }
+
+ protected final val packedUtils =
+ new TranscoderUtils(true)
+}
\ No newline at end of file
diff --git a/src/main/scala/shade/memcached/Configuration.scala b/shade-memcached/src/main/scala/shade/memcached/Configuration.scala
similarity index 98%
rename from src/main/scala/shade/memcached/Configuration.scala
rename to shade-memcached/src/main/scala/shade/memcached/Configuration.scala
index 4535de9..7b21ce3 100644
--- a/src/main/scala/shade/memcached/Configuration.scala
+++ b/shade-memcached/src/main/scala/shade/memcached/Configuration.scala
@@ -14,7 +14,6 @@ package shade.memcached
import net.spy.memcached.ConnectionFactoryBuilder.Locator
import net.spy.memcached.ops.OperationQueueFactory
import net.spy.memcached.{ DefaultConnectionFactory, HashAlgorithm }
-
import scala.concurrent.duration._
/**
@@ -56,7 +55,7 @@ import scala.concurrent.duration._
*
* @param locator locator selection, by default ARRAY_MOD
*/
-case class Configuration(
+final case class Configuration(
addresses: String,
authentication: Option[AuthConfiguration] = None,
keysPrefix: Option[String] = None,
@@ -79,6 +78,6 @@ object FailureMode extends Enumeration {
val Retry, Cancel, Redistribute = Value
}
-case class AuthConfiguration(
+final case class AuthConfiguration(
username: String,
password: String)
diff --git a/shade-memcached/src/main/scala/shade/memcached/FakeMemcached.scala b/shade-memcached/src/main/scala/shade/memcached/FakeMemcached.scala
new file mode 100644
index 0000000..40a9a60
--- /dev/null
+++ b/shade-memcached/src/main/scala/shade/memcached/FakeMemcached.scala
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+import monix.eval.Task
+import monix.execution.{CancelableFuture, Scheduler}
+import shade.local.mutable.TimeBasedCache
+
+import scala.concurrent.ExecutionContext
+import scala.concurrent.duration.Duration
+
+/** A fake [[Memcached]] implementation that can be used in testing. */
+class FakeMemcached(scheduler: Scheduler) extends Memcached {
+ private[this] val inMemory = TimeBasedCache[CachedData]()(scheduler)
+
+ def addL[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T]): Task[Boolean] =
+ Task.eval(inMemory.add(key, codec.encode(value), exp))
+
+ def add[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean] = {
+
+ val r = inMemory.add(key, codec.encode(value), exp)
+ CancelableFuture.successful(r)
+ }
+
+ def setL[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T]): Task[Unit] =
+ Task.eval(inMemory.set(key, codec.encode(value), exp))
+
+ def set[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Unit] =
+ CancelableFuture.successful {
+ inMemory.set(key, codec.encode(value), exp)
+ }
+
+ def deleteL(key: String): Task[Boolean] =
+ Task.eval(inMemory.delete(key))
+
+ def delete(key: String)(implicit ec: ExecutionContext): CancelableFuture[Boolean] =
+ CancelableFuture.successful(inMemory.delete(key))
+
+ def getL[T](key: String)(implicit codec: Codec[T]): Task[Option[T]] =
+ Task.eval(inMemory.get(key).map(codec.decode))
+
+ def get[T](key: String)(implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[T]] =
+ CancelableFuture.successful(inMemory.get(key).map(codec.decode))
+
+ def getsL[T](key: String)(implicit codec: Codec[T]): Task[Option[CASValue[T]]] =
+ Task.eval(
+ inMemory.get(key).map { data =>
+ val v = codec.decode(data)
+ new CASValue(v.hashCode(), v)
+ })
+
+ def gets[T](key: String)(implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[CASValue[T]]] =
+ CancelableFuture.successful(
+ inMemory.get(key).map { data =>
+ val v = codec.decode(data)
+ new CASValue(v.hashCode(), v)
+ })
+
+ private def cas[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T]): Boolean = {
+
+ inMemory.get(key) match {
+ case None => false
+ case current @ Some(data) =>
+ val v = codec.decode(data)
+ if (casId != v.hashCode()) false else {
+ val u = codec.encode(update)
+ inMemory.compareAndSet(key, current, u, exp)
+ }
+ }
+ }
+
+ def rawCompareAndSetL[T](key: String, casId: Long, update: T, exp: Duration)(implicit codec: Codec[T]): Task[Boolean] =
+ Task.eval(cas(key, casId, update, exp)(codec))
+
+ def rawCompareAndSet[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean] =
+ CancelableFuture.successful(cas(key, casId, update, exp)(codec))
+
+ private def incAndGet(key: String, by: Long, default: Long, exp: Duration): Long = {
+ val codec = implicitly[Codec[Long]]
+ val ref = inMemory.transformAndGet(key, exp) {
+ case None => codec.encode(default)
+ case Some(data) =>
+ val current = codec.decode(data)
+ codec.encode(current + by)
+ }
+ codec.decode(ref)
+ }
+
+ def incrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long] =
+ Task.eval(incAndGet(key, by, default, exp))
+
+ def incrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long] =
+ CancelableFuture.successful(incAndGet(key, by, default, exp))
+
+ def decrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long] =
+ Task.eval(incAndGet(key, -1 * by, default, exp))
+
+ def decrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long] =
+ CancelableFuture.successful(incAndGet(key, -1 * by, default, exp))
+
+ def close(): Unit =
+ inMemory.close()
+}
diff --git a/shade-memcached/src/main/scala/shade/memcached/Memcached.scala b/shade-memcached/src/main/scala/shade/memcached/Memcached.scala
new file mode 100644
index 0000000..1b12642
--- /dev/null
+++ b/shade-memcached/src/main/scala/shade/memcached/Memcached.scala
@@ -0,0 +1,555 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import monix.eval.Task
+import monix.execution.{CancelableFuture, Scheduler}
+
+import scala.concurrent.duration.Duration
+import scala.concurrent.{Await, ExecutionContext}
+
+/**
+ * @define addDesc Adds a value for a given key, if the key doesn't
+ * already exist in the cache store.
+ *
+ * If the key already exists in the cache, the returned
+ * result will be `false` and the current value will not be
+ * overridden. If the key isn't there already, the value will
+ * be set and the returned result will be `true`.
+ *
+ * @define addReturnDesc either `true`, in case the key was created,
+ * with the given value, or `false` in case the key already
+ * exists
+ *
+ * @define setDesc Stores a (key, value) pair in the cache store.
+ * If the `key` doesn't exist, then one is created. If
+ * the `key` exists, then it is updated.
+ *
+ * @define deleteDesc Deletes a `key` from the cache store.
+ *
+ * @define getDesc Fetches a value from the cache store associated
+ * with the given `key`.
+ *
+ * @define getsDesc Fetches a value from the cache store associated
+ * with the given `key` and also return its associated
+ * "cas ID" to use in `compareAndSet` operations.
+ *
+ * @define casDesc Atomic compare and set.
+ *
+ * @define rawCasDesc Atomic compare and set using cas IDs
+ * (fetched with `gets`).
+ *
+ * @define transformAndGetDesc Transforms the given key and
+ * returns the new value.
+ *
+ * The given function receives the current value
+ * (`None` in case the key is missing or `Some(value)` otherwise)
+ * and should return the new value that was eventually stored.
+ *
+ * The method goes into a `compareAndSet` loop until the cas operation
+ * succeeds, so the callback should have no side-effects.
+ *
+ * @define incrementDesc Atomic increment.
+ *
+ * Increments the value stored with the given `key` by
+ * the given amount. If the `key` does not exist, then
+ * it creates it with the `default` value.
+ *
+ * @define casReturn either `true` (in case the compare-and-set
+ * operation succeeded) or `false` if not, in which case
+ * a concurrent operation probably happened
+ *
+ * @define getReturnDesc `Some(value)` in case the `key` is available,
+ * or `None` otherwise (doesn't throw exception on missing keys)
+ *
+ * @define getsReturnDesc `Some(CASValue(value, casId))` in case the
+ * `key` is available, or `None` otherwise (doesn't throw
+ * exception on missing keys)
+ *
+ * @define deleteReturnDesc `true` if a key was deleted or `false`
+ * if there was nothing there to delete
+ *
+ * @define codecParamDesc is the serializer and deserializer needed
+ * for storing the given `value`
+ *
+ * @define expParamDesc specifies the expiry time, can be infinite
+ * (`Duration.Inf`)
+ *
+ * @define keyUpdateParamDesc is the key in memcached to update
+ *
+ * @define valueParamDesc is the cached value, associated with the
+ * given `key`
+ *
+ * @define ecParamDesc is the `ExecutionContext` used to schedule
+ * asynchronous computations
+ *
+ * @define casCurrentParamDesc is the current value associated with
+ * the given `key`, should be `None` in case it should be
+ * missing, or `Some(value)` otherwise
+ *
+ * @define casUpdateParamDesc is the value to be associated with
+ * the given `key` if this operation succeeds
+ *
+ * @define casIdParamDesc is the value Id returned by `gets`
+ */
+abstract class Memcached extends java.io.Closeable {
+ /** $addDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.add]] for the `Future`-enabled version
+ *
+ * @return a `Task` that on evaluation will signal
+ * $addReturnDesc
+ */
+ def addL[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): Task[Boolean]
+
+ /** $addDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ * @param ec $ecParamDesc
+ *
+ * @see [[Memcached.addL]] for the `Task`-enabled version
+ *
+ * @return a `CancelableFuture` that will signal
+ * $addReturnDesc
+ */
+ def add[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean]
+
+ /** $setDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.set]] for the `Future`-enabled version
+ */
+ def setL[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): Task[Unit]
+
+ /** $setDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param value $valueParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ * @param ec $ecParamDesc
+ *
+ * @see [[Memcached.setL]] for the `Task`-enabled version
+ */
+ def set[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Unit]
+
+ /** $deleteDesc
+ *
+ * @see [[Memcached.delete]] for the `Future`-enabled version
+ *
+ * @param key is the key to delete if it exists
+ * @return $deleteReturnDesc
+ */
+ def deleteL(key: String): Task[Boolean]
+
+ /** $deleteDesc
+ *
+ * @see [[Memcached.deleteL]] for the `Task`-enabled version
+ *
+ * @param key is the key to delete if it exists
+ * @return $deleteReturnDesc
+ */
+ def delete(key: String)(implicit ec: ExecutionContext): CancelableFuture[Boolean]
+
+ /** $getDesc
+ *
+ * @see [[Memcached.get]] for the `Future`-enabled version
+ *
+ * @param key is the key whose value we need to fetch
+ * @param codec $codecParamDesc
+ *
+ * @return $getReturnDesc
+ */
+ def getL[T](key: String)(implicit codec: Codec[T]): Task[Option[T]]
+
+ /** $getDesc
+ *
+ * @see [[Memcached.getL]] for the `Task`-enabled version
+ *
+ * @param key is the key whose value we need to fetch
+ * @param codec $codecParamDesc
+ * @param ec $ecParamDesc
+ *
+ * @return $getReturnDesc
+ */
+ def get[T](key: String)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[T]]
+
+ /** $getsDesc
+ *
+ * @see [[Memcached.gets]] for the `Future`-enabled version
+ *
+ * @param key is the key whose value we need to fetch
+ * @param codec $codecParamDesc
+ *
+ * @return $getsReturnDesc
+ */
+ def getsL[T](key: String)(implicit codec: Codec[T]): Task[Option[CASValue[T]]]
+
+ /** $getsDesc
+ *
+ * @see [[Memcached.getsL]] for the `Task`-enabled version
+ *
+ * @param key is the key whose value we need to fetch
+ * @param codec $codecParamDesc
+ * @param ec $ecParamDesc
+ *
+ * @return $getsReturnDesc
+ */
+ def gets[T](key: String)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[CASValue[T]]]
+
+ /** $rawCasDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param casId $casIdParamDesc
+ * @param update $casUpdateParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.rawCompareAndSet]] for the `Future`-enabled version
+ *
+ * @return $casReturn
+ */
+ def rawCompareAndSetL[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T]): Task[Boolean]
+
+ /** $rawCasDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param casId $casIdParamDesc
+ * @param update $casUpdateParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.rawCompareAndSetL]] for the `Task`-enabled version
+ *
+ * @return $casReturn
+ */
+ def rawCompareAndSet[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean]
+
+ /** $casDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param current $casCurrentParamDesc
+ * @param update $casUpdateParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.compareAndSet]] for the `Future`-enabled version
+ *
+ * @return $casReturn
+ */
+ def compareAndSetL[T](key: String, current: Option[T], update: T, exp: Duration)
+ (implicit codec: Codec[T]): Task[Boolean] = {
+
+ current match {
+ case None => addL(key, update, exp)
+ case Some(expected) =>
+ getsL[T](key).flatMap {
+ case Some(r) if r.getValue == expected =>
+ rawCompareAndSetL[T](key, r.getCas, update, exp)
+ case _ =>
+ Task.now(false)
+ }
+ }
+ }
+
+ /** $casDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param current $casCurrentParamDesc
+ * @param update $casUpdateParamDesc
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[Memcached.compareAndSetL]] for the `Task`-enabled version
+ *
+ * @return $casReturn
+ */
+ def compareAndSet[T](key: String, current: Option[T], update: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean] = {
+
+ current match {
+ case None => add(key, update, exp)
+ case Some(expected) =>
+ gets[T](key).flatMap {
+ case Some(r) if r.getValue == expected =>
+ rawCompareAndSet[T](key, r.getCas, update, exp)
+ case _ =>
+ CancelableFuture.successful(false)
+ }
+ }
+ }
+
+ /** $transformAndGetDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is the function that transforms the current value
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[transformAndGet]] for the `Future`-enabled function.
+ *
+ * @return the updated value
+ */
+ def transformAndGetL[T](key: String, exp: Duration)(f: Option[T] => T)
+ (implicit codec: Codec[T]): Task[T] = {
+
+ getsL[T](key).flatMap {
+ case None =>
+ val update = f(None)
+ addL(key, update, exp).flatMap {
+ case false => transformAndGetL(key, exp)(f)
+ case true => Task.now(update)
+ }
+ case Some(r) =>
+ val update = f(Option(r.getValue))
+ rawCompareAndSetL(key, r.getCas, update, exp).flatMap {
+ case false => transformAndGetL(key, exp)(f)
+ case true => Task.now(update)
+ }
+ }
+ }
+
+ /** $transformAndGetDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is the function that transforms the current value
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[transformAndGetL]] for the `Task`-enabled function.
+ *
+ * @return the updated value
+ */
+ def transformAndGet[T](key: String, exp: Duration)(f: Option[T] => T)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[T] = {
+
+ gets[T](key).flatMap {
+ case None =>
+ val update = f(None)
+ add(key, update, exp).flatMap {
+ case false => transformAndGet(key, exp)(f)
+ case true => CancelableFuture.successful(update)
+ }
+ case Some(r) =>
+ val update = f(Option(r.getValue))
+ rawCompareAndSet(key, r.getCas, update, exp).flatMap {
+ case false => transformAndGet(key, exp)(f)
+ case true => CancelableFuture.successful(update)
+ }
+ }
+ }
+
+ //--
+ /** $getAndTransformDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is the function that transforms the current value
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[getAndTransform]] for the `Future`-enabled function.
+ *
+ * @return the updated value
+ */
+ def getAndTransformL[T](key: String, exp: Duration)(f: Option[T] => T)
+ (implicit codec: Codec[T]): Task[Option[T]] = {
+
+ getsL[T](key).flatMap {
+ case None =>
+ val update = f(None)
+ addL(key, update, exp).flatMap {
+ case false => getAndTransformL(key, exp)(f)
+ case true => Task.now(None)
+ }
+ case Some(r) =>
+ val current = Option(r.getValue)
+ val update = f(current)
+ rawCompareAndSetL(key, r.getCas, update, exp).flatMap {
+ case false => getAndTransformL(key, exp)(f)
+ case true => Task.now(current)
+ }
+ }
+ }
+
+ /** $getAndTransformDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param f is the function that transforms the current value
+ * @param exp $expParamDesc
+ * @param codec $codecParamDesc
+ *
+ * @see [[getAndTransformL]] for the `Task`-enabled function.
+ *
+ * @return the updated value
+ */
+ def getAndTransform[T](key: String, exp: Duration)(f: Option[T] => T)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[T]] = {
+
+ gets[T](key).flatMap {
+ case None =>
+ val update = f(None)
+ add(key, update, exp).flatMap {
+ case false => getAndTransform(key, exp)(f)
+ case true => CancelableFuture.successful(None)
+ }
+ case Some(r) =>
+ val current = Option(r.getValue)
+ val update = f(current)
+ rawCompareAndSet(key, r.getCas, update, exp).flatMap {
+ case false => getAndTransform(key, exp)(f)
+ case true => CancelableFuture.successful(current)
+ }
+ }
+ }
+
+ /** $incrementDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param by is the value to add
+ * @param default is the default value to create in case the key is missing
+ * @param exp $expParamDesc
+ *
+ * @see [[incrementAndGet]] for the `Future`-enabled version
+ *
+ * @return the incremented value or -1 if the increment failed
+ */
+ def incrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long]
+
+ /** $incrementDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param by is the value to add
+ * @param default is the default value to create in case the key is missing
+ * @param exp $expParamDesc
+ *
+ * @see [[incrementAndGetL]] for the `Task`-enabled version
+ *
+ * @return the incremented value or -1 if the increment failed
+ */
+ def incrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long]
+
+ /** $decrementDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param by is the value to add
+ * @param default is the default value to create in case the key is missing
+ * @param exp $expParamDesc
+ *
+ * @see [[decrementAndGet]] for the `Future`-enabled version
+ *
+ * @return the decremented value or -1 if the decrement failed
+ */
+ def decrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long]
+
+ /** $decrementDesc
+ *
+ * @param key $keyUpdateParamDesc
+ * @param by is the value to add
+ * @param default is the default value to create in case the key is missing
+ * @param exp $expParamDesc
+ *
+ * @see [[decrementAndGetL]] for the `Task`-enabled version
+ *
+ * @return the decremented value or -1 if the decrement failed
+ */
+ def decrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long]
+
+ /** Shuts down the cache instance, performs any additional
+ * cleanups necessary.
+ */
+ def close(): Unit
+}
+
+object Memcached {
+ /**
+ * Builds a [[Memcached]] instance. Needs a [[Configuration]].
+ */
+ def apply(config: Configuration): Memcached =
+ new SpyMemcached(config)
+
+ /** Returns a [[FakeMemcached]] implementation, useful for usage in tests. */
+ def fake(implicit s: Scheduler): Memcached =
+ new FakeMemcached(s)
+
+ /** Extra extensions for [[Memcached]] */
+ implicit class Extensions(val client: Memcached) extends AnyVal {
+ /** Performs a [[Memcached.add]] and blocks for the result. */
+ def awaitAdd[T](key: String, value: T, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit codec: Codec[T], ec: ExecutionContext): Boolean =
+ Await.result(client.add(key, value, exp)(codec, ec), awaitAtMost)
+
+ /** Performs a [[Memcached.set]] and blocks for the result. */
+ def awaitSet[T](key: String, value: T, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit codec: Codec[T], ec: ExecutionContext): Unit =
+ Await.result(client.set(key, value, exp)(codec, ec), awaitAtMost)
+
+ /** Performs a [[Memcached.delete]] and blocks for the result. */
+ def awaitDelete(key: String, awaitAtMost: Duration = Duration.Inf)
+ (implicit ec: ExecutionContext): Boolean =
+ Await.result(client.delete(key)(ec), awaitAtMost)
+
+ /** Performs a [[Memcached.get]] and blocks for the result. */
+ def awaitGet[T](key: String, awaitAtMost: Duration = Duration.Inf)
+ (implicit codec: Codec[T], ec: ExecutionContext): Option[T] =
+ Await.result(client.get(key)(codec, ec), awaitAtMost)
+
+ /** Performs a [[Memcached.rawCompareAndSet]] and blocks for the result. */
+ def awaitRawCompareAndSet[T](key: String, casId: Int, update: T, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit codec: Codec[T], ec: ExecutionContext): Boolean =
+ Await.result(client.rawCompareAndSet(key, casId, update, exp)(codec, ec), awaitAtMost)
+
+ /** Performs a [[Memcached.rawCompareAndSet]] and blocks for the result. */
+ def awaitCompareAndSet[T](key: String, current: Option[T], update: T, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit codec: Codec[T], ec: ExecutionContext): Boolean =
+ Await.result(client.compareAndSet(key, current, update, exp)(codec, ec), awaitAtMost)
+
+ /** Performs a [[Memcached.transformAndGet]] and blocks for the result. */
+ def awaitTransformAndGet[T](key: String, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (f: Option[T] => T)(implicit codec: Codec[T], ec: ExecutionContext): T =
+ Await.result(client.transformAndGet(key, exp)(f), awaitAtMost)
+
+ /** Performs a [[Memcached.getAndTransform]] and blocks for the result. */
+ def awaitGetAndTransform[T](key: String, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (f: Option[T] => T)(implicit codec: Codec[T], ec: ExecutionContext): Option[T] =
+ Await.result(client.getAndTransform(key, exp)(f), awaitAtMost)
+
+ /** Performs an [[Memcached.incrementAndGet]] and blocks for the result. */
+ def awaitIncrementAndGet(key: String, by: Long, default: Long, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit ec: ExecutionContext): Long =
+ Await.result(client.incrementAndGet(key, by, default, exp), awaitAtMost)
+
+ /** Performs an [[Memcached.decrementAndGet]] and blocks for the result. */
+ def awaitDecrementAndGet(key: String, by: Long, default: Long, exp: Duration, awaitAtMost: Duration = Duration.Inf)
+ (implicit ec: ExecutionContext): Long =
+ Await.result(client.decrementAndGet(key, by, default, exp), awaitAtMost)
+ }
+}
\ No newline at end of file
diff --git a/shade-memcached/src/main/scala/shade/memcached/SpyMemcached.scala b/shade-memcached/src/main/scala/shade/memcached/SpyMemcached.scala
new file mode 100644
index 0000000..1702ce8
--- /dev/null
+++ b/shade-memcached/src/main/scala/shade/memcached/SpyMemcached.scala
@@ -0,0 +1,454 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import monix.eval.{Callback, Task}
+import monix.execution.{Cancelable, CancelableFuture, Scheduler}
+import java.lang.{Boolean => JavaBoolean, Long => JavaLong}
+import java.util.concurrent.{Future => JavaFuture}
+
+import net.spy.memcached.internal._
+import net.spy.memcached.ConnectionFactoryBuilder.{Protocol => SpyProtocol}
+import net.spy.memcached.auth.{AuthDescriptor, PlainCallbackHandler}
+import net.spy.memcached.{AddrUtil, CASResponse, ConnectionFactoryBuilder, MemcachedClient, FailureMode => SpyFailureMode}
+
+import scala.concurrent.duration.{Duration, FiniteDuration}
+import scala.concurrent.{ExecutionContext, Promise}
+import scala.language.higherKinds
+import scala.util.Try
+import scala.util.control.NonFatal
+
+class SpyMemcached(config: Configuration) extends Memcached {
+ override def addL[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): Task[Boolean] =
+ triggerOperationTask[JavaBoolean, Boolean, OperationFuture](
+ javaToScalaBoolean,
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.add(withPrefix(key), expSecs, value, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def add[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean] = {
+
+ triggerOperationFuture[JavaBoolean, Boolean, OperationFuture](
+ javaToScalaBoolean,
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.add(withPrefix(key), expSecs, value, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def setL[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): Task[Unit] =
+ triggerOperationTask[JavaBoolean, Unit, OperationFuture](
+ unit,
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.set(withPrefix(key), expSecs, value, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def set[T](key: String, value: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Unit] = {
+
+ triggerOperationFuture[JavaBoolean, Unit, OperationFuture](
+ unit,
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.set(withPrefix(key), expSecs, value, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def deleteL(key: String): Task[Boolean] =
+ triggerOperationTask[JavaBoolean, Boolean,OperationFuture](
+ javaToScalaBoolean,
+ () => client.delete(withPrefix(key)),
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def delete(key: String)
+ (implicit ec: ExecutionContext): CancelableFuture[Boolean] = {
+
+ triggerOperationFuture[JavaBoolean, Boolean, OperationFuture](
+ javaToScalaBoolean,
+ () => client.delete(withPrefix(key)),
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def getL[T](key: String)(implicit codec: Codec[T]): Task[Option[T]] =
+ triggerOperationTask[T, Option[T], GetFuture](
+ (x: T) => Option(x),
+ () => client.asyncGet(withPrefix(key), codec),
+ (async, callback) => {
+ async.addListener(new GetCompletionListener {
+ def onComplete(future: GetFuture[_]) =
+ callback()
+ })
+ }
+ )
+
+ override def get[T](key: String)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[T]] = {
+
+ triggerOperationFuture[T, Option[T], GetFuture](
+ (x: T) => Option(x),
+ () => client.asyncGet(withPrefix(key), codec),
+ (async, callback) => {
+ async.addListener(new GetCompletionListener {
+ def onComplete(future: GetFuture[_]) =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def getsL[T](key: String)(implicit codec: Codec[T]): Task[Option[CASValue[T]]] =
+ triggerOperationTask[CASValue[T], Option[CASValue[T]], OperationFuture](
+ (x: CASValue[T]) => Option(x),
+ () => client.asyncGets(withPrefix(key), codec),
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def gets[T](key: String)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Option[CASValue[T]]] = {
+
+ triggerOperationFuture[CASValue[T], Option[CASValue[T]], OperationFuture](
+ (x: CASValue[T]) => Option(x),
+ () => client.asyncGets(withPrefix(key), codec),
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def rawCompareAndSetL[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T]): Task[Boolean] = {
+
+ triggerOperationTask[CASResponse, Boolean, OperationFuture](
+ {
+ case CASResponse.OK | CASResponse.OBSERVE_MODIFIED => true
+ case _ => false
+ },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncCAS(withPrefix(key), casId, expSecs, update, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def rawCompareAndSet[T](key: String, casId: Long, update: T, exp: Duration)
+ (implicit codec: Codec[T], ec: ExecutionContext): CancelableFuture[Boolean] = {
+
+ triggerOperationFuture[CASResponse, Boolean, OperationFuture](
+ {
+ case CASResponse.OK | CASResponse.OBSERVE_MODIFIED => true
+ case _ => false
+ },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncCAS(withPrefix(key), casId, expSecs, update, codec)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def incrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long] =
+ triggerOperationTask[JavaLong, Long, OperationFuture](
+ { case null => 0L; case nr => nr },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncIncr(key, by, default, expSecs)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def incrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long] = {
+
+ triggerOperationFuture[JavaLong, Long, OperationFuture](
+ { case null => 0L; case nr => nr },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncIncr(key, by, default, expSecs)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ override def decrementAndGetL(key: String, by: Long, default: Long, exp: Duration): Task[Long] =
+ triggerOperationTask[JavaLong, Long, OperationFuture](
+ { case null => 0L; case nr => nr },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncDecr(key, by, default, expSecs)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+
+ override def decrementAndGet(key: String, by: Long, default: Long, exp: Duration)
+ (implicit ec: ExecutionContext): CancelableFuture[Long] = {
+
+ triggerOperationFuture[JavaLong, Long, OperationFuture](
+ { case null => 0L; case nr => nr },
+ () => {
+ val expSecs = expiryToSeconds(exp).toInt
+ client.asyncDecr(key, by, default, expSecs)
+ },
+ (async, callback) => {
+ async.addListener(new OperationCompletionListener {
+ def onComplete(future: OperationFuture[_]): Unit =
+ callback()
+ })
+ }
+ )
+ }
+
+ /** Helper that converts anything to Unit. */
+ private[this] val unit: (Any => Unit) =
+ _ => ()
+
+ /** Helper for converting Java's boxed booleans to Scala. */
+ private[this] val javaToScalaBoolean: (JavaBoolean => Boolean) = {
+ case null => false
+ case b => b.booleanValue()
+ }
+
+ private def triggerOperationFuture[A, R, Async[T] <: JavaFuture[T]]
+ (map: A => R, trigger: () => Async[A], addListener: (Async[A], () => Unit) => Unit)
+ (implicit ec: ExecutionContext): CancelableFuture[R] = {
+
+ try {
+ val op = trigger()
+ if (op.isDone) {
+ CancelableFuture.fromTry(Try(map(op.get())))
+ } else {
+ val p = Promise[R]()
+ addListener(op, () => p.complete(Try(map(op.get()))))
+
+ CancelableFuture(p.future, Cancelable { () =>
+ try op.cancel(false)
+ catch { case NonFatal(ex) => ec.reportFailure(ex) }
+ })
+ }
+ } catch {
+ case NonFatal(ex) =>
+ CancelableFuture.failed(ex)
+ }
+ }
+
+ private def triggerOperationTask[A, B, Async[T] <: JavaFuture[T]]
+ (map: A => B, trigger: () => Async[A], addListener: (Async[A], () => Unit) => Unit): Task[B] = {
+
+ @inline def invoke(op: JavaFuture[A], cb: Callback[B], map: A => B)
+ (implicit s: Scheduler): Unit = {
+
+ var streamErrors = true
+ try {
+ val r = map(op.get())
+ streamErrors = false
+ cb.asyncOnSuccess(r)
+ } catch {
+ case NonFatal(ex) =>
+ if (streamErrors) cb.asyncOnError(ex)
+ else s.reportFailure(ex)
+ }
+ }
+
+ Task.unsafeCreate[B] { (ctx, cb) =>
+ implicit val s = ctx.scheduler
+ var streamErrors = true
+ try {
+ val op = trigger()
+ streamErrors = false
+
+ // Fast path?
+ if (op.isDone)
+ s.executeTrampolined(() => invoke(op, cb, map))
+ else {
+ ctx.connection.push(Cancelable { () =>
+ try op.cancel(false)
+ catch { case NonFatal(ex) => s.reportFailure(ex) }
+ })
+
+ addListener(op, () => {
+ // Resetting the frameIndex because we've had an async boundary
+ ctx.frameRef.reset()
+ // Need to pop the current cancelable, as a matter of contract
+ ctx.connection.pop()
+ // Go, go, go
+ invoke(op, cb, map)
+ })
+ }
+ } catch {
+ case NonFatal(ex) =>
+ if (streamErrors) cb.asyncOnError(ex)(ctx.scheduler)
+ else ctx.scheduler.reportFailure(ex)
+ }
+ }
+ }
+
+ @inline
+ private def withPrefix(key: String): String =
+ if (prefix.isEmpty)
+ key
+ else
+ prefix + "-" + key
+
+ private[this] val prefix =
+ config.keysPrefix.getOrElse("")
+
+ private[this] val client = {
+ if (System.getProperty("net.spy.log.LoggerImpl") == null) {
+ System.setProperty(
+ "net.spy.log.LoggerImpl",
+ "shade.memcached.internals.Slf4jLogger"
+ )
+ }
+
+ val conn = {
+ val builder = new ConnectionFactoryBuilder()
+ .setProtocol(
+ if (config.protocol == Protocol.Binary)
+ SpyProtocol.BINARY
+ else
+ SpyProtocol.TEXT
+ )
+ .setDaemon(true)
+ .setFailureMode(config.failureMode match {
+ case FailureMode.Retry =>
+ SpyFailureMode.Retry
+ case FailureMode.Cancel =>
+ SpyFailureMode.Cancel
+ case FailureMode.Redistribute =>
+ SpyFailureMode.Redistribute
+ })
+ .setOpQueueFactory(config.opQueueFactory.orNull)
+ .setReadOpQueueFactory(config.readQueueFactory.orNull)
+ .setWriteOpQueueFactory(config.writeQueueFactory.orNull)
+ .setShouldOptimize(config.shouldOptimize)
+ .setHashAlg(config.hashAlgorithm)
+ .setLocatorType(config.locator)
+
+ val withTimeout = config.operationTimeout match {
+ case _: FiniteDuration =>
+ builder.setOpTimeout(config.operationTimeout.toMillis)
+ case _ =>
+ builder
+ }
+
+ val withAuth = config.authentication match {
+ case Some(credentials) =>
+ withTimeout.setAuthDescriptor(
+ new AuthDescriptor(
+ Array("PLAIN"),
+ new PlainCallbackHandler(credentials.username, credentials.password)
+ )
+ )
+ case None =>
+ withTimeout
+ }
+
+ withAuth
+ }
+
+ import scala.collection.JavaConverters._
+ val addresses = AddrUtil.getAddresses(config.addresses).asScala
+ new MemcachedClient(conn.build(), addresses.asJava)
+ }
+
+ protected final def expiryToSeconds(duration: Duration): Long = duration match {
+ case finite: FiniteDuration =>
+ val seconds = finite.toSeconds
+ if (seconds < 60 * 60 * 24 * 30)
+ seconds
+ else
+ System.currentTimeMillis() / 1000 + seconds
+ case _ =>
+ // infinite duration (set to 0)
+ 0
+ }
+
+ override def close(): Unit = {
+ client.shutdown()
+ }
+}
diff --git a/src/main/scala/shade/memcached/internals/Result.scala b/shade-memcached/src/main/scala/shade/memcached/package.scala
similarity index 53%
rename from src/main/scala/shade/memcached/internals/Result.scala
rename to shade-memcached/src/main/scala/shade/memcached/package.scala
index e7d06fc..2782c3e 100644
--- a/src/main/scala/shade/memcached/internals/Result.scala
+++ b/shade-memcached/src/main/scala/shade/memcached/package.scala
@@ -9,8 +9,15 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.memcached.internals
+package shade
-sealed trait Result[+T]
-case class SuccessfulResult[+T](key: String, result: T) extends Result[T]
-case class FailedResult(key: String, state: Status) extends Result[Nothing]
+package object memcached {
+ /** A byte array with flags attached, as stored in Memcached.
+ *
+ * Used by [[Codec]] to encode and decode data.
+ */
+ type CachedData = net.spy.memcached.CachedData
+
+ /** A value with a cas identifier attached. */
+ type CASValue[A] = net.spy.memcached.CASValue[A]
+}
diff --git a/src/test/resources/logback.xml b/shade-memcached/src/test/resources/logback.xml
similarity index 100%
rename from src/test/resources/logback.xml
rename to shade-memcached/src/test/resources/logback.xml
diff --git a/shade-memcached/src/test/scala/shade/memcached/CodecsSuite.scala b/shade-memcached/src/test/scala/shade/memcached/CodecsSuite.scala
new file mode 100644
index 0000000..77af27a
--- /dev/null
+++ b/shade-memcached/src/test/scala/shade/memcached/CodecsSuite.scala
@@ -0,0 +1,98 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import minitest.SimpleTestSuite
+import minitest.laws.Checkers
+import org.scalacheck.Arbitrary
+import shade.memcached.testModels.{ContentPiece, Impression}
+
+object CodecsSuite extends SimpleTestSuite with Checkers {
+ /** Properties-based checking for a codec of type A. */
+ private def serDesCheck[A](implicit A: Arbitrary[A], codec: Codec[A]): Unit =
+ check1 { value: A =>
+ val encoded = codec.encode(value)
+ val decoded = codec.decode(encoded)
+ decoded == value
+ }
+
+ test("Int") {
+ serDesCheck[Int]
+ }
+
+ test("Long") {
+ serDesCheck[Long]
+ }
+
+ test("Float") {
+ serDesCheck[Float]
+ }
+
+ test("Double") {
+ serDesCheck[Double]
+ }
+
+ test("Byte") {
+ serDesCheck[Byte]
+ }
+
+ test("Boolean") {
+ serDesCheck[Boolean]
+ }
+
+ test("Char") {
+ serDesCheck[Char]
+ }
+
+ test("Short") {
+ serDesCheck[Short]
+ }
+
+ test("String") {
+ serDesCheck[String]
+ }
+
+ test("Array[Byte]") {
+ serDesCheck[Array[Byte]]
+ }
+
+ test("List[String]") {
+ serDesCheck[List[String]]
+ }
+
+ test("testModels.bigInstance") {
+ val value = testModels.bigInstance
+ val codec = implicitly[Codec[Impression]]
+
+ val encoded = codec.encode(value)
+ val decoded = codec.decode(encoded)
+ assertEquals(decoded, value)
+ }
+
+ test("testModels.bigInstance2") {
+ val value = testModels.bigInstance2
+ val codec = implicitly[Codec[Impression]]
+
+ val encoded = codec.encode(value)
+ val decoded = codec.decode(encoded)
+ assertEquals(decoded, value)
+ }
+
+ test("testModels.contentSeq") {
+ val value = testModels.contentSeq
+ val codec = implicitly[Codec[Vector[ContentPiece]]]
+
+ val encoded = codec.encode(value)
+ val decoded = codec.decode(encoded)
+ assertEquals(decoded, value)
+ }
+}
\ No newline at end of file
diff --git a/shade-memcached/src/test/scala/shade/memcached/FakeMemcachedSuite.scala b/shade-memcached/src/test/scala/shade/memcached/FakeMemcachedSuite.scala
new file mode 100644
index 0000000..1a59aba
--- /dev/null
+++ b/shade-memcached/src/test/scala/shade/memcached/FakeMemcachedSuite.scala
@@ -0,0 +1,307 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import java.io.{ByteArrayOutputStream, ObjectOutputStream}
+
+import minitest.SimpleTestSuite
+import monix.execution.CancelableFuture
+import monix.execution.Scheduler.Implicits.global
+import shade.memcached.testModels.{Impression, Value}
+
+import scala.concurrent.duration._
+import scala.concurrent.{Await, Future}
+
+object FakeMemcachedSuite extends SimpleTestSuite with MemcachedTestHelpers {
+ implicit val timeout = 5.second
+
+ test("add") {
+ withFakeMemcached { cache =>
+ val op1 = cache.awaitAdd("hello", Value("world"), 5.seconds)
+ assertEquals(op1, true)
+
+ val stored = cache.awaitGet[Value]("hello")
+ assertEquals(stored, Some(Value("world")))
+
+ val op2 = cache.awaitAdd("hello", Value("changed"), 5.seconds)
+ assertEquals(op2, false)
+
+ val changed = cache.awaitGet[Value]("hello")
+ assertEquals(changed, Some(Value("world")))
+ }
+ }
+
+ test("add-null") {
+ withFakeMemcached { cache =>
+ intercept[NullPointerException] {
+ cache.awaitAdd("hello", null, 5.seconds)
+ }
+ }
+ }
+
+ test("get") {
+ withFakeMemcached { cache =>
+ val value = cache.awaitGet[Value]("missing")
+ assertEquals(value, None)
+ }
+ }
+
+ test("set") {
+ withFakeMemcached { cache =>
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ cache.awaitSet("hello", Value("world"), 3.seconds)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+
+ cache.awaitSet("hello", Value("changed"), 3.seconds)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("changed")))
+ }
+ }
+
+ test("set-null") {
+ withFakeMemcached { cache =>
+ intercept[NullPointerException] {
+ cache.awaitSet("hello", null, 5.seconds)
+ }
+ }
+ }
+
+ test("delete") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("hello")
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ cache.awaitSet("hello", Value("world"), 1.minute)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+
+ assertEquals(cache.awaitDelete("hello"), true)
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ assertEquals(cache.awaitDelete("hello"), false)
+ }
+ }
+
+ test("compareAndSet") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ // no can do
+ assertEquals(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf), false)
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ // set to value1
+ assert(Await.result(cache.compareAndSet("some-key", None, Value("value1"), 5.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value1")))
+
+ // no can do
+ assert(!Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value1")))
+
+ // set to value2, from value1
+ assert(Await.result(cache.compareAndSet("some-key", Some(Value("value1")), Value("value2"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value2")))
+
+ // no can do
+ assert(!Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value2")))
+
+ // set to value3, from value2
+ assert(Await.result(cache.compareAndSet("some-key", Some(Value("value2")), Value("value3"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value3")))
+ }
+ }
+
+ test("transformAndGet") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.transformAndGet[Int]("some-key", 5.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ assert(Await.result(incrementValue, Duration.Inf) == 1)
+ assert(Await.result(incrementValue, Duration.Inf) == 2)
+ assert(Await.result(incrementValue, Duration.Inf) == 3)
+ assert(Await.result(incrementValue, Duration.Inf) == 4)
+ assert(Await.result(incrementValue, Duration.Inf) == 5)
+ assert(Await.result(incrementValue, Duration.Inf) == 6)
+ }
+ }
+
+ test("getAndTransform") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue = Await.result(
+ cache.getAndTransform[Int]("some-key", 5.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ },
+ Duration.Inf
+ )
+
+ assertEquals(incrementValue, None)
+ assertEquals(incrementValue, Some(1))
+ assertEquals(incrementValue, Some(2))
+ assertEquals(incrementValue, Some(3))
+ assertEquals(incrementValue, Some(4))
+ assertEquals(incrementValue, Some(5))
+ assertEquals(incrementValue, Some(6))
+ }
+ }
+
+ test("transformAndGet-concurrent") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue(): CancelableFuture[Int] =
+ cache.transformAndGet[Int]("some-key", 60.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ val futures: List[Future[Int]] = (0 until 500).map(nr => incrementValue()).toList
+ val seq = Future.sequence(futures)
+ Await.result(seq, 20.seconds)
+
+ assertEquals(cache.awaitGet[Int]("some-key"), Some(500))
+ }
+ }
+
+ test("getAndTransform-concurrent") {
+ withFakeMemcached { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.getAndTransform[Int]("some-key", 60.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ val futures: List[Future[Option[Int]]] = (0 until 500).map(nr => incrementValue).toList
+ val seq = Future.sequence(futures)
+ Await.result(seq, 20.seconds)
+
+ assertEquals(cache.awaitGet[Int]("some-key"), Some(500))
+ }
+ }
+
+ test("increment-decrement") {
+ withFakeMemcached { cache =>
+ assertEquals(cache.awaitGet[Int]("hello"), None)
+
+ cache.awaitSet("hello", 123, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(123))
+
+ cache.awaitIncrementAndGet("hello", 1, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(124))
+
+ cache.awaitDecrementAndGet("hello", 1, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(123))
+ }
+ }
+
+ test("increment-decrement-delta") {
+ withFakeMemcached { cache =>
+ assertEquals(cache.awaitGet[Int]("hello"), None)
+
+ cache.awaitSet("hello", 123, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(123))
+
+ cache.awaitIncrementAndGet("hello", 5, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(128))
+
+ cache.awaitDecrementAndGet("hello", 5, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(123))
+ }
+ }
+
+ test("increment-default") {
+ withFakeMemcached { cache =>
+ assertEquals(cache.awaitGet[Int]("hello"), None)
+
+ cache.awaitIncrementAndGet("hello", 1, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(0))
+
+ cache.awaitIncrementAndGet("hello", 1, 0, 1.hour)
+ assertEquals(cache.awaitGet[Int]("hello"), Some(1))
+ }
+ }
+
+ test("increment-overflow") {
+ withFakeMemcached { cache =>
+ assert(cache.awaitIncrementAndGet("hello", 1, Long.MaxValue, 1.hour) == Long.MaxValue)
+ assert(cache.awaitIncrementAndGet("hello", 1, 0, 1.hour) == Long.MinValue)
+ assertEquals(cache.awaitGet[Long]("hello"), Some(Long.MinValue))
+ }
+ }
+
+ test("decrement-underflow") {
+ withFakeMemcached { cache =>
+ assert(cache.awaitDecrementAndGet("hello", 1, Long.MinValue, 1.hour) == Long.MinValue)
+ assert(cache.awaitDecrementAndGet("hello", 1, 0, 1.hour) == Long.MaxValue)
+ assert(cache.awaitDecrementAndGet("hello", 1, 0, 1.hour) == Long.MaxValue - 1)
+ assertEquals(cache.awaitGet[Long]("hello"), Some(Long.MaxValue - 1))
+ }
+ }
+
+ test("big-instance-1") {
+ withFakeMemcached { cache =>
+ val impression = testModels.bigInstance
+ cache.awaitSet(impression.uuid, impression, 60.seconds)
+ assertEquals(cache.awaitGet[Impression](impression.uuid), Some(impression))
+ }
+ }
+
+ test("big-instance-1-manual") {
+ withFakeMemcached { cache =>
+ val byteOut = new ByteArrayOutputStream()
+ val objectOut = new ObjectOutputStream(byteOut)
+
+ val impression = testModels.bigInstance
+ objectOut.writeObject(impression)
+ val byteArray = byteOut.toByteArray
+
+ cache.awaitSet(impression.uuid, byteArray, 60.seconds)
+
+ val inBytes = cache.awaitGet[Array[Byte]](impression.uuid)
+ assert(inBytes.isDefined)
+ assert(inBytes.get.length == byteArray.length)
+ }
+ }
+
+ test("big-instance-2") {
+ withFakeMemcached { cache =>
+ val impression = testModels.bigInstance2
+ cache.awaitSet(impression.uuid, impression, 60.seconds)
+ assertEquals(cache.awaitGet[Impression](impression.uuid), Some(impression))
+ }
+ }
+
+ test("big-instance-3") {
+ withFakeMemcached { cache =>
+ val impression = testModels.bigInstance
+ val result = cache.set(impression.uuid, impression, 60.seconds) flatMap { _ =>
+ cache.get[Impression](impression.uuid)
+ }
+
+ assertEquals(Await.result(result, Duration.Inf), Some(impression))
+ }
+ }
+}
diff --git a/src/test/scala/shade/tests/MemcachedTestHelpers.scala b/shade-memcached/src/test/scala/shade/memcached/MemcachedTestHelpers.scala
similarity index 81%
rename from src/test/scala/shade/tests/MemcachedTestHelpers.scala
rename to shade-memcached/src/test/scala/shade/memcached/MemcachedTestHelpers.scala
index 5938561..ee9abc3 100644
--- a/src/test/scala/shade/tests/MemcachedTestHelpers.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/MemcachedTestHelpers.scala
@@ -9,14 +9,12 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.tests
+package shade.memcached
-import shade.memcached._
-
-import scala.concurrent.ExecutionContext.Implicits._
import scala.concurrent.duration._
+import monix.execution.Scheduler.Implicits.global
-trait MemcachedTestHelpers extends MemcachedCodecs {
+trait MemcachedTestHelpers {
val defaultConfig = Configuration(
addresses = "127.0.0.1:11211",
authentication = None,
@@ -26,18 +24,18 @@ trait MemcachedTestHelpers extends MemcachedCodecs {
operationTimeout = 15.seconds
)
- def createCacheObject(prefix: String, opTimeout: Option[FiniteDuration] = None, failureMode: Option[FailureMode.Value] = None, isFake: Boolean = false): Memcached = {
+ def createCacheObject(prefix: String, opTimeout: Option[FiniteDuration] = None, failureMode: Option[FailureMode.Value] = None): Memcached = {
val config = defaultConfig.copy(
keysPrefix = defaultConfig.keysPrefix.map(s => s + "-" + prefix),
failureMode = failureMode.getOrElse(defaultConfig.failureMode),
operationTimeout = opTimeout.getOrElse(defaultConfig.operationTimeout)
)
- Memcached(config)(global)
+ Memcached(config)
}
def withFakeMemcached[T](cb: Memcached => T): T = {
- val cache = new FakeMemcached(global)
+ val cache = Memcached.fake(global)
try {
cb(cache)
} finally {
diff --git a/shade-memcached/src/test/scala/shade/memcached/SpyMemcachedSuite.scala b/shade-memcached/src/test/scala/shade/memcached/SpyMemcachedSuite.scala
new file mode 100644
index 0000000..0f14b43
--- /dev/null
+++ b/shade-memcached/src/test/scala/shade/memcached/SpyMemcachedSuite.scala
@@ -0,0 +1,411 @@
+/*
+ * Copyright (c) 2012-2017 by its authors. Some rights reserved.
+ * See the project homepage at: https://github.com/monix/shade
+ *
+ * Licensed under the MIT License (the "License"); you may not use this
+ * file except in compliance with the License. You may obtain a copy
+ * of the License at:
+ *
+ * https://github.com/monix/shade/blob/master/LICENSE.txt
+ */
+
+package shade.memcached
+
+import minitest.SimpleTestSuite
+import shade.memcached.testModels.Value
+import scala.collection.immutable.Seq
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.duration._
+import scala.concurrent.{Await, Future, TimeoutException}
+
+object SpyMemcachedSuite extends SimpleTestSuite with MemcachedTestHelpers {
+ test("add") {
+ withCache("add") { cache =>
+ val op1 = cache.awaitAdd("hello", Value("world"), 5.seconds)
+ assertEquals(op1, true)
+
+ val stored = cache.awaitGet[Value]("hello")
+ assertEquals(stored, Some(Value("world")))
+
+ val op2 = cache.awaitAdd("hello", Value("changed"), 5.seconds)
+ assertEquals(op2, false)
+
+ val changed = cache.awaitGet[Value]("hello")
+ assertEquals(changed, Some(Value("world")))
+ }
+ }
+
+ test("add-null") {
+ withCache("add-null") { cache =>
+ val op1 = cache.awaitAdd("hello", null, 5.seconds)
+ assertEquals(op1, false)
+
+ val stored = cache.awaitGet[Value]("hello")
+ assertEquals(stored, None)
+ }
+ }
+
+ test("get") {
+ withCache("get") { cache =>
+ val value = cache.awaitGet[Value]("missing")
+ assertEquals(value, None)
+ }
+ }
+
+ test("set") {
+ withCache("set") { cache =>
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ cache.awaitSet("hello", Value("world"), 1.seconds)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+
+ cache.awaitSet("hello", Value("changed"), 1.second)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("changed")))
+
+ Thread.sleep(3000)
+
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+ }
+ }
+
+ test("set-null") {
+ withCache("set-null") { cache =>
+ val op1 = cache.awaitAdd("hello", null, 5.seconds)
+ assertEquals(op1, false)
+
+ val stored = cache.awaitGet[Value]("hello")
+ assertEquals(stored, None)
+ }
+ }
+
+ test("delete") {
+ withCache("delete") { cache =>
+ cache.awaitDelete("hello")
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ cache.awaitSet("hello", Value("world"), 1.minute)
+ assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+
+ assertEquals(cache.awaitDelete("hello"), true)
+ assertEquals(cache.awaitGet[Value]("hello"), None)
+
+ assertEquals(cache.awaitDelete("hello"), false)
+ }
+ }
+
+ test("compareAndSet") {
+ withCache("compareAndSet") { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ // no can do
+ assert(!Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ // set to value1
+ assert(Await.result(cache.compareAndSet("some-key", None, Value("value1"), 5.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value1")))
+
+ // no can do
+ assert(!Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value1")))
+
+ // set to value2, from value1
+ assert(Await.result(cache.compareAndSet("some-key", Some(Value("value1")), Value("value2"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value2")))
+
+ // no can do
+ assert(!Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value2")))
+
+ // set to value3, from value2
+ assert(Await.result(cache.compareAndSet("some-key", Some(Value("value2")), Value("value3"), 15.seconds), Duration.Inf))
+ assertEquals(cache.awaitGet[Value]("some-key"), Some(Value("value3")))
+ }
+ }
+
+ test("transformAndGet") {
+ withCache("transformAndGet") { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.transformAndGet[Int]("some-key", 5.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ assert(Await.result(incrementValue, Duration.Inf) == 1)
+ assert(Await.result(incrementValue, Duration.Inf) == 2)
+ assert(Await.result(incrementValue, Duration.Inf) == 3)
+ assert(Await.result(incrementValue, Duration.Inf) == 4)
+ assert(Await.result(incrementValue, Duration.Inf) == 5)
+ assert(Await.result(incrementValue, Duration.Inf) == 6)
+ }
+ }
+
+ test("getAndTransform") {
+ withCache("getAndTransform") { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue = Await.result(
+ cache.getAndTransform[Int]("some-key", 5.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ },
+ Duration.Inf
+ )
+
+ assertEquals(incrementValue, None)
+ assertEquals(incrementValue, Some(1))
+ assertEquals(incrementValue, Some(2))
+ assertEquals(incrementValue, Some(3))
+ assertEquals(incrementValue, Some(4))
+ assertEquals(incrementValue, Some(5))
+ assertEquals(incrementValue, Some(6))
+ }
+ }
+
+ test("transformAndGet-concurrent") {
+ withCache("transformAndGet", opTimeout = Some(10.seconds)) { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.transformAndGet[Int]("some-key", 60.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ val futures: Seq[Future[Int]] = (0 until 100).map(nr => incrementValue)
+ val seq = Future.sequence(futures)
+ Await.result(seq, 20.seconds)
+
+ assertEquals(cache.awaitGet[Int]("some-key"), Some(100))
+ }
+ }
+
+ test("getAndTransform-concurrent") {
+ withCache("getAndTransform", opTimeout = Some(10.seconds)) { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.getAndTransform[Int]("some-key", 60.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ val futures: Seq[Future[Option[Int]]] = (0 until 100).map(nr => incrementValue)
+ val seq = Future.sequence(futures)
+ Await.result(seq, 20.seconds)
+
+ assertEquals(cache.awaitGet[Int]("some-key"), Some(100))
+ }
+ }
+
+ test("transformAndGet-concurrent-timeout") {
+ withCache("transformAndGet", opTimeout = Some(300.millis)) { cache =>
+ cache.awaitDelete("some-key")
+ assertEquals(cache.awaitGet[Value]("some-key"), None)
+
+ def incrementValue =
+ cache.transformAndGet[Int]("some-key", 60.seconds) {
+ case None => 1
+ case Some(nr) => nr + 1
+ }
+
+ val initial = Await.result(incrementValue.flatMap(_ => incrementValue), 3.seconds)
+ assertEquals(initial, 2)
+
+ val futures: Seq[Future[Int]] = (0 until 500).map(_ => incrementValue)
+ val seq = Future.sequence(futures)
+ try {
+ Await.result(seq, 20.seconds)
+ fail("should throw exception")
+ } catch {
+ case ex: TimeoutException =>
+ assertEquals(ex.getMessage, "some-key")
+ }
+ }
+ }
+//
+// test("getAndTransform-concurrent-timeout") {
+// withCache("getAndTransform", opTimeout = Some(300.millis)) { cache =>
+// cache.awaitDelete("some-key")
+// assertEquals(cache.awaitGet[Value]("some-key"), None)
+//
+// def incrementValue =
+// cache.getAndTransform[Int]("some-key", 60.seconds) {
+// case None => 1
+// case Some(nr) => nr + 1
+// }
+//
+// val initial = Await.result(incrementValue.flatMap { case _ => incrementValue }, 3.seconds)
+// assertEquals(initial, Some(1))
+//
+// val seq = concurrent.Future.sequence((0 until 500).map(nr => incrementValue))
+//
+// try {
+// Await.result(seq, 20.seconds)
+// fail("should throw exception")
+// } catch {
+// case ex: TimeoutException =>
+// assertEquals(ex.key, "some-key")
+// }
+// }
+// }
+//
+// test("increment-decrement") {
+// withCache("increment-decrement") { cache =>
+// assertEquals(cache.awaitGet[Int]("hello"), None)
+//
+// cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("123"))
+//
+// cache.awaitIncrement("hello", 1, None, 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("124"))
+//
+// cache.awaitDecrement("hello", 1, None, 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("123"))
+//
+// Thread.sleep(3000)
+//
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), None)
+// }
+// }
+//
+// test("increment-decrement-delta") {
+// withCache("increment-decrement-delta") { cache =>
+// assertEquals(cache.awaitGet[Int]("hello"), None)
+//
+// cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("123"))
+//
+// cache.awaitIncrement("hello", 5, None, 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("128"))
+//
+// cache.awaitDecrement("hello", 5, None, 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("123"))
+//
+// Thread.sleep(3000)
+//
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), None)
+// }
+// }
+//
+// test("increment-default") {
+// withCache("increment-default") { cache =>
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), None)
+//
+// cache.awaitIncrement("hello", 1, Some(0), 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("0"))
+//
+// cache.awaitIncrement("hello", 1, Some(0), 1.second)
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("1"))
+//
+// Thread.sleep(3000)
+//
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), None)
+// }
+// }
+//
+// test("increment-overflow") {
+// withCache("increment-overflow") { cache =>
+// assert(cache.awaitIncrement("hello", 1, Some(Long.MaxValue), 1.minute) == Long.MaxValue)
+//
+// assert(cache.awaitIncrement("hello", 1, None, 1.minute) == Long.MinValue)
+//
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("9223372036854775808"))
+// }
+// }
+//
+// test("decrement-underflow") {
+// withCache("increment-underflow") { cache =>
+// assert(cache.awaitDecrement("hello", 1, Some(1), 1.minute) == 1)
+//
+// assert(cache.awaitDecrement("hello", 1, None, 1.minute) == 0)
+//
+// assert(cache.awaitDecrement("hello", 1, None, 1.minute) == 0)
+//
+// assertEquals(cache.awaitGet[String]("hello")(StringBinaryCodec), Some("0"))
+// }
+// }
+//
+// test("vector-inherited-case-classes") {
+// withCache("vector-inherited-case-classes") { cache =>
+// val content = shade.testModels.contentSeq
+// cache.awaitSet("blog-posts", content, 60.seconds)
+// assertEquals(cache.awaitGet[Vector[ContentPiece]]("blog-posts"), Some(content))
+// }
+// }
+//
+// test("big-instance-1") {
+// withCache("big-instance-1") { cache =>
+// val impression = shade.testModels.bigInstance
+// cache.awaitSet(impression.uuid, impression, 60.seconds)
+// assertEquals(cache.awaitGet[Impression](impression.uuid), Some(impression))
+// }
+// }
+//
+// test("big-instance-1-manual") {
+// withCache("big-instance-1-manual") { cache =>
+// val byteOut = new ByteArrayOutputStream()
+// val objectOut = new ObjectOutputStream(byteOut)
+//
+// val impression = shade.testModels.bigInstance
+// objectOut.writeObject(impression)
+// val byteArray = byteOut.toByteArray
+//
+// cache.awaitSet(impression.uuid, byteArray, 60.seconds)
+//
+// val inBytes = cache.awaitGet[Array[Byte]](impression.uuid)
+// assert(inBytes.isDefined)
+// assert(inBytes.get.length == byteArray.length)
+// }
+// }
+//
+// test("big-instance-2") {
+// withCache("big-instance-2") { cache =>
+// val impression = shade.testModels.bigInstance2
+// cache.awaitSet(impression.uuid, impression, 60.seconds)
+// assertEquals(cache.awaitGet[Impression](impression.uuid), Some(impression))
+// }
+// }
+//
+// test("big-instance-3") {
+// withCache("big-instance-3") { cache =>
+// val impression = shade.testModels.bigInstance
+// val result = cache.set(impression.uuid, impression, 60.seconds) flatMap { _ =>
+// cache.get[Impression](impression.uuid)
+// }
+//
+// assert(Await.result(result, Duration.Inf) == Some(impression))
+// }
+// }
+//
+// test("cancel-strategy simple test") {
+// withCache("cancel-strategy", failureMode = Some(FailureMode.Cancel)) { cache =>
+// Thread.sleep(100)
+// val impression = shade.testModels.bigInstance2
+// cache.awaitSet(impression.uuid, impression, 60.seconds)
+// assertEquals(cache.awaitGet[Impression](impression.uuid), Some(impression))
+// }
+// }
+//
+// test("infinite-duration") {
+// withCache("infinite-duration") { cache =>
+// assertEquals(cache.awaitGet[Value]("hello"), None)
+// try {
+// cache.awaitSet("hello", Value("world"), Duration.Inf)
+// assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+//
+// Thread.sleep(5000)
+// assertEquals(cache.awaitGet[Value]("hello"), Some(Value("world")))
+// } finally {
+// cache.awaitDelete("hello")
+// }
+// }
+// }
+}
diff --git a/src/test/scala/shade/testModels/Advertiser.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/Advertiser.scala
similarity index 80%
rename from src/test/scala/shade/testModels/Advertiser.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/Advertiser.scala
index 07e0f5c..1d9812f 100644
--- a/src/test/scala/shade/testModels/Advertiser.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/Advertiser.scala
@@ -9,9 +9,10 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
-case class Advertiser(
+@SerialVersionUID(389230490582490931L)
+final case class Advertiser(
id: Option[Int],
name: Option[String],
serviceID: String)
diff --git a/src/test/scala/shade/testModels/ContentPiece.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/ContentPiece.scala
similarity index 95%
rename from src/test/scala/shade/testModels/ContentPiece.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/ContentPiece.scala
index faa3f76..d83bde6 100644
--- a/src/test/scala/shade/testModels/ContentPiece.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/ContentPiece.scala
@@ -9,7 +9,7 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
sealed trait ContentPiece extends Serializable {
import ContentPiece._
@@ -67,7 +67,7 @@ sealed trait ContentPiece extends Serializable {
object ContentPiece {
@SerialVersionUID(23904298512054925L)
- case class Image(
+ final case class Image(
id: Option[Int],
url: String,
creator: String,
@@ -77,7 +77,7 @@ object ContentPiece {
tags: Vector[String]) extends ContentPiece
@SerialVersionUID(9785234918758324L)
- case class Title(
+ final case class Title(
id: Option[Int],
url: String,
creator: String,
@@ -86,7 +86,7 @@ object ContentPiece {
tags: Vector[String]) extends ContentPiece
@SerialVersionUID(9348538729520853L)
- case class Article(
+ final case class Article(
id: Option[Int],
url: String,
creator: String,
diff --git a/src/test/scala/shade/testModels/GeoIPLocation.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/GeoIPLocation.scala
similarity index 85%
rename from src/test/scala/shade/testModels/GeoIPLocation.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/GeoIPLocation.scala
index 7894739..5444da3 100644
--- a/src/test/scala/shade/testModels/GeoIPLocation.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/GeoIPLocation.scala
@@ -9,9 +9,10 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
-case class GeoIPLocation(
+@SerialVersionUID(389230490582490932L)
+final case class GeoIPLocation(
countryCode: String,
city: Option[String],
countryName: Option[String],
diff --git a/src/test/scala/shade/testModels/Impression.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/Impression.scala
similarity index 78%
rename from src/test/scala/shade/testModels/Impression.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/Impression.scala
index e71d61f..5715353 100644
--- a/src/test/scala/shade/testModels/Impression.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/Impression.scala
@@ -9,12 +9,13 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
-case class Impression(
+@SerialVersionUID(389230490582490933L)
+final case class Impression(
uuid: String,
session: Session,
- servedOffers: Seq[Offer] = Seq.empty,
+ servedOffers: List[Offer] = Nil,
requestCount: Int = 0,
alreadyServed: Boolean = false,
clientVersion: Option[String] = None)
\ No newline at end of file
diff --git a/src/test/scala/shade/testModels/Offer.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/Offer.scala
similarity index 63%
rename from src/test/scala/shade/testModels/Offer.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/Offer.scala
index 7450a3f..3f380d3 100644
--- a/src/test/scala/shade/testModels/Offer.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/Offer.scala
@@ -9,31 +9,32 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
import java.util.UUID
-case class Offer(
- id: Option[Int],
- name: String,
+@SerialVersionUID(389230490582490966L)
+final case class Offer(
+ id: Option[Int],
+ name: String,
- advertiser: Advertiser,
- offerType: String,
+ advertiser: Advertiser,
+ offerType: String,
- liveDeal: LiveDealInfo,
- creative: OfferCreative,
+ liveDeal: LiveDealInfo,
+ creative: OfferCreative,
- deliveryMechanisms: Seq[String],
+ deliveryMechanisms: Seq[String],
- servedURL: String,
- realURL: Option[String],
+ servedURL: String,
+ realURL: Option[String],
- // is_active and is_valid
- isRunning: Boolean,
- isDynamic: Boolean,
- isGlobal: Boolean,
+ // is_active and is_valid
+ isRunning: Boolean,
+ isDynamic: Boolean,
+ isGlobal: Boolean,
- countries: Seq[String]) {
+ countries: Seq[String]) {
def uniqueToken = {
val token = id.toString + "-" + advertiser.serviceID +
@@ -51,13 +52,15 @@ case class Offer(
}
}
-case class LiveDealInfo(
+@SerialVersionUID(389230490582490944L)
+final case class LiveDealInfo(
uid: Option[String],
expires: Option[Int],
refreshToken: Option[Int],
searchKeyword: Option[String])
-case class OfferCreative(
+@SerialVersionUID(389230490582490955L)
+final case class OfferCreative(
title: String,
description: String,
merchantName: Option[String],
diff --git a/src/test/scala/shade/testModels/Session.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/Session.scala
similarity index 86%
rename from src/test/scala/shade/testModels/Session.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/Session.scala
index 922d93b..0f55cc3 100644
--- a/src/test/scala/shade/testModels/Session.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/Session.scala
@@ -9,9 +9,10 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
-case class Session(
+@SerialVersionUID(389230490582490977L)
+final case class Session(
uuid: String,
deviceID: String,
device: String,
diff --git a/src/test/scala/shade/testModels/UserInfo.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/UserInfo.scala
similarity index 76%
rename from src/test/scala/shade/testModels/UserInfo.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/UserInfo.scala
index 74a7dc4..915d5b5 100644
--- a/src/test/scala/shade/testModels/UserInfo.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/UserInfo.scala
@@ -9,11 +9,12 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.testModels
+package shade.memcached.testModels
-case class UserInfo(
+@SerialVersionUID(389230490582490988L)
+final case class UserInfo(
ip: String,
forwardedFor: String,
via: String,
agent: String,
- geoip: Option[GeoIPLocation])
\ No newline at end of file
+ geoIP: Option[GeoIPLocation])
\ No newline at end of file
diff --git a/src/test/scala/shade/tests/Value.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/Value.scala
similarity index 76%
rename from src/test/scala/shade/tests/Value.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/Value.scala
index a6dbccb..09eac70 100644
--- a/src/test/scala/shade/tests/Value.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/Value.scala
@@ -9,7 +9,8 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade.tests
+package shade.memcached.testModels
-case class Value(str: String)
+@SerialVersionUID(389230490582490999L)
+final case class Value(str: String)
diff --git a/src/test/scala/shade/testModels/package.scala b/shade-memcached/src/test/scala/shade/memcached/testModels/package.scala
similarity index 96%
rename from src/test/scala/shade/testModels/package.scala
rename to shade-memcached/src/test/scala/shade/memcached/testModels/package.scala
index d235447..3758aaf 100644
--- a/src/test/scala/shade/testModels/package.scala
+++ b/shade-memcached/src/test/scala/shade/memcached/testModels/package.scala
@@ -9,7 +9,7 @@
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
-package shade
+package shade.memcached
import scala.collection.mutable.ArrayBuffer
@@ -76,14 +76,14 @@ package object testModels {
"http://something.com/track?clickID=242323&pubID=982345&something=219&subID=something",
None,
- true,
- false,
- false,
+ isRunning = true,
+ isDynamic = false,
+ isGlobal = false,
List("us")
)
),
112,
- true,
+ alreadyServed = true,
Some("light-fullscreen")
)
@@ -122,7 +122,7 @@ package object testModels {
),
List.empty,
112,
- true,
+ alreadyServed = true,
Some("light-fullscreen")
)
diff --git a/src/main/scala/shade/CacheException.scala b/src/main/scala/shade/CacheException.scala
deleted file mode 100644
index 1bf917a..0000000
--- a/src/main/scala/shade/CacheException.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade
-
-/**
- * Super-class for errors thrown when specific cache-store related
- * errors occur.
- */
-class CacheException(val msg: String) extends RuntimeException(msg)
-
-/**
- * Thrown in case a cache store related operation times out.
- */
-class TimeoutException(val key: String) extends CacheException(key)
-
-/**
- * Thrown in case a cache store related operation is cancelled
- * (like due to closed / broken connections)
- */
-class CancelledException(val key: String) extends CacheException(key)
-
-/**
- * Gets thrown in case the implementation is wrong and
- * mishandled a status. Should never get thrown and
- * if it does, then it's a bug.
- */
-class UnhandledStatusException(msg: String) extends CacheException(msg)
-
-/**
- * Gets thrown in case a key is not found in the cache store on #apply().
- */
-class KeyNotInCacheException(val key: String) extends CacheException(key)
diff --git a/src/main/scala/shade/inmemory/InMemoryCache.scala b/src/main/scala/shade/inmemory/InMemoryCache.scala
deleted file mode 100644
index 2d0d64e..0000000
--- a/src/main/scala/shade/inmemory/InMemoryCache.scala
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.inmemory
-
-import monix.execution.Scheduler
-import monix.execution.atomic.AtomicAny
-
-import scala.annotation.tailrec
-import scala.concurrent.duration._
-import scala.concurrent.{ ExecutionContext, Future, Promise }
-import scala.util.Try
-
-trait InMemoryCache extends java.io.Closeable {
- def get[T](key: String): Option[T]
- def getOrElse[T](key: String, default: => T): T
- def add[T](key: String, value: T, expiry: Duration = Duration.Inf): Boolean
- def set[T](key: String, value: T, expiry: Duration = Duration.Inf): Unit
- def delete(key: String): Boolean
- def cachedFuture[T](key: String, expiry: Duration = Duration.Inf)(cb: => Future[T]): Future[T]
-
- def compareAndSet[T](key: String, expected: Option[T], update: T, expiry: Duration = Duration.Inf): Boolean
- def transformAndGet[T](key: String, expiry: Duration = Duration.Inf)(cb: Option[T] => T): T
- def getAndTransform[T](key: String, expiry: Duration = Duration.Inf)(cb: Option[T] => T): Option[T]
-
- def size: Int
-
- def realSize: Int
-
- /**
- * Future that completes when a maintenance window has run,
- * giving the number of items that were removed.
- * @return
- */
- def maintenance: Future[Int]
-
- def close(): Unit
-}
-
-object InMemoryCache {
- def apply(ec: ExecutionContext): InMemoryCache =
- new InMemoryCacheImpl()(ec)
-}
-
-private[inmemory] final class InMemoryCacheImpl(implicit ec: ExecutionContext) extends InMemoryCache {
- private[this] val scheduler = Scheduler(ec)
-
- def get[T](key: String): Option[T] = {
- val currentState = stateRef.get
-
- currentState.values.get(key) match {
- case Some(value) if value.expiresAt > System.currentTimeMillis() =>
- Some(value.value.asInstanceOf[T])
- case _ =>
- None
- }
- }
-
- def getOrElse[T](key: String, default: => T): T =
- get[T](key) match {
- case Some(value) => value
- case None => default
- }
-
- @tailrec
- def add[T](key: String, value: T, expiry: Duration = Duration.Inf): Boolean = {
- val ts = getExpiryTS(expiry)
- val currentTS = System.currentTimeMillis()
- val currentState = stateRef.get
-
- val itemExists = currentState.values.get(key) match {
- case Some(item) if item.expiresAt > currentTS =>
- true
- case _ =>
- false
- }
-
- if (itemExists || ts <= currentTS)
- false
- else {
- val firstExpiry = if (currentState.firstExpiry == 0) ts else math.min(currentState.firstExpiry, ts)
- val values = currentState.values.updated(key, CacheValue(value, ts))
- val newState = currentState.copy(values = values, firstExpiry = firstExpiry)
-
- if (stateRef.compareAndSet(currentState, newState))
- true
- else
- add(key, value, expiry)
- }
- }
-
- def set[T](key: String, value: T, expiry: Duration = Duration.Inf): Unit = {
- val ts = getExpiryTS(expiry)
-
- stateRef.transform { current =>
- val firstExpiry = if (current.firstExpiry == 0) ts else math.min(current.firstExpiry, ts)
- val values = current.values.updated(key, CacheValue(value, ts))
- current.copy(values = values, firstExpiry = firstExpiry)
- }
- }
-
- @tailrec
- def delete(key: String): Boolean = {
- val currentState = stateRef.get
-
- currentState.values.get(key) match {
- case Some(value) =>
- val values = currentState.values - key
- val newState = currentState.copy(values = values)
-
- if (stateRef.compareAndSet(currentState, newState))
- value.expiresAt > System.currentTimeMillis()
- else
- delete(key)
- case None =>
- false
- }
- }
-
- @tailrec
- def cachedFuture[T](key: String, expiry: Duration = Duration.Inf)(cb: => Future[T]): Future[T] = {
- val currentState = stateRef.get
-
- val currentValue = currentState.values.get(key) match {
- case Some(value) if value.expiresAt > System.currentTimeMillis() =>
- Some(value.value.asInstanceOf[Future[T]])
- case _ =>
- None
- }
-
- currentValue match {
- case Some(value) =>
- value
- case None =>
- val ts = getExpiryTS(expiry)
- val promise = Promise[T]()
- val future = promise.future
-
- val values = currentState.values.updated(key, CacheValue(future, ts))
- val firstExpiry = if (currentState.firstExpiry == 0) ts else math.min(currentState.firstExpiry, ts)
- val newState = currentState.copy(values, firstExpiry)
-
- if (stateRef.compareAndSet(currentState, newState)) {
- promise.completeWith(cb)
- future
- } else
- cachedFuture(key, expiry)(cb)
- }
- }
-
- def compareAndSet[T](key: String, expected: Option[T], update: T, expiry: Duration): Boolean = {
- val current = stateRef.get
- val ts = getExpiryTS(expiry)
-
- val currentValue = current.values.get(key) match {
- case Some(value) if value.expiresAt > System.currentTimeMillis() =>
- Some(value.value.asInstanceOf[T])
- case _ =>
- None
- }
-
- if (currentValue != expected)
- false
- else {
- val values = current.values.updated(key, CacheValue(update, ts))
- val firstExpiry = if (current.firstExpiry == 0) ts else math.min(current.firstExpiry, ts)
- val newState = current.copy(values, firstExpiry)
- stateRef.compareAndSet(current, newState)
- }
- }
-
- def transformAndGet[T](key: String, expiry: Duration)(cb: (Option[T]) => T): T =
- stateRef.transformAndExtract { current =>
- val ts = getExpiryTS(expiry)
-
- val currentValue = current.values.get(key) match {
- case Some(value) if value.expiresAt > System.currentTimeMillis() =>
- Some(value.value.asInstanceOf[T])
- case _ =>
- None
- }
-
- val newValue = cb(currentValue)
- val values = current.values.updated(key, CacheValue(newValue, ts))
- val firstExpiry = if (current.firstExpiry == 0) ts else math.min(current.firstExpiry, ts)
- (newValue, current.copy(values, firstExpiry))
- }
-
- def getAndTransform[T](key: String, expiry: Duration)(cb: (Option[T]) => T): Option[T] =
- stateRef.transformAndExtract { current =>
- val ts = getExpiryTS(expiry)
-
- val currentValue = current.values.get(key) match {
- case Some(value) if value.expiresAt > System.currentTimeMillis() =>
- Some(value.value.asInstanceOf[T])
- case _ =>
- None
- }
-
- val newValue = cb(currentValue)
- val values = current.values.updated(key, CacheValue(newValue, ts))
- val firstExpiry = if (current.firstExpiry == 0) ts else math.min(current.firstExpiry, ts)
- (currentValue, current.copy(values, firstExpiry))
- }
-
- def clean(): Boolean = {
- val (promise, difference) = stateRef.transformAndExtract { currentState =>
- val currentTS = System.currentTimeMillis()
-
- if (currentState.firstExpiry <= currentTS) {
- val values = currentState.values.filterNot(value => value._2.expiresAt <= currentTS)
- val difference = currentState.values.size - values.size
-
- val firstExpiry = values.foldLeft(0L) { (acc, elem) =>
- if (acc == 0 || acc < elem._2.expiresAt)
- elem._2.expiresAt
- else
- acc
- }
-
- val newState = CacheState(values, firstExpiry)
- ((currentState.maintenancePromise, difference), newState)
- } else {
- val newState = currentState.copy(maintenancePromise = Promise())
- ((currentState.maintenancePromise, 0), newState)
- }
- }
-
- promise.trySuccess(difference)
- }
-
- def size: Int = {
- val ts = System.currentTimeMillis()
- stateRef.get.values.count(_._2.expiresAt <= ts)
- }
-
- def realSize: Int = stateRef.get.values.size
-
- /**
- * Future that completes when a maintenance window has run,
- * giving the number of items that were removed.
- * @return
- */
- def maintenance: Future[Int] =
- stateRef.get.maintenancePromise.future
-
- def close(): Unit = {
- Try(task.cancel())
- val state = stateRef.getAndSet(CacheState())
- state.maintenancePromise.trySuccess(0)
- }
-
- protected def getExpiryTS(expiry: Duration): Long =
- if (expiry.isFinite())
- System.currentTimeMillis() + expiry.toMillis
- else
- System.currentTimeMillis() + 365.days.toMillis
-
- private[this] val task =
- scheduler.scheduleWithFixedDelay(3.seconds, 3.seconds) {
- clean()
- }
-
- private[this] case class CacheValue(
- value: Any,
- expiresAt: Long)
-
- private[this] case class CacheState(
- values: Map[String, CacheValue] = Map.empty,
- firstExpiry: Long = 0,
- maintenancePromise: Promise[Int] = Promise[Int]())
-
- private[this] val stateRef = AtomicAny(CacheState())
-}
diff --git a/src/main/scala/shade/memcached/Codec.scala b/src/main/scala/shade/memcached/Codec.scala
deleted file mode 100644
index 74994c2..0000000
--- a/src/main/scala/shade/memcached/Codec.scala
+++ /dev/null
@@ -1,181 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached
-
-import java.io._
-
-import scala.annotation.implicitNotFound
-import scala.language.implicitConversions
-import scala.reflect.ClassTag
-import scala.util.control.NonFatal
-
-/**
- * Represents a type class that needs to be implemented
- * for serialization/deserialization to work.
- */
-@implicitNotFound("Could not find any Codec implementation for type ${T}. Please provide one or import shade.memcached.MemcachedCodecs._")
-trait Codec[T] {
- def serialize(value: T): Array[Byte]
- def deserialize(data: Array[Byte]): T
-}
-
-object Codec extends BaseCodecs
-
-trait BaseCodecs {
- implicit object IntBinaryCodec extends Codec[Int] {
- def serialize(value: Int): Array[Byte] =
- Array(
- (value >>> 24).asInstanceOf[Byte],
- (value >>> 16).asInstanceOf[Byte],
- (value >>> 8).asInstanceOf[Byte],
- value.asInstanceOf[Byte]
- )
-
- def deserialize(data: Array[Byte]): Int =
- (data(0).asInstanceOf[Int] & 255) << 24 |
- (data(1).asInstanceOf[Int] & 255) << 16 |
- (data(2).asInstanceOf[Int] & 255) << 8 |
- data(3).asInstanceOf[Int] & 255
- }
-
- implicit object DoubleBinaryCodec extends Codec[Double] {
- import java.lang.{ Double => JvmDouble }
- def serialize(value: Double): Array[Byte] = {
- val l = JvmDouble.doubleToLongBits(value)
- LongBinaryCodec.serialize(l)
- }
-
- def deserialize(data: Array[Byte]): Double = {
- val l = LongBinaryCodec.deserialize(data)
- JvmDouble.longBitsToDouble(l)
- }
- }
-
- implicit object FloatBinaryCodec extends Codec[Float] {
- import java.lang.{ Float => JvmFloat }
- def serialize(value: Float): Array[Byte] = {
- val i = JvmFloat.floatToIntBits(value)
- IntBinaryCodec.serialize(i)
- }
-
- def deserialize(data: Array[Byte]): Float = {
- val i = IntBinaryCodec.deserialize(data)
- JvmFloat.intBitsToFloat(i)
- }
- }
-
- implicit object LongBinaryCodec extends Codec[Long] {
- def serialize(value: Long): Array[Byte] =
- Array(
- (value >>> 56).asInstanceOf[Byte],
- (value >>> 48).asInstanceOf[Byte],
- (value >>> 40).asInstanceOf[Byte],
- (value >>> 32).asInstanceOf[Byte],
- (value >>> 24).asInstanceOf[Byte],
- (value >>> 16).asInstanceOf[Byte],
- (value >>> 8).asInstanceOf[Byte],
- value.asInstanceOf[Byte]
- )
-
- def deserialize(data: Array[Byte]): Long =
- (data(0).asInstanceOf[Long] & 255) << 56 |
- (data(1).asInstanceOf[Long] & 255) << 48 |
- (data(2).asInstanceOf[Long] & 255) << 40 |
- (data(3).asInstanceOf[Long] & 255) << 32 |
- (data(4).asInstanceOf[Long] & 255) << 24 |
- (data(5).asInstanceOf[Long] & 255) << 16 |
- (data(6).asInstanceOf[Long] & 255) << 8 |
- data(7).asInstanceOf[Long] & 255
- }
-
- implicit object BooleanBinaryCodec extends Codec[Boolean] {
- def serialize(value: Boolean): Array[Byte] =
- Array((if (value) 1 else 0).asInstanceOf[Byte])
-
- def deserialize(data: Array[Byte]): Boolean =
- data.isDefinedAt(0) && data(0) == 1
- }
-
- implicit object CharBinaryCodec extends Codec[Char] {
- def serialize(value: Char): Array[Byte] = Array(
- (value >>> 8).asInstanceOf[Byte],
- value.asInstanceOf[Byte]
- )
-
- def deserialize(data: Array[Byte]): Char =
- ((data(0).asInstanceOf[Int] & 255) << 8 |
- data(1).asInstanceOf[Int] & 255)
- .asInstanceOf[Char]
- }
-
- implicit object ShortBinaryCodec extends Codec[Short] {
- def serialize(value: Short): Array[Byte] = Array(
- (value >>> 8).asInstanceOf[Byte],
- value.asInstanceOf[Byte]
- )
-
- def deserialize(data: Array[Byte]): Short =
- ((data(0).asInstanceOf[Short] & 255) << 8 |
- data(1).asInstanceOf[Short] & 255)
- .asInstanceOf[Short]
- }
-
- implicit object StringBinaryCodec extends Codec[String] {
- def serialize(value: String): Array[Byte] = value.getBytes("UTF-8")
- def deserialize(data: Array[Byte]): String = new String(data, "UTF-8")
- }
-
- implicit object ArrayByteBinaryCodec extends Codec[Array[Byte]] {
- def serialize(value: Array[Byte]): Array[Byte] = value
- def deserialize(data: Array[Byte]): Array[Byte] = data
- }
-}
-
-trait GenericCodec {
-
- private[this] class GenericCodec[S <: Serializable](classTag: ClassTag[S]) extends Codec[S] {
-
- def using[T <: Closeable, R](obj: T)(f: T => R): R =
- try
- f(obj)
- finally
- try obj.close() catch {
- case NonFatal(_) => // does nothing
- }
-
- def serialize(value: S): Array[Byte] =
- using (new ByteArrayOutputStream()) { buf =>
- using (new ObjectOutputStream(buf)) { out =>
- out.writeObject(value)
- out.close()
- buf.toByteArray
- }
- }
-
- def deserialize(data: Array[Byte]): S =
- using (new ByteArrayInputStream(data)) { buf =>
- val in = new GenericCodecObjectInputStream(classTag, buf)
- using (in) { inp =>
- inp.readObject().asInstanceOf[S]
- }
- }
- }
-
- implicit def AnyRefBinaryCodec[S <: Serializable](implicit ev: ClassTag[S]): Codec[S] =
- new GenericCodec[S](ev)
-
-}
-
-trait MemcachedCodecs extends BaseCodecs with GenericCodec
-
-object MemcachedCodecs extends MemcachedCodecs
-
diff --git a/src/main/scala/shade/memcached/FakeMemcached.scala b/src/main/scala/shade/memcached/FakeMemcached.scala
deleted file mode 100644
index f15411a..0000000
--- a/src/main/scala/shade/memcached/FakeMemcached.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached
-
-import monix.execution.CancelableFuture
-import shade.UnhandledStatusException
-import shade.inmemory.InMemoryCache
-
-import scala.concurrent.duration.Duration
-import scala.concurrent.{ ExecutionContext, Future }
-
-class FakeMemcached(context: ExecutionContext) extends Memcached {
- private[this] implicit val ec = context
-
- def add[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Boolean] =
- value match {
- case null =>
- CancelableFuture.successful(false)
- case _ =>
- CancelableFuture.successful(cache.add(key, codec.serialize(value).toSeq, exp))
- }
-
- def set[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Unit] =
- value match {
- case null =>
- CancelableFuture.successful(())
- case _ =>
- CancelableFuture.successful(cache.set(key, codec.serialize(value).toSeq, exp))
- }
-
- def delete(key: String): CancelableFuture[Boolean] =
- CancelableFuture.successful(cache.delete(key))
-
- def get[T](key: String)(implicit codec: Codec[T]): Future[Option[T]] =
- Future.successful(cache.get[Seq[Byte]](key)).map(_.map(x => codec.deserialize(x.toArray)))
-
- def compareAndSet[T](key: String, expecting: Option[T], newValue: T, exp: Duration)(implicit codec: Codec[T]): Future[Boolean] =
- Future.successful(cache.compareAndSet(key, expecting.map(x => codec.serialize(x).toSeq), codec.serialize(newValue).toSeq, exp))
-
- def transformAndGet[T](key: String, exp: Duration)(cb: (Option[T]) => T)(implicit codec: Codec[T]): Future[T] =
- Future.successful(cache.transformAndGet[Seq[Byte]](key: String, exp) { current =>
- val cValue = current.map(x => codec.deserialize(x.toArray))
- val update = cb(cValue)
- codec.serialize(update).toSeq
- }) map { update =>
- codec.deserialize(update.toArray)
- }
-
- def getAndTransform[T](key: String, exp: Duration)(cb: (Option[T]) => T)(implicit codec: Codec[T]): Future[Option[T]] =
- Future.successful(cache.getAndTransform[Seq[Byte]](key: String, exp) { current =>
- val cValue = current.map(x => codec.deserialize(x.toArray))
- val update = cb(cValue)
- codec.serialize(update).toSeq
- }) map { update =>
- update.map(x => codec.deserialize(x.toArray))
- }
-
- def increment(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long] = {
- def toBigInt(bytes: Seq[Byte]): BigInt = BigInt(new String(bytes.toArray))
- Future.successful(cache.transformAndGet[Seq[Byte]](key, exp) {
- case Some(current) => (toBigInt(current) + by).toString.getBytes
- case None if default.isDefined => default.get.toString.getBytes
- case None => throw new UnhandledStatusException(s"For key $key - CASNotFoundStatus")
- }).map(toBigInt).map(_.toLong)
- }
-
- def decrement(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long] = {
- def toBigInt(bytes: Seq[Byte]): BigInt = BigInt(new String(bytes.toArray))
- Future.successful(cache.transformAndGet[Seq[Byte]](key, exp) {
- case Some(current) => (toBigInt(current) - by).max(0).toString.getBytes
- case None if default.isDefined => default.get.toString.getBytes
- case None => throw new UnhandledStatusException(s"For key $key - CASNotFoundStatus")
- }).map(toBigInt).map(_.toLong)
- }
-
- def close(): Unit = {
- cache.close()
- }
-
- private[this] val cache = InMemoryCache(context)
-}
diff --git a/src/main/scala/shade/memcached/GenericCodecObjectInputStream.scala b/src/main/scala/shade/memcached/GenericCodecObjectInputStream.scala
deleted file mode 100644
index 6ed242b..0000000
--- a/src/main/scala/shade/memcached/GenericCodecObjectInputStream.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached
-
-import java.io.{ InputStream, ObjectInputStream, ObjectStreamClass }
-
-import scala.reflect.ClassTag
-import scala.util.control.NonFatal
-
-/**
- * Object input stream which tries the thread local class loader.
- *
- * Thread Local class loader is used by SBT to avoid polluting system class loader when
- * running different tasks.
- *
- * This allows deserialization of classes from sub-projects during something like
- * Play's test/run modes.
- */
-class GenericCodecObjectInputStream(classTag: ClassTag[_], in: InputStream)
- extends ObjectInputStream(in) {
-
- private def classTagClassLoader =
- classTag.runtimeClass.getClassLoader
- private def threadLocalClassLoader =
- Thread.currentThread().getContextClassLoader
-
- override protected def resolveClass(desc: ObjectStreamClass): Class[_] = {
- try classTagClassLoader.loadClass(desc.getName) catch {
- case NonFatal(_) =>
- try super.resolveClass(desc) catch {
- case NonFatal(_) =>
- threadLocalClassLoader.loadClass(desc.getName)
- }
- }
- }
-}
diff --git a/src/main/scala/shade/memcached/Memcached.scala b/src/main/scala/shade/memcached/Memcached.scala
deleted file mode 100644
index 1ff4e21..0000000
--- a/src/main/scala/shade/memcached/Memcached.scala
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached
-
-import monix.execution.CancelableFuture
-
-import scala.concurrent.duration.Duration
-import scala.concurrent.{ Await, ExecutionContext, Future }
-
-trait Memcached extends java.io.Closeable {
- /**
- * Adds a value for a given key, if the key doesn't already exist in the cache store.
- *
- * If the key already exists in the cache, the future returned result will be false and the
- * current value will not be overridden. If the key isn't there already, the value
- * will be set and the future returned result will be true.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- *
- * @return either true, in case the value was set, or false otherwise
- */
- def add[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Boolean]
-
- def awaitAdd[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): Boolean =
- Await.result(add(key, value, exp), Duration.Inf)
-
- /**
- * Sets a (key, value) in the cache store.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def set[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Unit]
-
- def awaitSet[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]) {
- Await.result(set(key, value, exp), Duration.Inf)
- }
-
- /**
- * Deletes a key from the cache store.
- *
- * @return true if a key was deleted or false if there was nothing there to delete
- */
- def delete(key: String): CancelableFuture[Boolean]
-
- def awaitDelete(key: String): Boolean =
- Await.result(delete(key), Duration.Inf)
-
- /**
- * Fetches a value from the cache store.
- *
- * @return Some(value) in case the key is available, or None otherwise (doesn't throw exception on key missing)
- */
- def get[T](key: String)(implicit codec: Codec[T]): Future[Option[T]]
-
- def awaitGet[T](key: String)(implicit codec: Codec[T]): Option[T] =
- Await.result(get[T](key), Duration.Inf)
-
- /**
- * Compare and set.
- *
- * @param expecting should be None in case the key is not expected, or Some(value) otherwise
- * @param exp can be Duration.Inf (infinite) for not setting an expiration
- * @return either true (in case the compare-and-set succeeded) or false otherwise
- */
- def compareAndSet[T](key: String, expecting: Option[T], newValue: T, exp: Duration)(implicit codec: Codec[T]): Future[Boolean]
-
- /**
- * Transforms the given key and returns the new value.
- *
- * The cb callback receives the current value
- * (None in case the key is missing or Some(value) otherwise)
- * and should return the new value to store.
- *
- * The method retries until the compare-and-set operation succeeds, so
- * the callback should have no side-effects.
- *
- * This function can be used for atomic increments and stuff like that.
- *
- * @return the new value
- */
- def transformAndGet[T](key: String, exp: Duration)(cb: Option[T] => T)(implicit codec: Codec[T]): Future[T]
-
- /**
- * Transforms the given key and returns the old value as an Option[T]
- * (None in case the key wasn't in the cache or Some(value) otherwise).
- *
- * The cb callback receives the current value
- * (None in case the key is missing or Some(value) otherwise)
- * and should return the new value to store.
- *
- * The method retries until the compare-and-set operation succeeds, so
- * the callback should have no side-effects.
- *
- * This function can be used for atomic increments and stuff like that.
- *
- * @return the old value
- */
- def getAndTransform[T](key: String, exp: Duration)(cb: Option[T] => T)(implicit codec: Codec[T]): Future[Option[T]]
-
- /**
- * Atomically increments the given key by a non-negative integer amount
- * and returns the new value.
- *
- * The value is stored as the ASCII decimal representation of a 64-bit
- * unsigned integer.
- *
- * If the key does not exist and a default is provided, sets the value of the
- * key to the provided default and expiry time.
- *
- * If the key does not exist and no default is provided, or if the key exists
- * with a value that does not conform to the expected representation, the
- * operation will fail.
- *
- * If the operation succeeds, it returns the new value of the key.
- *
- * Note that the default value is always treated as None when using the text
- * protocol.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def increment(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long]
-
- def awaitIncrement(key: String, by: Long, default: Option[Long], exp: Duration): Long =
- Await.result(increment(key, by, default, exp), Duration.Inf)
-
- /**
- * Atomically decrements the given key by a non-negative integer amount
- * and returns the new value.
- *
- * The value is stored as the ASCII decimal representation of a 64-bit
- * unsigned integer.
- *
- * If the key does not exist and a default is provided, sets the value of the
- * key to the provided default and expiry time.
- *
- * If the key does not exist and no default is provided, or if the key exists
- * with a value that does not conform to the expected representation, the
- * operation will fail.
- *
- * If the operation succeeds, it returns the new value of the key.
- *
- * Note that the default value is always treated as None when using the text
- * protocol.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def decrement(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long]
-
- def awaitDecrement(key: String, by: Long, default: Option[Long], exp: Duration): Long =
- Await.result(decrement(key, by, default, exp), Duration.Inf)
-
- /**
- * Shuts down the cache instance, performs any additional cleanups necessary.
- */
- def close(): Unit
-}
-
-object Memcached {
- /**
- * Builds a [[Memcached]] instance. Needs a [[Configuration]].
- */
- def apply(config: Configuration)(implicit ec: ExecutionContext): Memcached =
- new MemcachedImpl(config, ec)
-}
diff --git a/src/main/scala/shade/memcached/MemcachedImpl.scala b/src/main/scala/shade/memcached/MemcachedImpl.scala
deleted file mode 100644
index 7e1862a..0000000
--- a/src/main/scala/shade/memcached/MemcachedImpl.scala
+++ /dev/null
@@ -1,382 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached
-
-import java.util.concurrent.TimeUnit
-
-import monix.execution.{ CancelableFuture, Scheduler }
-import net.spy.memcached.ConnectionFactoryBuilder.{ Protocol => SpyProtocol }
-import net.spy.memcached.auth.{ AuthDescriptor, PlainCallbackHandler }
-import net.spy.memcached.ops.Mutator
-import net.spy.memcached.{ FailureMode => SpyFailureMode, _ }
-import shade.memcached.internals.{ FailedResult, SuccessfulResult, _ }
-import shade.{ CancelledException, TimeoutException, UnhandledStatusException }
-
-import scala.concurrent.duration._
-import scala.concurrent.{ ExecutionContext, Future }
-
-/**
- * Memcached client implementation based on SpyMemcached.
- *
- * See the parent trait (Cache) for API docs.
- */
-class MemcachedImpl(config: Configuration, ec: ExecutionContext) extends Memcached {
- private[this] implicit val context = ec
-
- /**
- * Adds a value for a given key, if the key doesn't already exist in the cache store.
- *
- * If the key already exists in the cache, the future returned result will be false and the
- * current value will not be overridden. If the key isn't there already, the value
- * will be set and the future returned result will be true.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- *
- * @return either true, in case the value was set, or false otherwise
- */
- def add[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Boolean] =
- value match {
- case null =>
- CancelableFuture.successful(false)
- case _ =>
- instance.realAsyncAdd(withPrefix(key), codec.serialize(value), 0, exp, config.operationTimeout) map {
- case SuccessfulResult(givenKey, Some(_)) =>
- true
- case SuccessfulResult(givenKey, None) =>
- false
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
- }
-
- /**
- * Sets a (key, value) in the cache store.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def set[T](key: String, value: T, exp: Duration)(implicit codec: Codec[T]): CancelableFuture[Unit] =
- value match {
- case null =>
- CancelableFuture.successful(())
- case _ =>
- instance.realAsyncSet(withPrefix(key), codec.serialize(value), 0, exp, config.operationTimeout) map {
- case SuccessfulResult(givenKey, _) =>
- ()
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
- }
-
- /**
- * Deletes a key from the cache store.
- *
- * @return true if a key was deleted or false if there was nothing there to delete
- */
- def delete(key: String): CancelableFuture[Boolean] =
- instance.realAsyncDelete(withPrefix(key), config.operationTimeout) map {
- case SuccessfulResult(givenKey, result) =>
- result
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
-
- /**
- * Fetches a value from the cache store.
- *
- * @return Some(value) in case the key is available, or None otherwise (doesn't throw exception on key missing)
- */
- def get[T](key: String)(implicit codec: Codec[T]): Future[Option[T]] =
- instance.realAsyncGet(withPrefix(key), config.operationTimeout) map {
- case SuccessfulResult(givenKey, option) =>
- option.map(codec.deserialize)
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
-
- def getOrElse[T](key: String, default: => T)(implicit codec: Codec[T]): Future[T] =
- get[T](key) map {
- case Some(value) => value
- case None => default
- }
-
- /**
- * Compare and set.
- *
- * @param expecting should be None in case the key is not expected, or Some(value) otherwise
- * @param exp can be Duration.Inf (infinite) for not setting an expiration
- * @return either true (in case the compare-and-set succeeded) or false otherwise
- */
- def compareAndSet[T](key: String, expecting: Option[T], newValue: T, exp: Duration)(implicit codec: Codec[T]): Future[Boolean] =
- expecting match {
- case None =>
- add[T](key, newValue, exp)
-
- case Some(expectingValue) =>
- instance.realAsyncGets(withPrefix(key), config.operationTimeout) flatMap {
- case SuccessfulResult(givenKey, None) =>
- Future.successful(false)
-
- case SuccessfulResult(givenKey, Some((currentData, casID))) =>
- if (codec.deserialize(currentData) == expectingValue)
- instance.realAsyncCAS(withPrefix(key), casID, 0, codec.serialize(newValue), exp, config.operationTimeout) map {
- case SuccessfulResult(_, bool) =>
- bool
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
- else
- Future.successful(false)
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
- }
-
- /**
- * Used by both transformAndGet and getAndTransform for code reusability.
- *
- * @param f is the function that dictates what gets returned (either the old or the new value)
- */
- private[this] def genericTransform[T, R](key: String, exp: Duration, cb: Option[T] => T)(f: (Option[T], T) => R)(implicit codec: Codec[T]): Future[R] = {
- val keyWithPrefix = withPrefix(key)
- val timeoutAt = System.currentTimeMillis() + config.operationTimeout.toMillis
-
- /*
- * Inner function used for retrying compare-and-set operations
- * with a maximum threshold of retries.
- *
- * @throws TransformOverflowException in case the maximum number of
- * retries is reached
- */
- def loop(retry: Int): Future[R] = {
- val remainingTime = timeoutAt - System.currentTimeMillis()
-
- if (remainingTime <= 0)
- throw new TimeoutException(key)
-
- instance.realAsyncGets(keyWithPrefix, remainingTime.millis) flatMap {
- case SuccessfulResult(_, None) =>
- val result = cb(None)
- add(key, result, exp) flatMap {
- case true =>
- Future.successful(f(None, result))
- case false =>
- loop(retry + 1)
- }
- case SuccessfulResult(_, Some((current, casID))) =>
- val currentOpt = Some(codec.deserialize(current))
- val result = cb(currentOpt)
-
- instance.realAsyncCAS(keyWithPrefix, casID, 0, codec.serialize(result), exp, remainingTime.millis) flatMap {
- case SuccessfulResult(_, true) =>
- Future.successful(f(currentOpt, result))
- case SuccessfulResult(_, false) =>
- loop(retry + 1)
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
-
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
- }
-
- loop(0)
- }
-
- /**
- * Transforms the given key and returns the new value.
- *
- * The cb callback receives the current value
- * (None in case the key is missing or Some(value) otherwise)
- * and should return the new value to store.
- *
- * The method retries until the compare-and-set operation succeeds, so
- * the callback should have no side-effects.
- *
- * This function can be used for atomic incrementers and stuff like that.
- *
- * @return the new value
- */
- def transformAndGet[T](key: String, exp: Duration)(cb: (Option[T]) => T)(implicit codec: Codec[T]): Future[T] =
- genericTransform(key, exp, cb) {
- case (oldValue, newValue) => newValue
- }
-
- /**
- * Transforms the given key and returns the old value as an Option[T]
- * (None in case the key wasn't in the cache or Some(value) otherwise).
- *
- * The cb callback receives the current value
- * (None in case the key is missing or Some(value) otherwise)
- * and should return the new value to store.
- *
- * The method retries until the compare-and-set operation succeeds, so
- * the callback should have no side-effects.
- *
- * This function can be used for atomic incrementers and stuff like that.
- *
- * @return the old value
- */
- def getAndTransform[T](key: String, exp: Duration)(cb: (Option[T]) => T)(implicit codec: Codec[T]): Future[Option[T]] =
- genericTransform(key, exp, cb) {
- case (oldValue, newValue) => oldValue
- }
-
- def close(): Unit = {
- instance.shutdown(3, TimeUnit.SECONDS)
- }
-
- /**
- * Atomically increments the given key by a non-negative integer amount
- * and returns the new value.
- *
- * The value is stored as the ASCII decimal representation of a 64-bit
- * unsigned integer.
- *
- * If the key does not exist and a default is provided, sets the value of the
- * key to the provided default and expiry time.
- *
- * If the key does not exist and no default is provided, or if the key exists
- * with a value that does not conform to the expected representation, the
- * operation will fail.
- *
- * If the operation succeeds, it returns the new value of the key.
- *
- * Note that the default value is always treated as None when using the text
- * protocol.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def increment(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long] =
- instance.realAsyncMutate(withPrefix(key), by, Mutator.incr, default, exp, config.operationTimeout) map {
- case SuccessfulResult(_, value) =>
- value
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
-
- /**
- * Atomically decrements the given key by a non-negative integer amount
- * and returns the new value.
- *
- * The value is stored as the ASCII decimal representation of a 64-bit
- * unsigned integer.
- *
- * If the key does not exist and a default is provided, sets the value of the
- * key to the provided default and expiry time.
- *
- * If the key does not exist and no default is provided, or if the key exists
- * with a value that does not conform to the expected representation, the
- * operation will fail.
- *
- * If the operation succeeds, it returns the new value of the key.
- *
- * Note that the default value is always treated as None when using the text
- * protocol.
- *
- * The expiry time can be Duration.Inf (infinite duration).
- */
- def decrement(key: String, by: Long, default: Option[Long], exp: Duration): Future[Long] =
- instance.realAsyncMutate(withPrefix(key), by, Mutator.decr, default, exp, config.operationTimeout) map {
- case SuccessfulResult(_, value) =>
- value
- case failure: FailedResult =>
- throwExceptionOn(failure)
- }
-
- private[this] def throwExceptionOn(failure: FailedResult) = failure match {
- case FailedResult(k, TimedOutStatus) =>
- throw new TimeoutException(withoutPrefix(k))
- case FailedResult(k, CancelledStatus) =>
- throw new CancelledException(withoutPrefix(k))
- case FailedResult(k, unhandled) =>
- throw new UnhandledStatusException(
- s"For key ${withoutPrefix(k)} - ${unhandled.getClass.getName}"
- )
- }
-
- @inline
- private[this] def withPrefix(key: String): String =
- if (prefix.isEmpty)
- key
- else
- prefix + "-" + key
-
- @inline
- private[this] def withoutPrefix[T](key: String): String = {
- if (!prefix.isEmpty && key.startsWith(prefix + "-"))
- key.substring(prefix.length + 1)
- else
- key
- }
-
- private[this] val prefix = config.keysPrefix.getOrElse("")
- private[this] val instance = {
- if (System.getProperty("net.spy.log.LoggerImpl") == null) {
- System.setProperty(
- "net.spy.log.LoggerImpl",
- "shade.memcached.internals.Slf4jLogger"
- )
- }
-
- val conn = {
- val builder = new ConnectionFactoryBuilder()
- .setProtocol(
- if (config.protocol == Protocol.Binary)
- SpyProtocol.BINARY
- else
- SpyProtocol.TEXT
- )
- .setDaemon(true)
- .setFailureMode(config.failureMode match {
- case FailureMode.Retry =>
- SpyFailureMode.Retry
- case FailureMode.Cancel =>
- SpyFailureMode.Cancel
- case FailureMode.Redistribute =>
- SpyFailureMode.Redistribute
- })
- .setOpQueueFactory(config.opQueueFactory.orNull)
- .setReadOpQueueFactory(config.readQueueFactory.orNull)
- .setWriteOpQueueFactory(config.writeQueueFactory.orNull)
- .setShouldOptimize(config.shouldOptimize)
- .setHashAlg(config.hashAlgorithm)
- .setLocatorType(config.locator)
-
- val withTimeout = config.operationTimeout match {
- case duration: FiniteDuration =>
- builder.setOpTimeout(config.operationTimeout.toMillis)
- case _ =>
- builder
- }
-
- val withAuth = config.authentication match {
- case Some(credentials) =>
- withTimeout.setAuthDescriptor(
- new AuthDescriptor(
- Array("PLAIN"),
- new PlainCallbackHandler(credentials.username, credentials.password)
- )
- )
- case None =>
- withTimeout
- }
-
- withAuth
- }
-
- import scala.collection.JavaConverters._
- val addresses = AddrUtil.getAddresses(config.addresses).asScala
- new SpyMemcachedIntegration(conn.build(), addresses, Scheduler(context))
- }
-}
-
diff --git a/src/main/scala/shade/memcached/internals/PartialResult.scala b/src/main/scala/shade/memcached/internals/PartialResult.scala
deleted file mode 100644
index ac36c55..0000000
--- a/src/main/scala/shade/memcached/internals/PartialResult.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached.internals
-
-import monix.execution.atomic.AtomicAny
-
-import scala.concurrent.{ Future, Promise }
-import scala.util.{ Success, Try }
-
-sealed trait PartialResult[+T]
-case class FinishedResult[T](result: Try[Result[T]]) extends PartialResult[T]
-case class FutureResult[T](result: Future[Result[T]]) extends PartialResult[T]
-case object NoResultAvailable extends PartialResult[Nothing]
-
-final class MutablePartialResult[T] {
- def tryComplete(result: Try[Result[T]]): Boolean =
- _result.compareAndSet(NoResultAvailable, FinishedResult(result))
-
- def tryCompleteWith(result: Future[Result[T]]): Boolean =
- _result.compareAndSet(NoResultAvailable, FutureResult(result))
-
- def completePromise(key: String, promise: Promise[Result[T]]): Unit = {
- _result.get match {
- case FinishedResult(result) =>
- promise.tryComplete(result)
- case FutureResult(result) =>
- promise.tryCompleteWith(result)
- case NoResultAvailable =>
- promise.tryComplete(Success(FailedResult(key, IllegalCompleteStatus)))
- }
- }
-
- private[this] val _result =
- AtomicAny(NoResultAvailable: PartialResult[T])
-}
diff --git a/src/main/scala/shade/memcached/internals/SpyMemcachedIntegration.scala b/src/main/scala/shade/memcached/internals/SpyMemcachedIntegration.scala
deleted file mode 100644
index 41d49f9..0000000
--- a/src/main/scala/shade/memcached/internals/SpyMemcachedIntegration.scala
+++ /dev/null
@@ -1,486 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached.internals
-
-import java.io.IOException
-import java.net.{ InetSocketAddress, SocketAddress }
-import java.util.concurrent.{ CountDownLatch, TimeUnit }
-
-import monix.execution.{ Cancelable, CancelableFuture, Scheduler }
-import monix.execution.atomic.{ Atomic, AtomicBoolean }
-import net.spy.memcached._
-import net.spy.memcached.auth.{ AuthDescriptor, AuthThreadMonitor }
-import net.spy.memcached.compat.SpyObject
-import net.spy.memcached.ops._
-import shade.UnhandledStatusException
-
-import scala.collection.JavaConverters._
-import scala.concurrent.duration.{ Duration, FiniteDuration }
-import scala.concurrent.{ ExecutionContext, Promise }
-import scala.util.control.NonFatal
-import scala.util.{ Failure, Success, Try }
-
-/**
- * Hooking in the SpyMemcached Internals.
- *
- * @param cf is Spy's Memcached connection factory
- * @param addrs is a list of addresses to connect to
- * @param scheduler is for making timeouts work
- */
-class SpyMemcachedIntegration(cf: ConnectionFactory, addrs: Seq[InetSocketAddress], scheduler: Scheduler)
- extends SpyObject with ConnectionObserver {
-
- require(cf != null, "Invalid connection factory")
- require(addrs != null && addrs.nonEmpty, "Invalid addresses list")
- assert(cf.getOperationTimeout > 0, "Operation timeout must be positive")
-
- protected final val opFact: OperationFactory = cf.getOperationFactory
- protected final val mconn: MemcachedConnection = cf.createConnection(addrs.asJava)
- protected final val authDescriptor: Option[AuthDescriptor] = Option(cf.getAuthDescriptor)
- protected final val authMonitor: AuthThreadMonitor = new AuthThreadMonitor
- protected final val shuttingDown: AtomicBoolean = Atomic(false)
-
- locally {
- if (authDescriptor.isDefined)
- addObserver(this)
- }
-
- /**
- * Add a connection observer.
- *
- * If connections are already established, your observer will be called with
- * the address and -1.
- *
- * @param obs the ConnectionObserver you wish to add
- * @return true if the observer was added.
- */
- def addObserver(obs: ConnectionObserver): Boolean = {
- val rv = mconn.addObserver(obs)
-
- if (rv)
- for (node <- mconn.getLocator.getAll.asScala)
- if (node.isActive)
- obs.connectionEstablished(node.getSocketAddress, -1)
- rv
- }
-
- def connectionLost(sa: SocketAddress): Unit = {
- // Don't care?
- }
-
- /**
- * A connection has just successfully been established on the given socket.
- *
- * @param sa the address of the node whose connection was established
- * @param reconnectCount the number of attempts before the connection was
- * established
- */
- def connectionEstablished(sa: SocketAddress, reconnectCount: Int): Unit = {
- for (authDescriptor <- this.authDescriptor) {
- if (authDescriptor.authThresholdReached)
- this.shutdown()
- authMonitor.authConnection(mconn, opFact, authDescriptor, findNode(sa))
- }
- }
-
- /**
- * Wait for the queues to die down.
- *
- * @param timeout the amount of time time for shutdown
- * @param unit the TimeUnit for the timeout
- * @return result of the request for the wait
- * @throws IllegalStateException in the rare circumstance where queue is too
- * full to accept any more requests
- */
- def waitForQueues(timeout: Long, unit: TimeUnit): Boolean = {
- val blatch: CountDownLatch = broadcastOp(new BroadcastOpFactory {
- def newOp(n: MemcachedNode, latch: CountDownLatch): Operation = {
- opFact.noop(new OperationCallback {
- def complete() {
- latch.countDown()
- }
-
- def receivedStatus(s: OperationStatus) {}
- })
- }
- }, mconn.getLocator.getAll, checkShuttingDown = false)
-
- try {
- blatch.await(timeout, unit)
- } catch {
- case e: InterruptedException =>
- throw new RuntimeException("Interrupted waiting for queues", e)
- }
- }
-
- def broadcastOp(of: BroadcastOpFactory): CountDownLatch =
- broadcastOp(of, mconn.getLocator.getAll, checkShuttingDown = true)
-
- def broadcastOp(of: BroadcastOpFactory, nodes: java.util.Collection[MemcachedNode]): CountDownLatch =
- broadcastOp(of, nodes, checkShuttingDown = true)
-
- /**
- * Broadcast an operation to a specific collection of nodes.
- */
- private def broadcastOp(of: BroadcastOpFactory, nodes: java.util.Collection[MemcachedNode], checkShuttingDown: Boolean): CountDownLatch = {
- if (checkShuttingDown && shuttingDown.get)
- throw new IllegalStateException("Shutting down")
- mconn.broadcastOperation(of, nodes)
- }
-
- private def findNode(sa: SocketAddress): MemcachedNode = {
- val node = mconn.getLocator.getAll.asScala.find(_.getSocketAddress == sa)
- assert(node.isDefined, s"Couldn't find node connected to $sa")
- node.get
- }
-
- /**
- * Shut down immediately.
- */
- def shutdown(): Unit = {
- shutdown(-1, TimeUnit.SECONDS)
- }
-
- def shutdown(timeout: Long, unit: TimeUnit): Boolean = {
- // Guard against double shutdowns (bug 8).
- if (!shuttingDown.compareAndSet(expect = false, update = true)) {
- getLogger.info("Suppressing duplicate attempt to shut down")
- false
- } else {
- val baseName: String = mconn.getName
- mconn.setName(s"$baseName - SHUTTING DOWN")
-
- try {
- if (timeout > 0) {
- mconn.setName(s"$baseName - SHUTTING DOWN (waiting)")
- waitForQueues(timeout, unit)
- } else
- true
- } finally {
- try {
- mconn.setName(s"$baseName - SHUTTING DOWN (telling client)")
- mconn.shutdown()
- mconn.setName(s"$baseName - SHUTTING DOWN (informed client)")
- } catch {
- case e: IOException =>
- getLogger.warn("exception while shutting down": Any, e: Throwable)
- }
- }
- }
- }
-
- def realAsyncGet(key: String, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Option[Array[Byte]]]] = {
- val promise = Promise[Result[Option[Array[Byte]]]]()
- val result = new MutablePartialResult[Option[Array[Byte]]]
-
- val op: GetOperation = opFact.get(key, new GetOperation.Callback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASNotFoundStatus =>
- result.tryComplete(Success(SuccessfulResult(key, None)))
- case CASSuccessStatus =>
- }
- }
-
- def gotData(k: String, flags: Int, data: Array[Byte]) {
- assert(key == k, "Wrong key returned")
- result.tryComplete(Success(SuccessfulResult(key, Option(data))))
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncSet(key: String, data: Array[Byte], flags: Int, exp: Duration, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Long]] = {
- val promise = Promise[Result[Long]]()
- val result = new MutablePartialResult[Long]
-
- val op: Operation = opFact.store(StoreType.set, key, flags, expiryToSeconds(exp).toInt, data, new StoreOperation.Callback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASSuccessStatus =>
- }
- }
-
- def gotData(key: String, cas: Long) {
- result.tryComplete(Success(SuccessfulResult(key, cas)))
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncAdd(key: String, data: Array[Byte], flags: Int, exp: Duration, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Option[Long]]] = {
- val promise = Promise[Result[Option[Long]]]()
- val result = new MutablePartialResult[Option[Long]]
-
- val op: Operation = opFact.store(StoreType.add, key, flags, expiryToSeconds(exp).toInt, data, new StoreOperation.Callback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASExistsStatus =>
- result.tryComplete(Success(SuccessfulResult(key, None)))
- case CASSuccessStatus =>
- }
- }
-
- def gotData(key: String, cas: Long) {
- result.tryComplete(Success(SuccessfulResult(key, Some(cas))))
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncDelete(key: String, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Boolean]] = {
- val promise = Promise[Result[Boolean]]()
- val result = new MutablePartialResult[Boolean]
-
- val op = opFact.delete(key, new DeleteOperation.Callback {
- def gotData(cas: Long): Unit = ()
-
- def complete() {
- result.completePromise(key, promise)
- }
-
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASSuccessStatus =>
- result.tryComplete(Success(SuccessfulResult(key, true)))
- case CASNotFoundStatus =>
- result.tryComplete(Success(SuccessfulResult(key, false)))
- }
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncGets(key: String, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Option[(Array[Byte], Long)]]] = {
- val promise = Promise[Result[Option[(Array[Byte], Long)]]]()
- val result = new MutablePartialResult[Option[(Array[Byte], Long)]]
-
- val op: Operation = opFact.gets(key, new GetsOperation.Callback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASNotFoundStatus =>
- result.tryComplete(Success(SuccessfulResult(key, None)))
- case CASSuccessStatus =>
- }
- }
-
- def gotData(receivedKey: String, flags: Int, cas: Long, data: Array[Byte]) {
- assert(key == receivedKey, "Wrong key returned")
- assert(cas > 0, s"CAS was less than zero: $cas")
-
- result.tryComplete(Try {
- SuccessfulResult(key, Option(data).map(d => (d, cas)))
- })
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncCAS(key: String, casID: Long, flags: Int, data: Array[Byte], exp: Duration, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Boolean]] = {
- val promise = Promise[Result[Boolean]]()
- val result = new MutablePartialResult[Boolean]
-
- val op = opFact.cas(StoreType.set, key, casID, flags, expiryToSeconds(exp).toInt, data, new StoreOperation.Callback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASSuccessStatus =>
- result.tryComplete(Success(SuccessfulResult(key, true)))
- case CASExistsStatus =>
- result.tryComplete(Success(SuccessfulResult(key, false)))
- case CASNotFoundStatus =>
- result.tryComplete(Success(SuccessfulResult(key, false)))
- }
- }
-
- def gotData(k: String, cas: Long) {
- assert(key == k, "Wrong key returned")
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- def realAsyncMutate(key: String, by: Long, mutator: Mutator, default: Option[Long], exp: Duration, timeout: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[Long]] = {
- val promise = Promise[Result[Long]]()
- val result = new MutablePartialResult[Long]
-
- val expiry = default match {
- case Some(_) => expiryToSeconds(exp).toInt
- case None => -1 // expiry of all 1-bits disables setting default in case of nonexistent key
- }
-
- val op: Operation = opFact.mutate(mutator, key, by, default.getOrElse(0L), expiry, new OperationCallback {
- def receivedStatus(opStatus: OperationStatus) {
- handleStatus(opStatus, key, result) {
- case CASSuccessStatus =>
- result.tryComplete(Success(SuccessfulResult(key, opStatus.getMessage.toLong)))
- }
- }
-
- def complete() {
- result.completePromise(key, promise)
- }
- })
-
- mconn.enqueueOperation(key, op)
- prepareFuture(key, op, promise, timeout)
- }
-
- protected final def prepareFuture[T](key: String, op: Operation, promise: Promise[Result[T]], atMost: FiniteDuration)(implicit ec: ExecutionContext): CancelableFuture[Result[T]] = {
- val operationCancelable = Cancelable(() => {
- try {
- if (!op.isCancelled)
- op.cancel()
- } catch {
- case NonFatal(ex) =>
- ec.reportFailure(ex)
- }
- })
-
- val timeout = scheduler.scheduleOnce(atMost) {
- promise.tryComplete {
- if (op.hasErrored)
- Failure(op.getException)
- else if (op.isCancelled)
- Success(FailedResult(key, CancelledStatus))
- else
- Success(FailedResult(key, TimedOutStatus))
- }
- }
-
- val future = promise.future
- val mainCancelable = Cancelable { () =>
- timeout.cancel()
- operationCancelable.cancel()
- }
-
- future.onComplete { msg =>
- try {
- timeout.cancel()
- } catch {
- case NonFatal(ex) =>
- ec.reportFailure(ex)
- }
-
- msg match {
- case Success(FailedResult(_, TimedOutStatus)) =>
- MemcachedConnection.opTimedOut(op)
- op.timeOut()
- if (!op.isCancelled) try op.cancel() catch {
- case NonFatal(_) =>
- }
- case Success(FailedResult(_, _)) =>
- if (!op.isCancelled) try op.cancel() catch {
- case NonFatal(_) =>
- }
- case _ =>
- MemcachedConnection.opSucceeded(op)
- }
- }
-
- CancelableFuture(future, mainCancelable)
- }
-
- protected final val statusTranslation: PartialFunction[OperationStatus, Status] = {
- case _: CancelledOperationStatus =>
- CancelledStatus
- case _: TimedOutOperationStatus =>
- TimedOutStatus
- case status: CASOperationStatus =>
- status.getCASResponse match {
- case CASResponse.EXISTS =>
- CASExistsStatus
- case CASResponse.NOT_FOUND =>
- CASNotFoundStatus
- case CASResponse.OK =>
- CASSuccessStatus
- case CASResponse.OBSERVE_ERROR_IN_ARGS =>
- CASObserveErrorInArgs
- case CASResponse.OBSERVE_MODIFIED =>
- CASObserveModified
- case CASResponse.OBSERVE_TIMEOUT =>
- CASObserveTimeout
- }
- case x if x.isSuccess =>
- CASSuccessStatus
- }
-
- protected final def expiryToSeconds(duration: Duration): Long = duration match {
- case finite: FiniteDuration =>
- val seconds = finite.toSeconds
- if (seconds < 60 * 60 * 24 * 30)
- seconds
- else
- System.currentTimeMillis() / 1000 + seconds
- case _ =>
- // infinite duration (set to 0)
- 0
- }
-
- /**
- * Handles OperationStatuses from SpyMemcached
- *
- * The first argument list takes the SpyMemcached operation status, and also the key and result so that this method
- * itself can attach sane failure handling.
- *
- * The second argument list is a simple PartialFunction that allows you to side effect for the translated [[Status]]s you care about,
- * typically by completing the result.
- *
- * @param spyMemcachedStatus SpyMemcached OperationStatus to be translated
- * @param key String key involved in the operation
- * @param result MutablePartialResult
- * @param handler a partial function that takes a translated [[Status]] and side-effects
- */
- private def handleStatus(
- spyMemcachedStatus: OperationStatus,
- key: String,
- result: MutablePartialResult[_])(handler: PartialFunction[Status, Unit]): Unit = {
- val status = statusTranslation.applyOrElse(spyMemcachedStatus, UnhandledStatus.fromSpyMemcachedStatus)
- handler.applyOrElse(status, {
- case UnhandledStatus(statusClass, statusMsg) => result.tryComplete(Failure(new UnhandledStatusException(s"$statusClass($statusMsg)")))
- // nothing
- case failure =>
- result.tryComplete(Success(FailedResult(key, failure)))
- }: Function[Status, Unit])
- }
-}
diff --git a/src/main/scala/shade/memcached/internals/Status.scala b/src/main/scala/shade/memcached/internals/Status.scala
deleted file mode 100644
index 5eb2bca..0000000
--- a/src/main/scala/shade/memcached/internals/Status.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached.internals
-
-import net.spy.memcached.ops.OperationStatus
-import scala.language.existentials
-
-sealed trait Status extends Product with Serializable
-case object TimedOutStatus extends Status
-case object CancelledStatus extends Status
-case object CASExistsStatus extends Status
-case object CASNotFoundStatus extends Status
-case object CASSuccessStatus extends Status
-case object CASObserveErrorInArgs extends Status
-case object CASObserveModified extends Status
-case object CASObserveTimeout extends Status
-case object IllegalCompleteStatus extends Status
-
-object UnhandledStatus {
-
- /**
- * Builds a serialisable UnhandledStatus from a given [[OperationStatus]] from SpyMemcached
- */
- def fromSpyMemcachedStatus(spyStatus: OperationStatus): UnhandledStatus = UnhandledStatus(spyStatus.getClass, spyStatus.getMessage)
-}
-
-final case class UnhandledStatus(statusClass: Class[_], message: String) extends Status
\ No newline at end of file
diff --git a/src/test/scala/shade/memcached/internals/MutablePartialResultSuite.scala b/src/test/scala/shade/memcached/internals/MutablePartialResultSuite.scala
deleted file mode 100644
index dd24a2d..0000000
--- a/src/test/scala/shade/memcached/internals/MutablePartialResultSuite.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.memcached.internals
-
-import org.scalatest.FunSuite
-import org.scalatest.concurrent.{ IntegrationPatience, ScalaFutures }
-
-import scala.concurrent.{ Future, Promise }
-import scala.util.Success
-
-class MutablePartialResultSuite
- extends FunSuite
- with ScalaFutures
- with IntegrationPatience {
-
- def assertCompletePromise(toCheck: MutablePartialResult[Boolean], expected: Boolean): Unit = {
- val promise = Promise[Result[Boolean]]()
- toCheck.completePromise("key1", promise)
- whenReady(promise.future) {
- case SuccessfulResult(_, r) => assert(r == expected)
- case _ => fail("not successful")
- }
- }
-
- test("initial state") {
- val pResult = new MutablePartialResult[Boolean]
- val promise = Promise[Result[Boolean]]()
- pResult.completePromise("key1", promise)
- whenReady(promise.future) { r =>
- assert(r.isInstanceOf[FailedResult])
- }
- }
-
- test("#tryComplete on a fresh MutablePartialResult") {
- val pResult = new MutablePartialResult[Boolean]
- pResult.tryComplete(Success(SuccessfulResult("key1", false)))
- assertCompletePromise(toCheck = pResult, expected = false)
- }
-
- test("#tryComplete on a MutablePartialResult that has already been completed") {
- val pResult = new MutablePartialResult[Boolean]
- assert(pResult.tryComplete(Success(SuccessfulResult("key1", false))))
- assert(!pResult.tryComplete(Success(SuccessfulResult("key1", true))))
- assertCompletePromise(toCheck = pResult, expected = false)
- }
-
- test("#tryCompleteWith on a fresh MutablePartialResult") {
- val pResult = new MutablePartialResult[Boolean]
- pResult.tryCompleteWith(Future.successful(SuccessfulResult("key1", false)))
- assertCompletePromise(toCheck = pResult, expected = false)
- }
-
- test("#tryCompleteWith on a MutablePartialResult that has already been completed") {
- val pResult = new MutablePartialResult[Boolean]
- assert(pResult.tryCompleteWith(Future.successful(SuccessfulResult("key1", false))))
- assert(!pResult.tryCompleteWith(Future.successful(SuccessfulResult("key1", true))))
- assertCompletePromise(toCheck = pResult, expected = false)
- }
-
-}
\ No newline at end of file
diff --git a/src/test/scala/shade/tests/CodecsSuite.scala b/src/test/scala/shade/tests/CodecsSuite.scala
deleted file mode 100644
index 412aad8..0000000
--- a/src/test/scala/shade/tests/CodecsSuite.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.tests
-
-import org.scalacheck.Arbitrary
-import org.scalatest.FunSuite
-import org.scalatest.prop.GeneratorDrivenPropertyChecks
-import shade.memcached.{ Codec, MemcachedCodecs }
-
-class CodecsSuite extends FunSuite with MemcachedCodecs with GeneratorDrivenPropertyChecks {
-
- /**
- * Properties-based checking for a codec of type A
- */
- private def serdesCheck[A: Arbitrary](codec: Codec[A]): Unit = {
- forAll { n: A =>
- val serialised = codec.serialize(n)
- val deserialised = codec.deserialize(serialised)
- assert(deserialised == n)
- }
- }
-
- test("IntBinaryCodec") {
- serdesCheck(IntBinaryCodec)
- }
-
- test("DoubleBinaryCodec") {
- serdesCheck(DoubleBinaryCodec)
- }
-
- test("FloatBinaryCodec") {
- serdesCheck(FloatBinaryCodec)
- }
-
- test("LongBinaryCodec") {
- serdesCheck(LongBinaryCodec)
- }
-
- test("BooleanBinaryCodec") {
- serdesCheck(BooleanBinaryCodec)
- }
-
- test("CharBinaryCodec") {
- serdesCheck(CharBinaryCodec)
- }
-
- test("ShortBinaryCodec") {
- serdesCheck(ShortBinaryCodec)
- }
-
- test("StringBinaryCodec") {
- serdesCheck(StringBinaryCodec)
- }
-
- test("ArrayByteBinaryCodec") {
- serdesCheck(ArrayByteBinaryCodec)
- }
-
-}
\ No newline at end of file
diff --git a/src/test/scala/shade/tests/FakeMemcachedSuite.scala b/src/test/scala/shade/tests/FakeMemcachedSuite.scala
deleted file mode 100644
index 8a73244..0000000
--- a/src/test/scala/shade/tests/FakeMemcachedSuite.scala
+++ /dev/null
@@ -1,338 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.tests
-
-import java.io.{ ByteArrayOutputStream, ObjectOutputStream }
-
-import org.scalatest.FunSuite
-import shade.testModels.Impression
-
-import scala.concurrent.Await
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-
-class FakeMemcachedSuite extends FunSuite with MemcachedTestHelpers {
- implicit val timeout = 5.second
-
- test("add") {
- withFakeMemcached { cache =>
- val op1 = cache.awaitAdd("hello", Value("world"), 5.seconds)
- assert(op1 === true)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === Some(Value("world")))
-
- val op2 = cache.awaitAdd("hello", Value("changed"), 5.seconds)
- assert(op2 === false)
-
- val changed = cache.awaitGet[Value]("hello")
- assert(changed === Some(Value("world")))
- }
- }
-
- test("add-null") {
- withFakeMemcached { cache =>
- val op1 = cache.awaitAdd("hello", null, 5.seconds)
- assert(op1 === false)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === None)
- }
- }
-
- test("get") {
- withFakeMemcached { cache =>
- val value = cache.awaitGet[Value]("missing")
- assert(value === None)
- }
- }
-
- test("set") {
- withFakeMemcached { cache =>
- assert(cache.awaitGet[Value]("hello") === None)
-
- cache.awaitSet("hello", Value("world"), 3.seconds)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
-
- cache.awaitSet("hello", Value("changed"), 3.seconds)
- assert(cache.awaitGet[Value]("hello") === Some(Value("changed")))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[Value]("hello") === None)
- }
- }
-
- test("set-null") {
- withFakeMemcached { cache =>
- val op1 = cache.awaitAdd("hello", null, 5.seconds)
- assert(op1 === false)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === None)
- }
- }
-
- test("delete") {
- withFakeMemcached { cache =>
- cache.awaitDelete("hello")
- assert(cache.awaitGet[Value]("hello") === None)
-
- cache.awaitSet("hello", Value("world"), 1.minute)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
-
- assert(cache.awaitDelete("hello") === true)
- assert(cache.awaitGet[Value]("hello") === None)
-
- assert(cache.awaitDelete("hello") === false)
- }
- }
-
- test("compareAndSet") {
- withFakeMemcached { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === None)
-
- // set to value1
- assert(Await.result(cache.compareAndSet("some-key", None, Value("value1"), 5.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value1")))
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value1")))
-
- // set to value2, from value1
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("value1")), Value("value2"), 15.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value2")))
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value2")))
-
- // set to value3, from value2
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("value2")), Value("value3"), 15.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value3")))
- }
- }
-
- test("transformAndGet") {
- withFakeMemcached { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.transformAndGet[Int]("some-key", 5.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- assert(Await.result(incrementValue, Duration.Inf) === 1)
- assert(Await.result(incrementValue, Duration.Inf) === 2)
- assert(Await.result(incrementValue, Duration.Inf) === 3)
- assert(Await.result(incrementValue, Duration.Inf) === 4)
- assert(Await.result(incrementValue, Duration.Inf) === 5)
- assert(Await.result(incrementValue, Duration.Inf) === 6)
- }
- }
-
- test("getAndTransform") {
- withFakeMemcached { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue = Await.result(
- cache.getAndTransform[Int]("some-key", 5.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- },
- Duration.Inf
- )
-
- assert(incrementValue === None)
- assert(incrementValue === Some(1))
- assert(incrementValue === Some(2))
- assert(incrementValue === Some(3))
- assert(incrementValue === Some(4))
- assert(incrementValue === Some(5))
- assert(incrementValue === Some(6))
- }
- }
-
- test("transformAndGet-concurrent") {
- withFakeMemcached { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.transformAndGet[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val seq = concurrent.Future.sequence((0 until 500).map(nr => incrementValue))
- Await.result(seq, 20.seconds)
-
- assert(cache.awaitGet[Int]("some-key") === Some(500))
- }
- }
-
- test("getAndTransform-concurrent") {
- withFakeMemcached { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.getAndTransform[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val seq = concurrent.Future.sequence((0 until 500).map(nr => incrementValue))
- Await.result(seq, 20.seconds)
-
- assert(cache.awaitGet[Int]("some-key") === Some(500))
- }
- }
-
- test("increment-decrement") {
- withFakeMemcached { cache =>
- assert(cache.awaitGet[Int]("hello") === None)
-
- cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- cache.awaitIncrement("hello", 1, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("124"))
-
- cache.awaitDecrement("hello", 1, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-decrement-delta") {
- withFakeMemcached { cache =>
- assert(cache.awaitGet[Int]("hello") === None)
-
- cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- cache.awaitIncrement("hello", 5, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("128"))
-
- cache.awaitDecrement("hello", 5, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-default") {
- withFakeMemcached { cache =>
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
-
- cache.awaitIncrement("hello", 1, Some(0), 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("0"))
-
- cache.awaitIncrement("hello", 1, Some(0), 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("1"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-overflow") {
- withFakeMemcached { cache =>
- assert(cache.awaitIncrement("hello", 1, Some(Long.MaxValue), 1.minute) === Long.MaxValue)
-
- assert(cache.awaitIncrement("hello", 1, None, 1.minute) === Long.MinValue)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("9223372036854775808"))
- }
- }
-
- test("decrement-underflow") {
- withFakeMemcached { cache =>
- assert(cache.awaitDecrement("hello", 1, Some(1), 1.minute) === 1)
-
- assert(cache.awaitDecrement("hello", 1, None, 1.minute) === 0)
-
- assert(cache.awaitDecrement("hello", 1, None, 1.minute) === 0)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("0"))
- }
- }
-
- test("big-instance-1") {
- withFakeMemcached { cache =>
- val impression = shade.testModels.bigInstance
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-
- test("big-instance-1-manual") {
- withFakeMemcached { cache =>
- val byteOut = new ByteArrayOutputStream()
- val objectOut = new ObjectOutputStream(byteOut)
-
- val impression = shade.testModels.bigInstance
- objectOut.writeObject(impression)
- val byteArray = byteOut.toByteArray
-
- cache.awaitSet(impression.uuid, byteArray, 60.seconds)
-
- val inBytes = cache.awaitGet[Array[Byte]](impression.uuid)
- assert(inBytes.isDefined)
- assert(inBytes.get.length == byteArray.length)
- }
- }
-
- test("big-instance-2") {
- withFakeMemcached { cache =>
- val impression = shade.testModels.bigInstance2
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-
- test("big-instance-3") {
- withFakeMemcached { cache =>
- val impression = shade.testModels.bigInstance
- val result = cache.set(impression.uuid, impression, 60.seconds) flatMap { _ =>
- cache.get[Impression](impression.uuid)
- }
-
- assert(Await.result(result, Duration.Inf) === Some(impression))
- }
- }
-
- test("cancel-strategy simple test") {
- withFakeMemcached { cache =>
- Thread.sleep(100)
- val impression = shade.testModels.bigInstance2
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-}
diff --git a/src/test/scala/shade/tests/InMemoryCacheVer2Suite.scala b/src/test/scala/shade/tests/InMemoryCacheVer2Suite.scala
deleted file mode 100644
index 56da20d..0000000
--- a/src/test/scala/shade/tests/InMemoryCacheVer2Suite.scala
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.tests
-
-import org.scalatest.FunSuite
-import shade.inmemory.InMemoryCache
-
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-import scala.concurrent.{ Await, Future }
-
-class InMemoryCacheVer2Suite extends FunSuite {
- test("get(), set()") {
- withInstance { cache =>
- assert(cache.get[String]("hello") === None)
-
- cache.set("hello", "world")
- assert(cache.get[String]("hello") === Some("world"))
- }
- }
-
- test("add()") {
- withInstance { cache =>
- assert(cache.get[String]("hello") === None)
-
- assert(cache.add("hello", "world"), "value should be added successfully")
- assert(cache.get[String]("hello") === Some("world"))
-
- assert(!cache.add("hello", "world version 2"), "value already exists")
- assert(cache.get[String]("hello") === Some("world"))
-
- cache.set("hello", "world version 2")
- assert(cache.get[String]("hello") === Some("world version 2"))
- }
- }
-
- test("getOrElse()") {
- withInstance { cache =>
- assert(cache.getOrElse("hello", "default") === "default")
- cache.set("hello", "world")
- assert(cache.getOrElse("hello", "world") === "world")
- }
- }
-
- test("delete()") {
- withInstance { cache =>
- assert(cache.get[String]("hello") === None)
- cache.set("hello", "world")
- assert(cache.get[String]("hello") === Some("world"))
-
- assert(cache.delete("hello"), "item should be deleted")
- assert(cache.get[String]("hello") === None)
- assert(!cache.delete("hello"), "item should not be there anymore")
- }
- }
-
- test("cachedFuture()") {
- withInstance { cache =>
- assert(cache.get[String]("hello") === None)
-
- def future() = cache.cachedFuture("hello", 1.minute) {
- Future {
- Thread.sleep(1000)
- "world"
- }
- }
-
- for (idx <- 0 until 10000)
- assert(Await.result(future(), 4.seconds) === "world")
- }
- }
-
- test("compareAndSet()") {
- withInstance { cache =>
- assert(cache.compareAndSet("hello", None, "world"), "first CAS should succeed")
- assert(cache.compareAndSet("hello", Some("world"), "world updated"), "second CAS should succeed")
- assert(cache.get[String]("hello") === Some("world updated"))
- assert(!cache.compareAndSet("hello", Some("bollocks"), "world"), "third CAS should fail")
- }
- }
-
- test("transformAndGet() (with expiry)") {
- withInstance { cache =>
- def incr() = cache.transformAndGet[Int]("number", 1.second) {
- case Some(nr) => nr + 1
- case None => 0
- }
-
- for (idx <- 0 until 100)
- assert(incr() === idx)
-
- Thread.sleep(1000)
- assert(incr() === 0)
- }
- }
-
- test("getAndTransform() (with expiry)") {
- withInstance { cache =>
- def incr() = cache.getAndTransform[Int]("number", 1.second) {
- case Some(nr) => nr + 1
- case None => 1
- }
-
- for (idx <- 0 until 100)
- if (idx == 0)
- assert(incr() === None)
- else
- assert(incr() === Some(idx))
-
- Thread.sleep(1000)
- assert(incr() === None)
- }
- }
-
- test("add() expiration") {
- withInstance { cache =>
- assert(cache.add("hello", "world", 1.second), "add() should work")
- assert(cache.get[String]("hello") === Some("world"))
-
- Thread.sleep(1000)
- assert(cache.get[String]("hello") === None)
- }
- }
-
- test("set() expiration") {
- withInstance { cache =>
- cache.set("hello", "world", 1.second)
- assert(cache.get[String]("hello") === Some("world"))
-
- Thread.sleep(1000)
- assert(cache.get[String]("hello") === None)
- }
- }
-
- test("delete() expiration") {
- withInstance { cache =>
- cache.set("hello", "world", 1.second)
- assert(cache.get[String]("hello") === Some("world"))
-
- Thread.sleep(1000)
- assert(!cache.delete("hello"), "delete() should return false")
- }
- }
-
- test("cachedFuture() expiration") {
- withInstance { cache =>
- val result = Await.result(cache.cachedFuture("hello", 1.second) { Future("world") }, 1.second)
- assert(result === "world")
-
- val size = cache.realSize
- assert(size === 1)
-
- Thread.sleep(1000)
- assert(cache.get[String]("hello") === None)
- }
- }
-
- test("compareAndSet() expiration") {
- withInstance { cache =>
- assert(cache.compareAndSet("hello", None, "world", 1.second), "CAS should succeed")
- assert(cache.get[String]("hello") === Some("world"))
-
- Thread.sleep(1000)
- assert(cache.get[String]("hello") === None)
- }
- }
-
- test("maintenance / scheduler") {
- withInstance { cache =>
- val startTS = System.currentTimeMillis()
-
- cache.set("hello", "world", 1.second)
- cache.set("hello2", "world2")
-
- assert(cache.realSize === 2)
-
- val diff = Await.result(cache.maintenance, 20.seconds)
- val m1ts = System.currentTimeMillis()
-
- assert(diff === 1)
- assert(cache.realSize === 1)
-
- val timeWindow1 = math.round((m1ts - startTS) / 1000.0)
- assert(timeWindow1 >= 3 && timeWindow1 <= 7, "scheduler should run at no less than 3 secs and no more than 7 secs")
-
- val diff2 = Await.result(cache.maintenance, 20.seconds)
- val m2ts = System.currentTimeMillis()
-
- assert(diff2 === 0)
- assert(cache.realSize === 1)
-
- val timeWindow2 = math.round((m2ts - m1ts) / 1000.0)
- assert(timeWindow2 >= 3 && timeWindow2 <= 7, "scheduler should run at no less than 3 secs and no more than 7 secs")
- }
- }
-
- def withInstance[T](cb: InMemoryCache => T) = {
- val instance = InMemoryCache(global)
- try cb(instance) finally {
- instance.close()
- }
- }
-}
diff --git a/src/test/scala/shade/tests/MemcachedSuite.scala b/src/test/scala/shade/tests/MemcachedSuite.scala
deleted file mode 100644
index 5f187ec..0000000
--- a/src/test/scala/shade/tests/MemcachedSuite.scala
+++ /dev/null
@@ -1,414 +0,0 @@
-/*
- * Copyright (c) 2012-2017 by its authors. Some rights reserved.
- * See the project homepage at: https://github.com/monix/shade
- *
- * Licensed under the MIT License (the "License"); you may not use this
- * file except in compliance with the License. You may obtain a copy
- * of the License at:
- *
- * https://github.com/monix/shade/blob/master/LICENSE.txt
- */
-
-package shade.tests
-
-import java.io.{ ByteArrayOutputStream, ObjectOutputStream }
-
-import org.scalatest.FunSuite
-import shade.TimeoutException
-import shade.memcached.FailureMode
-import shade.testModels.{ ContentPiece, Impression }
-
-import scala.concurrent.Await
-import scala.concurrent.ExecutionContext.Implicits.global
-import scala.concurrent.duration._
-
-class MemcachedSuite extends FunSuite with MemcachedTestHelpers {
- implicit val timeout = 5.second
-
- test("add") {
- withCache("add") { cache =>
- val op1 = cache.awaitAdd("hello", Value("world"), 5.seconds)
- assert(op1 === true)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === Some(Value("world")))
-
- val op2 = cache.awaitAdd("hello", Value("changed"), 5.seconds)
- assert(op2 === false)
-
- val changed = cache.awaitGet[Value]("hello")
- assert(changed === Some(Value("world")))
- }
- }
-
- test("add-null") {
- withCache("add-null") { cache =>
- val op1 = cache.awaitAdd("hello", null, 5.seconds)
- assert(op1 === false)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === None)
- }
- }
-
- test("get") {
- withCache("get") { cache =>
- val value = cache.awaitGet[Value]("missing")
- assert(value === None)
- }
- }
-
- test("set") {
- withCache("set") { cache =>
- assert(cache.awaitGet[Value]("hello") === None)
-
- cache.awaitSet("hello", Value("world"), 1.seconds)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
-
- cache.awaitSet("hello", Value("changed"), 1.second)
- assert(cache.awaitGet[Value]("hello") === Some(Value("changed")))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[Value]("hello") === None)
- }
- }
-
- test("set-null") {
- withCache("set-null") { cache =>
- val op1 = cache.awaitAdd("hello", null, 5.seconds)
- assert(op1 === false)
-
- val stored = cache.awaitGet[Value]("hello")
- assert(stored === None)
- }
- }
-
- test("delete") {
- withCache("delete") { cache =>
- cache.awaitDelete("hello")
- assert(cache.awaitGet[Value]("hello") === None)
-
- cache.awaitSet("hello", Value("world"), 1.minute)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
-
- assert(cache.awaitDelete("hello") === true)
- assert(cache.awaitGet[Value]("hello") === None)
-
- assert(cache.awaitDelete("hello") === false)
- }
- }
-
- test("compareAndSet") {
- withCache("compareAndSet") { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === None)
-
- // set to value1
- assert(Await.result(cache.compareAndSet("some-key", None, Value("value1"), 5.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value1")))
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value1")))
-
- // set to value2, from value1
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("value1")), Value("value2"), 15.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value2")))
-
- // no can do
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("invalid")), Value("value1"), 15.seconds), Duration.Inf) === false)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value2")))
-
- // set to value3, from value2
- assert(Await.result(cache.compareAndSet("some-key", Some(Value("value2")), Value("value3"), 15.seconds), Duration.Inf) === true)
- assert(cache.awaitGet[Value]("some-key") === Some(Value("value3")))
- }
- }
-
- test("transformAndGet") {
- withCache("transformAndGet") { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.transformAndGet[Int]("some-key", 5.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- assert(Await.result(incrementValue, Duration.Inf) === 1)
- assert(Await.result(incrementValue, Duration.Inf) === 2)
- assert(Await.result(incrementValue, Duration.Inf) === 3)
- assert(Await.result(incrementValue, Duration.Inf) === 4)
- assert(Await.result(incrementValue, Duration.Inf) === 5)
- assert(Await.result(incrementValue, Duration.Inf) === 6)
- }
- }
-
- test("getAndTransform") {
- withCache("getAndTransform") { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue = Await.result(
- cache.getAndTransform[Int]("some-key", 5.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- },
- Duration.Inf
- )
-
- assert(incrementValue === None)
- assert(incrementValue === Some(1))
- assert(incrementValue === Some(2))
- assert(incrementValue === Some(3))
- assert(incrementValue === Some(4))
- assert(incrementValue === Some(5))
- assert(incrementValue === Some(6))
- }
- }
-
- test("transformAndGet-concurrent") {
- withCache("transformAndGet", opTimeout = Some(10.seconds)) { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.transformAndGet[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val seq = concurrent.Future.sequence((0 until 100).map(nr => incrementValue))
- Await.result(seq, 20.seconds)
-
- assert(cache.awaitGet[Int]("some-key") === Some(100))
- }
- }
-
- test("getAndTransform-concurrent") {
- withCache("getAndTransform", opTimeout = Some(10.seconds)) { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.getAndTransform[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val seq = concurrent.Future.sequence((0 until 100).map(nr => incrementValue))
- Await.result(seq, 20.seconds)
-
- assert(cache.awaitGet[Int]("some-key") === Some(100))
- }
- }
-
- test("transformAndGet-concurrent-timeout") {
- withCache("transformAndGet", opTimeout = Some(300.millis)) { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.transformAndGet[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val initial = Await.result(incrementValue.flatMap { case _ => incrementValue }, 3.seconds)
- assert(initial === 2)
-
- val seq = concurrent.Future.sequence((0 until 500).map(nr => incrementValue))
- try {
- Await.result(seq, 20.seconds)
- fail("should throw exception")
- } catch {
- case ex: TimeoutException =>
- assert(ex.getMessage === "some-key")
- }
- }
- }
-
- test("getAndTransform-concurrent-timeout") {
- withCache("getAndTransform", opTimeout = Some(300.millis)) { cache =>
- cache.awaitDelete("some-key")
- assert(cache.awaitGet[Value]("some-key") === None)
-
- def incrementValue =
- cache.getAndTransform[Int]("some-key", 60.seconds) {
- case None => 1
- case Some(nr) => nr + 1
- }
-
- val initial = Await.result(incrementValue.flatMap { case _ => incrementValue }, 3.seconds)
- assert(initial === Some(1))
-
- val seq = concurrent.Future.sequence((0 until 500).map(nr => incrementValue))
-
- try {
- Await.result(seq, 20.seconds)
- fail("should throw exception")
- } catch {
- case ex: TimeoutException =>
- assert(ex.key === "some-key")
- }
- }
- }
-
- test("increment-decrement") {
- withCache("increment-decrement") { cache =>
- assert(cache.awaitGet[Int]("hello") === None)
-
- cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- cache.awaitIncrement("hello", 1, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("124"))
-
- cache.awaitDecrement("hello", 1, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-decrement-delta") {
- withCache("increment-decrement-delta") { cache =>
- assert(cache.awaitGet[Int]("hello") === None)
-
- cache.awaitSet("hello", "123", 1.second)(StringBinaryCodec)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- cache.awaitIncrement("hello", 5, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("128"))
-
- cache.awaitDecrement("hello", 5, None, 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("123"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-default") {
- withCache("increment-default") { cache =>
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
-
- cache.awaitIncrement("hello", 1, Some(0), 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("0"))
-
- cache.awaitIncrement("hello", 1, Some(0), 1.second)
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("1"))
-
- Thread.sleep(3000)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === None)
- }
- }
-
- test("increment-overflow") {
- withCache("increment-overflow") { cache =>
- assert(cache.awaitIncrement("hello", 1, Some(Long.MaxValue), 1.minute) === Long.MaxValue)
-
- assert(cache.awaitIncrement("hello", 1, None, 1.minute) === Long.MinValue)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("9223372036854775808"))
- }
- }
-
- test("decrement-underflow") {
- withCache("increment-underflow") { cache =>
- assert(cache.awaitDecrement("hello", 1, Some(1), 1.minute) === 1)
-
- assert(cache.awaitDecrement("hello", 1, None, 1.minute) === 0)
-
- assert(cache.awaitDecrement("hello", 1, None, 1.minute) === 0)
-
- assert(cache.awaitGet[String]("hello")(StringBinaryCodec) === Some("0"))
- }
- }
-
- test("vector-inherited-case-classes") {
- withCache("vector-inherited-case-classes") { cache =>
- val content = shade.testModels.contentSeq
- cache.awaitSet("blog-posts", content, 60.seconds)
- assert(cache.awaitGet[Vector[ContentPiece]]("blog-posts") === Some(content))
- }
- }
-
- test("big-instance-1") {
- withCache("big-instance-1") { cache =>
- val impression = shade.testModels.bigInstance
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-
- test("big-instance-1-manual") {
- withCache("big-instance-1-manual") { cache =>
- val byteOut = new ByteArrayOutputStream()
- val objectOut = new ObjectOutputStream(byteOut)
-
- val impression = shade.testModels.bigInstance
- objectOut.writeObject(impression)
- val byteArray = byteOut.toByteArray
-
- cache.awaitSet(impression.uuid, byteArray, 60.seconds)
-
- val inBytes = cache.awaitGet[Array[Byte]](impression.uuid)
- assert(inBytes.isDefined)
- assert(inBytes.get.length == byteArray.length)
- }
- }
-
- test("big-instance-2") {
- withCache("big-instance-2") { cache =>
- val impression = shade.testModels.bigInstance2
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-
- test("big-instance-3") {
- withCache("big-instance-3") { cache =>
- val impression = shade.testModels.bigInstance
- val result = cache.set(impression.uuid, impression, 60.seconds) flatMap { _ =>
- cache.get[Impression](impression.uuid)
- }
-
- assert(Await.result(result, Duration.Inf) === Some(impression))
- }
- }
-
- test("cancel-strategy simple test") {
- withCache("cancel-strategy", failureMode = Some(FailureMode.Cancel)) { cache =>
- Thread.sleep(100)
- val impression = shade.testModels.bigInstance2
- cache.awaitSet(impression.uuid, impression, 60.seconds)
- assert(cache.awaitGet[Impression](impression.uuid) === Some(impression))
- }
- }
-
- test("infinite-duration") {
- withCache("infinite-duration") { cache =>
- assert(cache.awaitGet[Value]("hello") === None)
- try {
- cache.awaitSet("hello", Value("world"), Duration.Inf)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
-
- Thread.sleep(5000)
- assert(cache.awaitGet[Value]("hello") === Some(Value("world")))
- } finally {
- cache.awaitDelete("hello")
- }
- }
- }
-}
diff --git a/version.sbt b/version.sbt
new file mode 100644
index 0000000..a25483b
--- /dev/null
+++ b/version.sbt
@@ -0,0 +1 @@
+version in ThisBuild := "2.0.0-SNAPSHOT"
\ No newline at end of file