diff --git a/README.md b/README.md
index 8b6a6f4..d11a53f 100644
--- a/README.md
+++ b/README.md
@@ -15,6 +15,7 @@
Click to expand
+- [Install](#install)
- [Usage](#usage)
- [Contributing](#contributing)
- [Donate](#donate)
@@ -22,6 +23,15 @@
+## Install
+With [npm](https://npmjs.org) do:
+
+```
+npm install level-ttl
+```
+
+Usage from TypeScript also requires `npm install @types/readable-stream`.
+
## Usage
**If you are upgrading:** please see [`UPGRADING.md`](UPGRADING.md).
@@ -31,22 +41,20 @@ Augment `levelup` to handle a new `ttl` option on `put()` and `batch()` that spe
Requires [`levelup`][levelup], [`level`][level] or one of its variants like [`level-rocksdb`][level-rocksdb] to be installed separately.
```js
-const level = require('level')
-const ttl = require('level-ttl')
+import level from 'classic-level'
+import ttl from 'level-ttl'
const db = ttl(level('./db'))
// This entry will only stay in the store for 1 hour
-db.put('foo', 'bar', { ttl: 1000 * 60 * 60 }, (err) => {
- // ..
-})
+await db.put('foo', 'bar', { ttl: 1000 * 60 * 60 })
-db.batch([
+await db.batch([
// Same for these two entries
{ type: 'put', key: 'foo', value: 'bar' },
{ type: 'put', key: 'bam', value: 'boom' },
{ type: 'del', key: 'w00t' }
-], { ttl: 1000 * 60 * 5 }, (err) => {})
+], { ttl: 1000 * 60 * 5 })
```
If you put the same entry twice, you **refresh** the TTL to the _last_ put operation. In this way you can build utilities like [session managers](https://github.com/rvagg/node-level-session/) for your web application where the user's session is refreshed with each visit but expires after a set period of time since their last visit.
@@ -54,8 +62,8 @@ If you put the same entry twice, you **refresh** the TTL to the _last_ put opera
Alternatively, for a lower write-footprint you can use the `ttl()` method that is added to your `levelup` instance which can serve to insert or update a ttl for any given key in the database - even if that key doesn't exist but may in the future!
```js
-db.put('foo', 'bar', (err) => {})
-db.ttl('foo', 1000 * 60 * 60, (err) => {})
+await db.put('foo', 'bar')
+await db.ttl('foo', 1000 * 60 * 60)
```
`level-ttl` uses an internal scan every 10 seconds by default, this limits the available resolution of your TTL values, possibly delaying a delete for up to 10 seconds. The resolution can be tuned by passing the `checkFrequency` option to the `ttl()` initialiser.
@@ -78,8 +86,8 @@ const db = ttl(level('./db'), {
defaultTTL: 15 * 60 * 1000
})
-db.put('A', 'beep', (err) => {})
-db.put('B', 'boop', { ttl: 60 * 1000 }, (err) => {})
+await db.put('A', 'beep')
+await db.put('B', 'boop', { ttl: 60 * 1000 })
```
### `opts.sub`
@@ -89,28 +97,32 @@ You can provide a custom storage for the meta data by using the `opts.sub` prope
A db for the data and a separate to store the meta data:
```js
-const level = require('level')
-const ttl = require('level-ttl')
-const meta = level('./meta')
+import level from 'classic-level'
+import ttl from 'level-ttl'
+import { EntryStream } from 'level-read-stream'
+
+const rootDb = level('./db')
+const meta = rootDb.sublevel('meta')
-const db = ttl(level('./db'), { sub: meta })
+const db = ttl(rootDb, { sub: meta })
const batch = [
{ type: 'put', key: 'foo', value: 'foo value' },
{ type: 'put', key: 'bar', value: 'bar value' }
]
-db.batch(batch, { ttl: 100 }, function (err) {
- db.createReadStream()
- .on('data', function (data) {
- console.log('data', data)
- })
- .on('end', function () {
- meta.createReadStream()
- .on('data', function (data) {
- console.log('meta', data)
- })
- })
+await db.batch(batch, { ttl: 100 })
+
+new EntryStream(db)
+ .on('data', function (data) {
+ console.log('data', data)
+ })
+ .on('end', function () {
+ new EntryStream(meta)
+ .on('data', function (data) {
+ console.log('meta', data)
+ })
+ })
})
```
diff --git a/encoding.js b/encoding.js
index b6f3976..92f0fa6 100644
--- a/encoding.js
+++ b/encoding.js
@@ -1,8 +1,4 @@
-'use strict'
-
-exports.create = function createEncoding (options) {
- options || (options = {})
-
+export function createEncoding (options = {}) {
if (options.ttlEncoding) return options.ttlEncoding
const PATH_SEP = options.separator
@@ -14,7 +10,7 @@ exports.create = function createEncoding (options) {
}
return {
- buffer: false,
+ format: 'utf8',
encode: function (e) {
// TODO: reexamine this with respect to level-sublevel@6's native codecs
if (Array.isArray(e)) {
diff --git a/level-ttl.d.ts b/level-ttl.d.ts
new file mode 100644
index 0000000..5beeaf7
--- /dev/null
+++ b/level-ttl.d.ts
@@ -0,0 +1,40 @@
+import type { AbstractLevel, AbstractPutOptions, AbstractBatchOptions } from 'abstract-level'
+import type { Encoding } from 'level-transcoder'
+
+export interface LevelTtlOptions {
+ defaultTTL: number
+ checkFrequency: number
+ ttlEncoding?: Encoding
+ sub?: AbstractLevel
+ namespace: string
+ methodPrefix: string
+ expiryNamespace: string
+ separator: string
+}
+
+export interface LevelTtlOpsExtraOptions {
+ ttl?: number
+}
+
+export interface LevelTtlPutOptions extends AbstractPutOptions , LevelTtlOpsExtraOptions {}
+
+export interface LevelTtlBatchOptions extends AbstractBatchOptions , LevelTtlOpsExtraOptions {}
+
+export interface _TTL extends Pick {
+ sub?: AbstractLevel
+ options: LevelTtlOptions
+ encoding: Encoding
+ _prefixNs: string[]
+ _expiryNs: string[]
+ _lock: AsyncLock
+}
+
+declare function LevelTTL (db: DB, options: Partial): DB & {
+ put: (key: K, value: V, options: LevelTtlPutOptions) => Promise
+ batch: (operations: Array>, options: LevelTtlBatchOptions) => Promise
+ ttl: (key: K, delay: number) => Promise
+ stop: () => void
+ _ttl: _TTL
+}
+
+export default LevelTTL
diff --git a/level-ttl.js b/level-ttl.js
index 577d1d3..b3df593 100644
--- a/level-ttl.js
+++ b/level-ttl.js
@@ -1,16 +1,13 @@
-'use strict'
-
-const after = require('after')
-const xtend = require('xtend')
-const encoding = require('./encoding')
-const Lock = require('lock').Lock
+import { createEncoding } from './encoding.js'
+import AsyncLock from 'async-lock'
+import { EntryStream } from 'level-read-stream'
function prefixKey (db, key) {
return db._ttl.encoding.encode(db._ttl._prefixNs.concat(key))
}
-function expiryKey (db, exp, key) {
- return db._ttl.encoding.encode(db._ttl._expiryNs.concat(exp, key))
+function expiryKey (db, expiryDate, key) {
+ return db._ttl.encoding.encode(db._ttl._expiryNs.concat(expiryDate, key))
}
function buildQuery (db) {
@@ -31,17 +28,11 @@ function startTtl (db, checkFrequency) {
const sub = db._ttl.sub
const query = buildQuery(db)
const decode = db._ttl.encoding.decode
- var createReadStream
db._ttl._checkInProgress = true
+ const emitError = db.emit.bind(db, 'error')
- if (sub) {
- createReadStream = sub.createReadStream.bind(sub)
- } else {
- createReadStream = db.createReadStream.bind(db)
- }
-
- createReadStream(query)
+ new EntryStream(sub || db, query)
.on('data', function (data) {
// the value is the key!
const key = decode(data.value)
@@ -51,29 +42,22 @@ function startTtl (db, checkFrequency) {
// the actual data that should expire now!
batch.push({ type: 'del', key: key })
})
- .on('error', db.emit.bind(db, 'error'))
+ .on('error', emitError)
.on('end', function () {
if (!batch.length) return
if (sub) {
- sub.batch(subBatch, { keyEncoding: 'binary' }, function (err) {
- if (err) db.emit('error', err)
- })
-
- db._ttl.batch(batch, { keyEncoding: 'binary' }, function (err) {
- if (err) db.emit('error', err)
- })
+ sub.batch(subBatch, { keyEncoding: 'binary' }).catch(emitError)
+ db._ttl.batch(batch, { keyEncoding: 'binary' }).catch(emitError)
} else {
- db._ttl.batch(subBatch.concat(batch), { keyEncoding: 'binary' }, function (err) {
- if (err) db.emit('error', err)
- })
+ db._ttl.batch(subBatch.concat(batch), { keyEncoding: 'binary' }).catch(emitError)
}
})
.on('close', function () {
db._ttl._checkInProgress = false
if (db._ttl._stopAfterCheck) {
- stopTtl(db, db._ttl._stopAfterCheck)
- db._ttl._stopAfterCheck = null
+ stopTtl(db)
+ db._ttl._stopAfterCheck = false
}
})
}, checkFrequency)
@@ -83,210 +67,163 @@ function startTtl (db, checkFrequency) {
}
}
-function stopTtl (db, callback) {
+function stopTtl (db) {
// can't close a db while an interator is in progress
// so if one is, defer
if (db._ttl._checkInProgress) {
- db._ttl._stopAfterCheck = callback
- // TODO do we really need to return the callback here?
- return db._ttl._stopAfterCheck
+ db._ttl._stopAfterCheck = true
+ } else {
+ clearInterval(db._ttl.intervalId)
}
- clearInterval(db._ttl.intervalId)
- callback && callback()
}
-function ttlon (db, keys, ttl, callback) {
- const exp = new Date(Date.now() + ttl)
+async function ttlon (db, keys, ttl) {
+ const expiryTime = new Date(Date.now() + ttl)
const batch = []
const sub = db._ttl.sub
const batchFn = (sub ? sub.batch.bind(sub) : db._ttl.batch)
const encode = db._ttl.encoding.encode
- db._ttl._lock(keys, function (release) {
- callback = release(callback || function () {})
- ttloff(db, keys, function () {
+ await db._ttl._lock.acquire(keys, async function (release) {
+ try {
+ await ttloff(db, keys)
keys.forEach(function (key) {
- batch.push({ type: 'put', key: expiryKey(db, exp, key), value: encode(key) })
- batch.push({ type: 'put', key: prefixKey(db, key), value: encode(exp) })
+ batch.push({ type: 'put', key: expiryKey(db, expiryTime, key), value: encode(key) })
+ batch.push({ type: 'put', key: prefixKey(db, key), value: encode(expiryTime) })
})
+ if (!batch.length) return release()
- if (!batch.length) return callback()
-
- batchFn(batch, { keyEncoding: 'binary', valueEncoding: 'binary' }, function (err) {
- if (err) { db.emit('error', err) }
- callback()
- })
- })
+ await batchFn(batch, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ } catch (err) {
+ db.emit('error', err)
+ }
+ release()
})
}
-function ttloff (db, keys, callback) {
+async function ttloff (db, keys) {
const batch = []
const sub = db._ttl.sub
const getFn = (sub ? sub.get.bind(sub) : db.get.bind(db))
const batchFn = (sub ? sub.batch.bind(sub) : db._ttl.batch)
const decode = db._ttl.encoding.decode
- const done = after(keys.length, function (err) {
- if (err) db.emit('error', err)
-
- if (!batch.length) return callback && callback()
-
- batchFn(batch, { keyEncoding: 'binary', valueEncoding: 'binary' }, function (err) {
- if (err) { db.emit('error', err) }
- callback && callback()
- })
- })
-
- keys.forEach(function (key) {
- const prefixedKey = prefixKey(db, key)
- getFn(prefixedKey, { keyEncoding: 'binary', valueEncoding: 'binary' }, function (err, exp) {
- if (!err && exp) {
- batch.push({ type: 'del', key: expiryKey(db, decode(exp), key) })
- batch.push({ type: 'del', key: prefixedKey })
+ try {
+ await Promise.all(keys.map(async key => {
+ const prefixedKey = prefixKey(db, key)
+ try {
+ // TODO: refactor with getMany
+ const exp = await getFn(prefixedKey, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ if (exp) {
+ batch.push({ type: 'del', key: expiryKey(db, decode(exp), key) })
+ batch.push({ type: 'del', key: prefixedKey })
+ }
+ } catch (err) {
+ if (err.code !== 'LEVEL_NOT_FOUND') throw err
}
- done(err && err.name !== 'NotFoundError' && err)
- })
- })
-}
-
-function put (db, key, value, options, callback) {
- if (typeof options === 'function') {
- callback = options
- options = {}
+ }))
+ if (!batch.length) return
+ await batchFn(batch, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ } catch (err) {
+ db.emit('error', err)
}
+}
- options || (options = {})
-
+function put (db, key, value, options = {}) {
if (db._ttl.options.defaultTTL > 0 && !options.ttl && options.ttl !== 0) {
options.ttl = db._ttl.options.defaultTTL
}
- var done
- var _callback = callback
-
if (options.ttl > 0 && key != null && value != null) {
- done = after(2, _callback || function () {})
- callback = done
- ttlon(db, [key], options.ttl, done)
+ return Promise.all([
+ db._ttl.put.call(db, key, value, options),
+ ttlon(db, [key], options.ttl)
+ ])
+ } else {
+ return db._ttl.put.call(db, key, value, options)
}
-
- db._ttl.put.call(db, key, value, options, callback)
}
-function setTtl (db, key, ttl, callback) {
+function setTtl (db, key, ttl) {
if (ttl > 0 && key != null) {
- ttlon(db, [key], ttl, callback)
+ ttlon(db, [key], ttl)
}
}
-function del (db, key, options, callback) {
- var done
- var _callback = callback
-
+async function del (db, key, options) {
if (key != null) {
- done = after(2, _callback || function () {})
- callback = done
- ttloff(db, [key], done)
+ await ttloff(db, [key])
}
-
- db._ttl.del.call(db, key, options, callback)
+ await db._ttl.del.call(db, key, options)
}
-function batch (db, arr, options, callback) {
- if (typeof options === 'function') {
- callback = options
- options = {}
- }
-
- options || (options = {})
-
+async function batch (db, arr, options = {}) {
if (db._ttl.options.defaultTTL > 0 && !options.ttl && options.ttl !== 0) {
options.ttl = db._ttl.options.defaultTTL
}
- var done
- var on
- var off
- var _callback = callback
-
if (options.ttl > 0 && Array.isArray(arr)) {
- done = after(3, _callback || function () {})
- callback = done
-
- on = []
- off = []
+ const on = []
+ const off = []
arr.forEach(function (entry) {
if (!entry || entry.key == null) { return }
-
if (entry.type === 'put' && entry.value != null) on.push(entry.key)
if (entry.type === 'del') off.push(entry.key)
})
-
- if (on.length) {
- ttlon(db, on, options.ttl, done)
- } else {
- done()
- }
-
- if (off.length) {
- ttloff(db, off, done)
- } else {
- done()
- }
+ await Promise.all([
+ on.length ? ttlon(db, on, options.ttl) : null,
+ off.length ? ttloff(db, off) : null
+ ])
}
- db._ttl.batch.call(db, arr, options, callback)
+ return db._ttl.batch.call(db, arr, options)
}
-function close (db, callback) {
- stopTtl(db, function () {
- if (db._ttl && typeof db._ttl.close === 'function') {
- return db._ttl.close.call(db, callback)
- }
- callback && callback()
- })
+async function close (db) {
+ stopTtl(db)
+ if (db._ttl && typeof db._ttl.close === 'function') {
+ await db._ttl.close.call(db)
+ }
}
-function setup (db, options) {
- if (db._ttl) return
+function setup (db, options = {}) {
+ if ('_ttl' in db) return db
- options || (options = {})
-
- options = xtend({
+ const opts = {
methodPrefix: '',
namespace: options.sub ? '' : 'ttl',
expiryNamespace: 'x',
separator: '!',
checkFrequency: 10000,
- defaultTTL: 0
- }, options)
+ defaultTTL: 0,
+ ...options
+ }
- const _prefixNs = options.namespace ? [options.namespace] : []
+ const _prefixNs = opts.namespace ? [opts.namespace] : []
db._ttl = {
put: db.put.bind(db),
del: db.del.bind(db),
batch: db.batch.bind(db),
close: db.close.bind(db),
- sub: options.sub,
- options: options,
- encoding: encoding.create(options),
+ sub: 'sub' in opts ? opts.sub : undefined,
+ options: opts,
+ encoding: createEncoding(opts),
_prefixNs: _prefixNs,
- _expiryNs: _prefixNs.concat(options.expiryNamespace),
- _lock: new Lock()
+ _expiryNs: _prefixNs.concat(opts.expiryNamespace),
+ _lock: new AsyncLock()
}
- db[options.methodPrefix + 'put'] = put.bind(null, db)
- db[options.methodPrefix + 'del'] = del.bind(null, db)
- db[options.methodPrefix + 'batch'] = batch.bind(null, db)
- db[options.methodPrefix + 'ttl'] = setTtl.bind(null, db)
- db[options.methodPrefix + 'stop'] = stopTtl.bind(null, db)
+ db[opts.methodPrefix + 'put'] = put.bind(null, db)
+ db[opts.methodPrefix + 'del'] = del.bind(null, db)
+ db[opts.methodPrefix + 'batch'] = batch.bind(null, db)
+ db[opts.methodPrefix + 'ttl'] = setTtl.bind(null, db)
+ db[opts.methodPrefix + 'stop'] = stopTtl.bind(null, db)
// we must intercept close()
db.close = close.bind(null, db)
- startTtl(db, options.checkFrequency)
+ startTtl(db, opts.checkFrequency)
return db
}
-module.exports = setup
+export default setup
diff --git a/package.json b/package.json
index b658478..dc83124 100644
--- a/package.json
+++ b/package.json
@@ -5,36 +5,37 @@
"author": "Rod Vagg (https://github.com/rvagg)",
"license": "MIT",
"main": "./level-ttl.js",
+ "type": "module",
"scripts": {
- "test": "standard && hallmark && (nyc -s node test.js | faucet) && nyc report",
- "coverage": "nyc report -r lcovonly",
+ "test": "standard && hallmark && c8 mocha",
"hallmark": "hallmark --fix",
"dependency-check": "dependency-check . test.js",
"prepublishOnly": "npm run dependency-check"
},
"files": [
"encoding.js",
- "level-ttl.js"
+ "level-ttl.js",
+ "level-ttl.d.ts"
],
"dependencies": {
- "after": "~0.8.2",
- "lock": "~1.1.0",
- "xtend": "~4.0.1"
+ "async-lock": "^1.4.1",
+ "level-read-stream": "^2.0.0"
},
"devDependencies": {
+ "@types/async-lock": "^1.4.2",
+ "@types/bytewise": "^1.1.2",
+ "@types/mocha": "^10.0.10",
+ "@types/readable-stream": "^4.0.22",
"bytewise": ">=0.8",
+ "c8": "^10.1.3",
"dependency-check": "^3.3.0",
- "faucet": "^0.0.3",
"hallmark": "^3.1.0",
- "level-concat-iterator": "^3.0.0",
- "level-test": "^9.0.0",
- "nyc": "^15.1.0",
+ "memory-level": "^3.1.0",
+ "mocha": "^11.7.4",
+ "should": "^13.2.3",
"slump": "^3.0.0",
- "standard": "^16.0.3",
- "subleveldown": "^5.0.1",
- "tape": "^5.3.1"
+ "standard": "^16.0.3"
},
- "peerDependencies": {},
"repository": {
"type": "git",
"url": "https://github.com/Level/level-ttl.git"
@@ -47,6 +48,6 @@
"ttl"
],
"engines": {
- "node": ">=10"
+ "node": ">=18"
}
}
diff --git a/test.js b/test.js
index 373cb09..4bd9d74 100644
--- a/test.js
+++ b/test.js
@@ -1,717 +1,485 @@
-'use strict'
-
-const tape = require('tape')
-const level = require('level-test')()
-const concat = require('level-concat-iterator')
-const ttl = require('./')
-const xtend = require('xtend')
-const sublevel = require('subleveldown')
-const random = require('slump')
-const bytewise = require('bytewise')
-const bwEncode = bytewise.encode
-
-function ltest (desc, opts, cb) {
- if (typeof opts === 'function') {
- cb = opts
- opts = {}
- }
+/* eslint-env mocha */
+import should from 'should'
+import { MemoryLevel } from 'memory-level'
+import ttl from './level-ttl.js'
+import bytewise from 'bytewise'
+import { bwRange, contains, getDbEntries, getDbEntriesAfterDelay, numberRange, randomPutBatch, shouldNotBeCalled, wait } from './tests_helpers.js'
- tape(desc, function (t) {
- level(opts, function (err, db) {
- t.error(err, 'no error on open()')
- t.ok(db, 'valid db object')
-
- var end = t.end.bind(t)
+const bwEncode = bytewise.encode
+const level = opts => new MemoryLevel(opts)
+const levelTtl = opts => ttl(level(opts), opts)
- t.end = function () {
- db.close(function (err) {
- t.error(err, 'no error on close()')
- end()
- })
- }
+describe('level-ttl', () => {
+ it('should work without options', () => {
+ levelTtl()
+ })
- cb(t, db)
+ it('should separate data and sublevel ttl meta data', async () => {
+ const db = new MemoryLevel()
+ const sub = db.sublevel('meta')
+ const ttldb = ttl(db, { sub })
+ const batch = randomPutBatch(5)
+ await ttldb.batch(batch, { ttl: 10000 })
+ const entries = await getDbEntries(db)
+ batch.forEach(item => {
+ contains(entries, '!meta!' + item.key, /\d{13}/)
+ contains(entries, new RegExp('!meta!x!\\d{13}!' + item.key), item.key)
})
})
-}
-function test (name, fn, opts) {
- ltest(name, opts, function (t, db) {
- var ttlDb = ttl(db, xtend({ checkFrequency: 50 }, opts))
- fn(t, ttlDb)
+ it('should separate data and sublevel ttl meta data (custom ttlEncoding)', async () => {
+ const db = new MemoryLevel({ keyEncoding: 'binary', valueEncoding: 'binary' })
+ const sub = db.sublevel('meta')
+ const ttldb = ttl(db, { sub, ttlEncoding: bytewise })
+ const batch = randomPutBatch(5)
+ function prefix (buf) {
+ return Buffer.concat([Buffer.from('!meta!'), buf])
+ }
+ await ttldb.batch(batch, { ttl: 10000 })
+ const entries = await getDbEntries(db)
+ batch.forEach(item => {
+ contains(entries, prefix(bwEncode([item.key])), bwRange())
+ contains(entries, {
+ gt: prefix(bwEncode(['x', new Date(0), item.key])),
+ lt: prefix(bwEncode(['x', new Date(9999999999999), item.key]))
+ }, bwEncode(item.key))
+ })
})
-}
-function db2arr (t, db, callback, opts) {
- concat(db.iterator(opts), function (err, arr) {
- if (err) return t.fail(err)
- callback(arr)
+ it('should expire sublevel data properly', async () => {
+ const db = new MemoryLevel()
+ const sub = db.sublevel('meta')
+ const ttldb = ttl(db, { checkFrequency: 25, sub })
+ const batch = randomPutBatch(50)
+ await ttldb.batch(batch, { ttl: 100 })
+ const entries = await getDbEntriesAfterDelay(db, 200)
+ entries.length.should.equal(0)
})
-}
-
-function bufferEq (a, b) {
- if (a instanceof Buffer && b instanceof Buffer) {
- return a.toString('hex') === b.toString('hex')
- }
-}
-
-function isRange (range) {
- return range && (range.gt || range.lt || range.gte || range.lte)
-}
-
-function matchRange (range, buffer) {
- var target = buffer.toString('hex')
- var match = true
-
- if (range.gt) {
- match = match && target > range.gt.toString('hex')
- } else if (range.gte) {
- match = match && target >= range.gte.toString('hex')
- }
-
- if (range.lt) {
- match = match && target < range.lt.toString('hex')
- } else if (range.lte) {
- match = match && target <= range.lte.toString('hex')
- }
-
- return match
-}
-
-function bwRange (prefix, resolution) {
- const now = Date.now()
- const min = new Date(resolution ? now - resolution : 0)
- const max = new Date(resolution ? now + resolution : 9999999999999)
- return {
- gte: bwEncode(prefix ? prefix.concat(min) : min),
- lte: bwEncode(prefix ? prefix.concat(max) : max)
- }
-}
-
-function formatRecord (key, value) {
- if (isRange(key)) {
- key.source = '[object KeyRange]'
- }
- if (isRange(value)) {
- value.source = '[object ValueRange]'
- }
- return '{' + (key.source || key) + ', ' + (value.source || value) + '}'
-}
-
-function contains (t, arr, key, value) {
- for (var i = 0; i < arr.length; i++) {
- if (typeof key === 'string' && arr[i].key !== key) continue
- if (typeof value === 'string' && arr[i].value !== value) continue
- if (key instanceof RegExp && !key.test(arr[i].key)) continue
- if (value instanceof RegExp && !value.test(arr[i].value)) continue
- if (key instanceof Buffer && !bufferEq(key, arr[i].key)) continue
- if (value instanceof Buffer && !bufferEq(value, arr[i].value)) continue
- if (isRange(key) && !matchRange(key, arr[i].key)) continue
- if (isRange(value) && !matchRange(value, arr[i].value)) continue
- return t.pass('contains ' + formatRecord(key, value))
- }
- return t.fail('does not contain ' + formatRecord(key, value))
-}
-
-function randomPutBatch (length) {
- var batch = []
- var randomize = function () {
- return random.string({ enc: 'base58', length: 10 })
- }
- for (var i = 0; i < length; ++i) {
- batch.push({ type: 'put', key: randomize(), value: randomize() })
- }
- return batch
-}
-
-function verifyIn (t, db, delay, cb, opts) {
- setTimeout(function () {
- db2arr(t, db, cb, opts)
- }, delay)
-}
-test('single ttl entry', function (t, db) {
- t.throws(db.put.bind(db), { name: 'WriteError', message: 'put() requires key and value arguments' })
- t.throws(db.del.bind(db), { name: 'WriteError', message: 'del() requires a key argument' })
- t.end()
-})
-
-test('single ttl entry with put', function (t, db) {
- db.put('foo', 'foovalue', function (err) {
- t.notOk(err, 'no error')
- db.put('bar', 'barvalue', { ttl: 100 }, function (err) {
- t.notOk(err, 'no error')
- db2arr(t, db, function (arr) {
- contains(t, arr, /!ttl!x!\d{13}!bar/, 'bar')
- contains(t, arr, '!ttl!bar', /\d{13}/)
- contains(t, arr, 'bar', 'barvalue')
- contains(t, arr, 'foo', 'foovalue')
- verifyIn(t, db, 150, function (arr) {
- t.deepEqual(arr, [
- { key: 'foo', value: 'foovalue' }
- ])
- t.end()
- })
- })
- })
+ it('should expire sublevel data properly (custom ttlEncoding)', async () => {
+ const db = new MemoryLevel()
+ const sub = db.sublevel('meta')
+ const ttldb = ttl(db, { checkFrequency: 25, sub, ttlEncoding: bytewise })
+ const batch = randomPutBatch(50)
+ await ttldb.batch(batch, { ttl: 100 })
+ const entries = await getDbEntriesAfterDelay(db, 200)
+ entries.length.should.equal(0)
})
})
-test('single ttl entry with put (custom ttlEncoding)', function (t, db) {
- db.put('foo', 'foovalue', function (err) {
- t.notOk(err, 'no error')
- db.put('bar', 'barvalue', { ttl: 100 }, function (err) {
- t.notOk(err, 'no error')
- db2arr(t, db, function (arr) {
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar'))
- contains(t, arr, bwEncode(['ttl', 'bar']), bwRange())
- contains(t, arr, Buffer.from('bar'), Buffer.from('barvalue'))
- contains(t, arr, Buffer.from('foo'), Buffer.from('foovalue'))
- verifyIn(t, db, 150, function (arr) {
- t.deepEqual(arr, [
- { key: 'foo', value: 'foovalue' }
- ])
- t.end()
- })
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
- })
+describe('put', () => {
+ it('should throw on missing key', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ try {
+ // @ts-expect-error
+ await db.put()
+ shouldNotBeCalled()
+ } catch (err) {
+ err.message.should.equal('Key cannot be null or undefined')
+ err.code.should.equal('LEVEL_INVALID_KEY')
+ }
})
-}, { ttlEncoding: bytewise })
-
-test('multiple ttl entries with put', function (t, db) {
- var expect = function (delay, keys, cb) {
- verifyIn(t, db, delay, function (arr) {
- t.equal(arr.length, 1 + keys * 3, 'correct number of entries in db')
- contains(t, arr, 'afoo', 'foovalue')
- if (keys >= 1) {
- contains(t, arr, 'bar1', 'barvalue1')
- contains(t, arr, /^!ttl!x!\d{13}!bar1$/, 'bar1')
- contains(t, arr, '!ttl!bar1', /^\d{13}$/)
- }
- if (keys >= 2) {
- contains(t, arr, 'bar2', 'barvalue2')
- contains(t, arr, /^!ttl!x!\d{13}!bar2$/, 'bar2')
- contains(t, arr, '!ttl!bar2', /^\d{13}$/)
- }
- if (keys >= 3) {
- contains(t, arr, 'bar3', 'barvalue3')
- contains(t, arr, /^!ttl!x!\d{13}!bar3$/, 'bar3')
- contains(t, arr, '!ttl!bar3', /^\d{13}$/)
- }
- cb && cb()
- })
- }
- db.put('afoo', 'foovalue')
- db.put('bar1', 'barvalue1', { ttl: 400 })
- db.put('bar2', 'barvalue2', { ttl: 250 })
- db.put('bar3', 'barvalue3', { ttl: 100 })
+ it('should put a single ttl entry', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ await db.put('foo', 'foovalue')
+ await db.put('bar', 'barvalue', { ttl: 100 })
+ const entries = await getDbEntries(db)
+ contains(entries, /!ttl!x!\d{13}!bar/, 'bar')
+ contains(entries, '!ttl!bar', /\d{13}/)
+ contains(entries, 'bar', 'barvalue')
+ contains(entries, 'foo', 'foovalue')
+ })
- expect(25, 3)
- expect(200, 2)
- expect(350, 1)
- expect(500, 0, t.end.bind(t))
-})
+ it('should put a single ttl entry (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, ttlEncoding: bytewise })
+ await db.put('foo', 'foovalue')
+ await db.put('bar', 'barvalue', { ttl: 100 })
+ const entries = await getDbEntries(db, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar'))
+ contains(entries, bwEncode(['ttl', 'bar']), bwRange())
+ contains(entries, Buffer.from('bar'), Buffer.from('barvalue'))
+ contains(entries, Buffer.from('foo'), Buffer.from('foovalue'))
+ const updatedEntries = await getDbEntriesAfterDelay(db, 150)
+ updatedEntries.should.deepEqual([{ key: 'foo', value: 'foovalue' }])
+ })
-test('multiple ttl entries with put (custom ttlEncoding)', function (t, db) {
- var expect = function (delay, keys, cb) {
- verifyIn(t, db, delay, function (arr) {
- t.equal(arr.length, 1 + keys * 3, 'correct number of entries in db')
- contains(t, arr, Buffer.from('afoo'), Buffer.from('foovalue'))
- if (keys >= 1) {
- contains(t, arr, Buffer.from('bar1'), Buffer.from('barvalue1'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar1'))
- contains(t, arr, bwEncode(['ttl', 'bar1']), bwRange())
- }
- if (keys >= 2) {
- contains(t, arr, Buffer.from('bar2'), Buffer.from('barvalue2'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar2'))
- contains(t, arr, bwEncode(['ttl', 'bar2']), bwRange())
- }
- if (keys >= 3) {
- contains(t, arr, Buffer.from('bar3'), Buffer.from('barvalue3'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar3'))
- contains(t, arr, bwEncode(['ttl', 'bar3']), bwRange())
- }
- cb && cb()
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
- }
-
- db.put('afoo', 'foovalue')
- db.put('bar1', 'barvalue1', { ttl: 400 })
- db.put('bar2', 'barvalue2', { ttl: 250 })
- db.put('bar3', 'barvalue3', { ttl: 100 })
-
- expect(25, 3)
- expect(200, 2)
- expect(350, 1)
- expect(500, 0, t.end.bind(t))
-}, { ttlEncoding: bytewise })
-
-test('multiple ttl entries with batch-put', function (t, db) {
- var expect = function (delay, keys, cb) {
- verifyIn(t, db, delay, function (arr) {
- t.equal(arr.length, 1 + keys * 3, 'correct number of entries in db')
- contains(t, arr, 'afoo', 'foovalue')
- if (keys >= 1) {
- contains(t, arr, 'bar1', 'barvalue1')
- contains(t, arr, /^!ttl!x!\d{13}!bar1$/, 'bar1')
- contains(t, arr, '!ttl!bar1', /^\d{13}$/)
+ it('should put multiple ttl entries', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ async function expect (delay, keysCount) {
+ const entries = await getDbEntriesAfterDelay(db, delay)
+ entries.length.should.equal(1 + keysCount * 3)
+ contains(entries, 'afoo', 'foovalue')
+ if (keysCount >= 1) {
+ contains(entries, 'bar1', 'barvalue1')
+ contains(entries, /^!ttl!x!\d{13}!bar1$/, 'bar1')
+ contains(entries, '!ttl!bar1', /^\d{13}$/)
}
- if (keys >= 2) {
- contains(t, arr, 'bar2', 'barvalue2')
- contains(t, arr, /^!ttl!x!\d{13}!bar2$/, 'bar2')
- contains(t, arr, '!ttl!bar2', /^\d{13}$/)
+ if (keysCount >= 2) {
+ contains(entries, 'bar2', 'barvalue2')
+ contains(entries, /^!ttl!x!\d{13}!bar2$/, 'bar2')
+ contains(entries, '!ttl!bar2', /^\d{13}$/)
}
- if (keys >= 3) {
- contains(t, arr, 'bar3', 'barvalue3')
- contains(t, arr, /^!ttl!x!\d{13}!bar3$/, 'bar3')
- contains(t, arr, '!ttl!bar3', /^\d{13}$/)
+ if (keysCount >= 3) {
+ contains(entries, 'bar3', 'barvalue3')
+ contains(entries, /^!ttl!x!\d{13}!bar3$/, 'bar3')
+ contains(entries, '!ttl!bar3', /^\d{13}$/)
}
- if (keys >= 3) {
- contains(t, arr, 'bar4', 'barvalue4')
- contains(t, arr, /^!ttl!x!\d{13}!bar4$/, 'bar4')
- contains(t, arr, '!ttl!bar4', /^\d{13}$/)
- }
- cb && cb()
- })
- }
-
- db.put('afoo', 'foovalue')
- db.batch([
- { type: 'put', key: 'bar1', value: 'barvalue1' },
- { type: 'put', key: 'bar2', value: 'barvalue2' }
- ], { ttl: 60 })
- db.batch([
- { type: 'put', key: 'bar3', value: 'barvalue3' },
- { type: 'put', key: 'bar4', value: 'barvalue4' }
- ], { ttl: 120 })
-
- expect(20, 4, t.end.bind(t))
-})
+ }
+
+ db.put('afoo', 'foovalue')
+ db.put('bar1', 'barvalue1', { ttl: 400 })
+ db.put('bar2', 'barvalue2', { ttl: 250 })
+ db.put('bar3', 'barvalue3', { ttl: 100 })
+
+ await Promise.all([
+ expect(25, 3),
+ expect(200, 2),
+ expect(350, 1),
+ expect(500, 0)
+ ])
+ })
-test('multiple ttl entries with batch-put (custom ttlEncoding)', function (t, db) {
- var expect = function (delay, keys, cb) {
- verifyIn(t, db, delay, function (arr) {
- t.equal(arr.length, 1 + keys * 3, 'correct number of entries in db')
- contains(t, arr, Buffer.from('afoo'), Buffer.from('foovalue'))
- if (keys >= 1) {
- contains(t, arr, Buffer.from('bar1'), Buffer.from('barvalue1'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar1'))
- contains(t, arr, bwEncode(['ttl', 'bar1']), bwRange())
- }
- if (keys >= 2) {
- contains(t, arr, Buffer.from('bar2'), Buffer.from('barvalue2'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar2'))
- contains(t, arr, bwEncode(['ttl', 'bar2']), bwRange())
+ it('should put multiple ttl entries (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, ttlEncoding: bytewise })
+ async function expect (delay, keysCount) {
+ const entries = await getDbEntriesAfterDelay(db, delay, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ entries.length.should.equal(1 + keysCount * 3)
+ contains(entries, Buffer.from('afoo'), Buffer.from('foovalue'))
+ if (keysCount >= 1) {
+ contains(entries, Buffer.from('bar1'), Buffer.from('barvalue1'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar1'))
+ contains(entries, bwEncode(['ttl', 'bar1']), bwRange())
}
- if (keys >= 3) {
- contains(t, arr, Buffer.from('bar3'), Buffer.from('barvalue3'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar3'))
- contains(t, arr, bwEncode(['ttl', 'bar3']), bwRange())
+ if (keysCount >= 2) {
+ contains(entries, Buffer.from('bar2'), Buffer.from('barvalue2'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar2'))
+ contains(entries, bwEncode(['ttl', 'bar2']), bwRange())
}
- if (keys >= 3) {
- contains(t, arr, Buffer.from('bar4'), Buffer.from('barvalue4'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar4'))
- contains(t, arr, bwEncode(['ttl', 'bar4']), bwRange())
+ if (keysCount >= 3) {
+ contains(entries, Buffer.from('bar3'), Buffer.from('barvalue3'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar3'))
+ contains(entries, bwEncode(['ttl', 'bar3']), bwRange())
}
- cb && cb()
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
- }
-
- db.put('afoo', 'foovalue')
- db.batch([
- { type: 'put', key: 'bar1', value: 'barvalue1' },
- { type: 'put', key: 'bar2', value: 'barvalue2' }
- ], { ttl: 60 })
- db.batch([
- { type: 'put', key: 'bar3', value: 'barvalue3' },
- { type: 'put', key: 'bar4', value: 'barvalue4' }
- ], { ttl: 120 })
-
- expect(20, 4, t.end.bind(t))
-}, { ttlEncoding: bytewise })
-
-test('prolong entry life with additional put', function (t, db) {
- var retest = function (delay, cb) {
- setTimeout(function () {
- db.put('bar', 'barvalue', { ttl: 250 })
- verifyIn(t, db, 50, function (arr) {
- contains(t, arr, 'foo', 'foovalue')
- contains(t, arr, 'bar', 'barvalue')
- contains(t, arr, /!ttl!x!\d{13}!bar/, 'bar')
- contains(t, arr, '!ttl!bar', /\d{13}/)
- cb && cb()
- })
- }, delay)
- }
- var i
-
- db.put('foo', 'foovalue')
- for (i = 0; i < 180; i += 20) retest(i)
- retest(180, t.end.bind(t))
-})
+ }
+
+ db.put('afoo', 'foovalue')
+ db.put('bar1', 'barvalue1', { ttl: 400 })
+ db.put('bar2', 'barvalue2', { ttl: 250 })
+ db.put('bar3', 'barvalue3', { ttl: 100 })
+
+ await Promise.all([
+ expect(25, 3),
+ expect(200, 2),
+ expect(350, 1),
+ expect(500, 0)
+ ])
+ })
-test('prolong entry life with additional put (custom ttlEncoding)', function (t, db) {
- var retest = function (delay, cb) {
- setTimeout(function () {
- db.put('bar', 'barvalue', { ttl: 250 })
- verifyIn(t, db, 50, function (arr) {
- contains(t, arr, Buffer.from('foo'), Buffer.from('foovalue'))
- contains(t, arr, Buffer.from('bar'), Buffer.from('barvalue'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar'))
- contains(t, arr, bwEncode(['ttl', 'bar']), bwRange())
- cb && cb()
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
- }, delay)
- }
-
- db.put('foo', 'foovalue')
- for (var i = 0; i < 180; i += 20) retest(i)
- retest(180, t.end.bind(t))
-}, { ttlEncoding: bytewise })
-
-test('prolong entry life with ttl(key, ttl)', function (t, db) {
- var retest = function (delay, cb) {
- setTimeout(function () {
- db.ttl('bar', 250)
- verifyIn(t, db, 25, function (arr) {
- contains(t, arr, 'bar', 'barvalue')
- contains(t, arr, 'foo', 'foovalue')
- contains(t, arr, /!ttl!x!\d{13}!bar/, 'bar')
- contains(t, arr, '!ttl!bar', /\d{13}/)
- cb && cb()
- })
- }, delay)
- }
-
- db.put('foo', 'foovalue')
- db.put('bar', 'barvalue')
- for (var i = 0; i < 180; i += 20) retest(i)
- retest(180, t.end.bind(t))
-})
+ it('should prolong entry life with additional put', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ await db.put('foo', 'foovalue')
+ for (let i = 0; i <= 180; i += 20) {
+ await db.put('bar', 'barvalue', { ttl: 250 })
+ const entries = await getDbEntriesAfterDelay(db, 50)
+ contains(entries, 'foo', 'foovalue')
+ contains(entries, 'bar', 'barvalue')
+ contains(entries, /!ttl!x!\d{13}!bar/, 'bar')
+ contains(entries, '!ttl!bar', /\d{13}/)
+ }
+ })
-test('prolong entry life with ttl(key, ttl) (custom ttlEncoding)', function (t, db) {
- var retest = function (delay, cb) {
- setTimeout(function () {
- db.ttl('bar', 250)
- verifyIn(t, db, 25, function (arr) {
- contains(t, arr, Buffer.from('bar'), Buffer.from('barvalue'))
- contains(t, arr, Buffer.from('foo'), Buffer.from('foovalue'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar'))
- contains(t, arr, bwEncode(['ttl', 'bar']), bwRange())
- cb && cb()
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
- }, delay)
- }
-
- db.put('foo', 'foovalue')
- db.put('bar', 'barvalue')
- for (var i = 0; i < 180; i += 20) retest(i)
- retest(180, t.end.bind(t))
-}, { ttlEncoding: bytewise })
-
-test('del removes both key and its ttl meta data', function (t, db) {
- db.put('foo', 'foovalue')
- db.put('bar', 'barvalue', { ttl: 250 })
-
- verifyIn(t, db, 150, function (arr) {
- contains(t, arr, 'foo', 'foovalue')
- contains(t, arr, 'bar', 'barvalue')
- contains(t, arr, /!ttl!x!\d{13}!bar/, 'bar')
- contains(t, arr, '!ttl!bar', /\d{13}/)
+ it('should prolong entry life with additional put (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, ttlEncoding: bytewise })
+ await db.put('foo', 'foovalue')
+ for (let i = 0; i <= 180; i += 20) {
+ await db.put('bar', 'barvalue', { ttl: 250 })
+ const entries = await getDbEntriesAfterDelay(db, 50, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ contains(entries, Buffer.from('foo'), Buffer.from('foovalue'))
+ contains(entries, Buffer.from('bar'), Buffer.from('barvalue'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar'))
+ contains(entries, bwEncode(['ttl', 'bar']), bwRange())
+ }
})
- setTimeout(function () {
- db.del('bar')
- }, 250)
+ it('should not duplicate the TTL key when prolonging entry', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ async function retest (delay) {
+ await wait(delay)
+ db.put('bar', 'barvalue', { ttl: 20 })
+ const entries = await getDbEntriesAfterDelay(db, 50)
+ const count = entries.filter(entry => {
+ return /!ttl!x!\d{13}!bar/.exec(entry.key)
+ }).length
+ count.should.be.belowOrEqual(1)
+ }
+ db.put('foo', 'foovalue')
+ await Promise.all(numberRange(0, 50).map(retest))
+ })
- verifyIn(t, db, 350, function (arr) {
- t.deepEqual(arr, [
- { key: 'foo', value: 'foovalue' }
- ])
- t.end()
+ it('should put a single entry with default ttl set', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75 })
+ await basicPutTest(db, 175)
+ })
+
+ it('should put a single entry with default ttl set (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75, ttlEncoding: bytewise })
+ await basicPutTest(db, 175)
+ })
+
+ it('should put a single entry with overridden ttl set', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75 })
+ await basicPutTest(db, 200, { ttl: 99 })
+ })
+
+ it('should put a single entry with overridden ttl set (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75, ttlEncoding: bytewise })
+ await basicPutTest(db, 200, { ttl: 99 })
})
})
-test('del removes both key and its ttl meta data (value encoding)', function (t, db) {
- db.put('foo', { v: 'foovalue' })
- db.put('bar', { v: 'barvalue' }, { ttl: 250 })
+async function basicPutTest (db, timeout, opts) {
+ await db.put('foo', 'foovalue', opts)
+ await wait(50)
+ const res = await db.get('foo')
+ res.should.equal('foovalue')
+ await wait(timeout - 50)
+ const res2 = await db.get('foo')
+ should(res2).not.be.ok()
+}
- verifyIn(t, db, 50, function (arr) {
- contains(t, arr, 'foo', '{"v":"foovalue"}')
- contains(t, arr, 'bar', '{"v":"barvalue"}')
- contains(t, arr, /!ttl!x!\d{13}!bar/, 'bar')
- contains(t, arr, '!ttl!bar', /\d{13}/)
- }, { valueEncoding: 'utf8' })
+describe('del', () => {
+ it('should throw on missing key', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ try {
+ // @ts-expect-error
+ await db.del()
+ shouldNotBeCalled()
+ } catch (err) {
+ err.message.should.equal('Key cannot be null or undefined')
+ err.code.should.equal('LEVEL_INVALID_KEY')
+ }
+ })
- setTimeout(function () {
- db.del('bar')
- }, 175)
+ it('should remove both key and its ttl meta data', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ db.put('foo', 'foovalue')
+ db.put('bar', 'barvalue', { ttl: 10000 })
- verifyIn(t, db, 350, function (arr) {
- t.deepEqual(arr, [
- { key: 'foo', value: '{"v":"foovalue"}' }
- ])
- t.end()
- }, { valueEncoding: 'utf8' })
-}, { keyEncoding: 'utf8', valueEncoding: 'json' })
-
-test('del removes both key and its ttl meta data (custom ttlEncoding)', function (t, db) {
- db.put('foo', { v: 'foovalue' })
- db.put('bar', { v: 'barvalue' }, { ttl: 250 })
-
- verifyIn(t, db, 50, function (arr) {
- contains(t, arr, Buffer.from('foo'), Buffer.from('{"v":"foovalue"}'))
- contains(t, arr, Buffer.from('bar'), Buffer.from('{"v":"barvalue"}'))
- contains(t, arr, bwRange(['ttl', 'x']), bwEncode('bar'))
- contains(t, arr, bwEncode(['ttl', 'bar']), bwRange())
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
-
- setTimeout(function () {
- db.del('bar')
- }, 175)
-
- verifyIn(t, db, 350, function (arr) {
- t.deepEqual(arr, [
- { key: 'foo', value: '{"v":"foovalue"}' }
+ const entries = await getDbEntriesAfterDelay(db, 150)
+ contains(entries, 'foo', 'foovalue')
+ contains(entries, 'bar', 'barvalue')
+ contains(entries, /!ttl!x!\d{13}!bar/, 'bar')
+ contains(entries, '!ttl!bar', /\d{13}/)
+
+ setTimeout(() => db.del('bar'), 250)
+
+ const updatedEntries = await getDbEntriesAfterDelay(db, 350)
+ updatedEntries.should.deepEqual([
+ { key: 'foo', value: 'foovalue' }
])
- t.end()
- }, { valueEncoding: 'utf8' })
-}, { keyEncoding: 'utf8', valueEncoding: 'json', ttlEncoding: bytewise })
-
-function wrappedTest () {
- var intervals = 0
- var _setInterval = global.setInterval
- var _clearInterval = global.clearInterval
-
- global.setInterval = function () {
- intervals++
- return _setInterval.apply(global, arguments)
- }
-
- global.clearInterval = function () {
- intervals--
- return _clearInterval.apply(global, arguments)
- }
-
- test('test stop() method stops interval and doesn\'t hold process up', function (t, db) {
- t.equals(intervals, 1, '1 interval timer')
- db.put('foo', 'bar1', { ttl: 25 })
-
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.notOk(err, 'no error')
- t.equal('bar1', value)
- })
- }, 40)
-
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.ok(err && err.notFound, 'not found error')
- t.notOk(value, 'no value')
- })
- }, 80)
-
- setTimeout(function () {
- db.stop(function () {
- db._ttl.close(function () {
- global.setInterval = _setInterval
- global.clearInterval = _clearInterval
- t.equals(0, intervals, 'all interval timers cleared')
- t.end()
- })
- })
- }, 120)
})
-}
-wrappedTest()
-
-function put (timeout, opts) {
- return function (t, db) {
- db.put('foo', 'foovalue', opts, function (err) {
- t.ok(!err, 'no error')
-
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.notOk(err, 'no error')
- t.equal('foovalue', value)
- })
- }, 50)
-
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.ok(err && err.notFound, 'not found error')
- t.notOk(value, 'no value')
- t.end()
- })
- }, timeout)
- })
- }
-}
+ it('should remove both key and its ttl meta data (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, keyEncoding: 'utf8', valueEncoding: 'json', ttlEncoding: bytewise })
+ // @ts-expect-error
+ db.put('foo', { v: 'foovalue' })
+ db.put('bar', { v: 'barvalue' }, { ttl: 250 })
-test('single put with default ttl set', put(175), {
- defaultTTL: 75
-})
+ const entries = await getDbEntriesAfterDelay(db, 50, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ contains(entries, Buffer.from('foo'), Buffer.from('{"v":"foovalue"}'))
+ contains(entries, Buffer.from('bar'), Buffer.from('{"v":"barvalue"}'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar'))
+ contains(entries, bwEncode(['ttl', 'bar']), bwRange())
-test('single put with default ttl set (custom ttlEncoding)', put(175), {
- defaultTTL: 75,
- ttlEncoding: bytewise
-})
+ setTimeout(() => db.del('bar'), 175)
-test('single put with overridden ttl set', put(200, { ttl: 99 }), {
- defaultTTL: 75
+ const updatedEntries = await getDbEntriesAfterDelay(db, 350, { valueEncoding: 'utf8' })
+ updatedEntries.should.deepEqual([
+ { key: 'foo', value: '{"v":"foovalue"}' }
+ ])
+ })
})
-test('single put with overridden ttl set (custom ttlEncoding)', put(200, { ttl: 99 }), {
- defaultTTL: 75,
- ttlEncoding: bytewise
-})
+describe('batch', () => {
+ it('should batch-put multiple ttl entries', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ async function expect (delay, keysCount) {
+ const entries = await getDbEntriesAfterDelay(db, delay)
+ entries.length.should.equal(1 + keysCount * 3)
+ contains(entries, 'afoo', 'foovalue')
+ if (keysCount >= 1) {
+ contains(entries, 'bar1', 'barvalue1')
+ contains(entries, /^!ttl!x!\d{13}!bar1$/, 'bar1')
+ contains(entries, '!ttl!bar1', /^\d{13}$/)
+ }
+ if (keysCount >= 2) {
+ contains(entries, 'bar2', 'barvalue2')
+ contains(entries, /^!ttl!x!\d{13}!bar2$/, 'bar2')
+ contains(entries, '!ttl!bar2', /^\d{13}$/)
+ }
+ if (keysCount >= 3) {
+ contains(entries, 'bar3', 'barvalue3')
+ contains(entries, /^!ttl!x!\d{13}!bar3$/, 'bar3')
+ contains(entries, '!ttl!bar3', /^\d{13}$/)
+ }
+ if (keysCount >= 3) {
+ contains(entries, 'bar4', 'barvalue4')
+ contains(entries, /^!ttl!x!\d{13}!bar4$/, 'bar4')
+ contains(entries, '!ttl!bar4', /^\d{13}$/)
+ }
+ }
-function batch (timeout, opts) {
- return function (t, db) {
+ db.put('afoo', 'foovalue')
db.batch([
- { type: 'put', key: 'foo', value: 'foovalue' },
- { type: 'put', key: 'bar', value: 'barvalue' }
- ], opts, function (err) {
- t.ok(!err, 'no error')
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.notOk(err, 'no error')
- t.equal('foovalue', value)
- db.get('bar', function (err, value) {
- t.notOk(err, 'no error')
- t.equal('barvalue', value)
- })
- })
- }, 50)
-
- setTimeout(function () {
- db.get('foo', function (err, value) {
- t.ok(err && err.notFound, 'not found error')
- t.notOk(value, 'no value')
- db.get('bar', function (err, value) {
- t.ok(err && err.notFound, 'not found error')
- t.notOk(value, 'no value')
- t.end()
- })
- })
- }, timeout)
- })
- }
-}
-
-test('batch put with default ttl set', batch(175), {
- defaultTTL: 75
-})
+ { type: 'put', key: 'bar1', value: 'barvalue1' },
+ { type: 'put', key: 'bar2', value: 'barvalue2' }
+ ], { ttl: 60 })
+ db.batch([
+ { type: 'put', key: 'bar3', value: 'barvalue3' },
+ { type: 'put', key: 'bar4', value: 'barvalue4' }
+ ], { ttl: 120 })
-test('batch put with default ttl set (custom ttlEncoding)', batch(175), {
- defaultTTL: 75,
- ttlEncoding: bytewise
-})
+ await expect(20, 4)
+ })
-test('batch put with overriden ttl set', batch(200, { ttl: 99 }), {
- defaultTTL: 75
-})
+ it('should batch-put multiple ttl entries (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, ttlEncoding: bytewise })
+ async function expect (delay, keysCount) {
+ const entries = await getDbEntriesAfterDelay(db, delay, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ entries.length.should.equal(1 + keysCount * 3)
+ contains(entries, Buffer.from('afoo'), Buffer.from('foovalue'))
+ if (keysCount >= 1) {
+ contains(entries, Buffer.from('bar1'), Buffer.from('barvalue1'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar1'))
+ contains(entries, bwEncode(['ttl', 'bar1']), bwRange())
+ }
+ if (keysCount >= 2) {
+ contains(entries, Buffer.from('bar2'), Buffer.from('barvalue2'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar2'))
+ contains(entries, bwEncode(['ttl', 'bar2']), bwRange())
+ }
+ if (keysCount >= 3) {
+ contains(entries, Buffer.from('bar3'), Buffer.from('barvalue3'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar3'))
+ contains(entries, bwEncode(['ttl', 'bar3']), bwRange())
+ }
+ if (keysCount >= 3) {
+ contains(entries, Buffer.from('bar4'), Buffer.from('barvalue4'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar4'))
+ contains(entries, bwEncode(['ttl', 'bar4']), bwRange())
+ }
+ }
-test('batch put with overriden ttl set (custom ttlEncoding)', batch(200, { ttl: 99 }), {
- defaultTTL: 75,
- ttlEncoding: bytewise
-})
+ db.put('afoo', 'foovalue')
+ db.batch([
+ { type: 'put', key: 'bar1', value: 'barvalue1' },
+ { type: 'put', key: 'bar2', value: 'barvalue2' }
+ ], { ttl: 60 })
+ db.batch([
+ { type: 'put', key: 'bar3', value: 'barvalue3' },
+ { type: 'put', key: 'bar4', value: 'barvalue4' }
+ ], { ttl: 120 })
-ltest('without options', function (t, db) {
- try {
- ttl(db)
- } catch (err) {
- t.notOk(err, 'no error on ttl()')
- }
- t.end()
-})
+ await expect(20, 4)
+ })
-ltest('data and subleveldown ttl meta data separation', function (t, db) {
- var meta = sublevel(db, 'meta')
- var ttldb = ttl(db, { sub: meta })
- var batch = randomPutBatch(5)
-
- ttldb.batch(batch, { ttl: 10000 }, function (err) {
- t.ok(!err, 'no error')
- db2arr(t, db, function (arr) {
- batch.forEach(function (item) {
- contains(t, arr, '!meta!' + item.key, /\d{13}/)
- contains(t, arr, new RegExp('!meta!x!\\d{13}!' + item.key), item.key)
- })
- t.end()
- })
+ it('should batch put with default ttl set', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75 })
+ await basicBatchPutTest(db, 175)
})
-})
-ltest('data and subleveldown ttl meta data separation (custom ttlEncoding)', function (t, db) {
- var meta = sublevel(db, 'meta')
- var ttldb = ttl(db, { sub: meta, ttlEncoding: bytewise })
- var batch = randomPutBatch(5)
-
- function prefix (buf) {
- return Buffer.concat([Buffer.from('!meta!'), buf])
- }
-
- ttldb.batch(batch, { ttl: 10000 }, function (err) {
- t.ok(!err, 'no error')
- db2arr(t, db, function (arr) {
- batch.forEach(function (item) {
- contains(t, arr, prefix(bwEncode([item.key])), bwRange())
- contains(t, arr, {
- gt: prefix(bwEncode(['x', new Date(0), item.key])),
- lt: prefix(bwEncode(['x', new Date(9999999999999), item.key]))
- }, bwEncode(item.key))
- })
- t.end()
- }, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ it('should batch put with default ttl set (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75, ttlEncoding: bytewise })
+ await basicBatchPutTest(db, 175)
})
-})
-ltest('that subleveldown data expires properly', function (t, db) {
- var meta = sublevel(db, 'meta')
- var ttldb = ttl(db, { checkFrequency: 25, sub: meta })
+ it('should batch put with overriden ttl set', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75 })
+ await basicBatchPutTest(db, 200, { ttl: 99 })
+ })
- ttldb.batch(randomPutBatch(50), { ttl: 100 }, function (err) {
- t.ok(!err, 'no error')
- verifyIn(t, db, 200, function (arr) {
- t.equal(arr.length, 0, 'should be empty array')
- t.end()
- })
+ it('should batch put with overriden ttl set (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, defaultTTL: 75, ttlEncoding: bytewise })
+ await basicBatchPutTest(db, 200, { ttl: 99 })
})
})
-ltest('that subleveldown data expires properly (custom ttlEncoding)', function (t, db) {
- var meta = sublevel(db, 'meta')
- var ttldb = ttl(db, { checkFrequency: 25, sub: meta, ttlEncoding: bytewise })
+async function basicBatchPutTest (db, timeout, opts) {
+ await db.batch([
+ { type: 'put', key: 'foo', value: 'foovalue' },
+ { type: 'put', key: 'bar', value: 'barvalue' }
+ ], opts)
+ await wait(50)
+ const res = await db.getMany(['foo', 'bar'])
+ res.should.deepEqual(['foovalue', 'barvalue'])
+ await wait(timeout - 50)
+ const res2 = await db.getMany(['foo', 'bar'])
+ res2.should.deepEqual([undefined, undefined])
+}
- ttldb.batch(randomPutBatch(50), { ttl: 100 }, function (err) {
- t.ok(!err, 'no error')
- verifyIn(t, db, 200, function (arr) {
- t.equal(arr.length, 0, 'should be empty array')
- t.end()
- })
+describe('ttl', () => {
+ it('should prolong entry life', async () => {
+ const db = levelTtl({ checkFrequency: 50 })
+ db.put('foo', 'foovalue')
+ db.put('bar', 'barvalue')
+ for (let i = 0; i <= 180; i += 20) {
+ await db.ttl('bar', 250)
+ const entries = await getDbEntriesAfterDelay(db, 25)
+ contains(entries, 'foo', 'foovalue')
+ contains(entries, 'bar', 'barvalue')
+ contains(entries, /!ttl!x!\d{13}!bar/, 'bar')
+ contains(entries, '!ttl!bar', /\d{13}/)
+ }
+ })
+
+ it('should prolong entry life (custom ttlEncoding)', async () => {
+ const db = levelTtl({ checkFrequency: 50, ttlEncoding: bytewise })
+ db.put('foo', 'foovalue')
+ db.put('bar', 'barvalue')
+ for (let i = 0; i <= 180; i += 20) {
+ await db.ttl('bar', 250)
+ const entries = await getDbEntriesAfterDelay(db, 25, { keyEncoding: 'binary', valueEncoding: 'binary' })
+ contains(entries, Buffer.from('bar'), Buffer.from('barvalue'))
+ contains(entries, Buffer.from('foo'), Buffer.from('foovalue'))
+ contains(entries, bwRange(['ttl', 'x']), bwEncode('bar'))
+ contains(entries, bwEncode(['ttl', 'bar']), bwRange())
+ }
})
})
-test('prolong entry with PUT should not duplicate the TTL key', function (t, db) {
- var retest = function (delay, cb) {
- setTimeout(function () {
- db.put('bar', 'barvalue', { ttl: 20 })
- verifyIn(t, db, 50, function (arr) {
- var count = arr.filter(function (kv) {
- return /!ttl!x!\d{13}!bar/.exec(kv.key)
- }).length
-
- t.ok(count <= 1, 'contains one or zero TTL entry')
- cb && cb()
- })
- }, delay)
- }
-
- db.put('foo', 'foovalue')
- for (var i = 0; i < 50; i++) retest(i)
- retest(50, t.end.bind(t))
-}, { checkFrequency: 5 })
+describe('stop', () => {
+ it('should stop interval and not hold process up', async () => {
+ let intervals = 0
+ const _setInterval = global.setInterval
+ const _clearInterval = global.clearInterval
+
+ global.setInterval = function () {
+ intervals++
+ return _setInterval.apply(global, arguments)
+ }
+
+ global.clearInterval = function () {
+ intervals--
+ return _clearInterval.apply(global, arguments)
+ }
+
+ const db = levelTtl({ checkFrequency: 50 })
+ intervals.should.equal(1)
+ await db.put('foo', 'bar1', { ttl: 25 })
+ await wait(40)
+ const res = await db.get('foo')
+ should(res).equal('bar1')
+ await wait(40)
+ const res2 = await db.get('foo')
+ // Getting a missing key doesn't throw an error anymore,
+ // see https://github.com/Level/abstract-level/blob/main/UPGRADING.md#12-not-found
+ should(res2).not.be.ok()
+ await wait(40)
+ await db.stop()
+ await db._ttl.close()
+ global.setInterval = _setInterval
+ global.clearInterval = _clearInterval
+ intervals.should.equal(0)
+ })
+})
diff --git a/tests_helpers.js b/tests_helpers.js
new file mode 100644
index 0000000..69f9e49
--- /dev/null
+++ b/tests_helpers.js
@@ -0,0 +1,124 @@
+import random from 'slump'
+import { EntryStream } from 'level-read-stream'
+import bytewise from 'bytewise'
+
+const bwEncode = bytewise.encode
+
+// Reimplemented with EntryStream as the `level-concat-iterator` implementation
+// with `concat(db.iterator)` was not returning anything
+export async function getDbEntries (db, opts) {
+ const entries = []
+ return new Promise((resolve, reject) => {
+ new EntryStream(db, opts)
+ .on('data', function (data) {
+ entries.push(data)
+ })
+ .on('close', function () {
+ resolve(entries)
+ })
+ .on('error', reject)
+ })
+}
+
+export async function getDbEntriesAfterDelay (db, delay, opts) {
+ await wait(delay)
+ return getDbEntries(db, opts)
+}
+
+function bufferEq (a, b) {
+ if (a instanceof Buffer && b instanceof Buffer) {
+ return a.toString('hex') === b.toString('hex')
+ }
+}
+
+function isRange (range) {
+ return range && (range.gt || range.lt || range.gte || range.lte)
+}
+
+function matchRange (range, buffer) {
+ const target = buffer.toString('hex')
+ let match = true
+
+ if (range.gt) {
+ match = match && target > range.gt.toString('hex')
+ } else if (range.gte) {
+ match = match && target >= range.gte.toString('hex')
+ }
+
+ if (range.lt) {
+ match = match && target < range.lt.toString('hex')
+ } else if (range.lte) {
+ match = match && target <= range.lte.toString('hex')
+ }
+
+ return match
+}
+
+export function bwRange (prefix, resolution) {
+ const now = Date.now()
+ const min = new Date(resolution ? now - resolution : 0)
+ const max = new Date(resolution ? now + resolution : 9999999999999)
+ return {
+ gte: bwEncode(prefix ? prefix.concat(min) : min),
+ lte: bwEncode(prefix ? prefix.concat(max) : max)
+ }
+}
+
+function formatRecord (key, value) {
+ if (isRange(key)) {
+ key.source = '[object KeyRange]'
+ }
+ if (isRange(value)) {
+ value.source = '[object ValueRange]'
+ }
+ return '{' + (key.source || key) + ', ' + (value.source || value) + '}'
+}
+
+export function contains (entries, key, value) {
+ for (let i = 0; i < entries.length; i++) {
+ if (typeof key === 'string' && entries[i].key !== key) continue
+ if (typeof value === 'string' && entries[i].value !== value) continue
+ if (key instanceof RegExp && !key.test(entries[i].key)) continue
+ if (value instanceof RegExp && !value.test(entries[i].value)) continue
+ if (key instanceof Buffer && !bufferEq(key, entries[i].key)) continue
+ if (value instanceof Buffer && !bufferEq(value, entries[i].value)) continue
+ if (isRange(key) && !matchRange(key, entries[i].key)) continue
+ if (isRange(value) && !matchRange(value, entries[i].value)) continue
+ return true
+ }
+ throw new Error('does not contain ' + formatRecord(key, value))
+}
+
+/**
+ * @typedef {Object} BatchOp
+ * @property {'put'} type
+ * @property {string} key
+ * @property {string} value
+ */
+
+/**
+ * @param {number} length
+ * @return {BatchOp[]}
+ */
+export function randomPutBatch (length) {
+ const batch = []
+ const randomize = () => random.string({ enc: 'base58', length: 10 })
+ for (let i = 0; i < length; ++i) {
+ batch.push({ type: 'put', key: randomize(), value: randomize() })
+ }
+ // @ts-expect-error
+ return batch
+}
+
+export const wait = ms => new Promise(resolve => setTimeout(resolve, ms))
+
+export function shouldNotBeCalled (res) {
+ const err = new Error('function was expected not to be called')
+ err.name = 'shouldNotBeCalled'
+ err.message += ` (got: ${JSON.stringify(res)})`
+ throw err
+}
+
+export function numberRange (min, max) {
+ return Object.keys(new Array(max + 1).fill('')).slice(min).map(num => parseInt(num))
+}