shared-memory
Version:
A k-v database using /shm/ to shared memory
368 lines (325 loc) • 9.11 kB
text/coffeescript
path = require 'path'
fs = require 'fs'
os = require 'options-stream'
ep = require 'event-pipe'
lockfile = require 'lockfile'
lockfile = require './filelock'
accpool = require './pool'
###
# properties:
# options
# indexPath
# dataPath
# indexHandle
# dataHandle
#
# indexCache
#
# usage:
# new
# ready
# set
# get
#
###
mkdirpSync = (uri) ->
try
unless fs.existsSync uri
mkdirpSync uri.split('/').slice(0, -1).join('/')
console.log 'mkdir', uri
fs.mkdirSync uri
class File_DB
constructor: (options) ->
= os
dir: '/tmp/'
index_file: 'index.fd'
data_file: 'data.fd'
lock_dir: 'locks'
min_length: 8
lock_opt:
pollPeriod: 10
wait: 50000
, options
if fs.existsSync '/dev/shm/'
.dir = path.join '/dev/shm', .dir
unless fs.existsSync .dir
mkdirpSync .dir
lock_dir = path.join .dir, .lock_dir
if fs.existsSync lock_dir
for file in fs.readdirSync lock_dir
fs.unlinkSync path.join lock_dir, file
fs.rmdirSync lock_dir
fs.mkdirSync lock_dir
= path.join .dir, .index_file
= path.join .dir, .data_file
= {}
= {}
= false
cb = (err) =>
if err
= err
else
= true
callback(, ) for callback in
_db = @
flow = ep()
flow.on 'error', cb
.lazy ->
fs.open _db.dataPath, 'a', @
.lazy (fd) ->
fs.close fd, @
.lazy ->
ws = fs.createWriteStream _db.dataPath, {flags:'r+', encoding: 'utf8', mode: '0666'}
ws.on 'open', (fd) => @ null, fd
.lazy (fd) ->
_db.dataHandle = fd
_db.index = new Index _db.options, fd, @
.lazy ->
cb()
.run()
= []
ready: (callback) ->
unless
return .push callback
else
callback ,
get: (key, cb) ->
unless cb then cb = ->
.get key, (err, pos) =>
return cb new Error 'key not found!' unless pos
pos, cb
getAll: ( cb ) ->
.getAll (err, data) =>
func = []
result = {}
for key, position of data
func.push key, position
unless func.length
return cb null, result
flow = ep()
flow.lazy func
flow.lazy ->
for [key, num] in arguments
result[key] = num if num
cb null, result
flow.run()
pop: (key, cb) ->
unless cb then cb = ->
.get key, (err, pos) =>
return cb new Error 'key not found!' unless pos
pos, cb
popAll: ( cb ) ->
.getAll (err, data) =>
func = []
result = {}
for key, position of data
func.push key, position
unless func.length
return cb null, result
flow = ep()
flow.lazy func
flow.lazy ->
for [key, num] in arguments
result[key] = num if num
cb null, result
flow.run()
_getdataMaker: (key, position) ->
that = @
->
flow = @
that._getData position, (err, num) ->
flow err, key, num
_popdataMaker: (key, position) ->
that = @
->
flow = @
that._popData position, (err, num) ->
flow err, key, num
increase: (key, num, cb) ->
unless cb then cb = ->
return cb() if num is 0
if typeof num is 'function'
cb = num
num = 1
unless num then num = 1
key, num, cb
decrease: (key, num, cb) ->
unless cb then cb = ->
return cb() if num is 0
if typeof num is 'function'
cb = num
num = -1
unless num then num = -1
if num>0 then num = -num
key, num, cb
_accumulate: (key, acc, cb ) ->
accpool key, acc, cb, (acc, cb) =>
# lock
key, cb, (done) =>
.get key, (err, pos) =>
# create if not exists
unless pos
key, acc.toString(), done
else
# get data
pos, (err, num) =>
num += acc
num = num.toString()
# set data
if num.length > pos[1]
# extend space
key, num, done
else
num, pos, done
_process: (key, done, cb) ->
the = @
lock_file = path.join .dir, .lock_dir, key+'.lock'
lockfile.lock lock_file, ->
cb -> lockfile.unlock lock_file, done
_getData: (pos, cb) ->
_buffer = new Buffer pos[1]
fs.read , _buffer, 0, pos[1], pos[0], (err) =>
return cb err if err
_buffer, cb
_popData: (pos, cb) ->
_buffer = new Buffer pos[1]
fs.read , _buffer, 0, pos[1], pos[0], (err) =>
return cb err if err
'', pos, (err) =>
return cb err if err
_buffer, cb
_parseData: (_buffer, cb) ->
data = _buffer.toString().trim()
unless isNaN(data)
cb null, 1*data
else
cb null, data
set: (key, val, cb) ->
unless cb then cb = ->
if typeof val isnt 'string'
val = JSON.stringify val
key, val, cb
_write: (key, val, cb) ->
.ensure key, val.length, (err, position) =>
return cb err if err
val, position, cb
_saveData: (val, [start, length], cb) ->
val = val.toString()
if val.length < length
val = (new Array(length-val.length+1)).join(' ') + val
data = new Buffer(val)
fs.write , new Buffer(val), 0, length, start, cb
_try_object: (data) ->
try
return JSON.parse data
catch e
return data
class Index
constructor: (, , cb) ->
= path.join .dir, .index_file
= path.join .dir, .lock_dir, 'index.lock'
= {}
= {}
= []
= 0
_index = @
flow = ep()
flow.on 'error', cb
.lazy ->
fs.open _index.path, 'a', @
.lazy (fd) ->
fs.close fd, @
.lazy ->
ws = fs.createWriteStream _index.path, {flags:'r+', encoding: 'utf8', mode: '0666'}
ws.on 'open', (fd) => @ null, fd
.lazy (fd) ->
_index.handle = fd
_index._update cb
.lazy ->
cb()
.run()
getAll: (cb) ->
=> cb null,
get: (key, cb) ->
position = [key]
unless position
=> cb null, [key]
else
cb null, position
ensure: (key, length, cb) ->
key, (err, position) =>
unless position and length <= position[1]
key, length, cb
else
cb null, position
_add: (key, length, cb) ->
length = .min_length if length < .min_length
[key] = length if not [key] or [key] < length
.push [key, cb]
_clearup: ->
return if
= true
# 默认重试5秒,如果文件锁1000ms未更新则抢锁,防止雪崩
lockfile.lock , (err) =>
if err
setTimeout =>
= false
, 10
else
=>
lockfile.unlock , =>
= false
if .length then
_process: (callback) ->
tasks =
cbs =
= {}
= []
(err, start) =>
return cb err if err
exists = false
for key, length of tasks
if [key]
continue if [key][1] >= length
exists = true
[key] = [start, length]
start += length
if exists
data = JSON.stringify().replace(/^{|}$/g,"") + ','
fs.write , new Buffer(data), 0, data.length, 0, (err, written)=>
for [key, cb] in cbs
cb null, [key]
callback()
else
data = ""
for key, length of tasks
data += "\"#{key}\":#{JSON.stringify(@cache[key])},"
(err, size) =>
fs.write , new Buffer(data), 0, data.length, size, (err, written)=>
for [key, cb] in cbs
cb null, [key]
callback()
_update: (cb) ->
fs.fstat , (err, stat) =>
if stat.mtime.getTime() is
cb()
else
(err, size, stat) =>
= stat.mtime.getTime()
unless size
= {}
return cb()
buffer = new Buffer size
fs.read , buffer, 0, size, 0, (err) =>
= JSON.parse '{'+buffer.toString().trim().replace(/,$/, '')+'}'
cb()
_getLength: (cb) ->
fs.fstat , (err, stat) ->
cb err, stat.size, stat
_getDataLength: (cb) ->
fs.fstat , (err, stat) ->
cb err, stat.size
module.exports = (options) ->
new File_DB options