simplify rpc function signatures

This commit is contained in:
ansuz 2017-05-16 17:07:53 +02:00
parent a97a6caf9b
commit 24eb928ef8
1 changed files with 54 additions and 43 deletions

97
rpc.js
View File

@ -192,7 +192,8 @@ var checkSignature = function (signedMsg, signature, publicKey) {
return Nacl.sign.detached.verify(signedBuffer, signatureBuffer, pubBuffer);
};
var loadUserPins = function (store, Sessions, publicKey, cb) {
var loadUserPins = function (Env, Sessions, publicKey, cb) {
var pinStore = Env.pinStore;
var session = beginSession(Sessions, publicKey);
if (session.channels) {
@ -210,7 +211,7 @@ var loadUserPins = function (store, Sessions, publicKey, cb) {
pins[channel] = false;
};
store.getMessages(publicKey, function (msg) {
pinStore.getMessages(publicKey, function (msg) {
// handle messages...
var parsed;
try {
@ -252,8 +253,8 @@ var truthyKeys = function (O) {
});
};
var getChannelList = function (store, Sessions, publicKey, cb) {
loadUserPins(store, Sessions, publicKey, function (pins) {
var getChannelList = function (Env, Sessions, publicKey, cb) {
loadUserPins(Env, Sessions, publicKey, function (pins) {
cb(truthyKeys(pins));
});
};
@ -263,7 +264,8 @@ var makeFilePath = function (root, id) {
return Path.join(root, id.slice(0, 2), id);
};
var getUploadSize = function (paths, channel, cb) {
var getUploadSize = function (Env, channel, cb) {
var paths = Env.paths;
var path = makeFilePath(paths.blob, channel);
if (!path) {
return cb('INVALID_UPLOAD_ID');
@ -275,7 +277,7 @@ var getUploadSize = function (paths, channel, cb) {
});
};
var getFileSize = function (paths, msgStore, channel, cb) {
var getFileSize = function (Env, msgStore, channel, cb) {
if (!isValidId(channel)) { return void cb('INVALID_CHAN'); }
if (channel.length === 32) {
@ -290,13 +292,13 @@ var getFileSize = function (paths, msgStore, channel, cb) {
}
// 'channel' refers to a file, so you need anoter API
getUploadSize(paths, channel, function (e, size) {
getUploadSize(Env, channel, function (e, size) {
if (e) { return void cb(e); }
cb(void 0, size);
});
};
var getMultipleFileSize = function (paths, msgStore, channels, cb) {
var getMultipleFileSize = function (Env, msgStore, channels, cb) {
if (!Array.isArray(channels)) { return cb('INVALID_LIST'); }
if (typeof(msgStore.getChannelSize) !== 'function') {
return cb('GET_CHANNEL_SIZE_UNSUPPORTED');
@ -311,7 +313,7 @@ var getMultipleFileSize = function (paths, msgStore, channels, cb) {
};
channels.forEach(function (channel) {
getFileSize(paths, msgStore, channel, function (e, size) {
getFileSize(Env, msgStore, channel, function (e, size) {
if (e) {
console.error(e);
counts[channel] = -1;
@ -323,17 +325,17 @@ var getMultipleFileSize = function (paths, msgStore, channels, cb) {
});
};
var getTotalSize = function (paths, pinStore, msgStore, Sessions, publicKey, cb) {
var getTotalSize = function (Env, msgStore, Sessions, publicKey, cb) {
var bytes = 0;
return void getChannelList(pinStore, Sessions, publicKey, function (channels) {
return void getChannelList(Env, Sessions, publicKey, function (channels) {
if (!channels) { cb('NO_ARRAY'); } // unexpected
var count = channels.length;
if (!count) { cb(void 0, 0); }
channels.forEach(function (channel) {
getFileSize(paths, msgStore, channel, function (e, size) {
getFileSize(Env, msgStore, channel, function (e, size) {
count--;
if (!e) { bytes += size; }
if (count === 0) { return cb(void 0, bytes); }
@ -356,20 +358,22 @@ var hashChannelList = function (A) {
return hash;
};
var getHash = function (store, Sessions, publicKey, cb) {
getChannelList(store, Sessions, publicKey, function (channels) {
var getHash = function (Env, Sessions, publicKey, cb) {
getChannelList(Env, Sessions, publicKey, function (channels) {
cb(void 0, hashChannelList(channels));
});
};
// TODO check if new pinned size exceeds user quota
var pinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
var pinChannel = function (Env, Sessions, publicKey, channels, cb) {
var pinStore = Env.pinStore;
if (!channels && channels.filter) {
// expected array
return void cb('[TYPE_ERROR] pin expects channel list argument');
}
getChannelList(pinStore, Sessions, publicKey, function (pinned) {
getChannelList(Env, Sessions, publicKey, function (pinned) {
var session = beginSession(Sessions, publicKey);
// only pin channels which are not already pinned
@ -378,7 +382,7 @@ var pinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
});
if (toStore.length === 0) {
return void getHash(pinStore, Sessions, publicKey, cb);
return void getHash(Env, Sessions, publicKey, cb);
}
pinStore.message(publicKey, JSON.stringify(['PIN', toStore]),
@ -387,12 +391,13 @@ var pinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
toStore.forEach(function (channel) {
session.channels[channel] = true;
});
getHash(pinStore, Sessions, publicKey, cb);
getHash(Env, Sessions, publicKey, cb);
});
});
};
var unpinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
var unpinChannel = function (Env, Sessions, publicKey, channels, cb) {
var pinStore = Env.pinStore;
if (!channels && channels.filter) {
// expected array
return void cb('[TYPE_ERROR] unpin expects channel list argument');
@ -407,7 +412,7 @@ var unpinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
});
if (toStore.length === 0) {
return void getHash(pinStore, Sessions, publicKey, cb);
return void getHash(Env, Sessions, publicKey, cb);
}
pinStore.message(publicKey, JSON.stringify(['UNPIN', toStore]),
@ -417,13 +422,14 @@ var unpinChannel = function (pinStore, Sessions, publicKey, channels, cb) {
delete session.channels[channel];
});
getHash(pinStore, Sessions, publicKey, cb);
getHash(Env, Sessions, publicKey, cb);
});
});
};
// TODO check if new pinned size exceeds user quota
var resetUserPins = function (pinStore, Sessions, publicKey, channelList, cb) {
var resetUserPins = function (Env, Sessions, publicKey, channelList, cb) {
var pinStore = Env.pinStore;
var session = beginSession(Sessions, publicKey);
var pins = session.channels = {};
@ -435,7 +441,7 @@ var resetUserPins = function (pinStore, Sessions, publicKey, channelList, cb) {
pins[channel] = true;
});
getHash(pinStore, Sessions, publicKey, function (e, hash) {
getHash(Env, Sessions, publicKey, function (e, hash) {
cb(e, hash);
});
});
@ -556,7 +562,8 @@ var makeFileStream = function (root, id, cb) {
});
};
var upload = function (paths, Sessions, publicKey, content, cb) {
var upload = function (Env, Sessions, publicKey, content, cb) {
var paths = Env.paths;
var dec = new Buffer(Nacl.util.decodeBase64(content)); // jshint ignore:line
var len = dec.length;
@ -588,7 +595,8 @@ var upload = function (paths, Sessions, publicKey, content, cb) {
}
};
var upload_cancel = function (paths, Sessions, publicKey, cb) {
var upload_cancel = function (Env, Sessions, publicKey, cb) {
var paths = Env.paths;
var path = makeFilePath(paths.staging, publicKey);
if (!path) {
console.log(paths.staging, publicKey);
@ -612,7 +620,8 @@ var isFile = function (filePath, cb) {
});
};
var upload_complete = function (paths, Sessions, publicKey, cb) {
var upload_complete = function (Env, Sessions, publicKey, cb) {
var paths = Env.paths;
var session = beginSession(Sessions, publicKey);
if (session.blobstage && session.blobstage.close) {
@ -657,7 +666,9 @@ var upload_complete = function (paths, Sessions, publicKey, cb) {
});
};
var upload_status = function (paths, pinStore, msgStore, Sessions, publicKey, filesize, cb) {
var upload_status = function (Env, msgStore, Sessions, publicKey, filesize, cb) {
var paths = Env.paths;
// validate that the provided size is actually a positive number
if (typeof(filesize) !== 'number' &&
filesize >= 0) { return void cb('E_INVALID_SIZE'); }
@ -668,7 +679,7 @@ var upload_status = function (paths, pinStore, msgStore, Sessions, publicKey, fi
getLimit(publicKey, function (e, limit) {
if (e) { return void cb(e); }
getTotalSize(paths, pinStore, msgStore, Sessions, publicKey, function (e, size) {
getTotalSize(Env, msgStore, Sessions, publicKey, function (e, size) {
if ((filesize + size) >= limit) { return cb('TOO_LARGE'); }
isFile(filePath, function (e, yes) {
if (e) {
@ -692,13 +703,13 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
return typeof(config[key]) === 'string'? config[key]: def;
};
var paths = {};
var Env = {};
var paths = Env.paths = {};
var pinPath = paths.pin = keyOrDefaultString('pinPath', './pins');
var blobPath = paths.blob = keyOrDefaultString('blobPath', './blob');
var blobStagingPath = paths.staging = keyOrDefaultString('blobStagingPath', './blobstage');
var pinStore;
var rpc = function (
ctx /*:{ store: Object }*/,
data /*:Array<Array<any>>*/,
@ -775,29 +786,29 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
switch (msg[0]) {
case 'COOKIE': return void Respond(void 0);
case 'RESET':
return resetUserPins(pinStore, Sessions, safeKey, msg[1], function (e, hash) {
return resetUserPins(Env, Sessions, safeKey, msg[1], function (e, hash) {
return void Respond(e, hash);
});
case 'PIN': // TODO don't pin if over the limit
// if over, send error E_OVER_LIMIT
return pinChannel(pinStore, Sessions, safeKey, msg[1], function (e, hash) {
return pinChannel(Env, Sessions, safeKey, msg[1], function (e, hash) {
Respond(e, hash);
});
case 'UNPIN':
return unpinChannel(pinStore, Sessions, safeKey, msg[1], function (e, hash) {
return unpinChannel(Env, Sessions, safeKey, msg[1], function (e, hash) {
Respond(e, hash);
});
case 'GET_HASH':
return void getHash(pinStore, Sessions, safeKey, function (e, hash) {
return void getHash(Env, Sessions, safeKey, function (e, hash) {
Respond(e, hash);
});
case 'GET_TOTAL_SIZE': // TODO cache this, since it will get called quite a bit
return getTotalSize(paths, pinStore, msgStore, Sessions, safeKey, function (e, size) {
return getTotalSize(Env, msgStore, Sessions, safeKey, function (e, size) {
if (e) { return void Respond(e); }
Respond(e, size);
});
case 'GET_FILE_SIZE':
return void getFileSize(paths, msgStore, msg[1], Respond);
return void getFileSize(Env, msgStore, msg[1], Respond);
case 'UPDATE_LIMITS':
return void updateLimits(config, safeKey, function (e, limit) {
if (e) { return void Respond(e); }
@ -809,7 +820,7 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
Respond(void 0, limit);
});
case 'GET_MULTIPLE_FILE_SIZE':
return void getMultipleFileSize(paths, msgStore, msg[1], function (e, dict) {
return void getMultipleFileSize(Env, msgStore, msg[1], function (e, dict) {
if (e) { return void Respond(e); }
Respond(void 0, dict);
});
@ -817,13 +828,13 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
// restricted to privileged users...
case 'UPLOAD':
if (!privileged) { return deny(); }
return void upload(paths, Sessions, safeKey, msg[1], function (e, len) {
return void upload(Env, Sessions, safeKey, msg[1], function (e, len) {
Respond(e, len);
});
case 'UPLOAD_STATUS':
if (!privileged) { return deny(); }
var filesize = msg[1];
return void upload_status(paths, pinStore, msgStore, Sessions, safeKey, msg[1], function (e, yes) {
return void upload_status(Env, msgStore, Sessions, safeKey, msg[1], function (e, yes) {
if (!e && !yes) {
// no pending uploads, set the new size
var user = beginSession(Sessions, safeKey);
@ -834,12 +845,12 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
});
case 'UPLOAD_COMPLETE':
if (!privileged) { return deny(); }
return void upload_complete(paths, Sessions, safeKey, function (e, hash) {
return void upload_complete(Env, Sessions, safeKey, function (e, hash) {
Respond(e, hash);
});
case 'UPLOAD_CANCEL':
if (!privileged) { return deny(); }
return void upload_cancel(paths, Sessions, safeKey, function (e) {
return void upload_cancel(Env, Sessions, safeKey, function (e) {
Respond(e);
});
default:
@ -881,7 +892,7 @@ RPC.create = function (config /*:typeof(ConfigType)*/, cb /*:(?Error, ?Function)
Store.create({
filePath: pinPath,
}, function (s) {
pinStore = s;
Env.pinStore = s;
safeMkdir(blobPath, function (e) {
if (e) { throw e; }