Merge branch 'release-5.1' of github.com:apple/foundationdb into release-5.1
This commit is contained in:
commit
8f300fdec9
|
@ -6,8 +6,6 @@ bindings/flow/bin/
|
|||
bindings/java/foundationdb-client*.jar
|
||||
bindings/java/foundationdb-tests*.jar
|
||||
bindings/java/fdb-java-*-sources.jar
|
||||
bindings/nodejs/build/
|
||||
bindings/nodejs/modules/
|
||||
packaging/msi/FDBInstaller.msi
|
||||
|
||||
# Generated source, build, and packaging files
|
||||
|
@ -24,7 +22,6 @@ bindings/java/src*/main/com/apple/foundationdb/StreamingMode.java
|
|||
bindings/java/src*/main/com/apple/foundationdb/MutationType.java
|
||||
bindings/java/src*/main/com/apple/foundationdb/ConflictRangeType.java
|
||||
bindings/java/src*/main/com/apple/foundationdb/FDBException.java
|
||||
bindings/nodejs/package.json
|
||||
bindings/python/fdb/fdb*options.py
|
||||
bindings/python/dist/
|
||||
bindings/python/setup.py
|
||||
|
@ -52,12 +49,8 @@ bindings/go/godoc
|
|||
bindings/java/.classstamp*
|
||||
bindings/java/classes*/
|
||||
bindings/java/javadoc*/
|
||||
bindings/nodejs/fdb_node.stamp
|
||||
bindings/nodejs/node_modules/
|
||||
|
||||
# Testing and logging
|
||||
bindings/nodejs/fdb_node*.log
|
||||
bindings/nodejs/npm-debug.log
|
||||
packaging/msi/*.log
|
||||
packaging/msi/obj
|
||||
simfdb
|
||||
|
|
4
Makefile
4
Makefile
|
@ -93,7 +93,7 @@ VPATH += $(addprefix :,$(filter-out lib,$(patsubst -L%,%,$(filter -L%,$(LDFLAGS)
|
|||
|
||||
CS_PROJECTS := flow/actorcompiler flow/coveragetool fdbclient/vexillographer
|
||||
CPP_PROJECTS := flow fdbrpc fdbclient fdbbackup fdbserver fdbcli bindings/c bindings/java fdbmonitor bindings/flow/tester bindings/flow
|
||||
OTHER_PROJECTS := bindings/python bindings/ruby bindings/nodejs bindings/go
|
||||
OTHER_PROJECTS := bindings/python bindings/ruby bindings/go
|
||||
|
||||
CS_MK_GENERATED := $(CS_PROJECTS:=/generated.mk)
|
||||
CPP_MK_GENERATED := $(CPP_PROJECTS:=/generated.mk)
|
||||
|
@ -127,7 +127,7 @@ else
|
|||
endif
|
||||
@echo "#define FDB_VT_PACKAGE_NAME \"$(PACKAGE_NAME)\"" >> $@
|
||||
|
||||
bindings: fdb_c fdb_python fdb_ruby fdb_java fdb_node fdb_flow fdb_flow_tester fdb_go fdb_go_tester
|
||||
bindings: fdb_c fdb_python fdb_ruby fdb_java fdb_flow fdb_flow_tester fdb_go fdb_go_tester
|
||||
|
||||
Makefiles: $(MK_GENERATED)
|
||||
|
||||
|
|
18
README.md
18
README.md
|
@ -1,3 +1,21 @@
|
|||
# FoundationDB
|
||||
|
||||
FoundationDB is a distributed database designed to handle large volumes of structured data across clusters of commodity servers. It organizes data as an ordered key-value store and employs ACID transactions for all operations. It is especially well-suited for read/write workloads but also has excellent performance for write-intensive workloads. Users interact with the database using API language binding.
|
||||
|
||||
# Building Locally
|
||||
|
||||
## macOS
|
||||
|
||||
1. Check out this repo on your Mac.
|
||||
1. Install the Xcode command-line tools.
|
||||
1. Download version 1.52 of [Boost](https://sourceforge.net/projects/boost/files/boost/1.52.0/).
|
||||
1. Set the BOOSTDIR environment variable to the location containing this boost installation.
|
||||
1. Install [Mono](http://www.mono-project.com/download/stable/).
|
||||
1. Install a [JDK](http://www.oracle.com/technetwork/java/javase/downloads/index.html). FoundationDB currently builds with Java 8.
|
||||
1. Navigate to the directory where you checked out the foundationdb repo.
|
||||
1. Run `make`.
|
||||
|
||||
This will build the fdbserver binary and the python bindings. If you
|
||||
want to build our other bindings, you will need to install a runtime for the
|
||||
language whose binding you want to build. Each binding has an `.mk` file
|
||||
which provides specific targets for that binding.
|
|
@ -58,8 +58,6 @@ _java_cmd = 'java -ea -cp %s:%s com.apple.foundationdb.test.' % (
|
|||
testers = {
|
||||
'python': Tester('python', 'python ' + _absolute_path('python/tests/tester.py'), 2040, 23, MAX_API_VERSION, types=ALL_TYPES),
|
||||
'python3': Tester('python3', 'python3 ' + _absolute_path('python/tests/tester.py'), 2040, 23, MAX_API_VERSION, types=ALL_TYPES),
|
||||
'node': Tester('node', _absolute_path('nodejs/tests/tester.js'), 53, 500, MAX_API_VERSION),
|
||||
'streamline': Tester('streamline', _absolute_path('nodejs/tests/streamline_tester._js'), 53, 500, MAX_API_VERSION),
|
||||
'ruby': Tester('ruby', _absolute_path('ruby/tests/tester.rb'), 64, 23, MAX_API_VERSION),
|
||||
'java': Tester('java', _java_cmd + 'StackTester', 2040, 510, MAX_API_VERSION, types=ALL_TYPES),
|
||||
'java_async': Tester('java', _java_cmd + 'AsyncStackTester', 2040, 510, MAX_API_VERSION, types=ALL_TYPES),
|
||||
|
|
|
@ -25,7 +25,7 @@ BREAKONERROR="${BREAKONERROR:-0}"
|
|||
RUNSCRIPTS="${RUNSCRIPTS:-1}"
|
||||
RUNTESTS="${RUNTESTS:-1}"
|
||||
RANDOMTEST="${RANDOMTEST:-0}"
|
||||
BINDINGTESTS="${BINDINGTESTS:-python python3 java java_async ruby node go flow}"
|
||||
BINDINGTESTS="${BINDINGTESTS:-python python3 java java_async ruby go flow}"
|
||||
LOGLEVEL="${LOGLEVEL:-INFO}"
|
||||
_BINDINGTESTS=(${BINDINGTESTS})
|
||||
DISABLEDTESTS=()
|
||||
|
|
|
@ -24,7 +24,6 @@ function run_scripted() {
|
|||
scripted ruby
|
||||
scripted java
|
||||
scripted java_async
|
||||
scripted node
|
||||
scripted go
|
||||
scripted flow
|
||||
}
|
||||
|
@ -40,8 +39,6 @@ while `true`; do
|
|||
run ruby
|
||||
run java
|
||||
run java_async
|
||||
run node
|
||||
#run streamline
|
||||
run go
|
||||
run flow
|
||||
done
|
||||
|
|
|
@ -102,5 +102,3 @@ fdb_c_tests: packages/fdb-c-tests-$(VERSION)-$(PLATFORM).tar.gz
|
|||
|
||||
fdb_c_tests_clean:
|
||||
@rm -f packages/fdb-c-tests-$(VERSION)-$(PLATFORM).tar.gz
|
||||
|
||||
packages: fdb_c_tests
|
||||
|
|
|
@ -30,11 +30,6 @@
|
|||
<property name="option" value="eol"/>
|
||||
<property name="ignoreEnums" value="false"/>
|
||||
</module>
|
||||
<!-- We have about 76 errors with value = "alone" and 27 with value = "same". We should pick one.
|
||||
<module name="RightCurly">
|
||||
<property name="option" value="same"/>
|
||||
</module>
|
||||
-->
|
||||
|
||||
<!-- Design -->
|
||||
<!-- We might get some helpful errors if we turned this on, but not right now.
|
||||
|
|
|
@ -42,12 +42,12 @@ class JNIUtil {
|
|||
OSX("osx", "x86_64", true);
|
||||
|
||||
private final String name;
|
||||
private final String arch;
|
||||
private final String arch;
|
||||
private final boolean canDeleteEager;
|
||||
|
||||
OS(String name, String arch, boolean canDeleteEager) {
|
||||
this.name = name;
|
||||
this.arch = arch;
|
||||
this.arch = arch;
|
||||
this.canDeleteEager = canDeleteEager;
|
||||
}
|
||||
|
||||
|
@ -55,9 +55,9 @@ class JNIUtil {
|
|||
return this.name;
|
||||
}
|
||||
|
||||
public String getArch() {
|
||||
return this.arch;
|
||||
}
|
||||
public String getArch() {
|
||||
return this.arch;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -89,18 +89,18 @@ class JNIUtil {
|
|||
OS os = getRunningOS();
|
||||
String path = getPath(os, libName);
|
||||
|
||||
if ((os.getName().equals("linux") && !path.endsWith(".so")) || (os.getName().equals("windows") && !path.endsWith(".dll")) || (os.getName().equals("osx") && !path.endsWith(".jnilib") && !path.endsWith(".dylib"))) {
|
||||
throw new IllegalStateException("OS sanity check failed. System property os.name reports " + os.getName()+" but System.mapLibraryName is looking for " + getLibName(libName));
|
||||
}
|
||||
if ((os.getName().equals("linux") && !path.endsWith(".so")) || (os.getName().equals("windows") && !path.endsWith(".dll")) || (os.getName().equals("osx") && !path.endsWith(".jnilib") && !path.endsWith(".dylib"))) {
|
||||
throw new IllegalStateException("OS sanity check failed. System property os.name reports " + os.getName()+" but System.mapLibraryName is looking for " + getLibName(libName));
|
||||
}
|
||||
|
||||
File exported;
|
||||
File exported;
|
||||
|
||||
try {
|
||||
exported = exportResource(path);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new UnsatisfiedLinkError(e.getMessage());
|
||||
}
|
||||
try {
|
||||
exported = exportResource(path);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new UnsatisfiedLinkError(e.getMessage());
|
||||
}
|
||||
String filename = exported.getAbsolutePath();
|
||||
|
||||
System.load(filename);
|
||||
|
|
|
@ -21,17 +21,17 @@
|
|||
package com.apple.foundationdb;
|
||||
|
||||
class RangeResultInfo {
|
||||
RangeResultSummary getSummary() {
|
||||
return f.getSummary();
|
||||
}
|
||||
RangeResultSummary getSummary() {
|
||||
return f.getSummary();
|
||||
}
|
||||
|
||||
RangeResult get() {
|
||||
return f.getResults();
|
||||
}
|
||||
RangeResult get() {
|
||||
return f.getResults();
|
||||
}
|
||||
|
||||
RangeResultInfo(FutureResults f) {
|
||||
this.f = f;
|
||||
}
|
||||
RangeResultInfo(FutureResults f) {
|
||||
this.f = f;
|
||||
}
|
||||
|
||||
private FutureResults f;
|
||||
private FutureResults f;
|
||||
}
|
||||
|
|
|
@ -526,7 +526,7 @@ public class DirectoryLayer implements Directory {
|
|||
|
||||
return AsyncUtil.collect(
|
||||
AsyncUtil.mapIterable(tr.getRange(subdir.range()),
|
||||
kv -> subdir.unpack(kv.getKey()).getString(0)
|
||||
kv -> subdir.unpack(kv.getKey()).getString(0)
|
||||
),
|
||||
tr.getExecutor()
|
||||
);
|
||||
|
|
|
@ -46,231 +46,231 @@ import com.apple.foundationdb.tuple.Versionstamp;
|
|||
* </p>
|
||||
*/
|
||||
public class Subspace {
|
||||
static final Tuple EMPTY_TUPLE = Tuple.from();
|
||||
static final byte[] EMPTY_BYTES = new byte[0];
|
||||
static final Tuple EMPTY_TUPLE = Tuple.from();
|
||||
static final byte[] EMPTY_BYTES = new byte[0];
|
||||
|
||||
private final byte[] rawPrefix;
|
||||
private final byte[] rawPrefix;
|
||||
|
||||
/**
|
||||
* Constructor for a subspace formed with an empty prefix {@link Tuple}.
|
||||
*/
|
||||
public Subspace() {
|
||||
this(EMPTY_TUPLE, EMPTY_BYTES);
|
||||
}
|
||||
/**
|
||||
* Constructor for a subspace formed with an empty prefix {@link Tuple}.
|
||||
*/
|
||||
public Subspace() {
|
||||
this(EMPTY_TUPLE, EMPTY_BYTES);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for a subspace formed with the specified prefix {@link Tuple}.
|
||||
* Note that the {@link Tuple} {@code prefix} should not contain any incomplete
|
||||
* {@link Versionstamp}s as any of its entries.
|
||||
*
|
||||
* @param prefix a {@link Tuple} used to form the subspace
|
||||
* @throws IllegalArgumentException if {@code prefix} contains any incomplete {@link Versionstamp}s
|
||||
*/
|
||||
public Subspace(Tuple prefix) {
|
||||
this(prefix, EMPTY_BYTES);
|
||||
}
|
||||
/**
|
||||
* Constructor for a subspace formed with the specified prefix {@link Tuple}.
|
||||
* Note that the {@link Tuple} {@code prefix} should not contain any incomplete
|
||||
* {@link Versionstamp}s as any of its entries.
|
||||
*
|
||||
* @param prefix a {@link Tuple} used to form the subspace
|
||||
* @throws IllegalArgumentException if {@code prefix} contains any incomplete {@link Versionstamp}s
|
||||
*/
|
||||
public Subspace(Tuple prefix) {
|
||||
this(prefix, EMPTY_BYTES);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for a subspace formed with the specified byte string, which will
|
||||
* be prepended to all packed keys.
|
||||
*
|
||||
* @param rawPrefix a byte array used as the prefix for all packed keys
|
||||
*/
|
||||
public Subspace(byte[] rawPrefix) {
|
||||
this(EMPTY_TUPLE, rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Constructor for a subspace formed with the specified byte string, which will
|
||||
* be prepended to all packed keys.
|
||||
*
|
||||
* @param rawPrefix a byte array used as the prefix for all packed keys
|
||||
*/
|
||||
public Subspace(byte[] rawPrefix) {
|
||||
this(EMPTY_TUPLE, rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor for a subspace formed with both a prefix {@link Tuple} and a
|
||||
* prefix byte string. The prefix {@code Tuple} will be prepended to all
|
||||
* {@code Tuples} packed by the {@code Subspace}, and the byte string prefix
|
||||
* will be prepended to the packed result. Note that the {@link Tuple} {@code prefix}
|
||||
* should not contain any incomplete {@link Versionstamp}s as any of its entries.
|
||||
*
|
||||
* @param prefix a {@code Tuple} used to form the subspace
|
||||
* @param rawPrefix a byte array used as the prefix for all packed keys
|
||||
* @throws IllegalArgumentException if {@code prefix} contains any incomplete {@link Versionstamp}s
|
||||
*/
|
||||
public Subspace(Tuple prefix, byte[] rawPrefix) {
|
||||
this.rawPrefix = join(rawPrefix, prefix.pack());
|
||||
}
|
||||
/**
|
||||
* Constructor for a subspace formed with both a prefix {@link Tuple} and a
|
||||
* prefix byte string. The prefix {@code Tuple} will be prepended to all
|
||||
* {@code Tuples} packed by the {@code Subspace}, and the byte string prefix
|
||||
* will be prepended to the packed result. Note that the {@link Tuple} {@code prefix}
|
||||
* should not contain any incomplete {@link Versionstamp}s as any of its entries.
|
||||
*
|
||||
* @param prefix a {@code Tuple} used to form the subspace
|
||||
* @param rawPrefix a byte array used as the prefix for all packed keys
|
||||
* @throws IllegalArgumentException if {@code prefix} contains any incomplete {@link Versionstamp}s
|
||||
*/
|
||||
public Subspace(Tuple prefix, byte[] rawPrefix) {
|
||||
this.rawPrefix = join(rawPrefix, prefix.pack());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if this {@code Subspace} is equal to {@code rhs}.
|
||||
* Two {@code Subspace}s are equal if they have the same prefix.
|
||||
*
|
||||
* @param rhs the object to check for equality
|
||||
* @return {@code true} if this {@code Subspace} and {@code rhs} have equal prefixes
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object rhs) {
|
||||
if(this == rhs) {
|
||||
return true;
|
||||
}
|
||||
if(rhs == null || getClass() != rhs.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Subspace other = (Subspace)rhs;
|
||||
return Arrays.equals(rawPrefix, other.rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Returns true if this {@code Subspace} is equal to {@code rhs}.
|
||||
* Two {@code Subspace}s are equal if they have the same prefix.
|
||||
*
|
||||
* @param rhs the object to check for equality
|
||||
* @return {@code true} if this {@code Subspace} and {@code rhs} have equal prefixes
|
||||
*/
|
||||
@Override
|
||||
public boolean equals(Object rhs) {
|
||||
if(this == rhs) {
|
||||
return true;
|
||||
}
|
||||
if(rhs == null || getClass() != rhs.getClass()) {
|
||||
return false;
|
||||
}
|
||||
Subspace other = (Subspace)rhs;
|
||||
return Arrays.equals(rawPrefix, other.rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a human-readable string representation of this subspace. This is
|
||||
* really only useful for debugging purposes, but it includes information
|
||||
* on what raw prefix the subspace is using.
|
||||
* @return a printable representation of the subspace
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Subspace(rawPrefix=" + printable(rawPrefix) + ")";
|
||||
}
|
||||
/**
|
||||
* Create a human-readable string representation of this subspace. This is
|
||||
* really only useful for debugging purposes, but it includes information
|
||||
* on what raw prefix the subspace is using.
|
||||
* @return a printable representation of the subspace
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Subspace(rawPrefix=" + printable(rawPrefix) + ")";
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a hash-table compatible hash of this subspace. This is based off
|
||||
* of the hash of the underlying byte-array prefix.
|
||||
* @return a hash of this subspace
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Returns a hash-table compatible hash of this subspace. This is based off
|
||||
* of the hash of the underlying byte-array prefix.
|
||||
* @return a hash of this subspace
|
||||
*/
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Arrays.hashCode(rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@code Object}. The object will be inserted into a {@link Tuple} and passed to {@link #get(Tuple)}.
|
||||
*
|
||||
* @param obj an {@code Object} compatible with {@code Tuple}s
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code obj}
|
||||
*/
|
||||
public Subspace get(Object obj) {
|
||||
return get(Tuple.from(obj));
|
||||
}
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@code Object}. The object will be inserted into a {@link Tuple} and passed to {@link #get(Tuple)}.
|
||||
*
|
||||
* @param obj an {@code Object} compatible with {@code Tuple}s
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code obj}
|
||||
*/
|
||||
public Subspace get(Object obj) {
|
||||
return get(Tuple.from(obj));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@link Tuple} used to form the new {@code Subspace}
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code tuple}
|
||||
*/
|
||||
public Subspace get(Tuple tuple) {
|
||||
return subspace(tuple);
|
||||
}
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@link Tuple} used to form the new {@code Subspace}
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code tuple}
|
||||
*/
|
||||
public Subspace get(Tuple tuple) {
|
||||
return subspace(tuple);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key encoding the prefix used for this {@code Subspace}. This is equivalent to
|
||||
* {@link #pack}ing the empty {@link Tuple}.
|
||||
*
|
||||
* @return the key encoding the prefix used for this {@code Subspace}
|
||||
*/
|
||||
public byte[] getKey() {
|
||||
return pack();
|
||||
}
|
||||
/**
|
||||
* Gets the key encoding the prefix used for this {@code Subspace}. This is equivalent to
|
||||
* {@link #pack}ing the empty {@link Tuple}.
|
||||
*
|
||||
* @return the key encoding the prefix used for this {@code Subspace}
|
||||
*/
|
||||
public byte[] getKey() {
|
||||
return pack();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key encoding the prefix used for this {@code Subspace}.
|
||||
*
|
||||
* @return the key encoding the prefix used for this {@code Subspace}
|
||||
*/
|
||||
public byte[] pack() {
|
||||
return Arrays.copyOf(rawPrefix, rawPrefix.length);
|
||||
}
|
||||
/**
|
||||
* Gets the key encoding the prefix used for this {@code Subspace}.
|
||||
*
|
||||
* @return the key encoding the prefix used for this {@code Subspace}
|
||||
*/
|
||||
public byte[] pack() {
|
||||
return Arrays.copyOf(rawPrefix, rawPrefix.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key encoding the specified {@code Object} in this {@code Subspace}. {@code obj} is
|
||||
* inserted into a {@link Tuple} and packed with {@link #pack(Tuple)}.
|
||||
*
|
||||
* @param obj an {@code Object} to be packed that is compatible with {@link Tuple}s
|
||||
* @return the key encoding the tuple derived from {@code obj}
|
||||
*/
|
||||
public byte[] pack(Object obj) {
|
||||
return pack(Tuple.from(obj));
|
||||
}
|
||||
/**
|
||||
* Gets the key encoding the specified {@code Object} in this {@code Subspace}. {@code obj} is
|
||||
* inserted into a {@link Tuple} and packed with {@link #pack(Tuple)}.
|
||||
*
|
||||
* @param obj an {@code Object} to be packed that is compatible with {@link Tuple}s
|
||||
* @return the key encoding the tuple derived from {@code obj}
|
||||
*/
|
||||
public byte[] pack(Object obj) {
|
||||
return pack(Tuple.from(obj));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key encoding the specified tuple in this {@code Subspace}. For example, if you have a {@code Subspace}
|
||||
* with prefix {@link Tuple} {@code ("users")} and you use it to pack the {@link Tuple} {@code ("Smith")},
|
||||
* the result is the same as if you packed the {@link Tuple} {@code ("users", "Smith")}.
|
||||
*
|
||||
* @param tuple the {@code Tuple} to be packed
|
||||
* @return the key encoding the specified tuple in this {@code Subspace}
|
||||
*/
|
||||
public byte[] pack(Tuple tuple) {
|
||||
return tuple.pack(rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Gets the key encoding the specified tuple in this {@code Subspace}. For example, if you have a {@code Subspace}
|
||||
* with prefix {@link Tuple} {@code ("users")} and you use it to pack the {@link Tuple} {@code ("Smith")},
|
||||
* the result is the same as if you packed the {@link Tuple} {@code ("users", "Smith")}.
|
||||
*
|
||||
* @param tuple the {@code Tuple} to be packed
|
||||
* @return the key encoding the specified tuple in this {@code Subspace}
|
||||
*/
|
||||
public byte[] pack(Tuple tuple) {
|
||||
return tuple.pack(rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the key encoding the specified tuple in this {@code Subspace} for use with
|
||||
* {@link com.apple.foundationdb.MutationType#SET_VERSIONSTAMPED_KEY MutationType.SET_VERSIONSTAMPED_KEY}.
|
||||
* There must be exactly one incomplete {@link Versionstamp} included in the given {@link Tuple}. It will
|
||||
* create a key that is within this {@code Subspace} that can be provided as the {@code key} argument to
|
||||
* {@link com.apple.foundationdb.Transaction#mutate(com.apple.foundationdb.MutationType, byte[], byte[]) Transaction.mutate()}
|
||||
* with the {@link com.apple.foundationdb.MutationType#SET_VERSIONSTAMPED_KEY SET_VERSIONSTAMPED_KEY}
|
||||
* mutation. This will throw an {@link IllegalArgumentException} if the {@link Tuple} does not
|
||||
* contain an incomplete {@link Versionstamp} or if it contains multiple.
|
||||
*
|
||||
* @param tuple the {@code Tuple} to be packed
|
||||
* @return the key encoding the specified tuple in this {@code Subspace}
|
||||
* @throws IllegalArgumentException if {@code tuple} does not contain exactly one incomplete {@link Versionstamp}
|
||||
*/
|
||||
public byte[] packWithVersionstamp(Tuple tuple) {
|
||||
return tuple.packWithVersionstamp(rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Gets the key encoding the specified tuple in this {@code Subspace} for use with
|
||||
* {@link com.apple.foundationdb.MutationType#SET_VERSIONSTAMPED_KEY MutationType.SET_VERSIONSTAMPED_KEY}.
|
||||
* There must be exactly one incomplete {@link Versionstamp} included in the given {@link Tuple}. It will
|
||||
* create a key that is within this {@code Subspace} that can be provided as the {@code key} argument to
|
||||
* {@link com.apple.foundationdb.Transaction#mutate(com.apple.foundationdb.MutationType, byte[], byte[]) Transaction.mutate()}
|
||||
* with the {@link com.apple.foundationdb.MutationType#SET_VERSIONSTAMPED_KEY SET_VERSIONSTAMPED_KEY}
|
||||
* mutation. This will throw an {@link IllegalArgumentException} if the {@link Tuple} does not
|
||||
* contain an incomplete {@link Versionstamp} or if it contains multiple.
|
||||
*
|
||||
* @param tuple the {@code Tuple} to be packed
|
||||
* @return the key encoding the specified tuple in this {@code Subspace}
|
||||
* @throws IllegalArgumentException if {@code tuple} does not contain exactly one incomplete {@link Versionstamp}
|
||||
*/
|
||||
public byte[] packWithVersionstamp(Tuple tuple) {
|
||||
return tuple.packWithVersionstamp(rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the {@link Tuple} encoded by the given key, with this {@code Subspace}'s prefix {@link Tuple} and
|
||||
* {@code raw prefix} removed.
|
||||
*
|
||||
* @param key The key being decoded
|
||||
* @return the {@link Tuple} encoded by {@code key} with the prefix removed
|
||||
*/
|
||||
public Tuple unpack(byte[] key) {
|
||||
if(!contains(key))
|
||||
throw new IllegalArgumentException("Cannot unpack key that is not contained in subspace.");
|
||||
/**
|
||||
* Gets the {@link Tuple} encoded by the given key, with this {@code Subspace}'s prefix {@link Tuple} and
|
||||
* {@code raw prefix} removed.
|
||||
*
|
||||
* @param key The key being decoded
|
||||
* @return the {@link Tuple} encoded by {@code key} with the prefix removed
|
||||
*/
|
||||
public Tuple unpack(byte[] key) {
|
||||
if(!contains(key))
|
||||
throw new IllegalArgumentException("Cannot unpack key that is not contained in subspace.");
|
||||
|
||||
return Tuple.fromBytes(Arrays.copyOfRange(key, rawPrefix.length, key.length));
|
||||
}
|
||||
return Tuple.fromBytes(Arrays.copyOfRange(key, rawPrefix.length, key.length));
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a {@link Range} respresenting all keys strictly in the {@code Subspace}.
|
||||
*
|
||||
* @return the {@link Range} of keyspace corresponding to this {@code Subspace}
|
||||
*/
|
||||
public Range range() {
|
||||
return range(EMPTY_TUPLE);
|
||||
}
|
||||
/**
|
||||
* Gets a {@link Range} respresenting all keys strictly in the {@code Subspace}.
|
||||
*
|
||||
* @return the {@link Range} of keyspace corresponding to this {@code Subspace}
|
||||
*/
|
||||
public Range range() {
|
||||
return range(EMPTY_TUPLE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a {@link Range} representing all keys in the {@code Subspace} strictly starting with
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@code Tuple} whose sub-keys we are searching for
|
||||
* @return the {@link Range} of keyspace corresponding to {@code tuple}
|
||||
*/
|
||||
public Range range(Tuple tuple) {
|
||||
Range p = tuple.range();
|
||||
return new Range(join(rawPrefix, p.begin), join(rawPrefix, p.end));
|
||||
}
|
||||
/**
|
||||
* Gets a {@link Range} representing all keys in the {@code Subspace} strictly starting with
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@code Tuple} whose sub-keys we are searching for
|
||||
* @return the {@link Range} of keyspace corresponding to {@code tuple}
|
||||
*/
|
||||
public Range range(Tuple tuple) {
|
||||
Range p = tuple.range();
|
||||
return new Range(join(rawPrefix, p.begin), join(rawPrefix, p.end));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests whether the specified key starts with this {@code Subspace}'s prefix, indicating that
|
||||
* the {@code Subspace} logically contains key.
|
||||
*
|
||||
* @param key the key to be tested
|
||||
* @return {@code true} if {@code key} starts with {@code Subspace.key()}
|
||||
*/
|
||||
public boolean contains(byte[] key) {
|
||||
return ByteArrayUtil.startsWith(key, rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Tests whether the specified key starts with this {@code Subspace}'s prefix, indicating that
|
||||
* the {@code Subspace} logically contains key.
|
||||
*
|
||||
* @param key the key to be tested
|
||||
* @return {@code true} if {@code key} starts with {@code Subspace.key()}
|
||||
*/
|
||||
public boolean contains(byte[] key) {
|
||||
return ByteArrayUtil.startsWith(key, rawPrefix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@link Tuple} used to form the new {@code Subspace}
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code tuple}
|
||||
*/
|
||||
public Subspace subspace(Tuple tuple) {
|
||||
return new Subspace(tuple, rawPrefix);
|
||||
}
|
||||
/**
|
||||
* Gets a new subspace which is equivalent to this subspace with its prefix {@link Tuple} extended by
|
||||
* the specified {@link Tuple}.
|
||||
*
|
||||
* @param tuple the {@link Tuple} used to form the new {@code Subspace}
|
||||
* @return a new subspace formed by joining this {@code Subspace}'s prefix to {@code tuple}
|
||||
*/
|
||||
public Subspace subspace(Tuple tuple) {
|
||||
return new Subspace(tuple, rawPrefix);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,45 +63,45 @@ import java.util.Iterator;
|
|||
* </ul>
|
||||
*/
|
||||
public class IterableComparator implements Comparator<Iterable<?>> {
|
||||
/**
|
||||
* Creates a new {@code IterableComparator}. This {@link Comparator} has
|
||||
* no internal state.
|
||||
*/
|
||||
public IterableComparator() {}
|
||||
/**
|
||||
* Creates a new {@code IterableComparator}. This {@link Comparator} has
|
||||
* no internal state.
|
||||
*/
|
||||
public IterableComparator() {}
|
||||
|
||||
/**
|
||||
* Compare two {@link Iterable}s in a way consistent with their
|
||||
* byte representation. This is done element-wise and is consistent
|
||||
* with a number of other ways of sorting {@link Tuple}s. This will
|
||||
* raise an {@link IllegalArgumentException} if any of the items
|
||||
* of either {@link Iterable} cannot be serialized by a {@link Tuple}.
|
||||
*
|
||||
* @param iterable1 the first {@link Iterable} of items
|
||||
* @param iterable2 the second {@link Iterable} of items
|
||||
* @return a negative number if the first iterable would sort before the second
|
||||
* when serialized, a positive number if the opposite is true, and zero
|
||||
* if the two are equal
|
||||
*/
|
||||
@Override
|
||||
public int compare(Iterable<?> iterable1, Iterable<?> iterable2) {
|
||||
Iterator<?> i1 = iterable1.iterator();
|
||||
Iterator<?> i2 = iterable2.iterator();
|
||||
/**
|
||||
* Compare two {@link Iterable}s in a way consistent with their
|
||||
* byte representation. This is done element-wise and is consistent
|
||||
* with a number of other ways of sorting {@link Tuple}s. This will
|
||||
* raise an {@link IllegalArgumentException} if any of the items
|
||||
* of either {@link Iterable} cannot be serialized by a {@link Tuple}.
|
||||
*
|
||||
* @param iterable1 the first {@link Iterable} of items
|
||||
* @param iterable2 the second {@link Iterable} of items
|
||||
* @return a negative number if the first iterable would sort before the second
|
||||
* when serialized, a positive number if the opposite is true, and zero
|
||||
* if the two are equal
|
||||
*/
|
||||
@Override
|
||||
public int compare(Iterable<?> iterable1, Iterable<?> iterable2) {
|
||||
Iterator<?> i1 = iterable1.iterator();
|
||||
Iterator<?> i2 = iterable2.iterator();
|
||||
|
||||
while(i1.hasNext() && i2.hasNext()) {
|
||||
int itemComp = TupleUtil.compareItems(i1.next(), i2.next());
|
||||
if(itemComp != 0) {
|
||||
return itemComp;
|
||||
}
|
||||
}
|
||||
while(i1.hasNext() && i2.hasNext()) {
|
||||
int itemComp = TupleUtil.compareItems(i1.next(), i2.next());
|
||||
if(itemComp != 0) {
|
||||
return itemComp;
|
||||
}
|
||||
}
|
||||
|
||||
if(i1.hasNext()) {
|
||||
// iterable2 is a prefix of iterable1.
|
||||
return 1;
|
||||
}
|
||||
if(i2.hasNext()) {
|
||||
// iterable1 is a prefix of iterable2.
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
if(i1.hasNext()) {
|
||||
// iterable2 is a prefix of iterable1.
|
||||
return 1;
|
||||
}
|
||||
if(i2.hasNext()) {
|
||||
// iterable1 is a prefix of iterable2.
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,89 +27,89 @@ import com.apple.foundationdb.Database;
|
|||
import com.apple.foundationdb.FDB;
|
||||
|
||||
public abstract class AbstractTester {
|
||||
public static final int API_VERSION = 510;
|
||||
protected static final int NUM_RUNS = 25;
|
||||
protected static final Charset ASCII = Charset.forName("ASCII");
|
||||
public static final int API_VERSION = 510;
|
||||
protected static final int NUM_RUNS = 25;
|
||||
protected static final Charset ASCII = Charset.forName("ASCII");
|
||||
|
||||
protected TesterArgs args;
|
||||
protected Random random;
|
||||
protected TestResult result;
|
||||
protected FDB fdb;
|
||||
protected TesterArgs args;
|
||||
protected Random random;
|
||||
protected TestResult result;
|
||||
protected FDB fdb;
|
||||
|
||||
public AbstractTester() {
|
||||
args = null;
|
||||
random = new Random();
|
||||
result = new TestResult(random);
|
||||
}
|
||||
public AbstractTester() {
|
||||
args = null;
|
||||
random = new Random();
|
||||
result = new TestResult(random);
|
||||
}
|
||||
|
||||
public void runTest() {
|
||||
try(Database db = fdb.open()) {
|
||||
try {
|
||||
testPerformance(db);
|
||||
}
|
||||
catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "Failed to complete all tests"));
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "fdb.open failed"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
public void runTest() {
|
||||
try(Database db = fdb.open()) {
|
||||
try {
|
||||
testPerformance(db);
|
||||
}
|
||||
catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "Failed to complete all tests"));
|
||||
}
|
||||
}
|
||||
catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "fdb.open failed"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
public abstract void testPerformance(Database db);
|
||||
public abstract void testPerformance(Database db);
|
||||
|
||||
public String multiVersionDescription() {
|
||||
if (args == null) return "";
|
||||
public String multiVersionDescription() {
|
||||
if (args == null) return "";
|
||||
|
||||
if (!args.useMultiversionApi()) {
|
||||
return "multi-version API disabled";
|
||||
} else if (args.useExternalClient()) {
|
||||
if (args.putCallbacksOnExternalThread()) {
|
||||
return "external client on external thread";
|
||||
} else {
|
||||
return "external client on main thread";
|
||||
}
|
||||
} else {
|
||||
return "local client";
|
||||
}
|
||||
}
|
||||
if (!args.useMultiversionApi()) {
|
||||
return "multi-version API disabled";
|
||||
} else if (args.useExternalClient()) {
|
||||
if (args.putCallbacksOnExternalThread()) {
|
||||
return "external client on external thread";
|
||||
} else {
|
||||
return "external client on main thread";
|
||||
}
|
||||
} else {
|
||||
return "local client";
|
||||
}
|
||||
}
|
||||
|
||||
public void run(String[] argStrings) {
|
||||
args = TesterArgs.parseArgs(argStrings);
|
||||
if (args == null) return;
|
||||
public void run(String[] argStrings) {
|
||||
args = TesterArgs.parseArgs(argStrings);
|
||||
if (args == null) return;
|
||||
|
||||
fdb = FDB.selectAPIVersion(API_VERSION);
|
||||
fdb = FDB.selectAPIVersion(API_VERSION);
|
||||
|
||||
// Validate argument combinations and set options.
|
||||
if (!args.useMultiversionApi()) {
|
||||
if (args.putCallbacksOnExternalThread() || args.useExternalClient()) {
|
||||
throw new IllegalArgumentException("Invalid multi-version API argument combination");
|
||||
}
|
||||
fdb.options().setDisableMultiVersionClientApi();
|
||||
}
|
||||
if (args.putCallbacksOnExternalThread()) {
|
||||
if (!args.useExternalClient()) {
|
||||
throw new IllegalArgumentException("Cannot enable callbacks on external thread without using external client");
|
||||
}
|
||||
fdb.options().setCallbacksOnExternalThreads();
|
||||
}
|
||||
if (args.useExternalClient()) {
|
||||
fdb.options().setDisableLocalClient();
|
||||
}
|
||||
// Validate argument combinations and set options.
|
||||
if (!args.useMultiversionApi()) {
|
||||
if (args.putCallbacksOnExternalThread() || args.useExternalClient()) {
|
||||
throw new IllegalArgumentException("Invalid multi-version API argument combination");
|
||||
}
|
||||
fdb.options().setDisableMultiVersionClientApi();
|
||||
}
|
||||
if (args.putCallbacksOnExternalThread()) {
|
||||
if (!args.useExternalClient()) {
|
||||
throw new IllegalArgumentException("Cannot enable callbacks on external thread without using external client");
|
||||
}
|
||||
fdb.options().setCallbacksOnExternalThreads();
|
||||
}
|
||||
if (args.useExternalClient()) {
|
||||
fdb.options().setDisableLocalClient();
|
||||
}
|
||||
|
||||
try {
|
||||
runTest();
|
||||
} catch (Exception e) {
|
||||
result.addError(e);
|
||||
}
|
||||
try {
|
||||
runTest();
|
||||
} catch (Exception e) {
|
||||
result.addError(e);
|
||||
}
|
||||
|
||||
result.save(args.getOutputDirectory());
|
||||
}
|
||||
result.save(args.getOutputDirectory());
|
||||
}
|
||||
|
||||
public RuntimeException wrapAndPrintError(Throwable t, String message) {
|
||||
String errorMessage = message + ": " + t.getClass() + ": " + t.getMessage() + "\n";
|
||||
t.printStackTrace();
|
||||
return new RuntimeException(errorMessage, t);
|
||||
}
|
||||
public RuntimeException wrapAndPrintError(Throwable t, String message) {
|
||||
String errorMessage = message + ": " + t.getClass() + ": " + t.getMessage() + "\n";
|
||||
t.printStackTrace();
|
||||
return new RuntimeException(errorMessage, t);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,6 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import com.apple.foundationdb.Cluster;
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import com.apple.foundationdb.TransactionContext;
|
||||
|
|
|
@ -22,7 +22,6 @@ package com.apple.foundationdb.test;
|
|||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import com.apple.foundationdb.Cluster;
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import com.apple.foundationdb.KeyValue;
|
||||
|
|
|
@ -53,15 +53,14 @@ public class ParallelRandomScan {
|
|||
}
|
||||
|
||||
private static void runTest(Database database,
|
||||
int parallelism, int rows, int duration) throws InterruptedException
|
||||
{
|
||||
int parallelism, int rows, int duration) throws InterruptedException {
|
||||
final Random r = new Random();
|
||||
final AtomicInteger readsCompleted = new AtomicInteger(0);
|
||||
final AtomicInteger errors = new AtomicInteger(0);
|
||||
final Semaphore coordinator = new Semaphore(parallelism);
|
||||
final ContinuousSample<Long> latencies = new ContinuousSample<>(1000);
|
||||
|
||||
try(final Transaction tr = database.createTransaction()) {
|
||||
try(Transaction tr = database.createTransaction()) {
|
||||
tr.options().setReadYourWritesDisable();
|
||||
|
||||
// Clearing the whole database before starting means all reads are local
|
||||
|
|
|
@ -41,370 +41,370 @@ import com.apple.foundationdb.async.AsyncUtil;
|
|||
import com.apple.foundationdb.tuple.ByteArrayUtil;
|
||||
|
||||
public class PerformanceTester extends AbstractTester {
|
||||
private final int keyCount;
|
||||
private final int keySize;
|
||||
private final int valueSize;
|
||||
|
||||
private final String keyFormat;
|
||||
private final byte[] valueBytes;
|
||||
|
||||
public static final int DEFAULT_KEY_COUNT = 10_000;
|
||||
public static final int DEFAULT_KEY_SIZE = 16;
|
||||
public static final int DEFAULT_VALUE_SIZE = 100;
|
||||
|
||||
private enum Tests {
|
||||
FUTURE_LATENCY("Java Completable API future throughput"),
|
||||
SET("Java Completable API set throughput"),
|
||||
CLEAR("Java Completable API clear throughput"),
|
||||
CLEAR_RANGE("Java Completable API clear_range throughput"),
|
||||
PARALLEL_GET("Java Completable API parallel get throughput"),
|
||||
SERIAL_GET("Java Completable API serial get throughput"),
|
||||
GET_RANGE("Java Completable API get_range throughput"),
|
||||
GET_KEY("Java Completable API get_key throughput"),
|
||||
GET_SINGLE_KEY_RANGE("Java Completable API get_single_key_range throughput"),
|
||||
ALTERNATING_GET_SET("Java Completable API alternating get and set throughput"),
|
||||
WRITE_TRANSACTION("Java Completable API single-key transaction throughput");
|
||||
|
||||
private String kpi;
|
||||
private Function<? super Database, ? extends Double> function;
|
||||
|
||||
Tests(String kpi) {
|
||||
this.kpi = kpi;
|
||||
}
|
||||
|
||||
public void setFunction(Function<?super Database, ? extends Double> function) {
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
public Function<? super Database, ? extends Double> getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
public String getKpi() {
|
||||
return kpi;
|
||||
}
|
||||
}
|
||||
|
||||
public PerformanceTester() {
|
||||
this(DEFAULT_KEY_COUNT, DEFAULT_KEY_SIZE, DEFAULT_VALUE_SIZE);
|
||||
}
|
||||
|
||||
public PerformanceTester(int keyCount, int keySize, int valueSize) {
|
||||
super();
|
||||
this.keyCount = keyCount;
|
||||
this.keySize = keySize;
|
||||
this.valueSize = valueSize;
|
||||
|
||||
keyFormat = "%0" + keySize + "d";
|
||||
|
||||
valueBytes = new byte[valueSize];
|
||||
Arrays.fill(valueBytes, (byte)'x');
|
||||
|
||||
// Initialize tests.
|
||||
Tests.FUTURE_LATENCY.setFunction(db -> futureLatency(db, 100_000));
|
||||
Tests.SET.setFunction(db -> set(db, 100_000));
|
||||
Tests.CLEAR.setFunction(db -> clear(db, 100_000));
|
||||
Tests.CLEAR_RANGE.setFunction(db -> clearRange(db, 100_000));
|
||||
Tests.PARALLEL_GET.setFunction(db -> parallelGet(db, 10_000));
|
||||
Tests.SERIAL_GET.setFunction(db -> serialGet(db, 2_000));
|
||||
Tests.GET_RANGE.setFunction(db -> getRange(db, 1_000));
|
||||
Tests.GET_KEY.setFunction(db -> getKey(db, 2_000));
|
||||
Tests.GET_SINGLE_KEY_RANGE.setFunction(db -> getSingleKeyRange(db, 2_000));
|
||||
Tests.ALTERNATING_GET_SET.setFunction(db -> alternatingGetSet(db, 2_000));
|
||||
Tests.WRITE_TRANSACTION.setFunction(db -> writeTransaction(db, 1_000));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testPerformance(Database db) {
|
||||
insertData(db);
|
||||
|
||||
List<String> testsToRun;
|
||||
if (args.getTestsToRun().isEmpty()) {
|
||||
testsToRun = Arrays.stream(Tests.values()).map(Tests::name).map(String::toLowerCase).sorted().collect(Collectors.toList());
|
||||
} else {
|
||||
testsToRun = args.getTestsToRun();
|
||||
}
|
||||
|
||||
for (String test : testsToRun) {
|
||||
Tests testObj;
|
||||
try {
|
||||
testObj = Tests.valueOf(test.toUpperCase());
|
||||
} catch (IllegalArgumentException e) {
|
||||
result.addError(new IllegalArgumentException("Test " + test + " not implemented"));
|
||||
continue;
|
||||
}
|
||||
|
||||
Function<? super Database, ? extends Double> function = testObj.getFunction();
|
||||
|
||||
try {
|
||||
Thread.sleep(5_000);
|
||||
} catch (InterruptedException e) {
|
||||
result.addError(wrapAndPrintError(e, "Interrupted while sleeping"));
|
||||
}
|
||||
|
||||
System.out.println("Running test " + test);
|
||||
|
||||
List<Double> results = new ArrayList<>(NUM_RUNS);
|
||||
|
||||
for (int i = 0; i < NUM_RUNS; i++) {
|
||||
try {
|
||||
results.add(function.apply(db));
|
||||
} catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "Performance test failed: " + test));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (results.size() == NUM_RUNS) {
|
||||
Collections.sort(results);
|
||||
result.addKpi(String.format("%s (%s)", testObj.getKpi(), multiVersionDescription()), results.get(results.size()/2).intValue(), "keys/s");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void insertData(Database db) {
|
||||
System.out.println("Loading database");
|
||||
|
||||
db.run(tr -> {
|
||||
byte[] subspacePrefix = args.getSubspace().pack();
|
||||
if (subspacePrefix.length == 0) {
|
||||
// Clear user space.
|
||||
tr.clear(new byte[0], new byte[]{(byte)0xff});
|
||||
} else {
|
||||
tr.clear(args.getSubspace().range());
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
int keysPerActor = 100_000 / (keySize + valueSize);
|
||||
int numActors = (int)Math.ceil(keyCount*1.0/keysPerActor);
|
||||
|
||||
List<CompletableFuture<Void>> futures = IntStream.range(0, numActors).mapToObj(i -> {
|
||||
int startKey = keysPerActor * i;
|
||||
int endKey = (i + 1 == numActors) ? (keyCount) : (keysPerActor * (i+1));
|
||||
return db.runAsync(tr -> {
|
||||
IntStream.range(startKey, endKey).forEach(keyIndex -> tr.set(key(keyIndex), value(keyIndex)));
|
||||
return CompletableFuture.completedFuture((Void)null);
|
||||
});
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
AsyncUtil.whenAll(futures).get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
result.addError(wrapAndPrintError(e, "Data insertion failed"));
|
||||
}
|
||||
|
||||
// Give the database time to re-balance
|
||||
try {
|
||||
Thread.sleep(15_000);
|
||||
} catch (InterruptedException e) {
|
||||
result.addError(wrapAndPrintError(e, "Interrupted while waiting for quiescence"));
|
||||
}
|
||||
}
|
||||
|
||||
public Double futureLatency(Database db, int count) {
|
||||
return db.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
tr.getReadVersion().join();
|
||||
|
||||
long start = System.nanoTime();
|
||||
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getReadVersion().join();
|
||||
}
|
||||
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double clear(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
tr.clear(randomKey());
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double clearRange(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.clear(key(keyIndex), key(keyIndex + 1));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double set(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.set(key(keyIndex), value(keyIndex));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double parallelGet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
|
||||
List<CompletableFuture<byte[]>> futures = IntStream.range(0, count)
|
||||
.mapToObj(ignore -> tr.get(randomKey()))
|
||||
.collect(Collectors.toList());
|
||||
AsyncUtil.whenAll(futures).join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double alternatingGetSet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
|
||||
List<CompletableFuture<byte[]>> futures = IntStream.range(0, count)
|
||||
.mapToObj(ignore -> {
|
||||
int keyIndex = randomKeyIndex();
|
||||
byte[] keyBytes = key(keyIndex);
|
||||
byte[] valBytes = value(keyIndex);
|
||||
|
||||
tr.set(keyBytes, valBytes);
|
||||
return tr.get(keyBytes);
|
||||
}).collect(Collectors.toList());
|
||||
AsyncUtil.whenAll(futures).join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double serialGet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
|
||||
List<byte[]> keys;
|
||||
if (count > keyCount/2) {
|
||||
keys = Stream.generate(this::randomKey).limit(count).collect(Collectors.toList());
|
||||
} else {
|
||||
Set<Integer> keySet = new HashSet<>();
|
||||
while (keySet.size() < count) {
|
||||
keySet.add(randomKeyIndex());
|
||||
}
|
||||
keys = keySet.stream().map(this::key).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
long start = System.nanoTime();
|
||||
for (byte[] key : keys) {
|
||||
tr.get(key).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getRange(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
int startIndex = random.nextInt(keyCount - count);
|
||||
|
||||
long start = System.nanoTime();
|
||||
tr.getRange(key(startIndex), key(startIndex+count)).asList().join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getKey(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getKey(new KeySelector(randomKey(), true, random.nextInt(20) - 10)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getSingleKeyRange(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.getRange(key(keyIndex), key(keyIndex + 1), 2).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double writeTransaction(TransactionContext tcx, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tcx.run(tr -> {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.set(key(keyIndex), value(keyIndex));
|
||||
return null;
|
||||
});
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
|
||||
public byte[] key(int i) {
|
||||
return ByteArrayUtil.join(args.getSubspace().pack(), String.format(keyFormat, i).getBytes(ASCII));
|
||||
}
|
||||
|
||||
public int randomKeyIndex() {
|
||||
return random.nextInt(keyCount);
|
||||
}
|
||||
|
||||
public byte[] randomKey() {
|
||||
return key(randomKeyIndex());
|
||||
}
|
||||
|
||||
public byte[] value(int key) {
|
||||
return valueBytes;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Running Java performance test on Java version " + System.getProperty("java.version"));
|
||||
try {
|
||||
new PerformanceTester().run(args);
|
||||
} catch (IllegalArgumentException e) {
|
||||
System.out.println("Could not run test due to malformed arguments.");
|
||||
System.out.println(e.getMessage());
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
System.out.println("Fatal error encountered during run: " + e);
|
||||
e.printStackTrace();
|
||||
System.exit(2);
|
||||
}
|
||||
}
|
||||
private final int keyCount;
|
||||
private final int keySize;
|
||||
private final int valueSize;
|
||||
|
||||
private final String keyFormat;
|
||||
private final byte[] valueBytes;
|
||||
|
||||
public static final int DEFAULT_KEY_COUNT = 10_000;
|
||||
public static final int DEFAULT_KEY_SIZE = 16;
|
||||
public static final int DEFAULT_VALUE_SIZE = 100;
|
||||
|
||||
private enum Tests {
|
||||
FUTURE_LATENCY("Java Completable API future throughput"),
|
||||
SET("Java Completable API set throughput"),
|
||||
CLEAR("Java Completable API clear throughput"),
|
||||
CLEAR_RANGE("Java Completable API clear_range throughput"),
|
||||
PARALLEL_GET("Java Completable API parallel get throughput"),
|
||||
SERIAL_GET("Java Completable API serial get throughput"),
|
||||
GET_RANGE("Java Completable API get_range throughput"),
|
||||
GET_KEY("Java Completable API get_key throughput"),
|
||||
GET_SINGLE_KEY_RANGE("Java Completable API get_single_key_range throughput"),
|
||||
ALTERNATING_GET_SET("Java Completable API alternating get and set throughput"),
|
||||
WRITE_TRANSACTION("Java Completable API single-key transaction throughput");
|
||||
|
||||
private String kpi;
|
||||
private Function<? super Database, ? extends Double> function;
|
||||
|
||||
Tests(String kpi) {
|
||||
this.kpi = kpi;
|
||||
}
|
||||
|
||||
public void setFunction(Function<?super Database, ? extends Double> function) {
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
public Function<? super Database, ? extends Double> getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
public String getKpi() {
|
||||
return kpi;
|
||||
}
|
||||
}
|
||||
|
||||
public PerformanceTester() {
|
||||
this(DEFAULT_KEY_COUNT, DEFAULT_KEY_SIZE, DEFAULT_VALUE_SIZE);
|
||||
}
|
||||
|
||||
public PerformanceTester(int keyCount, int keySize, int valueSize) {
|
||||
super();
|
||||
this.keyCount = keyCount;
|
||||
this.keySize = keySize;
|
||||
this.valueSize = valueSize;
|
||||
|
||||
keyFormat = "%0" + keySize + "d";
|
||||
|
||||
valueBytes = new byte[valueSize];
|
||||
Arrays.fill(valueBytes, (byte)'x');
|
||||
|
||||
// Initialize tests.
|
||||
Tests.FUTURE_LATENCY.setFunction(db -> futureLatency(db, 100_000));
|
||||
Tests.SET.setFunction(db -> set(db, 100_000));
|
||||
Tests.CLEAR.setFunction(db -> clear(db, 100_000));
|
||||
Tests.CLEAR_RANGE.setFunction(db -> clearRange(db, 100_000));
|
||||
Tests.PARALLEL_GET.setFunction(db -> parallelGet(db, 10_000));
|
||||
Tests.SERIAL_GET.setFunction(db -> serialGet(db, 2_000));
|
||||
Tests.GET_RANGE.setFunction(db -> getRange(db, 1_000));
|
||||
Tests.GET_KEY.setFunction(db -> getKey(db, 2_000));
|
||||
Tests.GET_SINGLE_KEY_RANGE.setFunction(db -> getSingleKeyRange(db, 2_000));
|
||||
Tests.ALTERNATING_GET_SET.setFunction(db -> alternatingGetSet(db, 2_000));
|
||||
Tests.WRITE_TRANSACTION.setFunction(db -> writeTransaction(db, 1_000));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testPerformance(Database db) {
|
||||
insertData(db);
|
||||
|
||||
List<String> testsToRun;
|
||||
if (args.getTestsToRun().isEmpty()) {
|
||||
testsToRun = Arrays.stream(Tests.values()).map(Tests::name).map(String::toLowerCase).sorted().collect(Collectors.toList());
|
||||
} else {
|
||||
testsToRun = args.getTestsToRun();
|
||||
}
|
||||
|
||||
for (String test : testsToRun) {
|
||||
Tests testObj;
|
||||
try {
|
||||
testObj = Tests.valueOf(test.toUpperCase());
|
||||
} catch (IllegalArgumentException e) {
|
||||
result.addError(new IllegalArgumentException("Test " + test + " not implemented"));
|
||||
continue;
|
||||
}
|
||||
|
||||
Function<? super Database, ? extends Double> function = testObj.getFunction();
|
||||
|
||||
try {
|
||||
Thread.sleep(5_000);
|
||||
} catch (InterruptedException e) {
|
||||
result.addError(wrapAndPrintError(e, "Interrupted while sleeping"));
|
||||
}
|
||||
|
||||
System.out.println("Running test " + test);
|
||||
|
||||
List<Double> results = new ArrayList<>(NUM_RUNS);
|
||||
|
||||
for (int i = 0; i < NUM_RUNS; i++) {
|
||||
try {
|
||||
results.add(function.apply(db));
|
||||
} catch (Exception e) {
|
||||
result.addError(wrapAndPrintError(e, "Performance test failed: " + test));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (results.size() == NUM_RUNS) {
|
||||
Collections.sort(results);
|
||||
result.addKpi(String.format("%s (%s)", testObj.getKpi(), multiVersionDescription()), results.get(results.size()/2).intValue(), "keys/s");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void insertData(Database db) {
|
||||
System.out.println("Loading database");
|
||||
|
||||
db.run(tr -> {
|
||||
byte[] subspacePrefix = args.getSubspace().pack();
|
||||
if (subspacePrefix.length == 0) {
|
||||
// Clear user space.
|
||||
tr.clear(new byte[0], new byte[]{(byte)0xff});
|
||||
} else {
|
||||
tr.clear(args.getSubspace().range());
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
int keysPerActor = 100_000 / (keySize + valueSize);
|
||||
int numActors = (int)Math.ceil(keyCount*1.0/keysPerActor);
|
||||
|
||||
List<CompletableFuture<Void>> futures = IntStream.range(0, numActors).mapToObj(i -> {
|
||||
int startKey = keysPerActor * i;
|
||||
int endKey = (i + 1 == numActors) ? (keyCount) : (keysPerActor * (i+1));
|
||||
return db.runAsync(tr -> {
|
||||
IntStream.range(startKey, endKey).forEach(keyIndex -> tr.set(key(keyIndex), value(keyIndex)));
|
||||
return CompletableFuture.completedFuture((Void)null);
|
||||
});
|
||||
}).collect(Collectors.toList());
|
||||
|
||||
try {
|
||||
AsyncUtil.whenAll(futures).get();
|
||||
} catch (InterruptedException | ExecutionException e) {
|
||||
result.addError(wrapAndPrintError(e, "Data insertion failed"));
|
||||
}
|
||||
|
||||
// Give the database time to re-balance
|
||||
try {
|
||||
Thread.sleep(15_000);
|
||||
} catch (InterruptedException e) {
|
||||
result.addError(wrapAndPrintError(e, "Interrupted while waiting for quiescence"));
|
||||
}
|
||||
}
|
||||
|
||||
public Double futureLatency(Database db, int count) {
|
||||
return db.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
tr.getReadVersion().join();
|
||||
|
||||
long start = System.nanoTime();
|
||||
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getReadVersion().join();
|
||||
}
|
||||
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double clear(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
tr.clear(randomKey());
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double clearRange(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.clear(key(keyIndex), key(keyIndex + 1));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double set(Database db, int count) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
long start = System.nanoTime();
|
||||
for(int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.set(key(keyIndex), value(keyIndex));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
tr.cancel();
|
||||
|
||||
return count * 1_000_000_000.0 / (end - start);
|
||||
}
|
||||
}
|
||||
|
||||
public Double parallelGet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
|
||||
List<CompletableFuture<byte[]>> futures = IntStream.range(0, count)
|
||||
.mapToObj(ignore -> tr.get(randomKey()))
|
||||
.collect(Collectors.toList());
|
||||
AsyncUtil.whenAll(futures).join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double alternatingGetSet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
|
||||
List<CompletableFuture<byte[]>> futures = IntStream.range(0, count)
|
||||
.mapToObj(ignore -> {
|
||||
int keyIndex = randomKeyIndex();
|
||||
byte[] keyBytes = key(keyIndex);
|
||||
byte[] valBytes = value(keyIndex);
|
||||
|
||||
tr.set(keyBytes, valBytes);
|
||||
return tr.get(keyBytes);
|
||||
}).collect(Collectors.toList());
|
||||
AsyncUtil.whenAll(futures).join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double serialGet(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
|
||||
List<byte[]> keys;
|
||||
if (count > keyCount/2) {
|
||||
keys = Stream.generate(this::randomKey).limit(count).collect(Collectors.toList());
|
||||
} else {
|
||||
Set<Integer> keySet = new HashSet<>();
|
||||
while (keySet.size() < count) {
|
||||
keySet.add(randomKeyIndex());
|
||||
}
|
||||
keys = keySet.stream().map(this::key).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
long start = System.nanoTime();
|
||||
for (byte[] key : keys) {
|
||||
tr.get(key).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getRange(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
int startIndex = random.nextInt(keyCount - count);
|
||||
|
||||
long start = System.nanoTime();
|
||||
tr.getRange(key(startIndex), key(startIndex+count)).asList().join();
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getKey(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getKey(new KeySelector(randomKey(), true, random.nextInt(20) - 10)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double getSingleKeyRange(TransactionContext tcx, int count) {
|
||||
return tcx.run(tr -> {
|
||||
tr.options().setRetryLimit(5);
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.getRange(key(keyIndex), key(keyIndex + 1), 2).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
});
|
||||
}
|
||||
|
||||
public Double writeTransaction(TransactionContext tcx, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tcx.run(tr -> {
|
||||
int keyIndex = randomKeyIndex();
|
||||
tr.set(key(keyIndex), value(keyIndex));
|
||||
return null;
|
||||
});
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
|
||||
public byte[] key(int i) {
|
||||
return ByteArrayUtil.join(args.getSubspace().pack(), String.format(keyFormat, i).getBytes(ASCII));
|
||||
}
|
||||
|
||||
public int randomKeyIndex() {
|
||||
return random.nextInt(keyCount);
|
||||
}
|
||||
|
||||
public byte[] randomKey() {
|
||||
return key(randomKeyIndex());
|
||||
}
|
||||
|
||||
public byte[] value(int key) {
|
||||
return valueBytes;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Running Java performance test on Java version " + System.getProperty("java.version"));
|
||||
try {
|
||||
new PerformanceTester().run(args);
|
||||
} catch (IllegalArgumentException e) {
|
||||
System.out.println("Could not run test due to malformed arguments.");
|
||||
System.out.println(e.getMessage());
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
System.out.println("Fatal error encountered during run: " + e);
|
||||
e.printStackTrace();
|
||||
System.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -32,62 +32,62 @@ import com.apple.foundationdb.Transaction;
|
|||
import com.apple.foundationdb.tuple.ByteArrayUtil;
|
||||
|
||||
public class RYWBenchmark extends AbstractTester {
|
||||
private int keyCount;
|
||||
private int keyCount;
|
||||
|
||||
public static final int DEFAULT_KEY_COUNT = 10_000;
|
||||
public static final int DEFAULT_KEY_SIZE = 16;
|
||||
public static final int DEFAULT_KEY_COUNT = 10_000;
|
||||
public static final int DEFAULT_KEY_SIZE = 16;
|
||||
|
||||
private final String keyFormat;
|
||||
private final String keyFormat;
|
||||
|
||||
private enum Tests {
|
||||
GET_SINGLE("RYW Java Completable: get single cached value throughput"),
|
||||
GET_MANY_SEQUENTIAL("RYW Java Completable: get sequential cached value throughput"),
|
||||
GET_RANGE_BASIC("RYW Java Completable: get range cached values throughput"),
|
||||
SINGLE_CLEAR_GET_RANGE("RYW Java Completable: get range cached values with clears throughput"),
|
||||
CLEAR_RANGE_GET_RANGE("RYW Java Completable: get range cached values with clear ranges throughput"),
|
||||
INTERLEAVED_SETS_GETS("RYW Java Completable: interleaved sets and gets on a single key throughput");
|
||||
private enum Tests {
|
||||
GET_SINGLE("RYW Java Completable: get single cached value throughput"),
|
||||
GET_MANY_SEQUENTIAL("RYW Java Completable: get sequential cached value throughput"),
|
||||
GET_RANGE_BASIC("RYW Java Completable: get range cached values throughput"),
|
||||
SINGLE_CLEAR_GET_RANGE("RYW Java Completable: get range cached values with clears throughput"),
|
||||
CLEAR_RANGE_GET_RANGE("RYW Java Completable: get range cached values with clear ranges throughput"),
|
||||
INTERLEAVED_SETS_GETS("RYW Java Completable: interleaved sets and gets on a single key throughput");
|
||||
|
||||
private String kpi;
|
||||
private Function<? super Transaction, ? extends Double> function;
|
||||
private String kpi;
|
||||
private Function<? super Transaction, ? extends Double> function;
|
||||
|
||||
Tests(String kpi) {
|
||||
this.kpi = kpi;
|
||||
}
|
||||
Tests(String kpi) {
|
||||
this.kpi = kpi;
|
||||
}
|
||||
|
||||
public void setFunction(Function<?super Transaction, ? extends Double> function) {
|
||||
this.function = function;
|
||||
}
|
||||
public void setFunction(Function<?super Transaction, ? extends Double> function) {
|
||||
this.function = function;
|
||||
}
|
||||
|
||||
public Function<? super Transaction, ? extends Double> getFunction() {
|
||||
return function;
|
||||
}
|
||||
public Function<? super Transaction, ? extends Double> getFunction() {
|
||||
return function;
|
||||
}
|
||||
|
||||
public String getKpi() {
|
||||
return kpi;
|
||||
}
|
||||
}
|
||||
public String getKpi() {
|
||||
return kpi;
|
||||
}
|
||||
}
|
||||
|
||||
public RYWBenchmark() {
|
||||
this(DEFAULT_KEY_COUNT, DEFAULT_KEY_SIZE);
|
||||
}
|
||||
public RYWBenchmark() {
|
||||
this(DEFAULT_KEY_COUNT, DEFAULT_KEY_SIZE);
|
||||
}
|
||||
|
||||
public RYWBenchmark(int keyCount, int keySize) {
|
||||
super();
|
||||
this.keyCount = keyCount;
|
||||
public RYWBenchmark(int keyCount, int keySize) {
|
||||
super();
|
||||
this.keyCount = keyCount;
|
||||
|
||||
keyFormat = "%0" + keySize + "d";
|
||||
keyFormat = "%0" + keySize + "d";
|
||||
|
||||
Tests.GET_SINGLE.setFunction(tr -> getSingle(tr, 10_000));
|
||||
Tests.GET_MANY_SEQUENTIAL.setFunction(tr -> getManySequential(tr, 10_000));
|
||||
Tests.GET_RANGE_BASIC.setFunction(tr -> getRangeBasic(tr, 1_000));
|
||||
Tests.SINGLE_CLEAR_GET_RANGE.setFunction(tr -> singleClearGetRange(tr, 1_000));
|
||||
Tests.CLEAR_RANGE_GET_RANGE.setFunction(tr -> clearRangeGetRange(tr, 1_000));
|
||||
Tests.INTERLEAVED_SETS_GETS.setFunction(tr -> interleavedSetsGets(tr, 10_000));
|
||||
}
|
||||
Tests.GET_SINGLE.setFunction(tr -> getSingle(tr, 10_000));
|
||||
Tests.GET_MANY_SEQUENTIAL.setFunction(tr -> getManySequential(tr, 10_000));
|
||||
Tests.GET_RANGE_BASIC.setFunction(tr -> getRangeBasic(tr, 1_000));
|
||||
Tests.SINGLE_CLEAR_GET_RANGE.setFunction(tr -> singleClearGetRange(tr, 1_000));
|
||||
Tests.CLEAR_RANGE_GET_RANGE.setFunction(tr -> clearRangeGetRange(tr, 1_000));
|
||||
Tests.INTERLEAVED_SETS_GETS.setFunction(tr -> interleavedSetsGets(tr, 10_000));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testPerformance(Database db) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
@Override
|
||||
public void testPerformance(Database db) {
|
||||
try(Transaction tr = db.createTransaction()) {
|
||||
insertData(tr);
|
||||
|
||||
List<String> testsToRun;
|
||||
|
@ -139,103 +139,103 @@ public class RYWBenchmark extends AbstractTester {
|
|||
|
||||
tr.cancel();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Double getSingle(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.get(key(5001)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double getSingle(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.get(key(5001)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
|
||||
public Double getManySequential(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.get(key(i)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double getManySequential(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.get(key(i)).join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
return count*1_000_000_000.0/(end - start);
|
||||
}
|
||||
|
||||
public Double getRangeBasic(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double getRangeBasic(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count * 1_000_000_000.0 * keyCount/(end - start);
|
||||
}
|
||||
return count * 1_000_000_000.0 * keyCount/(end - start);
|
||||
}
|
||||
|
||||
public Double singleClearGetRange(Transaction tr, int count) {
|
||||
for (int i = 0; i < keyCount; i += 2) {
|
||||
tr.clear(("" + i).getBytes(ASCII));
|
||||
}
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double singleClearGetRange(Transaction tr, int count) {
|
||||
for (int i = 0; i < keyCount; i += 2) {
|
||||
tr.clear(("" + i).getBytes(ASCII));
|
||||
}
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
Double kpi = count * 1_000_000_000.0 * keyCount / 2 / (end - start);
|
||||
insertData(tr);
|
||||
return kpi;
|
||||
}
|
||||
Double kpi = count * 1_000_000_000.0 * keyCount / 2 / (end - start);
|
||||
insertData(tr);
|
||||
return kpi;
|
||||
}
|
||||
|
||||
public Double clearRangeGetRange(Transaction tr, int count) {
|
||||
for (int i = 0; i < keyCount; i += 4) {
|
||||
tr.clear(key(i), key(i+1));
|
||||
}
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double clearRangeGetRange(Transaction tr, int count) {
|
||||
for (int i = 0; i < keyCount; i += 4) {
|
||||
tr.clear(key(i), key(i+1));
|
||||
}
|
||||
long start = System.nanoTime();
|
||||
for (int i = 0; i < count; i++) {
|
||||
tr.getRange(key(0), key(keyCount)).asList().join();
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
Double kpi = count * 1_000_000_000.0 * keyCount * 3 / 4 / (end - start);
|
||||
insertData(tr);
|
||||
return kpi;
|
||||
}
|
||||
Double kpi = count * 1_000_000_000.0 * keyCount * 3 / 4 / (end - start);
|
||||
insertData(tr);
|
||||
return kpi;
|
||||
}
|
||||
|
||||
public Double interleavedSetsGets(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
byte[] keyBytes = "foo".getBytes(ASCII);
|
||||
tr.set(keyBytes, "1".getBytes(ASCII));
|
||||
for (int i = 0; i < count; i++) {
|
||||
int old = Integer.parseInt(new String(tr.get(keyBytes).join(), ASCII));
|
||||
tr.set(keyBytes, ("" + (old + 1)).getBytes(ASCII));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
public Double interleavedSetsGets(Transaction tr, int count) {
|
||||
long start = System.nanoTime();
|
||||
byte[] keyBytes = "foo".getBytes(ASCII);
|
||||
tr.set(keyBytes, "1".getBytes(ASCII));
|
||||
for (int i = 0; i < count; i++) {
|
||||
int old = Integer.parseInt(new String(tr.get(keyBytes).join(), ASCII));
|
||||
tr.set(keyBytes, ("" + (old + 1)).getBytes(ASCII));
|
||||
}
|
||||
long end = System.nanoTime();
|
||||
|
||||
return count * 1_000_000_000.0/(end - start);
|
||||
}
|
||||
return count * 1_000_000_000.0/(end - start);
|
||||
}
|
||||
|
||||
public void insertData(Transaction tr) {
|
||||
tr.clear(new byte[0], new byte[]{(byte)0xff}); // Clear user space.
|
||||
for (int i = 0; i < keyCount; i++) {
|
||||
tr.set(key(i), "foo".getBytes(ASCII));
|
||||
}
|
||||
}
|
||||
public void insertData(Transaction tr) {
|
||||
tr.clear(new byte[0], new byte[]{(byte)0xff}); // Clear user space.
|
||||
for (int i = 0; i < keyCount; i++) {
|
||||
tr.set(key(i), "foo".getBytes(ASCII));
|
||||
}
|
||||
}
|
||||
|
||||
public byte[] key(int i) {
|
||||
return ByteArrayUtil.join(args.getSubspace().pack(), String.format(keyFormat, i).getBytes(ASCII));
|
||||
}
|
||||
public byte[] key(int i) {
|
||||
return ByteArrayUtil.join(args.getSubspace().pack(), String.format(keyFormat, i).getBytes(ASCII));
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Running Java RYW benchmark on Java version " + System.getProperty("java.version"));
|
||||
try {
|
||||
new RYWBenchmark().run(args);
|
||||
} catch (IllegalArgumentException e) {
|
||||
System.out.println("Could not run test due to malformed arguments.");
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
System.out.println("Fatal error encountered during run: " + e);
|
||||
e.printStackTrace();
|
||||
System.exit(2);
|
||||
}
|
||||
}
|
||||
public static void main(String[] args) {
|
||||
System.out.println("Running Java RYW benchmark on Java version " + System.getProperty("java.version"));
|
||||
try {
|
||||
new RYWBenchmark().run(args);
|
||||
} catch (IllegalArgumentException e) {
|
||||
System.out.println("Could not run test due to malformed arguments.");
|
||||
System.exit(1);
|
||||
} catch (Exception e) {
|
||||
System.out.println("Fatal error encountered during run: " + e);
|
||||
e.printStackTrace();
|
||||
System.exit(2);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package com.apple.foundationdb.test;
|
|||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import com.apple.foundationdb.Cluster;
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import com.apple.foundationdb.TransactionContext;
|
||||
|
|
|
@ -439,15 +439,15 @@ public class StackTester {
|
|||
byte[] prefix = (byte[]) params.get(0);
|
||||
|
||||
Map<Integer, StackEntry> entries = new HashMap<>();
|
||||
while(inst.size() > 0) {
|
||||
while(inst.size() > 0) {
|
||||
entries.put(inst.size()-1, inst.pop());
|
||||
if(entries.size() == 100) {
|
||||
logStack(inst.context.db, entries, prefix);
|
||||
entries.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logStack(inst.context.db, entries, prefix);
|
||||
logStack(inst.context.db, entries, prefix);
|
||||
}
|
||||
else {
|
||||
throw new IllegalArgumentException("Unrecognized (or unimplemented) operation");
|
||||
|
@ -576,7 +576,7 @@ public class StackTester {
|
|||
}
|
||||
|
||||
private static void logStack(Database db, Map<Integer, StackEntry> entries, byte[] prefix) {
|
||||
db.run(tr -> {
|
||||
db.run(tr -> {
|
||||
for(Map.Entry<Integer, StackEntry> it : entries.entrySet()) {
|
||||
byte[] pk = Tuple.from(it.getKey(), it.getValue().idx).pack(prefix);
|
||||
byte[] pv = Tuple.from(StackUtils.serializeFuture(it.getValue().value)).pack();
|
||||
|
@ -588,7 +588,7 @@ public class StackTester {
|
|||
}
|
||||
|
||||
private static boolean checkWatches(List<CompletableFuture<Void>> watches, Database db, boolean expected) {
|
||||
for(CompletableFuture<Void> w : watches) {
|
||||
for(CompletableFuture<Void> w : watches) {
|
||||
if(w.isDone() || expected) {
|
||||
try {
|
||||
w.join();
|
||||
|
@ -647,8 +647,8 @@ public class StackTester {
|
|||
return null;
|
||||
});
|
||||
|
||||
if(checkWatches(watches, db, true)) {
|
||||
return;
|
||||
if(checkWatches(watches, db, true)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ public class StackUtils {
|
|||
throw e;
|
||||
}
|
||||
|
||||
item = getErrorBytes(ex);
|
||||
item = getErrorBytes(ex);
|
||||
}
|
||||
return item;
|
||||
}
|
||||
|
|
|
@ -30,119 +30,119 @@ import java.util.Random;
|
|||
import java.util.TreeMap;
|
||||
|
||||
public class TestResult {
|
||||
private long id;
|
||||
private Map<String,Map<String,Object>> kpis;
|
||||
private List<Throwable> errors;
|
||||
private long id;
|
||||
private Map<String,Map<String,Object>> kpis;
|
||||
private List<Throwable> errors;
|
||||
|
||||
public TestResult(Random r) {
|
||||
id = Math.abs(r.nextLong());
|
||||
kpis = new TreeMap<String,Map<String,Object>>(); // Tree map because we will have to print this out.
|
||||
errors = new ArrayList<Throwable>();
|
||||
}
|
||||
public TestResult(Random r) {
|
||||
id = Math.abs(r.nextLong());
|
||||
kpis = new TreeMap<String,Map<String,Object>>(); // Tree map because we will have to print this out.
|
||||
errors = new ArrayList<Throwable>();
|
||||
}
|
||||
|
||||
public void addKpi(String name, Number value, String units) {
|
||||
TreeMap<String,Object> kpi = new TreeMap<String,Object>();
|
||||
kpi.put("value", value);
|
||||
kpi.put("units", units);
|
||||
kpis.put(name, kpi);
|
||||
}
|
||||
public void addKpi(String name, Number value, String units) {
|
||||
TreeMap<String,Object> kpi = new TreeMap<String,Object>();
|
||||
kpi.put("value", value);
|
||||
kpi.put("units", units);
|
||||
kpis.put(name, kpi);
|
||||
}
|
||||
|
||||
public void addError(Throwable t) {
|
||||
errors.add(t);
|
||||
}
|
||||
public void addError(Throwable t) {
|
||||
errors.add(t);
|
||||
}
|
||||
|
||||
public void save(String directory) {
|
||||
String file = "javaresult-" + id + ".json";
|
||||
if(directory.length() > 0) {
|
||||
file = directory + "/" + file;
|
||||
}
|
||||
public void save(String directory) {
|
||||
String file = "javaresult-" + id + ".json";
|
||||
if(directory.length() > 0) {
|
||||
file = directory + "/" + file;
|
||||
}
|
||||
|
||||
// TODO: Should we use a really JSON library?
|
||||
// TODO: Should we use a really JSON library?
|
||||
|
||||
StringBuilder outputBuilder = new StringBuilder();
|
||||
outputBuilder.append('{');
|
||||
StringBuilder outputBuilder = new StringBuilder();
|
||||
outputBuilder.append('{');
|
||||
|
||||
// Add KPIs:
|
||||
outputBuilder.append("\"kpis\": {");
|
||||
boolean firstKpi = true;
|
||||
for (Map.Entry<String,Map<String,Object>> kpi : kpis.entrySet()) {
|
||||
if (firstKpi) {
|
||||
firstKpi = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
// Add KPIs:
|
||||
outputBuilder.append("\"kpis\": {");
|
||||
boolean firstKpi = true;
|
||||
for (Map.Entry<String,Map<String,Object>> kpi : kpis.entrySet()) {
|
||||
if (firstKpi) {
|
||||
firstKpi = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append(kpi.getKey());
|
||||
outputBuilder.append("\": {");
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append(kpi.getKey());
|
||||
outputBuilder.append("\": {");
|
||||
|
||||
boolean firstEntry = true;
|
||||
boolean firstEntry = true;
|
||||
|
||||
for (Map.Entry<String,Object> entry : kpi.getValue().entrySet()) {
|
||||
if (firstEntry) {
|
||||
firstEntry = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
for (Map.Entry<String,Object> entry : kpi.getValue().entrySet()) {
|
||||
if (firstEntry) {
|
||||
firstEntry = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append(entry.getKey());
|
||||
outputBuilder.append("\": ");
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append(entry.getKey());
|
||||
outputBuilder.append("\": ");
|
||||
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof String) {
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append((String)value);
|
||||
outputBuilder.append("\"");
|
||||
} else {
|
||||
outputBuilder.append(value.toString());
|
||||
}
|
||||
}
|
||||
Object value = entry.getValue();
|
||||
if (value instanceof String) {
|
||||
outputBuilder.append("\"");
|
||||
outputBuilder.append((String)value);
|
||||
outputBuilder.append("\"");
|
||||
} else {
|
||||
outputBuilder.append(value.toString());
|
||||
}
|
||||
}
|
||||
|
||||
outputBuilder.append("}");
|
||||
}
|
||||
outputBuilder.append("}, ");
|
||||
outputBuilder.append("}");
|
||||
}
|
||||
outputBuilder.append("}, ");
|
||||
|
||||
// Add errors:
|
||||
outputBuilder.append("\"errors\":[");
|
||||
boolean firstError = true;
|
||||
for (Throwable t : errors) {
|
||||
if (firstError) {
|
||||
firstError = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
// Add errors:
|
||||
outputBuilder.append("\"errors\":[");
|
||||
boolean firstError = true;
|
||||
for (Throwable t : errors) {
|
||||
if (firstError) {
|
||||
firstError = false;
|
||||
} else {
|
||||
outputBuilder.append(", ");
|
||||
}
|
||||
|
||||
StringBuilder msgBuilder = new StringBuilder();
|
||||
msgBuilder.append(t.getClass().toString());
|
||||
msgBuilder.append(": ");
|
||||
msgBuilder.append(t.getMessage()); // Escaping quotes. Yeah, this won't work in the general case....
|
||||
StackTraceElement[] stackTraceElements = t.getStackTrace();
|
||||
for (StackTraceElement element : stackTraceElements) {
|
||||
msgBuilder.append("\n ");
|
||||
msgBuilder.append(element.toString());
|
||||
}
|
||||
outputBuilder.append('"');
|
||||
outputBuilder.append(msgBuilder.toString()
|
||||
.replace("\\", "\\\\")
|
||||
.replace("\"", "\\\"")
|
||||
.replace("\t", "\\t")
|
||||
.replace("\r", "\\r")
|
||||
.replace("\n", "\\n")
|
||||
.replace("\f", "\\f")
|
||||
.replace("\b", "\\b")
|
||||
);
|
||||
outputBuilder.append('"');
|
||||
}
|
||||
outputBuilder.append("]");
|
||||
StringBuilder msgBuilder = new StringBuilder();
|
||||
msgBuilder.append(t.getClass().toString());
|
||||
msgBuilder.append(": ");
|
||||
msgBuilder.append(t.getMessage()); // Escaping quotes. Yeah, this won't work in the general case....
|
||||
StackTraceElement[] stackTraceElements = t.getStackTrace();
|
||||
for (StackTraceElement element : stackTraceElements) {
|
||||
msgBuilder.append("\n ");
|
||||
msgBuilder.append(element.toString());
|
||||
}
|
||||
outputBuilder.append('"');
|
||||
outputBuilder.append(msgBuilder.toString()
|
||||
.replace("\\", "\\\\")
|
||||
.replace("\"", "\\\"")
|
||||
.replace("\t", "\\t")
|
||||
.replace("\r", "\\r")
|
||||
.replace("\n", "\\n")
|
||||
.replace("\f", "\\f")
|
||||
.replace("\b", "\\b")
|
||||
);
|
||||
outputBuilder.append('"');
|
||||
}
|
||||
outputBuilder.append("]");
|
||||
|
||||
outputBuilder.append('}');
|
||||
outputBuilder.append('}');
|
||||
|
||||
try (BufferedWriter writer = new BufferedWriter(new FileWriter(file))) {
|
||||
writer.write(outputBuilder.toString());
|
||||
} catch (IOException e) {
|
||||
System.out.println("Could not write results to file " + file);
|
||||
throw new RuntimeException("Could not save results: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
try (BufferedWriter writer = new BufferedWriter(new FileWriter(file))) {
|
||||
writer.write(outputBuilder.toString());
|
||||
} catch (IOException e) {
|
||||
System.out.println("Could not write results to file " + file);
|
||||
throw new RuntimeException("Could not save results: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,126 +27,126 @@ import com.apple.foundationdb.subspace.Subspace;
|
|||
import com.apple.foundationdb.tuple.Tuple;
|
||||
|
||||
public class TesterArgs {
|
||||
private String outputDirectory;
|
||||
private boolean multiversionApi;
|
||||
private boolean callbacksOnExternalThread;
|
||||
private boolean externalClient;
|
||||
private Subspace subspace;
|
||||
private List<String> testsToRun;
|
||||
private String outputDirectory;
|
||||
private boolean multiversionApi;
|
||||
private boolean callbacksOnExternalThread;
|
||||
private boolean externalClient;
|
||||
private Subspace subspace;
|
||||
private List<String> testsToRun;
|
||||
|
||||
private TesterArgs(String outputDirectory, boolean multiversionApi, boolean callbacksOnExternalThread, boolean externalClient, Subspace subspace, List<String> testsToRun) {
|
||||
this.outputDirectory = outputDirectory;
|
||||
this.multiversionApi = multiversionApi;
|
||||
this.callbacksOnExternalThread = callbacksOnExternalThread;
|
||||
this.externalClient = externalClient;
|
||||
this.subspace = subspace;
|
||||
this.testsToRun = testsToRun;
|
||||
}
|
||||
private TesterArgs(String outputDirectory, boolean multiversionApi, boolean callbacksOnExternalThread, boolean externalClient, Subspace subspace, List<String> testsToRun) {
|
||||
this.outputDirectory = outputDirectory;
|
||||
this.multiversionApi = multiversionApi;
|
||||
this.callbacksOnExternalThread = callbacksOnExternalThread;
|
||||
this.externalClient = externalClient;
|
||||
this.subspace = subspace;
|
||||
this.testsToRun = testsToRun;
|
||||
}
|
||||
|
||||
public static void printUsage() {
|
||||
String usage = "Arguments: [-o/--output-directory DIR] [--disable-multiversion-api] [--enable-callbacks-on-external-threads] [--use-external-client] [--tests-to-run TEST [TEST ...]] [-h/--help]\n" +
|
||||
"\n" +
|
||||
"Arguments:\n" +
|
||||
" -o/--output-directory DIR Directory to store JSON output. If not set, the current directory is used.\n" +
|
||||
" --disable-multiversion-api Disables the multi-version client API\n" +
|
||||
" --enable-callbacks-on-external-threads Allows callbacks to be called on threads created by the client library.\n" +
|
||||
" --use-external-client Connect to the server using an external client.\n" +
|
||||
" --tests-to-run TEST [TEST ...] List of test names to run.\n" +
|
||||
" -h/--help Print this help message and then quit.\n";
|
||||
public static void printUsage() {
|
||||
String usage = "Arguments: [-o/--output-directory DIR] [--disable-multiversion-api] [--enable-callbacks-on-external-threads] [--use-external-client] [--tests-to-run TEST [TEST ...]] [-h/--help]\n" +
|
||||
"\n" +
|
||||
"Arguments:\n" +
|
||||
" -o/--output-directory DIR Directory to store JSON output. If not set, the current directory is used.\n" +
|
||||
" --disable-multiversion-api Disables the multi-version client API\n" +
|
||||
" --enable-callbacks-on-external-threads Allows callbacks to be called on threads created by the client library.\n" +
|
||||
" --use-external-client Connect to the server using an external client.\n" +
|
||||
" --tests-to-run TEST [TEST ...] List of test names to run.\n" +
|
||||
" -h/--help Print this help message and then quit.\n";
|
||||
|
||||
System.out.print(usage);
|
||||
}
|
||||
System.out.print(usage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the argument strings into a <code>TesterArgs</code> instance.
|
||||
* This will return <code>null</code> if the args include an argument telling
|
||||
* it to print the help message and it will throw an {@link IllegalArgumentException}
|
||||
* if it can't parse the arguments.
|
||||
*
|
||||
* @param args command-line args
|
||||
* @return built instance or <code>null</code>
|
||||
* @throws IllegalArgumentException if the arguments can't be parsed
|
||||
*/
|
||||
public static TesterArgs parseArgs(String[] args) {
|
||||
String outputDirectory = "";
|
||||
boolean multiversionApi = true;
|
||||
boolean callbacksOnExternalThread = false;
|
||||
boolean externalClient = false;
|
||||
Subspace subspace = new Subspace();
|
||||
List<String> testsToRun = new ArrayList<String>();
|
||||
/**
|
||||
* Parses the argument strings into a <code>TesterArgs</code> instance.
|
||||
* This will return <code>null</code> if the args include an argument telling
|
||||
* it to print the help message and it will throw an {@link IllegalArgumentException}
|
||||
* if it can't parse the arguments.
|
||||
*
|
||||
* @param args command-line args
|
||||
* @return built instance or <code>null</code>
|
||||
* @throws IllegalArgumentException if the arguments can't be parsed
|
||||
*/
|
||||
public static TesterArgs parseArgs(String[] args) {
|
||||
String outputDirectory = "";
|
||||
boolean multiversionApi = true;
|
||||
boolean callbacksOnExternalThread = false;
|
||||
boolean externalClient = false;
|
||||
Subspace subspace = new Subspace();
|
||||
List<String> testsToRun = new ArrayList<String>();
|
||||
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
String arg = args[i];
|
||||
if (arg.equals("-o") || arg.equals("--output-directory")) {
|
||||
if (i + 1 < args.length) {
|
||||
outputDirectory = args[++i];
|
||||
} else {
|
||||
System.out.println("No output directory specified for argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("No output directory specified for argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("--subspace")) {
|
||||
if (i + 1 < args.length) {
|
||||
subspace = new Subspace(Tuple.from(args[++i]));
|
||||
} else {
|
||||
System.out.println("No subspace specified for argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("Not subspace specified for argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("--disable-multiversion-api")) {
|
||||
multiversionApi = false;
|
||||
} else if (arg.equals("--enable-callbacks-on-external-threads")) {
|
||||
callbacksOnExternalThread = true;
|
||||
} else if (arg.equals("--use-external-client")) {
|
||||
externalClient = true;
|
||||
} else if (arg.equals("--tests-to-run")) {
|
||||
if (i + 1 < args.length && args[i + 1].charAt(0) != '-') {
|
||||
int j;
|
||||
for (j = i + 1; j < args.length && args[j].charAt(0) != '-'; j++) {
|
||||
testsToRun.add(args[j]);
|
||||
}
|
||||
i = j - 1;
|
||||
} else {
|
||||
System.out.println("No tests specified with argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("No tests specified with argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("-h") || arg.equals("--help")) {
|
||||
printUsage();
|
||||
return null;
|
||||
} else {
|
||||
System.out.println("Unknown argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("Unknown argument " + arg);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
String arg = args[i];
|
||||
if (arg.equals("-o") || arg.equals("--output-directory")) {
|
||||
if (i + 1 < args.length) {
|
||||
outputDirectory = args[++i];
|
||||
} else {
|
||||
System.out.println("No output directory specified for argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("No output directory specified for argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("--subspace")) {
|
||||
if (i + 1 < args.length) {
|
||||
subspace = new Subspace(Tuple.from(args[++i]));
|
||||
} else {
|
||||
System.out.println("No subspace specified for argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("Not subspace specified for argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("--disable-multiversion-api")) {
|
||||
multiversionApi = false;
|
||||
} else if (arg.equals("--enable-callbacks-on-external-threads")) {
|
||||
callbacksOnExternalThread = true;
|
||||
} else if (arg.equals("--use-external-client")) {
|
||||
externalClient = true;
|
||||
} else if (arg.equals("--tests-to-run")) {
|
||||
if (i + 1 < args.length && args[i + 1].charAt(0) != '-') {
|
||||
int j;
|
||||
for (j = i + 1; j < args.length && args[j].charAt(0) != '-'; j++) {
|
||||
testsToRun.add(args[j]);
|
||||
}
|
||||
i = j - 1;
|
||||
} else {
|
||||
System.out.println("No tests specified with argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("No tests specified with argument " + arg);
|
||||
}
|
||||
} else if (arg.equals("-h") || arg.equals("--help")) {
|
||||
printUsage();
|
||||
return null;
|
||||
} else {
|
||||
System.out.println("Unknown argument " + arg + "\n");
|
||||
printUsage();
|
||||
throw new IllegalArgumentException("Unknown argument " + arg);
|
||||
}
|
||||
}
|
||||
|
||||
return new TesterArgs(outputDirectory, multiversionApi, callbacksOnExternalThread, externalClient, subspace, testsToRun);
|
||||
}
|
||||
return new TesterArgs(outputDirectory, multiversionApi, callbacksOnExternalThread, externalClient, subspace, testsToRun);
|
||||
}
|
||||
|
||||
// Accessors.
|
||||
// Accessors.
|
||||
|
||||
public String getOutputDirectory() {
|
||||
return outputDirectory;
|
||||
}
|
||||
public String getOutputDirectory() {
|
||||
return outputDirectory;
|
||||
}
|
||||
|
||||
public boolean useMultiversionApi() {
|
||||
return multiversionApi;
|
||||
}
|
||||
public boolean useMultiversionApi() {
|
||||
return multiversionApi;
|
||||
}
|
||||
|
||||
public boolean putCallbacksOnExternalThread() {
|
||||
return callbacksOnExternalThread;
|
||||
}
|
||||
public boolean putCallbacksOnExternalThread() {
|
||||
return callbacksOnExternalThread;
|
||||
}
|
||||
|
||||
public boolean useExternalClient() {
|
||||
return externalClient;
|
||||
}
|
||||
public boolean useExternalClient() {
|
||||
return externalClient;
|
||||
}
|
||||
|
||||
public Subspace getSubspace() {
|
||||
return subspace;
|
||||
}
|
||||
public Subspace getSubspace() {
|
||||
return subspace;
|
||||
}
|
||||
|
||||
public List<String> getTestsToRun() {
|
||||
return testsToRun;
|
||||
}
|
||||
public List<String> getTestsToRun() {
|
||||
return testsToRun;
|
||||
}
|
||||
}
|
|
@ -20,7 +20,6 @@
|
|||
|
||||
package com.apple.foundationdb.test;
|
||||
|
||||
import com.apple.foundationdb.Cluster;
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import com.apple.foundationdb.TransactionContext;
|
||||
|
|
|
@ -31,9 +31,9 @@ import com.apple.foundationdb.tuple.Tuple;
|
|||
import com.apple.foundationdb.tuple.Versionstamp;
|
||||
|
||||
public class VersionstampSmokeTest {
|
||||
public static void main(String[] args) {
|
||||
FDB fdb = FDB.selectAPIVersion(510);
|
||||
try(Database db = fdb.open()) {
|
||||
public static void main(String[] args) {
|
||||
FDB fdb = FDB.selectAPIVersion(510);
|
||||
try(Database db = fdb.open()) {
|
||||
db.run(tr -> {
|
||||
tr.clear(Tuple.from("prefix").range());
|
||||
return null;
|
||||
|
@ -59,7 +59,7 @@ public class VersionstampSmokeTest {
|
|||
System.out.println(Versionstamp.complete(trVersion));
|
||||
assert v.equals(Versionstamp.complete(trVersion));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private VersionstampSmokeTest() {}
|
||||
private VersionstampSmokeTest() {}
|
||||
}
|
||||
|
|
|
@ -26,13 +26,13 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import com.apple.foundationdb.async.AsyncUtil;
|
||||
|
||||
public class WhileTrueTest {
|
||||
public static void main(String[] args) {
|
||||
// This should cause memory issues using the old implementation but not the new one.
|
||||
// Pro tip: Run with options -Xms16m -Xmx16m -XX:+HeadDumpOnOutOfMemoryError
|
||||
AtomicInteger count = new AtomicInteger(1000000);
|
||||
AsyncUtil.whileTrue(() -> CompletableFuture.completedFuture(count.decrementAndGet()).thenApplyAsync(c -> c > 0)).join();
|
||||
System.out.println("Final value: " + count.get());
|
||||
}
|
||||
public static void main(String[] args) {
|
||||
// This should cause memory issues using the old implementation but not the new one.
|
||||
// Pro tip: Run with options -Xms16m -Xmx16m -XX:+HeadDumpOnOutOfMemoryError
|
||||
AtomicInteger count = new AtomicInteger(1000000);
|
||||
AsyncUtil.whileTrue(() -> CompletableFuture.completedFuture(count.decrementAndGet()).thenApplyAsync(c -> c > 0)).join();
|
||||
System.out.println("Final value: " + count.get());
|
||||
}
|
||||
|
||||
private WhileTrueTest() {}
|
||||
private WhileTrueTest() {}
|
||||
}
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
Complete documentation of the FoundationDB Node.js API can be found at [https://www.foundationdb.org/documentation/api-node.html](https://www.foundationdb.org/documentation/api-node.html).
|
||||
|
||||
These bindings require the FoundationDB client. The client can be obtained from [https://www.foundationdb.org/downloads/fdb-c/](https://www.foundationdb.org/downloads/fdb-c/).
|
|
@ -1,25 +0,0 @@
|
|||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'fdblib',
|
||||
'sources': [ 'src/FdbV8Wrapper.cpp', 'src/Database.cpp', 'src/Transaction.cpp', 'src/Cluster.cpp', 'src/FdbError.cpp', 'src/FdbOptions.cpp', 'src/FdbOptions.g.cpp', 'src/FdbUtil.cpp' ],
|
||||
'include_dirs': ['../c'],
|
||||
'conditions': [
|
||||
['OS=="linux"', {
|
||||
'link_settings': { 'libraries': ['-lfdb_c', '-L../../../lib'] },
|
||||
}],
|
||||
['OS=="mac"', {
|
||||
'xcode_settings': {
|
||||
'MACOSX_DEPLOYMENT_TARGET': '10.7', # -mmacosx-version-min=10.7
|
||||
'OTHER_CFLAGS': ['-std=c++0x']
|
||||
},
|
||||
'link_settings': { 'libraries': ['-lfdb_c', '-L../../../lib'] },
|
||||
}],
|
||||
['OS=="win"', {
|
||||
'link_settings': { 'libraries': ['../../../bin/Release/fdb_c.lib'] },
|
||||
}],
|
||||
],
|
||||
'cflags': ['-std=c++0x'],
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,23 +0,0 @@
|
|||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'fdblib',
|
||||
'sources': [ 'src/FdbV8Wrapper.cpp', 'src/Database.cpp', 'src/Transaction.cpp', 'src/Cluster.cpp', 'src/FdbError.cpp', 'src/FdbOptions.cpp', 'src/FdbOptions.g.cpp', 'src/FdbUtil.cpp' ],
|
||||
'conditions': [
|
||||
['OS=="linux"', {
|
||||
'link_settings': { 'libraries': ['-lfdb_c'] },
|
||||
}],
|
||||
['OS=="mac"', {
|
||||
'xcode_settings': { 'OTHER_CFLAGS': ['-std=c++0x'] },
|
||||
'include_dirs': ['/usr/local/include'],
|
||||
'link_settings': { 'libraries': ['-lfdb_c', '-L/usr/local/lib'] },
|
||||
}],
|
||||
['OS=="win"', {
|
||||
'link_settings': { 'libraries': ['<!(echo %FOUNDATIONDB_INSTALL_PATH%)\\lib\\foundationdb\\fdb_c.lib'] },
|
||||
'include_dirs': ['<!(echo %FOUNDATIONDB_INSTALL_PATH%)\\include'],
|
||||
}],
|
||||
],
|
||||
'cflags': ['-std=c++0x'],
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* FoundationDB Node.js API
|
||||
* Copyright (c) 2012 FoundationDB, LLC
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var fdb = require('bindings')('fdblib.node');
|
||||
|
||||
module.exports = fdb;
|
|
@ -1,157 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<Import Project="$(SolutionDir)versions.target" />
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{9463CB25-DCA0-9D45-C46E-0A8E68EE7FAE}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>fdblib</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Label="Configuration">
|
||||
<ConfigurationType>DynamicLibrary</ConfigurationType>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings" />
|
||||
<ImportGroup Label="PropertySheets">
|
||||
<Import Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup>
|
||||
<IgnoreImportLibrary>true</IgnoreImportLibrary>
|
||||
<IntDir>$(SystemDrive)\temp\msvcfdb\$(Platform)$(Configuration)\$(MSBuildProjectName)\</IntDir>
|
||||
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental>
|
||||
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</LinkIncremental>
|
||||
<OutDir>$(SolutionDir)bin\$(Configuration)\</OutDir>
|
||||
<TargetName>$(ProjectName)</TargetName>
|
||||
<TargetPath>$(OutDir)$(ProjectName).node</TargetPath>
|
||||
<TargetExt>.node</TargetExt>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup>
|
||||
<PreBuildEvent>
|
||||
<Command>"$(ProgramW6432)\nodejs\node_modules\npm\bin\node-gyp-bin\node-gyp.cmd" install --ensure -v --target=$(NodeVersion)</Command>
|
||||
</PreBuildEvent>
|
||||
<PreBuildEvent>
|
||||
<Message>Download and install node-gyp header and lib files</Message>
|
||||
</PreBuildEvent>
|
||||
<PostBuildEvent>
|
||||
<Command>
|
||||
mkdir modules
|
||||
mkdir "modules\$(NodeVersionDir)"
|
||||
copy "$(TargetPath)" "modules\$(NodeVersionDir)\fdblib.node"
|
||||
</Command>
|
||||
</PostBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<AdditionalIncludeDirectories>$(USERPROFILE)\.node-gyp\$(NodeVersion)\src;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\uv\include;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\v8\include;..\c;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
|
||||
<BufferSecurityCheck>true</BufferSecurityCheck>
|
||||
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
|
||||
<DisableSpecificWarnings>4251;%(DisableSpecificWarnings)</DisableSpecificWarnings>
|
||||
<ExceptionHandling>Sync</ExceptionHandling>
|
||||
<MinimalRebuild>false</MinimalRebuild>
|
||||
<OmitFramePointers>false</OmitFramePointers>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;BUILDING_V8_SHARED=1;BUILDING_UV_SHARED=1;DEBUG;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
|
||||
<StringPooling>true</StringPooling>
|
||||
<SuppressStartupBanner>true</SuppressStartupBanner>
|
||||
<TreatWarningAsError>false</TreatWarningAsError>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<AdditionalDependencies>$(USERPROFILE)\.node-gyp\$(NodeVersion)\$(Platform)\node.lib;..\..\bin\$(Configuration)\fdb_c.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
<AllowIsolation>true</AllowIsolation>
|
||||
<DataExecutionPrevention>true</DataExecutionPrevention>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<OutputFile>$(OutDir)$(ProjectName).node</OutputFile>
|
||||
<RandomizedBaseAddress>true</RandomizedBaseAddress>
|
||||
<SuppressStartupBanner>true</SuppressStartupBanner>
|
||||
<TargetMachine>MachineX64</TargetMachine>
|
||||
</Link>
|
||||
<ResourceCompile>
|
||||
<AdditionalIncludeDirectories>$(USERPROFILE)\.node-gyp\$(NodeVersion)\src;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\uv\include;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\v8\include;..\..\c;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||
<PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;BUILDING_V8_SHARED=1;BUILDING_UV_SHARED=1;DEBUG;_DEBUG;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ResourceCompile>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<AdditionalIncludeDirectories>$(USERPROFILE)\.node-gyp\$(NodeVersion)\src;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\uv\include;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\v8\include;..\c;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||
<AdditionalOptions>/MP %(AdditionalOptions)</AdditionalOptions>
|
||||
<BufferSecurityCheck>true</BufferSecurityCheck>
|
||||
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
|
||||
<DisableSpecificWarnings>4251;%(DisableSpecificWarnings)</DisableSpecificWarnings>
|
||||
<ExceptionHandling>false</ExceptionHandling>
|
||||
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<InlineFunctionExpansion>AnySuitable</InlineFunctionExpansion>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<OmitFramePointers>true</OmitFramePointers>
|
||||
<Optimization>Full</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;BUILDING_V8_SHARED=1;BUILDING_UV_SHARED=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
|
||||
<RuntimeTypeInfo>false</RuntimeTypeInfo>
|
||||
<StringPooling>true</StringPooling>
|
||||
<SuppressStartupBanner>true</SuppressStartupBanner>
|
||||
<TreatWarningAsError>false</TreatWarningAsError>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
</ClCompile>
|
||||
<Lib>
|
||||
<AdditionalOptions>/LTCG %(AdditionalOptions)</AdditionalOptions>
|
||||
</Lib>
|
||||
<Link>
|
||||
<AdditionalDependencies>$(USERPROFILE)\.node-gyp\$(NodeVersion)\$(Platform)\node.lib;..\..\bin\$(Configuration)\fdb_c.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
<AllowIsolation>true</AllowIsolation>
|
||||
<DataExecutionPrevention>true</DataExecutionPrevention>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
<LinkTimeCodeGeneration>UseLinkTimeCodeGeneration</LinkTimeCodeGeneration>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<OutputFile>$(OutDir)$(ProjectName).node</OutputFile>
|
||||
<RandomizedBaseAddress>true</RandomizedBaseAddress>
|
||||
<SuppressStartupBanner>true</SuppressStartupBanner>
|
||||
<TargetMachine>MachineX64</TargetMachine>
|
||||
</Link>
|
||||
<ResourceCompile>
|
||||
<AdditionalIncludeDirectories>$(USERPROFILE)\.node-gyp\$(NodeVersion)\src;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\uv\include;$(USERPROFILE)\.node-gyp\$(NodeVersion)\deps\v8\include;..\..\c;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||
<PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;BUILDING_V8_SHARED=1;BUILDING_UV_SHARED=1;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ResourceCompile>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="src\FdbOptions.g.cpp" />
|
||||
<ClCompile Include="src\Database.cpp" />
|
||||
<ClCompile Include="src\Transaction.cpp" />
|
||||
<ClCompile Include="src\FdbError.cpp" />
|
||||
<ClCompile Include="src\Cluster.cpp" />
|
||||
<ClCompile Include="src\FdbV8Wrapper.cpp" />
|
||||
<ClCompile Include="src\FdbOptions.cpp" />
|
||||
<ClCompile Include="src\FdbUtil.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="src\Cluster.h" />
|
||||
<ClInclude Include="src\Database.h" />
|
||||
<ClInclude Include="src\FdbError.h" />
|
||||
<ClInclude Include="src\FdbOptions.h" />
|
||||
<ClInclude Include="src\FdbUtil.h" />
|
||||
<ClInclude Include="src\NodeCallback.h" />
|
||||
<ClInclude Include="src\Transaction.h" />
|
||||
<ClInclude Include="src\Version.h" />
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets" />
|
||||
<Target Name="AfterClean">
|
||||
<Message Text="Cleaning $(NodeVersionDir)\fdblib module" Importance="high" />
|
||||
<Delete Files="modules\$(NodeVersionDir)\fdblib.node" />
|
||||
</Target>
|
||||
</Project>
|
|
@ -1,64 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<!-- Load version information from the central versions file, creating -PRERELEASE if needed -->
|
||||
<Import Project="$(SolutionDir)versions.target" />
|
||||
<PropertyGroup Condition="'$(Release)' != 'true' ">
|
||||
<PreReleaseDecoration>-PRERELEASE</PreReleaseDecoration>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Release)' == 'true' ">
|
||||
<PreReleaseDecoration>
|
||||
</PreReleaseDecoration>
|
||||
</PropertyGroup>
|
||||
<!-- Set up VS minimal configuation -->
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{9463CB25-DCA0-9D45-C46E-0A8E68EE7FAE}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Label="Configuration">
|
||||
<PlatformToolset>v140_xp</PlatformToolset>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<PropertyGroup>
|
||||
<IntDir>$(SystemDrive)\temp\msvcfdb\$(Platform)$(Configuration)\$(MSBuildProjectName)\</IntDir>
|
||||
</PropertyGroup>
|
||||
<!-- Meat of the work to create proper node output. -->
|
||||
<ItemDefinitionGroup>
|
||||
<!-- Clean up before a build -->
|
||||
<PreBuildEvent>
|
||||
<Command>del "$(SolutionDir)bin\$(Configuration)\fdb-node*.tar.gz"</Command>
|
||||
</PreBuildEvent>
|
||||
<PostBuildEvent>
|
||||
<!-- After build, create package.json from input -->
|
||||
<Command>
|
||||
c:\python27\python -c "print open(\"package.json.in\").read().replace(\"VERSION\",\"$(Version)$(PreReleaseDecoration)\")" > package.json
|
||||
copy ..\..\LICENSE
|
||||
c:\python27\python "$(SolutionDir)build/tarball.py" -r nodejs "$(SolutionDir)bin\$(Configuration)\fdb-node-$(Version)$(PreReleaseDecoration)-windows-$(Platform).tar.gz" lib modules package.json README.md LICENSE
|
||||
del LICENSE</Command>
|
||||
</PostBuildEvent>
|
||||
</ItemDefinitionGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<Target Name="AfterClean">
|
||||
<!-- Add output of which VS was not aware to "clean" step -->
|
||||
<ItemGroup>
|
||||
<FilesToDelete Include="$(ProjectDir)package.json">
|
||||
<Visible>false</Visible>
|
||||
</FilesToDelete>
|
||||
<FilesToDelete Include="$(SolutionDir)bin\$(Configuration)\fdb-node-$(Version)$(PreReleaseDecoration)-windows-$(Platform).tar.gz">
|
||||
<Visible>false</Visible>
|
||||
</FilesToDelete>
|
||||
</ItemGroup>
|
||||
<Message Text="Cleaning package.json and tar.gz file" Importance="high" />
|
||||
<Delete Files="@(FilesToDelete)" />
|
||||
</Target>
|
||||
</Project>
|
|
@ -1,14 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{E22D4EF8-E75D-4281-93F9-A9F73936DE54}</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Label="Configuration">
|
||||
<PlatformToolset>v140_xp</PlatformToolset>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<NodeVersion>0.10.0</NodeVersion>
|
||||
<NodeVersionDir>0.10</NodeVersionDir>
|
||||
</PropertyGroup>
|
||||
<Import Project="fdb_node.target" />
|
||||
</Project>
|
|
@ -1,14 +0,0 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{E936E200-689E-49FD-8463-32FE763F1860}</ProjectGuid>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Label="Configuration">
|
||||
<PlatformToolset>v140_xp</PlatformToolset>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<NodeVersion>0.8.22</NodeVersion>
|
||||
<NodeVersionDir>0.8</NodeVersionDir>
|
||||
</PropertyGroup>
|
||||
<Import Project="fdb_node.target" />
|
||||
</Project>
|
|
@ -1,99 +0,0 @@
|
|||
#
|
||||
# include.mk
|
||||
#
|
||||
# This source file is part of the FoundationDB open source project
|
||||
#
|
||||
# Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
# -*- mode: makefile-gmake; -*-
|
||||
|
||||
TARGETS += fdb_node fdb_node_npm
|
||||
CLEAN_TARGETS += fdb_node_clean fdb_node_npm_clean
|
||||
|
||||
NODE_VERSIONS := 0.8.22 0.10.0
|
||||
|
||||
NODE_DIST_URL ?= https://nodejs.org/dist
|
||||
NODE_REGISTRY_URL ?= https://registry.npmjs.org/
|
||||
|
||||
ifeq ($(RELEASE),true)
|
||||
NPMVER = $(VERSION)
|
||||
else
|
||||
NPMVER = $(VERSION)-PRERELEASE
|
||||
endif
|
||||
|
||||
packages: fdb_node_npm
|
||||
|
||||
packages_clean: fdb_node_npm_clean
|
||||
|
||||
fdb_node: fdb_c bindings/nodejs/fdb_node.stamp
|
||||
|
||||
bindings/nodejs/fdb_node.stamp: bindings/nodejs/src/FdbOptions.g.cpp bindings/nodejs/src/*.cpp bindings/nodejs/src/*.h bindings/nodejs/binding.gyp lib/libfdb_c.$(DLEXT) bindings/nodejs/package.json
|
||||
@echo "Building $@"
|
||||
@rm -f $@
|
||||
@cd bindings/nodejs && \
|
||||
mkdir -p modules && \
|
||||
rm -rf modules/* && \
|
||||
for ver in $(NODE_VERSIONS); do \
|
||||
MMVER=`echo $$ver | sed -e 's,\., ,g' | awk '{print $$1 "." $$2}'` && \
|
||||
mkdir modules/$$MMVER && \
|
||||
node-gyp configure --dist-url=$(NODE_DIST_URL) --target=$$ver && \
|
||||
node-gyp -v build && \
|
||||
cp build/Release/fdblib.node modules/$${MMVER} ; \
|
||||
done
|
||||
@touch $@
|
||||
|
||||
bindings/nodejs/src/FdbOptions.g.cpp: bin/vexillographer.exe fdbclient/vexillographer/fdb.options
|
||||
@echo "Building $@"
|
||||
@$(MONO) bin/vexillographer.exe fdbclient/vexillographer/fdb.options nodejs $@
|
||||
|
||||
fdb_node_clean:
|
||||
@echo "Cleaning fdb_node"
|
||||
@rm -f bindings/nodejs/src/FdbOptions.g.cpp
|
||||
@rm -rf bindings/nodejs/modules
|
||||
@(cd bindings/nodejs && node-gyp clean)
|
||||
@rm -f bindings/nodejs/fdb_node.stamp
|
||||
|
||||
bindings/nodejs/package.json: bindings/nodejs/package.json.in $(ALL_MAKEFILES) versions.target
|
||||
@m4 -DVERSION=$(NPMVER) $< > $@
|
||||
@echo "Updating Node dependencies"
|
||||
@cd bindings/nodejs && \
|
||||
npm config set registry "$(NODE_REGISTRY_URL)" && \
|
||||
npm update
|
||||
|
||||
fdb_node_npm: fdb_node versions.target bindings/nodejs/README.md bindings/nodejs/lib/*.js bindings/nodejs/src/* bindings/nodejs/binding.gyp LICENSE
|
||||
@echo "Packaging NPM"
|
||||
@mkdir -p packages
|
||||
@rm -f packages/fdb-node-*
|
||||
@rm -rf packages/nodejs.tmp
|
||||
@mkdir -p packages/nodejs.tmp/nodejs
|
||||
@cp LICENSE packages/nodejs.tmp/nodejs/LICENSE
|
||||
@tar -C bindings -czf packages/fdb-node-$(NPMVER)-$(PLATFORM)-$(ARCH).tar.gz nodejs/lib nodejs/modules nodejs/package.json nodejs/README.md -C ../packages/nodejs.tmp nodejs/LICENSE
|
||||
@rm -rf packages/nodejs.tmp
|
||||
ifeq ($(PLATFORM),linux)
|
||||
@echo "Packaging NPM (unbuilt)"
|
||||
@rm -rf packages/nodejs.tmp
|
||||
@mkdir -p packages/nodejs.tmp/npmsrc/nodejs
|
||||
@cat bindings/nodejs/package.json | grep -v private | grep -v engineStrict | awk '/"semver"/ {print " \"bindings\": \"*\""; next} {print}' > packages/nodejs.tmp/npmsrc/nodejs/package.json
|
||||
@cp -r bindings/nodejs/lib bindings/nodejs/src bindings/nodejs/README.md LICENSE packages/nodejs.tmp/npmsrc/nodejs
|
||||
@cp bindings/nodejs/binding.gyp.npmsrc packages/nodejs.tmp/npmsrc/nodejs/binding.gyp
|
||||
@cp bindings/nodejs/fdbModule.js.npmsrc packages/nodejs.tmp/npmsrc/nodejs/lib/fdbModule.js
|
||||
@tar -C packages/nodejs.tmp/npmsrc -czf packages/fdb-node-$(NPMVER).tar.gz nodejs
|
||||
@rm -rf packages/nodejs.tmp
|
||||
endif
|
||||
|
||||
fdb_node_npm_clean:
|
||||
@echo "Cleaning NPM"
|
||||
@rm -f packages/fdb-node-* bindings/nodejs/package.json
|
|
@ -1,26 +0,0 @@
|
|||
/*
|
||||
* apiVersion.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var apiVersion;
|
||||
module.exports = { value: apiVersion };
|
||||
|
|
@ -1,86 +0,0 @@
|
|||
/*
|
||||
* bufferConversion.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var toBuffer = function(obj) {
|
||||
if(Buffer.isBuffer(obj))
|
||||
return obj;
|
||||
|
||||
if(obj instanceof ArrayBuffer)
|
||||
obj = new Uint8Array(obj);
|
||||
|
||||
if(obj instanceof Uint8Array) {
|
||||
var buf = new Buffer(obj.length);
|
||||
for(var i = 0; i < obj.length; ++i)
|
||||
buf[i] = obj[i];
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
if(typeof obj === 'string')
|
||||
return new Buffer(obj, 'utf8');
|
||||
|
||||
throw new TypeError('toBuffer function expects a string, buffer, ArrayBuffer, or Uint8Array');
|
||||
};
|
||||
|
||||
toBuffer.fromByteLiteral = function(str) {
|
||||
if(typeof str === 'string') {
|
||||
var buf = new Buffer(str.length);
|
||||
for(var i = 0; i < str.length; ++i) {
|
||||
if(str[i] > 255)
|
||||
throw new RangeError('fromByteLiteral string argument cannot have codepoints larger than 1 byte');
|
||||
buf[i] = str.charCodeAt(i);
|
||||
}
|
||||
return buf;
|
||||
}
|
||||
else
|
||||
throw new TypeError('fromByteLiteral function expects a string');
|
||||
};
|
||||
|
||||
toBuffer.toByteLiteral = function(buf) {
|
||||
if(Buffer.isBuffer(buf))
|
||||
return String.fromCharCode.apply(null, buf);
|
||||
else
|
||||
throw new TypeError('toByteLiteral function expects a buffer');
|
||||
};
|
||||
|
||||
toBuffer.printable = function(buf) {
|
||||
buf = toBuffer(buf);
|
||||
var out = '';
|
||||
for(var i = 0; i < buf.length; ++i) {
|
||||
if(buf[i] >= 32 && buf[i] < 127 && buf[i] !== 92)
|
||||
out += String.fromCharCode(buf[i]);
|
||||
else if(buf[i] === 92)
|
||||
out += '\\\\';
|
||||
else {
|
||||
var str = buf[i].toString(16);
|
||||
out += '\\x';
|
||||
if(str.length == 1)
|
||||
out += '0';
|
||||
out += str;
|
||||
}
|
||||
}
|
||||
|
||||
return out;
|
||||
};
|
||||
|
||||
module.exports = toBuffer;
|
|
@ -1,38 +0,0 @@
|
|||
/*
|
||||
* cluster.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var future = require('./future');
|
||||
var Database = require('./database');
|
||||
|
||||
var Cluster = function(_cluster) {
|
||||
this._cluster = _cluster;
|
||||
this.options = _cluster.options;
|
||||
|
||||
};
|
||||
|
||||
Cluster.prototype.openDatabase = function() {
|
||||
return new Database(this._cluster.openDatabase());
|
||||
};
|
||||
|
||||
module.exports = Cluster;
|
||||
|
|
@ -1,172 +0,0 @@
|
|||
/*
|
||||
* database.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var Transaction = require('./transaction');
|
||||
var future = require('./future');
|
||||
var fdb = require('./fdbModule');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
|
||||
var onError = function(tr, err, func, cb) {
|
||||
tr.onError(err, function(retryErr, retryRes) {
|
||||
if(retryErr)
|
||||
cb(retryErr, retryRes);
|
||||
else
|
||||
retryLoop(tr, func, cb);
|
||||
});
|
||||
};
|
||||
|
||||
var retryLoop = function(tr, func, cb) {
|
||||
func(tr, function(err, res) {
|
||||
if(err) {
|
||||
onError(tr, err, func, cb);
|
||||
}
|
||||
else {
|
||||
tr.commit(function(commitErr, commitRes) {
|
||||
if(commitErr)
|
||||
onError(tr, commitErr, func, cb);
|
||||
else
|
||||
cb(commitErr, res);
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
var atomic = function(db, op) {
|
||||
return function(key, value, cb) {
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
fdb.atomic[op].call(tr.tr, fdbUtil.keyToBuffer(key), fdbUtil.valueToBuffer(value));
|
||||
innerCb();
|
||||
}, cb);
|
||||
};
|
||||
};
|
||||
|
||||
var Database = function(_db) {
|
||||
this._db = _db;
|
||||
this.options = _db.options;
|
||||
|
||||
for(var op in fdb.atomic)
|
||||
this[op] = atomic(this, op);
|
||||
};
|
||||
|
||||
Database.prototype.createTransaction = function() {
|
||||
return new Transaction(this, this._db.createTransaction());
|
||||
};
|
||||
|
||||
Database.prototype.doTransaction = function(func, cb) {
|
||||
var tr = this.createTransaction();
|
||||
|
||||
return future.create(function(futureCb) {
|
||||
retryLoop(tr, func, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.get = function(key, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.get(key, innerCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.getKey = function(keySelector, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.getKey(keySelector, innerCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.getRange = function(start, end, options, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.getRange(start, end, options).toArray(innerCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.getRangeStartsWith = function(prefix, options, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
try {
|
||||
tr.getRangeStartsWith(prefix, options).toArray(innerCb);
|
||||
}
|
||||
catch(e) {
|
||||
innerCb(e);
|
||||
}
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.getAndWatch = function(key, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.get(key, function(err, val) {
|
||||
if(err)
|
||||
innerCb(err);
|
||||
else
|
||||
innerCb(undefined, { value: val, watch: tr.watch(key) });
|
||||
});
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.setAndWatch = function(key, value, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.set(key, value);
|
||||
var watchObj = tr.watch(key);
|
||||
innerCb(undefined, { watch: watchObj });
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.clearAndWatch = function(key, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.clear(key);
|
||||
var watchObj = tr.watch(key);
|
||||
innerCb(undefined, { watch: watchObj });
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.set = function(key, value, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.set(key, value);
|
||||
innerCb();
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.clear = function(key, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.clear(key);
|
||||
innerCb();
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.clearRange = function(start, end, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
tr.clearRange(start, end);
|
||||
innerCb();
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Database.prototype.clearRangeStartsWith = function(prefix, cb) {
|
||||
return this.doTransaction(function(tr, innerCb) {
|
||||
try {
|
||||
tr.clearRangeStartsWith(prefix);
|
||||
innerCb();
|
||||
}
|
||||
catch(e) {
|
||||
innerCb(e);
|
||||
}
|
||||
}, cb);
|
||||
};
|
||||
|
||||
module.exports = Database;
|
|
@ -1,850 +0,0 @@
|
|||
/*
|
||||
* directory.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var util = require('util');
|
||||
|
||||
var buffer = require('./bufferConversion');
|
||||
var future = require('./future');
|
||||
var transactional = require('./retryDecorator');
|
||||
var tuple = require('./tuple');
|
||||
var Subspace = require('./subspace');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
|
||||
/*************
|
||||
* Utilities *
|
||||
*************/
|
||||
|
||||
function whileLoop(func, cb) {
|
||||
return future.create(function(futureCb) {
|
||||
fdbUtil.whileLoop(function(f) {
|
||||
func()(f);
|
||||
}, futureCb);
|
||||
}, cb);
|
||||
}
|
||||
|
||||
function startsWith(str, prefix) {
|
||||
return str.length >= prefix.length && fdbUtil.buffersEqual(str.slice(0, prefix.length), prefix);
|
||||
}
|
||||
|
||||
function valueOrDefault(value, defaultValue) {
|
||||
if(typeof value === 'undefined')
|
||||
return defaultValue;
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function pathsEqual(path1, path2) {
|
||||
if(path1.length !== path2.length)
|
||||
return false;
|
||||
|
||||
for(var i = 0; i < path1.length; ++i)
|
||||
if(path1[i] !== path2[i])
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function tuplifyPath(path) {
|
||||
if(!(path instanceof Array))
|
||||
path = [path];
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
function checkLayer(layer, required) {
|
||||
if(layer && layer.length > 0 && !fdbUtil.buffersEqual(layer, required))
|
||||
throw new Error('The directory was created with an incompatible layer.');
|
||||
}
|
||||
|
||||
/***************************
|
||||
* HighContentionAllocator *
|
||||
***************************/
|
||||
|
||||
var HighContentionAllocator = function(subspace) {
|
||||
this.counters = subspace.subspace([0]);
|
||||
this.recent = subspace.subspace([1]);
|
||||
};
|
||||
|
||||
HighContentionAllocator.prototype.allocate = transactional(function(tr) {
|
||||
var self = this;
|
||||
var windowStart = 0;
|
||||
return whileLoop(function() {
|
||||
return tr.snapshot.getRange(self.counters.range().begin, self.counters.range().end, { limit: 1, reverse: true })
|
||||
.toArray()
|
||||
.then(function(arr) {
|
||||
if(arr.length > 0) {
|
||||
windowStart = self.counters.unpack(arr[0].key)[0];
|
||||
}
|
||||
})
|
||||
.then(function() {
|
||||
return self.chooseWindow(tr, windowStart);
|
||||
})
|
||||
.then(function(window) {
|
||||
return self.choosePrefix(tr, window);
|
||||
})
|
||||
.then(function(prefix) {
|
||||
if(prefix !== null) {
|
||||
prefix = tuple.pack([prefix]); // exit the loop
|
||||
return prefix;
|
||||
}
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
HighContentionAllocator.prototype.chooseWindow = function(tr, windowStart) {
|
||||
var self = this;
|
||||
|
||||
var increment = new Buffer(8);
|
||||
increment.fill(0);
|
||||
increment.writeUInt32LE(1, 0);
|
||||
|
||||
var window = { start: windowStart, size: 0 };
|
||||
|
||||
return whileLoop(function() {
|
||||
// Cannot yield to event loop in this block {
|
||||
if(window.start > windowStart) {
|
||||
tr.clearRange(self.counters, self.counters.get(window.start));
|
||||
tr.options.setNextWriteNoWriteConflictRange();
|
||||
tr.clearRange(self.recent, self.recent.get(window.start));
|
||||
}
|
||||
|
||||
tr.add(self.counters.pack([window.start]), increment);
|
||||
return tr.snapshot.get(self.counters.get(window.start))
|
||||
// }
|
||||
.then(function(newCountBuffer) {
|
||||
var newCount = (newCountBuffer === null) ? 0 : newCountBuffer.readUInt32LE(0);
|
||||
window.size = windowSize(window.start);
|
||||
if(newCount * 2 < window.size) {
|
||||
return window; // exit the loop
|
||||
}
|
||||
|
||||
window.start += window.size;
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
HighContentionAllocator.prototype.choosePrefix = function(tr, window) {
|
||||
var self = this;
|
||||
|
||||
return whileLoop(function() {
|
||||
var candidate = Math.floor(Math.random() * window.size) + window.start;
|
||||
var allocationKey = self.recent.pack([candidate]);
|
||||
|
||||
// Cannot yield to event loop in this block {
|
||||
var counterRange = tr.snapshot.getRange(self.counters.range().begin, self.counters.range().end, { limit: 1, reverse: true }).toArray();
|
||||
var allocation = tr.get(allocationKey);
|
||||
tr.options.setNextWriteNoWriteConflictRange();
|
||||
tr.set(allocationKey, buffer(''));
|
||||
// }
|
||||
|
||||
return future.all([counterRange, allocation])
|
||||
.then(function(vals) {
|
||||
var currentWindowStart = vals[0].length > 0 ? self.counters.unpack(vals[0][0].key)[0] : 0;
|
||||
if(currentWindowStart > window.start) {
|
||||
return null; // exit the loop and force find() to retry
|
||||
}
|
||||
if(vals[1] === null) {
|
||||
tr.addWriteConflictKey(allocationKey);
|
||||
return candidate; // exit the loop
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function windowSize(start) {
|
||||
if(start < 255)
|
||||
return 64;
|
||||
if(start < 65535)
|
||||
return 1024;
|
||||
|
||||
return 8192;
|
||||
}
|
||||
|
||||
/******************
|
||||
* DirectoryLayer *
|
||||
******************/
|
||||
|
||||
var VERSION = [1, 0, 0];
|
||||
var SUBDIRS = 0;
|
||||
|
||||
var DirectoryLayer = function(options) {
|
||||
options = valueOrDefault(options, {});
|
||||
|
||||
this._nodeSubspace = valueOrDefault(options.nodeSubspace, new Subspace([], buffer.fromByteLiteral('\xfe')));
|
||||
|
||||
// If specified, new automatically allocated prefixes will all fall within the contentSubspace
|
||||
this._contentSubspace = valueOrDefault(options.contentSubspace, new Subspace());
|
||||
this._allowManualPrefixes = valueOrDefault(options.allowManualPrefixes, false);
|
||||
|
||||
this._rootNode = this._nodeSubspace.subspace([this._nodeSubspace.key()]);
|
||||
this._allocator = new HighContentionAllocator(this._rootNode.subspace([buffer('hca')]));
|
||||
|
||||
this._path = [];
|
||||
};
|
||||
|
||||
var createOrOpen = transactional(function(tr, self, path, options, allowCreate, allowOpen, cb) {
|
||||
options = valueOrDefault(options, {});
|
||||
var layer = valueOrDefault(options.layer, buffer(''));
|
||||
var prefix = options.prefix;
|
||||
|
||||
allowCreate = valueOrDefault(allowCreate, true);
|
||||
allowOpen = valueOrDefault(allowOpen, true);
|
||||
|
||||
return checkVersion(self, tr, false)
|
||||
.then(function() {
|
||||
if(typeof prefix !== 'undefined') {
|
||||
if(allowCreate && allowOpen)
|
||||
throw new Error('Cannot specify a prefix when calling create_or_open.');
|
||||
else if(!self._allowManualPrefixes) {
|
||||
if(self._path.length === 0)
|
||||
throw new Error('Cannot specify a prefix unless manual prefixes are enabled.');
|
||||
else
|
||||
throw new Error('Cannot specify a prefix in a partition.');
|
||||
}
|
||||
}
|
||||
|
||||
path = toUnicodePath(path);
|
||||
if(path.length === 0)
|
||||
throw new Error('The root directory cannot be opened.');
|
||||
|
||||
return find(self, tr, path).then(loadMetadata(tr));
|
||||
})
|
||||
.then(function(existingNode) {
|
||||
if(existingNode.exists()) {
|
||||
if(existingNode.isInPartition(false)) {
|
||||
var subpath = existingNode.getPartitionSubpath();
|
||||
var directoryLayer = existingNode.getContents(self)._directoryLayer;
|
||||
return createOrOpen(tr,
|
||||
existingNode.getContents(self)._directoryLayer,
|
||||
subpath,
|
||||
options,
|
||||
allowCreate,
|
||||
allowOpen);
|
||||
}
|
||||
|
||||
return openDirectory(tr, self, path, layer, existingNode, allowOpen);
|
||||
}
|
||||
else
|
||||
return createDirectory(tr, self, path, layer, prefix, allowCreate);
|
||||
})(cb);
|
||||
});
|
||||
|
||||
var openDirectory = function(tr, self, path, layer, existingNode, allowOpen) {
|
||||
if(!allowOpen)
|
||||
throw new Error('The directory already exists.');
|
||||
|
||||
checkLayer(layer, existingNode.layer);
|
||||
|
||||
return existingNode.getContents(self);
|
||||
};
|
||||
|
||||
var createDirectory = function(tr, self, path, layer, prefix, allowCreate) {
|
||||
if(!allowCreate)
|
||||
throw new Error('The directory does not exist.');
|
||||
|
||||
var prefixIsAllocated = typeof(prefix) === 'undefined';
|
||||
return checkVersion(self, tr, true)
|
||||
.then(function() {
|
||||
return getPrefix(self, tr, prefix);
|
||||
})
|
||||
.then(function(prefix) {
|
||||
return isPrefixFree(self, prefixIsAllocated ? tr.snapshot : tr, prefix)
|
||||
.then(function(isFree) {
|
||||
if(!isFree) {
|
||||
if(prefixIsAllocated)
|
||||
throw new Error('The directory layer has manually allocated prefixes that conflict with the automatic prefix allocator.');
|
||||
else
|
||||
throw new Error('The given prefix is already in use.');
|
||||
}
|
||||
|
||||
return getParentNode(self, tr, path);
|
||||
})
|
||||
.then(function(parentNode) {
|
||||
if(!parentNode)
|
||||
throw new Error('The parent directory doesn\'t exist.');
|
||||
|
||||
var node = nodeWithPrefix(self, prefix);
|
||||
tr.set(parentNode.subspace([SUBDIRS]).pack([path[path.length-1]]), prefix);
|
||||
tr.set(node.pack([buffer('layer')]), layer);
|
||||
|
||||
return contentsOfNode(self, node, path, layer);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.getLayer = function() {
|
||||
return new Buffer(0);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.getPath = function() {
|
||||
return this._path.slice(0);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.createOrOpen = function(databaseOrTransaction, path, options, cb) {
|
||||
return createOrOpen(databaseOrTransaction, this, path, options, true, true, cb);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.open = function(databaseOrTransaction, path, options, cb) {
|
||||
return createOrOpen(databaseOrTransaction, this, path, options, false, true, cb);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.create = function(databaseOrTransaction, path, options, cb) {
|
||||
return createOrOpen(databaseOrTransaction, this, path, options, true, false, cb);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.moveTo = function(databaseOrTransaction, newAbsolutePath, cb) {
|
||||
return future.reject(new Error('The root directory cannot be moved.'))(cb);
|
||||
};
|
||||
|
||||
DirectoryLayer.prototype.move = transactional(function(tr, oldPath, newPath, cb) {
|
||||
var self = this;
|
||||
var oldNode, newNode;
|
||||
|
||||
return checkVersion(self, tr, true)
|
||||
.then(function() {
|
||||
oldPath = toUnicodePath(oldPath);
|
||||
newPath = toUnicodePath(newPath);
|
||||
|
||||
if(pathsEqual(oldPath, newPath.slice(0, oldPath.length)))
|
||||
throw new Error('The destination directory cannot be a subdirectory of the source directory.');
|
||||
|
||||
var oldNodeFuture = find(self, tr, oldPath).then(loadMetadata(tr));
|
||||
var newNodeFuture = find(self, tr, newPath).then(loadMetadata(tr));
|
||||
return future.all([oldNodeFuture, newNodeFuture]);
|
||||
})
|
||||
.then(function(nodes) {
|
||||
oldNode = nodes[0];
|
||||
newNode = nodes[1];
|
||||
|
||||
if(!oldNode.exists())
|
||||
throw new Error('The source directory does not exist.');
|
||||
|
||||
if(oldNode.isInPartition(false) || newNode.isInPartition(false)) {
|
||||
if(!oldNode.isInPartition(false) || !newNode.isInPartition(false) || !pathsEqual(oldNode.path, newNode.path))
|
||||
throw new Error('Cannot move between partitions.');
|
||||
|
||||
return newNode.getContents(self).move(tr, oldNode.getPartitionSubpath(), newNode.getPartitionSubpath());
|
||||
}
|
||||
|
||||
if(newNode.exists())
|
||||
throw new Error('The destination directory already exists. Remove it first.');
|
||||
|
||||
return find(self, tr, newPath.slice(0, newPath.length-1))
|
||||
.then(function(parentNode) {
|
||||
if(!parentNode.exists())
|
||||
throw new Error('The parent of the destination directory does not exist. Create it first.');
|
||||
|
||||
tr.set(parentNode.subspace.subspace([SUBDIRS]).pack([newPath[newPath.length-1]]),
|
||||
self._nodeSubspace.unpack(oldNode.subspace.key())[0]);
|
||||
|
||||
return removeFromParent(self, tr, oldPath);
|
||||
})
|
||||
.then(function() {
|
||||
return contentsOfNode(self, oldNode.subspace, newPath, oldNode.layer);
|
||||
});
|
||||
})(cb);
|
||||
});
|
||||
|
||||
DirectoryLayer.prototype.remove = transactional(function(tr, path, cb) {
|
||||
return removeInternal(this, tr, path, true)(cb);
|
||||
});
|
||||
|
||||
DirectoryLayer.prototype.removeIfExists = transactional(function(tr, path, cb) {
|
||||
return removeInternal(this, tr, path, false)(cb);
|
||||
});
|
||||
|
||||
function removeInternal(self, tr, path, failOnNonexistent) {
|
||||
return checkVersion(self, tr, true)
|
||||
.then(function() {
|
||||
path = valueOrDefault(path, []);
|
||||
if(path.length === 0)
|
||||
return future.reject(new Error('The root directory cannot be removed.'));
|
||||
|
||||
path = toUnicodePath(path);
|
||||
return find(self, tr, path).then(loadMetadata(tr));
|
||||
})
|
||||
.then(function(node) {
|
||||
if(!node.exists()) {
|
||||
if(failOnNonexistent)
|
||||
throw new Error('The directory doesn\'t exist');
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
if(node.isInPartition(false)) {
|
||||
return removeInternal(node.getContents(self)._directoryLayer,
|
||||
tr,
|
||||
node.getPartitionSubpath(),
|
||||
failOnNonexistent);
|
||||
}
|
||||
|
||||
return removeRecursive(self, tr, node.subspace)
|
||||
.then(function() {
|
||||
return removeFromParent(self, tr, path);
|
||||
}).
|
||||
then(function() {
|
||||
return true;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
DirectoryLayer.prototype.list = transactional(function(tr, path, cb) {
|
||||
var self = this;
|
||||
return checkVersion(self, tr, false)
|
||||
.then(function() {
|
||||
path = valueOrDefault(path, []);
|
||||
path = toUnicodePath(path);
|
||||
|
||||
return find(self, tr, path).then(loadMetadata(tr));
|
||||
})
|
||||
.then(function(node) {
|
||||
if(!node.exists())
|
||||
throw new Error('The given directory does not exist');
|
||||
|
||||
if(node.isInPartition(true))
|
||||
return node.getContents(self).list(tr, node.getPartitionSubpath());
|
||||
|
||||
var subdir = node.subspace.subspace([SUBDIRS]);
|
||||
|
||||
return tr.getRange(subdir.range().begin, subdir.range().end).toArray()
|
||||
.then(function(arr) {
|
||||
return arr.map(function(kv) { return subdir.unpack(kv.key)[0].toString('utf8'); });
|
||||
});
|
||||
})(cb);
|
||||
});
|
||||
|
||||
DirectoryLayer.prototype.exists = transactional(function(tr, path, cb) {
|
||||
var self = this;
|
||||
return checkVersion(self, tr, false)
|
||||
.then(function() {
|
||||
path = valueOrDefault(path, []);
|
||||
path = toUnicodePath(path);
|
||||
return find(self, tr, path).then(loadMetadata(tr));
|
||||
})
|
||||
.then(function(node) {
|
||||
if(!node.exists())
|
||||
return false;
|
||||
|
||||
if(node.isInPartition(false))
|
||||
return node.getContents(self).exists(tr, node.getPartitionSubpath());
|
||||
|
||||
return true;
|
||||
})(cb);
|
||||
});
|
||||
|
||||
// Private functions:
|
||||
|
||||
function checkVersion(self, tr, writeAccess) {
|
||||
return tr.get(self._rootNode.pack([buffer('version')]))
|
||||
.then(function(versionBuf) {
|
||||
if(!versionBuf) {
|
||||
if(writeAccess)
|
||||
initializeDirectory(self, tr);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
var version = [];
|
||||
for(var i = 0; i < 3; ++i)
|
||||
version.push(versionBuf.readInt32LE(4*i));
|
||||
|
||||
var dirVersion = util.format('%d.%d.%d', version[0], version[1], version[2]);
|
||||
var layerVersion = util.format('%d.%d.%d', VERSION[0], VERSION[1], VERSION[2]);
|
||||
|
||||
if(version[0] > VERSION[0]) {
|
||||
throw new Error(util.format('Cannot load directory with version %s using directory layer %s',
|
||||
dirVersion,
|
||||
layerVersion));
|
||||
}
|
||||
|
||||
if(version[1] > VERSION[1]) {
|
||||
throw new Error(util.format('Directory with version %s is read-only when opened using directory layer %s',
|
||||
dirVersion,
|
||||
layerVersion));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function initializeDirectory(self, tr) {
|
||||
var versionBuf = new Buffer(12);
|
||||
for(var i = 0; i < 3; ++i)
|
||||
versionBuf.writeUInt32LE(VERSION[i], i*4);
|
||||
|
||||
tr.set(self._rootNode.pack([buffer('version')]), versionBuf);
|
||||
}
|
||||
|
||||
function nodeWithPrefix(self, prefix) {
|
||||
if(prefix === null)
|
||||
return null;
|
||||
|
||||
return self._nodeSubspace.subspace([prefix]);
|
||||
}
|
||||
|
||||
function find(self, tr, path) {
|
||||
var pathIndex = 0;
|
||||
var node = new Node(self._rootNode, [], path);
|
||||
|
||||
return whileLoop(function() {
|
||||
if(pathIndex === path.length)
|
||||
return future.resolve(node);
|
||||
|
||||
return tr.get(node.subspace.subspace([SUBDIRS]).pack([path[pathIndex++]]))
|
||||
.then(function(val) {
|
||||
node = new Node(nodeWithPrefix(self, val), path.slice(0, pathIndex), path);
|
||||
if(!node.exists())
|
||||
return node;
|
||||
return node.loadMetadata(tr)
|
||||
.then(function() {
|
||||
if(fdbUtil.buffersEqual(node.layer, buffer('partition')))
|
||||
return node;
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function contentsOfNode(self, node, path, layer) {
|
||||
var prefix = self._nodeSubspace.unpack(node.key())[0];
|
||||
|
||||
if(fdbUtil.buffersEqual(layer, buffer('partition')))
|
||||
return new DirectoryPartition(self._path.concat(path), prefix, self);
|
||||
else
|
||||
return new DirectorySubspace(self._path.concat(path), prefix, self, layer);
|
||||
}
|
||||
|
||||
function getPrefix(self, tr, prefix) {
|
||||
if(typeof prefix === 'undefined') {
|
||||
return self._allocator.allocate(tr)
|
||||
.then(function(prefix) {
|
||||
var allocated = Buffer.concat([self._contentSubspace.key(), prefix], self._contentSubspace.key().length + prefix.length);
|
||||
return tr.getRangeStartsWith(allocated, { limit: 1 })
|
||||
.toArray()
|
||||
.then(function(arr) {
|
||||
if(arr.length > 0)
|
||||
throw new Error('The database has keys stored at the prefix chosen by the automatic prefix allocator: ' + prefix);
|
||||
|
||||
return allocated;
|
||||
});
|
||||
});
|
||||
}
|
||||
else
|
||||
return future.resolve(buffer(prefix));
|
||||
}
|
||||
|
||||
function getNodeContainingKey(self, tr, key) {
|
||||
if(self._nodeSubspace.contains(key))
|
||||
return future.resolve(self._rootNode);
|
||||
|
||||
return tr.getRange(self._nodeSubspace.range([]).begin,
|
||||
self._nodeSubspace.subspace([key]).range().begin,
|
||||
{ limit: 1, reverse: true })
|
||||
.toArray()
|
||||
.then(function(arr) {
|
||||
if(arr.length > 0) {
|
||||
var prevPrefix = self._nodeSubspace.unpack(arr[0].key)[0];
|
||||
if(startsWith(key, prevPrefix))
|
||||
return nodeWithPrefix(self, prevPrefix);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
function isPrefixFree(self, tr, prefix) {
|
||||
if(!prefix || prefix.length === 0)
|
||||
return future.resolve(false);
|
||||
|
||||
return getNodeContainingKey(self, tr, prefix)
|
||||
.then(function(node) {
|
||||
if(node)
|
||||
return false;
|
||||
|
||||
return tr.getRange(self._nodeSubspace.pack([prefix]),
|
||||
self._nodeSubspace.pack([fdbUtil.strinc(prefix)]),
|
||||
{ limit: 1 })
|
||||
.toArray()
|
||||
.then(function(arr) {
|
||||
return arr.length === 0;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getParentNode(self, tr, path) {
|
||||
if(path.length > 1) {
|
||||
return self.createOrOpen(tr, path.slice(0, path.length-1))
|
||||
.then(function(dir) {
|
||||
return nodeWithPrefix(self, dir.key());
|
||||
});
|
||||
}
|
||||
else
|
||||
return future.resolve(self._rootNode);
|
||||
}
|
||||
|
||||
function removeFromParent(self, tr, path) {
|
||||
return find(self, tr, path.slice(0, path.length-1))
|
||||
.then(function(parentNode) {
|
||||
tr.clear(parentNode.subspace.subspace([SUBDIRS]).pack([path[path.length-1]]));
|
||||
});
|
||||
}
|
||||
|
||||
function removeRecursive(self, tr, node) {
|
||||
var subdir = node.subspace([SUBDIRS]);
|
||||
return tr.getRange(subdir.range().begin, subdir.range().end)
|
||||
.forEach(function(kv, loopCb) {
|
||||
removeRecursive(self, tr, nodeWithPrefix(self, kv.value))(loopCb);
|
||||
})
|
||||
.then(function() {
|
||||
tr.clearRangeStartsWith(self._nodeSubspace.unpack(node.key())[0]);
|
||||
tr.clearRange(node.range().begin, node.range().end);
|
||||
});
|
||||
}
|
||||
|
||||
function toUnicodePath(path) {
|
||||
if(Buffer.isBuffer(path) || path instanceof ArrayBuffer || path instanceof Uint8Array)
|
||||
path = buffer(path).toString('utf8');
|
||||
|
||||
if(typeof path === 'string')
|
||||
return [path];
|
||||
|
||||
if(path instanceof Array) {
|
||||
for(var i = 0; i < path.length; ++i) {
|
||||
if(Buffer.isBuffer(path[i]) || path[i] instanceof ArrayBuffer || path[i] instanceof Uint8Array)
|
||||
path[i] = buffer(path[i]).toString('utf8');
|
||||
if(typeof path[i] !== 'string')
|
||||
throw new TypeError('Invalid path: must be a string, Buffer, ArrayBuffer, Uint8Array, or an array of such items');
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
throw new TypeError('Invalid path: must be a string, Buffer, ArrayBuffer, Uint8Array, or an array of such items');
|
||||
}
|
||||
|
||||
/*********************
|
||||
* DirectorySubspace *
|
||||
*********************/
|
||||
|
||||
var DirectorySubspace = function(path, prefix, directoryLayer, layer) {
|
||||
Subspace.call(this, undefined, prefix);
|
||||
this._path = path;
|
||||
this._directoryLayer = directoryLayer;
|
||||
this._layer = layer;
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype = new Subspace();
|
||||
DirectorySubspace.constructor = DirectorySubspace;
|
||||
|
||||
DirectorySubspace.prototype.getLayer = function() {
|
||||
return this._layer;
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.getPath = function() {
|
||||
return this._path.slice(0);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.createOrOpen = function(databaseOrTransaction, nameOrPath, options, cb) {
|
||||
var path = tuplifyPath(nameOrPath);
|
||||
return this._directoryLayer.createOrOpen(databaseOrTransaction, partitionSubpath(this, path), options, cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.open = function(databaseOrTransaction, nameOrPath, options, cb) {
|
||||
var path = tuplifyPath(nameOrPath);
|
||||
return this._directoryLayer.open(databaseOrTransaction, partitionSubpath(this, path), options, cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.create = function(databaseOrTransaction, nameOrPath, options, cb) {
|
||||
var path = tuplifyPath(nameOrPath);
|
||||
return this._directoryLayer.create(databaseOrTransaction, partitionSubpath(this, path), options, cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.list = function(databaseOrTransaction, nameOrPath, cb) {
|
||||
var path = tuplifyPath(valueOrDefault(nameOrPath, []));
|
||||
return this._directoryLayer.list(databaseOrTransaction, partitionSubpath(this, path), cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.move = function(databaseOrTransaction, oldNameOrPath, newNameOrPath, cb) {
|
||||
var oldPath = tuplifyPath(oldNameOrPath);
|
||||
var newPath = tuplifyPath(newNameOrPath);
|
||||
return this._directoryLayer.move(databaseOrTransaction,
|
||||
partitionSubpath(this, oldPath),
|
||||
partitionSubpath(this, newPath),
|
||||
cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.moveTo = function(databaseOrTransaction, newAbsoluteNameOrPath, cb) {
|
||||
var directoryLayer;
|
||||
var newAbsolutePath;
|
||||
try {
|
||||
directoryLayer = getLayerForPath(this, []);
|
||||
newAbsolutePath = toUnicodePath(newAbsoluteNameOrPath);
|
||||
var partitionPath = newAbsolutePath.slice(0, directoryLayer._path.length);
|
||||
if(!pathsEqual(partitionPath, directoryLayer._path))
|
||||
throw new Error('Cannot move between partitions.');
|
||||
}
|
||||
catch(err) {
|
||||
return future.reject(err)(cb);
|
||||
}
|
||||
|
||||
return directoryLayer.move(databaseOrTransaction,
|
||||
this._path.slice(directoryLayer._path.length),
|
||||
newAbsolutePath.slice(directoryLayer._path.length),
|
||||
cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.remove = function(databaseOrTransaction, nameOrPath, cb) {
|
||||
var path = tuplifyPath(valueOrDefault(nameOrPath, []));
|
||||
var directoryLayer = getLayerForPath(this, path);
|
||||
return directoryLayer.remove(databaseOrTransaction, partitionSubpath(this, path, directoryLayer), cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.removeIfExists = function(databaseOrTransaction, nameOrPath, cb) {
|
||||
var path = tuplifyPath(valueOrDefault(nameOrPath, []));
|
||||
var directoryLayer = getLayerForPath(this, path);
|
||||
return directoryLayer.removeIfExists(databaseOrTransaction, partitionSubpath(this, path, directoryLayer), cb);
|
||||
};
|
||||
|
||||
DirectorySubspace.prototype.exists = function(databaseOrTransaction, nameOrPath, cb) {
|
||||
var path = tuplifyPath(valueOrDefault(nameOrPath, []));
|
||||
var directoryLayer = getLayerForPath(this, path);
|
||||
return directoryLayer.exists(databaseOrTransaction, partitionSubpath(this, path, directoryLayer), cb);
|
||||
};
|
||||
|
||||
var partitionSubpath = function(directorySubspace, path, directoryLayer) {
|
||||
directoryLayer = valueOrDefault(directoryLayer, directorySubspace._directoryLayer);
|
||||
return directorySubspace._path.slice(directoryLayer._path.length).concat(path);
|
||||
};
|
||||
|
||||
/**********************
|
||||
* DirectoryPartition *
|
||||
**********************/
|
||||
|
||||
var DirectoryPartition = function(path, prefix, parentDirectoryLayer) {
|
||||
var directoryLayer = new DirectoryLayer({
|
||||
nodeSubspace: new Subspace(undefined, Buffer.concat([prefix, buffer.fromByteLiteral('\xfe')], prefix.length+1)),
|
||||
contentSubspace: new Subspace(undefined, prefix)
|
||||
});
|
||||
|
||||
directoryLayer._path = path;
|
||||
DirectorySubspace.call(this, path, prefix, directoryLayer, buffer('partition'));
|
||||
this._parentDirectoryLayer = parentDirectoryLayer;
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype = new DirectorySubspace();
|
||||
DirectoryPartition.constructor = DirectoryPartition;
|
||||
|
||||
DirectoryPartition.prototype.key = function() {
|
||||
throw new Error('Cannot get key for the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.pack = function(arr) {
|
||||
throw new Error('Cannot pack keys using the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.unpack = function(arr) {
|
||||
throw new Error('Cannot unpack keys using the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.range = function(arr) {
|
||||
throw new Error('Cannot get range for the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.contains = function(key) {
|
||||
throw new Error('Cannot check whether a key belongs to the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.get = function(name) {
|
||||
throw new Error('Cannot open subspace in the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.subspace = function(arr) {
|
||||
throw new Error('Cannot open subspace in the root of a directory partition.');
|
||||
};
|
||||
|
||||
DirectoryPartition.prototype.asFoundationDBKey = function() {
|
||||
throw new Error('Cannot use the root of a directory partition as a key.');
|
||||
};
|
||||
|
||||
var getLayerForPath = function(directorySubspace, path) {
|
||||
if(directorySubspace instanceof DirectoryPartition && path.length === 0)
|
||||
return directorySubspace._parentDirectoryLayer;
|
||||
else
|
||||
return directorySubspace._directoryLayer;
|
||||
};
|
||||
|
||||
/********
|
||||
* Node *
|
||||
********/
|
||||
|
||||
var Node = function(subspace, path, targetPath) {
|
||||
this.subspace = subspace;
|
||||
this.path = path;
|
||||
this.targetPath = targetPath;
|
||||
};
|
||||
|
||||
Node.prototype.exists = function() {
|
||||
return typeof(this.subspace) !== 'undefined' && this.subspace !== null;
|
||||
};
|
||||
|
||||
Node.prototype.loadMetadata = function(tr) {
|
||||
var self = this;
|
||||
if(!self.exists()) {
|
||||
self.loadedMetadata = true;
|
||||
return future.resolve(self);
|
||||
}
|
||||
|
||||
return tr.get(self.subspace.pack([buffer('layer')]))
|
||||
.then(function(layer) {
|
||||
self.loadedMetadata = true;
|
||||
self.layer = layer;
|
||||
return self;
|
||||
});
|
||||
};
|
||||
|
||||
Node.prototype.ensureMetadataLoaded = function() {
|
||||
if(!this.loadedMetadata)
|
||||
throw new Error('Metadata for node has not been loaded');
|
||||
};
|
||||
|
||||
Node.prototype.isInPartition = function(includeEmptySubpath) {
|
||||
this.ensureMetadataLoaded();
|
||||
return this.exists() &&
|
||||
fdbUtil.buffersEqual(this.layer, buffer('partition')) &&
|
||||
(includeEmptySubpath || this.targetPath.length > this.path.length);
|
||||
};
|
||||
|
||||
Node.prototype.getPartitionSubpath = function() {
|
||||
this.ensureMetadataLoaded();
|
||||
return this.targetPath.slice(this.path.length);
|
||||
};
|
||||
|
||||
Node.prototype.getContents = function(directoryLayer) {
|
||||
this.ensureMetadataLoaded();
|
||||
return contentsOfNode(directoryLayer, this.subspace, this.path, this.layer);
|
||||
};
|
||||
|
||||
var loadMetadata = function(tr) {
|
||||
return function(node) {
|
||||
return node.loadMetadata(tr);
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = { directory: new DirectoryLayer(), DirectoryLayer: DirectoryLayer };
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* error.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
function FDBError(description, code) {
|
||||
Error.captureStackTrace(this, this.constructor);
|
||||
this.message = description;
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
FDBError.prototype = new Error();
|
||||
FDBError.constructor = FDBError;
|
||||
FDBError.prototype.name = "FDBError"; // affects error messages, also needed for compatibility with older bindings
|
||||
|
||||
module.exports = FDBError;
|
|
@ -1,115 +0,0 @@
|
|||
/*
|
||||
* fdb.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var KeySelector = require('./keySelector');
|
||||
var Cluster = require('./cluster');
|
||||
var future = require('./future');
|
||||
var Transactional = require('./retryDecorator');
|
||||
var tuple = require('./tuple');
|
||||
var buffer = require('./bufferConversion');
|
||||
var fdb = require('./fdbModule');
|
||||
var FDBError = require('./error');
|
||||
var locality = require('./locality');
|
||||
var directory = require('./directory');
|
||||
var Subspace = require('./subspace');
|
||||
var selectedApiVersion = require('./apiVersion');
|
||||
|
||||
var fdbModule = {};
|
||||
|
||||
module.exports = {
|
||||
FDBError: FDBError,
|
||||
apiVersion: function(version) {
|
||||
if(selectedApiVersion.value && version !== selectedApiVersion.value)
|
||||
throw new Error('Cannot select multiple different FDB API versions');
|
||||
if(version < 500)
|
||||
throw new RangeError('FDB API versions before 500 are not supported');
|
||||
if(version > 510)
|
||||
throw new RangeError('Latest known FDB API version is 510');
|
||||
|
||||
if(!selectedApiVersion.value) {
|
||||
fdb.apiVersion(version);
|
||||
|
||||
fdbModule.FDBError = this.FDBError;
|
||||
fdbModule.KeySelector = KeySelector;
|
||||
fdbModule.future = future;
|
||||
fdbModule.transactional = Transactional;
|
||||
fdbModule.tuple = tuple;
|
||||
fdbModule.buffer = buffer;
|
||||
fdbModule.locality = locality;
|
||||
fdbModule.directory = directory.directory;
|
||||
fdbModule.DirectoryLayer = directory.DirectoryLayer;
|
||||
fdbModule.Subspace = Subspace;
|
||||
|
||||
fdbModule.options = fdb.options;
|
||||
fdbModule.streamingMode = fdb.streamingMode;
|
||||
|
||||
var dbCache = {};
|
||||
|
||||
var doInit = function() {
|
||||
fdb.startNetwork();
|
||||
|
||||
process.on('exit', function() {
|
||||
//Clearing out the caches makes memory debugging a little easier
|
||||
dbCache = null;
|
||||
|
||||
fdb.stopNetwork();
|
||||
});
|
||||
|
||||
//Subsequent calls do nothing
|
||||
doInit = function() { };
|
||||
};
|
||||
|
||||
fdbModule.init = function() {
|
||||
doInit();
|
||||
};
|
||||
|
||||
fdbModule.createCluster = function(clusterFile, cb) {
|
||||
if(!clusterFile)
|
||||
clusterFile = '';
|
||||
|
||||
return new Cluster(fdb.createCluster(clusterFile));
|
||||
};
|
||||
|
||||
fdbModule.open = function(clusterFile, cb) {
|
||||
if(clusterFile)
|
||||
fdb.options.setClusterFile(clusterFile);
|
||||
|
||||
this.init();
|
||||
|
||||
var database = dbCache[clusterFile];
|
||||
if(!database) {
|
||||
var cluster = fdbModule.createCluster(clusterFile);
|
||||
database = cluster.openDatabase();
|
||||
dbCache[clusterFile] = database;
|
||||
}
|
||||
|
||||
return database;
|
||||
};
|
||||
}
|
||||
|
||||
selectedApiVersion.value = version;
|
||||
return fdbModule;
|
||||
}
|
||||
};
|
||||
|
||||
fdb.FDBError = module.exports.FDBError;
|
|
@ -1,33 +0,0 @@
|
|||
/*
|
||||
* fdbModule.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var semver = require('semver');
|
||||
|
||||
if(semver.satisfies(process.version, '0.10.x'))
|
||||
var fdb = require('../modules/0.10/fdblib');
|
||||
else if(semver.satisfies(process.version, '0.8.x'))
|
||||
var fdb = require('../modules/0.8/fdblib');
|
||||
else
|
||||
throw new Error('FoundationDB binary NPM does not support Node.js ' + process.version + '; only v0.8.x and v0.10.x are supported');
|
||||
|
||||
module.exports = fdb;
|
|
@ -1,104 +0,0 @@
|
|||
/*
|
||||
* fdbUtil.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var buffer = require('./bufferConversion');
|
||||
var future = require('./future');
|
||||
|
||||
var strinc = function(str) {
|
||||
var buf = buffer(str);
|
||||
|
||||
var lastNonFFByte;
|
||||
for(lastNonFFByte = buf.length-1; lastNonFFByte >= 0; --lastNonFFByte)
|
||||
if(buf[lastNonFFByte] != 0xFF)
|
||||
break;
|
||||
|
||||
if(lastNonFFByte < 0)
|
||||
throw new Error('invalid argument \'' + str + '\': prefix must have at least one byte not equal to 0xFF');
|
||||
|
||||
var copy = new Buffer(lastNonFFByte + 1);
|
||||
str.copy(copy, 0, 0, copy.length);
|
||||
++copy[lastNonFFByte];
|
||||
|
||||
return copy;
|
||||
};
|
||||
|
||||
var whileLoop = function(func, cb) {
|
||||
return future.create(function(futureCb) {
|
||||
var calledCallback = true;
|
||||
function outer(err, res) {
|
||||
if(err || typeof(res) !== 'undefined') {
|
||||
futureCb(err, res);
|
||||
}
|
||||
else if(!calledCallback) {
|
||||
calledCallback = true;
|
||||
}
|
||||
else {
|
||||
while(calledCallback) {
|
||||
calledCallback = false;
|
||||
func(outer);
|
||||
}
|
||||
|
||||
calledCallback = true;
|
||||
}
|
||||
}
|
||||
|
||||
outer();
|
||||
}, cb);
|
||||
};
|
||||
|
||||
var keyToBuffer = function(key) {
|
||||
if(typeof(key.asFoundationDBKey) == 'function')
|
||||
return buffer(key.asFoundationDBKey());
|
||||
|
||||
return buffer(key);
|
||||
};
|
||||
|
||||
var valueToBuffer = function(val) {
|
||||
if(typeof(val.asFoundationDBValue) == 'function')
|
||||
return buffer(val.asFoundationDBValue());
|
||||
|
||||
return buffer(val);
|
||||
};
|
||||
|
||||
var buffersEqual = function(buf1, buf2) {
|
||||
if(!buf1 || !buf2)
|
||||
return buf1 === buf2;
|
||||
|
||||
if(buf1.length !== buf2.length)
|
||||
return false;
|
||||
|
||||
for(var i = 0; i < buf1.length; ++i)
|
||||
if(buf1[i] !== buf2[i])
|
||||
return false;
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
strinc: strinc,
|
||||
whileLoop: whileLoop,
|
||||
keyToBuffer: keyToBuffer,
|
||||
valueToBuffer: valueToBuffer,
|
||||
buffersEqual: buffersEqual
|
||||
};
|
||||
|
|
@ -1,244 +0,0 @@
|
|||
/*
|
||||
* future.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
var semver = require('semver');
|
||||
|
||||
function isFunction(f) {
|
||||
return typeof(f) == 'function';
|
||||
}
|
||||
|
||||
function isObject(o) {
|
||||
return o === Object(o);
|
||||
}
|
||||
|
||||
var resolvePromise = function(promise, value) {
|
||||
var called = false;
|
||||
try {
|
||||
if(promise === value)
|
||||
promise._state.reject(new TypeError('promise.then cannot be fulfilled with itself as the argument.'));
|
||||
|
||||
if(isObject(value)) {
|
||||
var then = value.then;
|
||||
if(isFunction(then)) {
|
||||
then.call(value, function(res) {
|
||||
if(!called) {
|
||||
called = true;
|
||||
resolvePromise(promise, res, promise);
|
||||
}
|
||||
}, function(err) {
|
||||
if(!called) {
|
||||
called = true;
|
||||
promise._state.reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
else
|
||||
promise._state.fulfill(value);
|
||||
}
|
||||
else
|
||||
promise._state.fulfill(value);
|
||||
}
|
||||
catch(error) {
|
||||
if(!called)
|
||||
promise._state.reject(error);
|
||||
}
|
||||
};
|
||||
|
||||
var FuturePrototype = {
|
||||
cancel: function() {
|
||||
//cancel is not implemented for most futures
|
||||
},
|
||||
|
||||
then: function(onFulfilled, onRejected) {
|
||||
var self = this;
|
||||
var future = create();
|
||||
this._state.addCallback(function(err, res) {
|
||||
var setImmediateFunc;
|
||||
if(semver.satisfies(process.version, '>=0.10.0')) {
|
||||
setImmediateFunc = setImmediate;
|
||||
}
|
||||
else {
|
||||
setImmediateFunc = process.nextTick;
|
||||
}
|
||||
setImmediateFunc(function() {
|
||||
try {
|
||||
if(self._state.rejected) {
|
||||
if(isFunction(onRejected))
|
||||
res = onRejected(err);
|
||||
else {
|
||||
future._state.reject(err);
|
||||
return;
|
||||
}
|
||||
}
|
||||
else if(isFunction(onFulfilled))
|
||||
res = onFulfilled(res);
|
||||
|
||||
resolvePromise(future, res);
|
||||
}
|
||||
catch(error) {
|
||||
future._state.reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return future;
|
||||
},
|
||||
|
||||
"catch": function(onRejected) {
|
||||
this.then(undefined, onRejected);
|
||||
}
|
||||
};
|
||||
|
||||
FuturePrototype.__proto__ = Function.__proto__;
|
||||
|
||||
var FutureState = function() {
|
||||
this.callbacks = [];
|
||||
this.fulfilled = false;
|
||||
this.rejected = false;
|
||||
};
|
||||
|
||||
FutureState.prototype.triggerCallbacks = function() {
|
||||
for(var i = 0; i < this.callbacks.length; ++i)
|
||||
this.callbacks[i](this.error, this.value);
|
||||
|
||||
this.callbacks = [];
|
||||
};
|
||||
|
||||
FutureState.prototype.addCallback = function(cb) {
|
||||
if(!this.rejected && !this.fulfilled)
|
||||
this.callbacks.push(cb);
|
||||
else
|
||||
cb(this.error, this.value);
|
||||
};
|
||||
|
||||
FutureState.prototype.fulfill = function(value) {
|
||||
if(!this.fulfilled && !this.rejected) {
|
||||
this.fulfilled = true;
|
||||
this.value = value;
|
||||
this.triggerCallbacks();
|
||||
}
|
||||
};
|
||||
|
||||
FutureState.prototype.reject = function(reason) {
|
||||
if(!this.fulfilled && !this.rejected) {
|
||||
this.rejected = true;
|
||||
this.error = reason;
|
||||
this.triggerCallbacks();
|
||||
}
|
||||
};
|
||||
|
||||
var getFutureCallback = function(futureState) {
|
||||
return function(err, val) {
|
||||
if(err)
|
||||
futureState.reject(err);
|
||||
else
|
||||
futureState.fulfill(val);
|
||||
};
|
||||
};
|
||||
|
||||
var create = function(func, cb) {
|
||||
if(cb)
|
||||
func(cb);
|
||||
else {
|
||||
// This object is used to break a reference cycle with C++ objects
|
||||
var futureState = new FutureState();
|
||||
|
||||
var future = function(callback) {
|
||||
if(typeof callback === 'undefined')
|
||||
return future;
|
||||
|
||||
future.then(function(val) { callback(undefined, val); }, callback);
|
||||
};
|
||||
|
||||
future._state = futureState;
|
||||
future.__proto__ = FuturePrototype;
|
||||
|
||||
if(func)
|
||||
func.call(future, getFutureCallback(futureState));
|
||||
|
||||
return future;
|
||||
}
|
||||
};
|
||||
|
||||
var resolve = function(value) {
|
||||
var f = create();
|
||||
f._state.fulfill(value);
|
||||
return f;
|
||||
};
|
||||
|
||||
var reject = function(reason) {
|
||||
var f = create();
|
||||
f._state.reject(reason);
|
||||
return f;
|
||||
};
|
||||
|
||||
var all = function(futures) {
|
||||
var future = create(function(futureCb) {
|
||||
var count = futures.length;
|
||||
|
||||
if(count === 0)
|
||||
futureCb(undefined, []);
|
||||
|
||||
var successCallback = function() {
|
||||
if(--count === 0)
|
||||
futureCb(undefined, futures.map(function(f) { return f._state.value; }));
|
||||
};
|
||||
|
||||
for(var i = 0; i < futures.length; ++i) {
|
||||
if(futures[i] && isFunction(futures[i].then))
|
||||
futures[i].then(successCallback, futureCb);
|
||||
else
|
||||
successCallback();
|
||||
}
|
||||
});
|
||||
|
||||
return future;
|
||||
};
|
||||
|
||||
var race = function(futures) {
|
||||
var future = create(function(futureCb) {
|
||||
var successCallback = function(val) {
|
||||
futureCb(undefined, val);
|
||||
};
|
||||
|
||||
for(var i = 0; i < futures.length; ++i) {
|
||||
if(futures[i] && isFunction(futures[i].then))
|
||||
futures[i].then(successCallback, futureCb);
|
||||
else {
|
||||
futureCb(undefined, futures[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return future;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
FuturePrototype: FuturePrototype,
|
||||
create: create,
|
||||
resolve: resolve,
|
||||
reject: reject,
|
||||
all: all,
|
||||
race: race
|
||||
};
|
||||
|
|
@ -1,65 +0,0 @@
|
|||
/*
|
||||
* keySelector.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
|
||||
var KeySelector = function(key, orEqual, offset) {
|
||||
this.key = fdbUtil.keyToBuffer(key);
|
||||
this.orEqual = orEqual;
|
||||
this.offset = offset;
|
||||
};
|
||||
|
||||
KeySelector.prototype.next = function() {
|
||||
return this.add(1);
|
||||
};
|
||||
|
||||
KeySelector.prototype.prev = function() {
|
||||
return this.add(-1);
|
||||
};
|
||||
|
||||
KeySelector.prototype.add = function(addOffset) {
|
||||
return new KeySelector(this.key, this.orEqual, this.offset + addOffset);
|
||||
};
|
||||
|
||||
KeySelector.isKeySelector = function(sel) {
|
||||
return sel instanceof KeySelector;
|
||||
};
|
||||
|
||||
KeySelector.lastLessThan = function(key) {
|
||||
return new KeySelector(key, false, 0);
|
||||
};
|
||||
|
||||
KeySelector.lastLessOrEqual = function(key) {
|
||||
return new KeySelector(key, true, 0);
|
||||
};
|
||||
|
||||
KeySelector.firstGreaterThan = function(key) {
|
||||
return new KeySelector(key, true, 1);
|
||||
};
|
||||
|
||||
KeySelector.firstGreaterOrEqual = function(key) {
|
||||
return new KeySelector(key, false, 1);
|
||||
};
|
||||
|
||||
module.exports = KeySelector;
|
||||
|
|
@ -1,182 +0,0 @@
|
|||
/*
|
||||
* lazyIterator.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
var future = require('./future');
|
||||
|
||||
function fetch(state, cb) {
|
||||
if(cb)
|
||||
state.fetchCallbacks.push(cb);
|
||||
if(!state.fetching) {
|
||||
state.fetching = true;
|
||||
state.fetcher.fetch(function(err, res) {
|
||||
var cbs = state.fetchCallbacks;
|
||||
state.fetching = false;
|
||||
state.fetchCallbacks = [];
|
||||
state.results = res;
|
||||
state.index = -1;
|
||||
state.finished = !res || res.length === 0;
|
||||
|
||||
for(var i = 0; i < cbs.length; ++i)
|
||||
cbs[i](err);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function iterState(fetcher) {
|
||||
return {
|
||||
index: -1,
|
||||
results: undefined,
|
||||
|
||||
fetching: false,
|
||||
fetchCallbacks: [],
|
||||
fetcher: fetcher
|
||||
};
|
||||
}
|
||||
|
||||
var LazyIterator = function(Fetcher) {
|
||||
this.Fetcher = Fetcher;
|
||||
this.stateForNext = undefined;
|
||||
|
||||
this.startState = iterState(new Fetcher());
|
||||
|
||||
var startState = this.startState;
|
||||
fetch(this.startState);
|
||||
};
|
||||
|
||||
function copyState(state, wantAll) {
|
||||
var newState = iterState();
|
||||
newState.index = state.index;
|
||||
newState.results = state.results;
|
||||
newState.fetching = state.fetching;
|
||||
|
||||
if(state.fetching) {
|
||||
state.fetchCallbacks.push(function(err) {
|
||||
var cbs = newState.fetchCallbacks;
|
||||
newState.index = state.index;
|
||||
newState.results = state.results;
|
||||
newState.fetching = false;
|
||||
newState.fetchCallbacks = [];
|
||||
newState.finished = state.finished;
|
||||
newState.fetcher = state.fetcher.clone(wantAll);
|
||||
for(var i = 0; i < cbs.length; ++i)
|
||||
cbs[i](err);
|
||||
});
|
||||
}
|
||||
else {
|
||||
newState.fetcher = state.fetcher.clone(wantAll);
|
||||
}
|
||||
|
||||
return newState;
|
||||
}
|
||||
|
||||
function nextImpl(state, cb) {
|
||||
if(state.finished)
|
||||
cb();
|
||||
else if(state.results && (state.index + 1) < state.results.length)
|
||||
cb(null, state.results[++state.index]);
|
||||
else {
|
||||
fetch(state, function(err) {
|
||||
if(err)
|
||||
cb(err);
|
||||
else if(state.finished)
|
||||
cb();
|
||||
else
|
||||
nextImpl(state, cb);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
LazyIterator.prototype.next = function(cb) {
|
||||
var itr = this;
|
||||
return future.create(function(futureCb) {
|
||||
if(!itr.stateForNext)
|
||||
itr.stateForNext = copyState(itr.startState);
|
||||
|
||||
nextImpl(itr.stateForNext, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
LazyIterator.prototype.forEach = function(func, cb) {
|
||||
var itr = this;
|
||||
return future.create(function(futureCb) {
|
||||
var state = copyState(itr.startState);
|
||||
|
||||
fdbUtil.whileLoop(function(loopCb) {
|
||||
nextImpl(state, function(err, res) {
|
||||
if(err || !res)
|
||||
loopCb(err, null);
|
||||
else
|
||||
func(res, loopCb);
|
||||
});
|
||||
}, futureCb);
|
||||
|
||||
}, cb);
|
||||
};
|
||||
|
||||
function forEachBatchImpl(state, func, cb) {
|
||||
function loopBody(loopCb) {
|
||||
function processBatch(err) {
|
||||
if(err || state.finished)
|
||||
loopCb(err, null);
|
||||
else {
|
||||
state.index = state.results.length;
|
||||
func(state.results, loopCb);
|
||||
}
|
||||
}
|
||||
|
||||
if(!state.results || state.index === state.results.length)
|
||||
fetch(state, processBatch);
|
||||
else
|
||||
processBatch();
|
||||
}
|
||||
|
||||
fdbUtil.whileLoop(loopBody, cb);
|
||||
}
|
||||
|
||||
LazyIterator.prototype.forEachBatch = function(func, cb) {
|
||||
var itr = this;
|
||||
return future.create(function(futureCb) {
|
||||
forEachBatchImpl(copyState(itr.startState), func, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
LazyIterator.prototype.toArray = function(cb) {
|
||||
var itr = this;
|
||||
return future.create(function(futureCb) {
|
||||
var state = copyState(itr.startState, true);
|
||||
var result = [];
|
||||
|
||||
forEachBatchImpl(state, function(arr, itrCb) {
|
||||
result = result.concat(arr);
|
||||
itrCb();
|
||||
}, function(err, res) {
|
||||
if(err)
|
||||
futureCb(err);
|
||||
else
|
||||
futureCb(null, result);
|
||||
});
|
||||
}, cb);
|
||||
};
|
||||
|
||||
module.exports = LazyIterator;
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* locality.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var buffer = require('./bufferConversion');
|
||||
var transactional = require('./retryDecorator');
|
||||
var Database = require('./database');
|
||||
var LazyIterator = require('./lazyIterator');
|
||||
var fdb = require('./fdbModule');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
|
||||
var KEY_SERVERS_PREFIX = buffer.fromByteLiteral('\xff/keyServers/');
|
||||
var PAST_VERSION_ERROR_CODE = 1007;
|
||||
|
||||
function getBoundaryKeysImpl(tr, begin, end, callback) {
|
||||
function BoundaryFetcher(wantAll) {
|
||||
this.tr = tr;
|
||||
this.begin = begin;
|
||||
this.end = end;
|
||||
this.lastBegin = undefined;
|
||||
|
||||
var fetcher = this;
|
||||
|
||||
if(wantAll)
|
||||
this.streamingMode = fdb.streamingMode.wantAll;
|
||||
else
|
||||
this.streamingMode = fdb.streamingMode.iterator;
|
||||
|
||||
function iteratorCb(err, res) {
|
||||
if(err) {
|
||||
if(err.code === PAST_VERSION_ERROR_CODE && fetcher.begin !== fetcher.lastBegin) {
|
||||
fetcher.tr = fetcher.tr.db.createTransaction();
|
||||
readKeys();
|
||||
}
|
||||
else {
|
||||
fetcher.tr.onError(err, function(e) {
|
||||
if(e)
|
||||
fetcher.fetchCb(e);
|
||||
else
|
||||
readKeys();
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
fetcher.fetchCb();
|
||||
}
|
||||
|
||||
function readKeys() {
|
||||
fetcher.lastBegin = fetcher.begin;
|
||||
fetcher.tr.options.setReadSystemKeys();
|
||||
fetcher.tr.options.setLockAware();
|
||||
fetcher.tr.snapshot.getRange(fetcher.begin, fetcher.end, {streamingMode: fetcher.streamingMode}).forEachBatch(function(kvs, innerCb) {
|
||||
fetcher.forEachCb = innerCb;
|
||||
var keys = kvs.map(function(kv) { return kv.key.slice(13); });
|
||||
var last = kvs[kvs.length-1].key;
|
||||
fetcher.begin = Buffer.concat([last, buffer.fromByteLiteral('\x00')], last.length + 1);
|
||||
fetcher.fetchCb(undefined, keys);
|
||||
}, iteratorCb);
|
||||
|
||||
fetcher.streamingMode = fdb.streamingMode.wantAll;
|
||||
}
|
||||
|
||||
this.fetch = function(cb) {
|
||||
this.fetchCb = cb;
|
||||
if(this.read)
|
||||
this.forEachCb();
|
||||
else {
|
||||
this.read = true;
|
||||
readKeys();
|
||||
}
|
||||
};
|
||||
|
||||
this.clone = function(wantAll) {
|
||||
var clone = new BoundaryFetcher(wantAll);
|
||||
|
||||
clone.tr = this.tr.db.createTransaction();
|
||||
clone.begin = this.begin;
|
||||
clone.end = this.end;
|
||||
clone.lastBegin = this.lastBegin;
|
||||
|
||||
return clone;
|
||||
};
|
||||
}
|
||||
|
||||
callback(null, new LazyIterator(BoundaryFetcher));
|
||||
}
|
||||
|
||||
function getBoundaryKeys(databaseOrTransaction, begin, end, callback) {
|
||||
begin = fdbUtil.keyToBuffer(begin);
|
||||
end = fdbUtil.keyToBuffer(end);
|
||||
|
||||
begin = Buffer.concat([KEY_SERVERS_PREFIX, begin], KEY_SERVERS_PREFIX.length + begin.length);
|
||||
end = Buffer.concat([KEY_SERVERS_PREFIX, end], KEY_SERVERS_PREFIX.length + end.length);
|
||||
|
||||
if(databaseOrTransaction instanceof Database) {
|
||||
getBoundaryKeysImpl(databaseOrTransaction.createTransaction(), begin, end, callback);
|
||||
}
|
||||
else {
|
||||
var tr = databaseOrTransaction.db.createTransaction();
|
||||
databaseOrTransaction.getReadVersion(function(err, ver) {
|
||||
tr.setReadVersion(ver);
|
||||
getBoundaryKeysImpl(tr, begin, end, callback);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var getAddressesForKey = transactional(function (tr, key, cb) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
tr.tr.getAddressesForKey(key, cb);
|
||||
});
|
||||
|
||||
module.exports = {getBoundaryKeys: getBoundaryKeys, getAddressesForKey: getAddressesForKey};
|
|
@ -1,106 +0,0 @@
|
|||
/*
|
||||
* rangeIterator.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var KeySelector = require('./keySelector');
|
||||
var Future = require('./future');
|
||||
var fdb = require('./fdbModule');
|
||||
var LazyIterator = require('./lazyIterator');
|
||||
|
||||
function getStreamingMode(requestedMode, limit, wantAll) {
|
||||
if(wantAll && requestedMode === fdb.streamingMode.iterator) {
|
||||
if(limit)
|
||||
return fdb.streamingMode.exact;
|
||||
else
|
||||
return fdb.streamingMode.wantAll;
|
||||
}
|
||||
|
||||
return requestedMode;
|
||||
}
|
||||
|
||||
module.exports = function(tr, start, end, options, snapshot) {
|
||||
if(!options)
|
||||
options = {};
|
||||
|
||||
if(!options.limit)
|
||||
options.limit = 0;
|
||||
if(!options.reverse)
|
||||
options.reverse = false;
|
||||
if(!options.streamingMode && options.streamingMode !== 0)
|
||||
options.streamingMode = fdb.streamingMode.iterator;
|
||||
|
||||
var RangeFetcher = function(wantAll) {
|
||||
this.finished = false;
|
||||
this.limit = options.limit;
|
||||
this.iterStart = start;
|
||||
this.iterEnd = end;
|
||||
this.iterationCount = 1;
|
||||
this.streamingMode = getStreamingMode(options.streamingMode, this.limit, wantAll);
|
||||
};
|
||||
|
||||
RangeFetcher.prototype.clone = function(wantAll) {
|
||||
var clone = new RangeFetcher(wantAll);
|
||||
|
||||
clone.finished = this.finished;
|
||||
clone.limit = this.limit;
|
||||
clone.iterStart = this.iterStart;
|
||||
clone.iterEnd = this.iterEnd;
|
||||
clone.iterationCount = this.iterationCount;
|
||||
|
||||
return clone;
|
||||
};
|
||||
|
||||
RangeFetcher.prototype.fetch = function(cb) {
|
||||
var fetcher = this;
|
||||
if(fetcher.finished) {
|
||||
cb();
|
||||
}
|
||||
else {
|
||||
tr.getRange(fetcher.iterStart.key, fetcher.iterStart.orEqual, fetcher.iterStart.offset, fetcher.iterEnd.key, fetcher.iterEnd.orEqual, fetcher.iterEnd.offset, fetcher.limit, fetcher.streamingMode, fetcher.iterationCount++, snapshot, options.reverse, function(err, res)
|
||||
{
|
||||
if(!err) {
|
||||
var results = res.array;
|
||||
if(results.length > 0) {
|
||||
if(!options.reverse)
|
||||
fetcher.iterStart = KeySelector.firstGreaterThan(results[results.length-1].key);
|
||||
else
|
||||
fetcher.iterEnd = KeySelector.firstGreaterOrEqual(results[results.length-1].key);
|
||||
}
|
||||
|
||||
if(fetcher.limit !== 0) {
|
||||
fetcher.limit -= results.length;
|
||||
if(fetcher.limit <= 0)
|
||||
fetcher.finished = true;
|
||||
}
|
||||
if(!res.more)
|
||||
fetcher.finished = true;
|
||||
cb(undefined, results);
|
||||
}
|
||||
else {
|
||||
cb(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return new LazyIterator(RangeFetcher);
|
||||
};
|
|
@ -1,50 +0,0 @@
|
|||
/*
|
||||
* retryDecorator.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var Database = require('./database');
|
||||
var Transaction = require('./transaction');
|
||||
|
||||
module.exports = function(func) {
|
||||
return function(db) {
|
||||
var self = this;
|
||||
|
||||
if(db instanceof Database) {
|
||||
var cb = arguments[func.length - 1];
|
||||
|
||||
if(typeof cb !== "undefined" && !(cb instanceof Function))
|
||||
throw new TypeError("fdb.transactional function must declare a callback function as last argument");
|
||||
else {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
args[0] = tr;
|
||||
args[func.length - 1] = innerCb;
|
||||
func.apply(self, args);
|
||||
})(cb);
|
||||
}
|
||||
}
|
||||
else if(db instanceof Transaction || db instanceof Transaction.SnapshotTransaction)
|
||||
return func.apply(self, arguments);
|
||||
else
|
||||
throw new TypeError("fdb.transactional function must pass a Database, Transaction, or SnapshotTransaction as first argument");
|
||||
};
|
||||
};
|
|
@ -1,85 +0,0 @@
|
|||
/*
|
||||
* subspace.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var buffer = require('./bufferConversion');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
var tuple = require('./tuple');
|
||||
|
||||
var Subspace = function(prefixArray, rawPrefix) {
|
||||
if(typeof rawPrefix === 'undefined')
|
||||
rawPrefix = new Buffer(0);
|
||||
if(typeof prefixArray === 'undefined')
|
||||
prefixArray = [];
|
||||
|
||||
rawPrefix = fdbUtil.keyToBuffer(rawPrefix);
|
||||
var packed = tuple.pack(prefixArray);
|
||||
|
||||
this.rawPrefix = Buffer.concat([rawPrefix, packed], rawPrefix.length + packed.length);
|
||||
};
|
||||
|
||||
Subspace.prototype.key = function() {
|
||||
return this.rawPrefix;
|
||||
};
|
||||
|
||||
Subspace.prototype.pack = function(arr) {
|
||||
var packed = tuple.pack(arr);
|
||||
return Buffer.concat([this.rawPrefix, packed], this.rawPrefix.length + packed.length) ;
|
||||
};
|
||||
|
||||
Subspace.prototype.unpack = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
if(!this.contains(key))
|
||||
throw new Error('Cannot unpack key that is not in subspace.');
|
||||
|
||||
return tuple.unpack(key.slice(this.rawPrefix.length));
|
||||
};
|
||||
|
||||
Subspace.prototype.range = function(arr) {
|
||||
if(typeof arr === 'undefined')
|
||||
arr = [];
|
||||
|
||||
var range = tuple.range(arr);
|
||||
return {
|
||||
begin: Buffer.concat([this.rawPrefix, range.begin], this.rawPrefix.length + range.begin.length),
|
||||
end: Buffer.concat([this.rawPrefix, range.end], this.rawPrefix.length + range.end.length)
|
||||
};
|
||||
};
|
||||
|
||||
Subspace.prototype.contains = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
return key.length >= this.rawPrefix.length && fdbUtil.buffersEqual(key.slice(0, this.rawPrefix.length), this.rawPrefix);
|
||||
};
|
||||
|
||||
Subspace.prototype.get = function(item) {
|
||||
return this.subspace([item]);
|
||||
};
|
||||
|
||||
Subspace.prototype.subspace = function(arr) {
|
||||
return new Subspace(arr, this.rawPrefix);
|
||||
};
|
||||
|
||||
Subspace.prototype.asFoundationDBKey = function() {
|
||||
return this.key();
|
||||
};
|
||||
|
||||
module.exports = Subspace;
|
|
@ -1,203 +0,0 @@
|
|||
/*
|
||||
* transaction.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var future = require('./future');
|
||||
var KeySelector = require('./keySelector');
|
||||
var rangeIterator = require('./rangeIterator');
|
||||
var buffer = require('./bufferConversion');
|
||||
var FDBError = require('./error');
|
||||
var fdb = require('./fdbModule');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
|
||||
function addReadOperations(object, snapshot) {
|
||||
object.prototype.get = function(key, cb) {
|
||||
var tr = this.tr;
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
|
||||
return future.create(function(futureCb) {
|
||||
tr.get(key, snapshot, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
object.prototype.getKey = function(keySelector, cb) {
|
||||
var tr = this.tr;
|
||||
return future.create(function(futureCb) {
|
||||
tr.getKey(keySelector.key, keySelector.orEqual, keySelector.offset, snapshot, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
object.prototype.getRange = function(start, end, options) {
|
||||
if(!KeySelector.isKeySelector(start))
|
||||
start = KeySelector.firstGreaterOrEqual(start);
|
||||
if(!KeySelector.isKeySelector(end))
|
||||
end = KeySelector.firstGreaterOrEqual(end);
|
||||
|
||||
return rangeIterator(this.tr, start, end, options, snapshot);
|
||||
};
|
||||
|
||||
object.prototype.getRangeStartsWith = function(prefix, options) {
|
||||
prefix = fdbUtil.keyToBuffer(prefix);
|
||||
return this.getRange(prefix, fdbUtil.strinc(prefix), options, snapshot);
|
||||
};
|
||||
|
||||
object.prototype.getReadVersion = function(cb) {
|
||||
var tr = this.tr;
|
||||
return future.create(function(futureCb) {
|
||||
tr.getReadVersion(futureCb, snapshot);
|
||||
}, cb);
|
||||
};
|
||||
}
|
||||
|
||||
var atomic = function(tr, op) {
|
||||
return function(key, value) { fdb.atomic[op].call(tr, fdbUtil.keyToBuffer(key), fdbUtil.valueToBuffer(value)); };
|
||||
};
|
||||
|
||||
var Transaction = function(db, tr) {
|
||||
this.db = db;
|
||||
this.tr = tr;
|
||||
|
||||
this.options = tr.options;
|
||||
this.snapshot = new Transaction.SnapshotTransaction(tr);
|
||||
|
||||
for(var op in fdb.atomic)
|
||||
this[op] = atomic(tr, op);
|
||||
};
|
||||
|
||||
Transaction.SnapshotTransaction = function(tr) {
|
||||
this.tr = tr;
|
||||
};
|
||||
|
||||
addReadOperations(Transaction, false);
|
||||
addReadOperations(Transaction.SnapshotTransaction, true);
|
||||
|
||||
Transaction.prototype.doTransaction = function(func, cb) {
|
||||
var self = this;
|
||||
return future.create(function(futureCb) {
|
||||
func(self, futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Transaction.prototype.set = function(key, value) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
value = fdbUtil.valueToBuffer(value);
|
||||
|
||||
this.tr.set(key, value);
|
||||
};
|
||||
|
||||
Transaction.prototype.clear = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
|
||||
this.tr.clear(key);
|
||||
};
|
||||
|
||||
Transaction.prototype.clearRange = function(start, end) {
|
||||
start = fdbUtil.keyToBuffer(start);
|
||||
end = fdbUtil.keyToBuffer(end);
|
||||
|
||||
this.tr.clearRange(start, end);
|
||||
};
|
||||
|
||||
Transaction.prototype.clearRangeStartsWith = function(prefix) {
|
||||
prefix = fdbUtil.keyToBuffer(prefix);
|
||||
this.clearRange(prefix, fdbUtil.strinc(prefix));
|
||||
};
|
||||
|
||||
Transaction.prototype.watch = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
|
||||
var self = this;
|
||||
var watchFuture = future.create(function(futureCb) {
|
||||
// 'this' is the future that is being created.
|
||||
// We set its cancel method to cancel the watch.
|
||||
this._watch = self.tr.watch(key, futureCb);
|
||||
this.cancel = function() { this._watch.cancel(); };
|
||||
});
|
||||
|
||||
return watchFuture;
|
||||
};
|
||||
|
||||
Transaction.prototype.addReadConflictRange = function(start, end) {
|
||||
start = fdbUtil.keyToBuffer(start);
|
||||
end = fdbUtil.keyToBuffer(end);
|
||||
this.tr.addReadConflictRange(start, end);
|
||||
};
|
||||
|
||||
Transaction.prototype.addReadConflictKey = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
this.tr.addReadConflictRange(key, Buffer.concat([key, buffer.fromByteLiteral('\x00')], key.length + 1));
|
||||
};
|
||||
|
||||
Transaction.prototype.addWriteConflictRange = function(start, end) {
|
||||
start = fdbUtil.keyToBuffer(start);
|
||||
end = fdbUtil.keyToBuffer(end);
|
||||
|
||||
this.tr.addWriteConflictRange(start, end);
|
||||
};
|
||||
|
||||
Transaction.prototype.addWriteConflictKey = function(key) {
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
this.tr.addWriteConflictRange(key, Buffer.concat([key, buffer.fromByteLiteral('\x00')], key.length + 1));
|
||||
};
|
||||
|
||||
Transaction.prototype.commit = function(cb) {
|
||||
var tr = this.tr;
|
||||
return future.create(function(futureCb) {
|
||||
tr.commit(futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Transaction.prototype.onError = function(fdbError, cb) {
|
||||
var tr = this.tr;
|
||||
return future.create(function(futureCb) {
|
||||
if(fdbError instanceof FDBError)
|
||||
tr.onError(fdbError.code, futureCb);
|
||||
else
|
||||
futureCb(fdbError, null);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Transaction.prototype.reset = function() {
|
||||
this.tr.reset();
|
||||
};
|
||||
|
||||
Transaction.prototype.setReadVersion = function(version) {
|
||||
this.tr.setReadVersion(version);
|
||||
};
|
||||
|
||||
Transaction.prototype.getCommittedVersion = function() {
|
||||
return this.tr.getCommittedVersion();
|
||||
};
|
||||
|
||||
Transaction.prototype.getVersionstamp = function(cb) {
|
||||
var tr = this.tr;
|
||||
return future.create(function(futureCb) {
|
||||
tr.getVersionstamp(futureCb);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
Transaction.prototype.cancel = function() {
|
||||
this.tr.cancel();
|
||||
};
|
||||
|
||||
module.exports = Transaction;
|
||||
|
|
@ -1,481 +0,0 @@
|
|||
/*
|
||||
* tuple.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
"use strict";
|
||||
|
||||
var assert = require('assert');
|
||||
var buffer = require('./bufferConversion');
|
||||
var fdbUtil = require('./fdbUtil');
|
||||
var fdb = require('./fdbModule');
|
||||
var FDBError = require('./error');
|
||||
|
||||
var sizeLimits = new Array(8);
|
||||
|
||||
function setupSizeLimits() {
|
||||
sizeLimits[0] = 1;
|
||||
for(var i = 1; i < sizeLimits.length; i++) {
|
||||
sizeLimits[i] = sizeLimits[i-1] * 256;
|
||||
sizeLimits[i-1] -= 1;
|
||||
}
|
||||
sizeLimits[7] -= 1;
|
||||
}
|
||||
|
||||
setupSizeLimits();
|
||||
|
||||
var maxInt = Math.pow(2, 53) - 1;
|
||||
var minInt = -Math.pow(2, 53);
|
||||
|
||||
var nullByte = new Buffer('00', 'hex');
|
||||
|
||||
var BYTES_CODE = 0x01;
|
||||
var STRING_CODE = 0x02;
|
||||
var NESTED_CODE = 0x05;
|
||||
var INT_ZERO_CODE = 0x14;
|
||||
var POS_INT_END = 0x1d;
|
||||
var NEG_INT_END = 0x0b;
|
||||
var FLOAT_CODE = 0x20;
|
||||
var DOUBLE_CODE = 0x21;
|
||||
var FALSE_CODE = 0x26;
|
||||
var TRUE_CODE = 0x27;
|
||||
var UUID_CODE = 0x30;
|
||||
|
||||
function validateData(data, length) {
|
||||
if(!(data instanceof Buffer) && !(data instanceof Array)) {
|
||||
throw new TypeError('Data for FDB tuple type not array or buffer.');
|
||||
} else if(data.length != length) {
|
||||
throw new RangeError('Data for FDB tuple type has length ' + data.length + ' instead of expected length ' + length);
|
||||
}
|
||||
}
|
||||
|
||||
// If encoding and sign bit is 1 (negative), flip all of the bits. Otherwise, just flip sign.
|
||||
// If decoding and sign bit is 0 (negative), flip all of the bits. Otherwise, just flip sign.
|
||||
function adjustFloat(data, start, encode) {
|
||||
if((encode && (data[start] & 0x80) === 0x80) || (!encode && (data[start] & 0x80) === 0x00)) {
|
||||
for(var i = start; i < data.length; i++) {
|
||||
data[i] = data[i] ^ 0xff;
|
||||
}
|
||||
} else {
|
||||
data[start] = data[start] ^ 0x80;
|
||||
}
|
||||
}
|
||||
|
||||
function Float(value) {
|
||||
this.value = value;
|
||||
this.toBytes = function () {
|
||||
if (this.rawData !== undefined) {
|
||||
return this.rawData;
|
||||
} else {
|
||||
var buf = new Buffer(4);
|
||||
buf.writeFloatBE(fdb.toFloat(this.value), 0);
|
||||
return buf;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Float.fromBytes = function (buf) {
|
||||
validateData(buf, 4);
|
||||
var f = new Float(buf.readFloatBE(0));
|
||||
if(isNaN(f.value)) {
|
||||
f.rawData = buf;
|
||||
}
|
||||
return f;
|
||||
}
|
||||
|
||||
function Double(value) {
|
||||
this.value = value;
|
||||
this.toBytes = function () {
|
||||
if (this.rawData !== undefined) {
|
||||
return this.rawData;
|
||||
} else {
|
||||
var buf = new Buffer(8);
|
||||
buf.writeDoubleBE(this.value, 0);
|
||||
return buf;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Double.fromBytes = function (buf) {
|
||||
validateData(buf, 8);
|
||||
var d = new Double(buf.readDoubleBE(0));
|
||||
if(isNaN(d.value)) {
|
||||
d.rawData = buf;
|
||||
}
|
||||
return d;
|
||||
}
|
||||
|
||||
function UUID(data) {
|
||||
if (data.length != 16) {
|
||||
// There's a special code for this, so we check it first and throw the error if appropriate.
|
||||
throw new FDBError("invalid_uuid_size", 2268);
|
||||
}
|
||||
validateData(data, 16);
|
||||
this.data = new Buffer(data);
|
||||
}
|
||||
|
||||
function findNullBytes(buf, pos, searchForTerminators) {
|
||||
var nullBytes = [];
|
||||
|
||||
var found;
|
||||
for(pos; pos < buf.length; ++pos) {
|
||||
if(searchForTerminators && found && buf[pos] !== 255) {
|
||||
break;
|
||||
}
|
||||
|
||||
found = false;
|
||||
if(buf[pos] === 0) {
|
||||
found = true;
|
||||
nullBytes.push(pos);
|
||||
}
|
||||
}
|
||||
|
||||
if(!found && searchForTerminators) {
|
||||
nullBytes.push(buf.length);
|
||||
}
|
||||
|
||||
return nullBytes;
|
||||
}
|
||||
|
||||
function encode(item, buf, pos) {
|
||||
var encodedString;
|
||||
if(typeof item === 'undefined')
|
||||
throw new TypeError('Packed element cannot be undefined');
|
||||
|
||||
else if(item === null)
|
||||
return nullByte;
|
||||
|
||||
//byte string or unicode
|
||||
else if(Buffer.isBuffer(item) || item instanceof ArrayBuffer || item instanceof Uint8Array || typeof item === 'string') {
|
||||
var unicode = typeof item === 'string';
|
||||
|
||||
if(unicode) {
|
||||
item = new Buffer(item, 'utf8');
|
||||
}
|
||||
else {
|
||||
item = buffer(item);
|
||||
}
|
||||
|
||||
var nullBytes = findNullBytes(item, 0);
|
||||
|
||||
encodedString = new Buffer(2 + item.length + nullBytes.length);
|
||||
encodedString[0] = unicode ? STRING_CODE : BYTES_CODE;
|
||||
|
||||
var srcPos = 0;
|
||||
var targetPos = 1;
|
||||
for(var i = 0; i < nullBytes.length; ++i) {
|
||||
item.copy(encodedString, targetPos, srcPos, nullBytes[i]+1);
|
||||
targetPos += nullBytes[i]+1 - srcPos;
|
||||
srcPos = nullBytes[i]+1;
|
||||
encodedString[targetPos++] = 255;
|
||||
}
|
||||
|
||||
item.copy(encodedString, targetPos, srcPos);
|
||||
encodedString[encodedString.length-1] = 0;
|
||||
|
||||
return encodedString;
|
||||
}
|
||||
|
||||
//64-bit integer
|
||||
else if((typeof item === 'number' || item instanceof Number) && item % 1 === 0) {
|
||||
var negative = item < 0;
|
||||
var posItem = Math.abs(item);
|
||||
|
||||
var length = 0;
|
||||
for(; length < sizeLimits.length; ++length) {
|
||||
if(posItem <= sizeLimits[length])
|
||||
break;
|
||||
}
|
||||
|
||||
if(item > maxInt || item < minInt)
|
||||
throw new RangeError('Cannot pack signed integer larger than 54 bits');
|
||||
|
||||
var prefix = negative ? INT_ZERO_CODE - length : INT_ZERO_CODE + length;
|
||||
|
||||
var outBuf = new Buffer(length+1);
|
||||
outBuf[0] = prefix;
|
||||
for(var byteIdx = length-1; byteIdx >= 0; --byteIdx) {
|
||||
var b = posItem & 0xff;
|
||||
if(negative)
|
||||
outBuf[byteIdx+1] = ~b;
|
||||
else {
|
||||
outBuf[byteIdx+1] = b;
|
||||
}
|
||||
|
||||
posItem = (posItem - b) / 0x100;
|
||||
}
|
||||
|
||||
return outBuf;
|
||||
}
|
||||
|
||||
// Floats
|
||||
else if(item instanceof Float) {
|
||||
var outBuf = new Buffer(5);
|
||||
outBuf[0] = FLOAT_CODE;
|
||||
if (isNaN(item.value) && item.rawData !== undefined) {
|
||||
item.rawData.copy(outBuf, 1, 0, 4);
|
||||
} else {
|
||||
outBuf.writeFloatBE(fdb.toFloat(item.value), 1);
|
||||
}
|
||||
adjustFloat(outBuf, 1, true);
|
||||
return outBuf;
|
||||
}
|
||||
|
||||
// Doubles
|
||||
else if(item instanceof Double) {
|
||||
var outBuf = new Buffer(9);
|
||||
outBuf[0] = DOUBLE_CODE;
|
||||
if (isNaN(item.value) && item.rawData !== undefined) {
|
||||
item.rawData.copy(outBuf, 1, 0, 8);
|
||||
} else {
|
||||
outBuf.writeDoubleBE(item.value, 1);
|
||||
}
|
||||
adjustFloat(outBuf, 1, true);
|
||||
return outBuf;
|
||||
}
|
||||
|
||||
// UUIDs
|
||||
else if(item instanceof UUID) {
|
||||
var outBuf = new Buffer(17);
|
||||
outBuf[0] = UUID_CODE;
|
||||
item.data.copy(outBuf, 1);
|
||||
return outBuf;
|
||||
}
|
||||
|
||||
// booleans
|
||||
else if(item instanceof Boolean || typeof item === 'boolean') {
|
||||
var outBuf = new Buffer(1);
|
||||
var boolItem;
|
||||
if(item instanceof Boolean) {
|
||||
boolItem = item.valueOf();
|
||||
} else {
|
||||
boolItem = item;
|
||||
}
|
||||
if(boolItem) {
|
||||
outBuf[0] = TRUE_CODE;
|
||||
} else {
|
||||
outBuf[0] = FALSE_CODE;
|
||||
}
|
||||
return outBuf;
|
||||
}
|
||||
|
||||
// nested tuples
|
||||
else if(item instanceof Array) {
|
||||
var totalLength = 2;
|
||||
var outArr = [new Buffer('05', 'hex')];
|
||||
for(var i = 0; i < item.length; ++i) {
|
||||
if(item[i] === null) {
|
||||
outArr.push(new Buffer('00ff', 'hex'));
|
||||
totalLength += 2;
|
||||
} else {
|
||||
outArr.push(encode(item[i]));
|
||||
totalLength += outArr[i+1].length;
|
||||
}
|
||||
}
|
||||
outArr.push(new Buffer('00', 'hex'))
|
||||
return Buffer.concat(outArr, totalLength);
|
||||
}
|
||||
|
||||
else
|
||||
throw new TypeError('Packed element must either be a string, a buffer, an integer, or null');
|
||||
}
|
||||
|
||||
function pack(arr) {
|
||||
if(!(arr instanceof Array))
|
||||
throw new TypeError('fdb.tuple.pack must be called with a single array argument');
|
||||
|
||||
var totalLength = 0;
|
||||
|
||||
var outArr = [];
|
||||
for(var i = 0; i < arr.length; ++i) {
|
||||
outArr.push(encode(arr[i]));
|
||||
totalLength += outArr[i].length;
|
||||
}
|
||||
|
||||
return Buffer.concat(outArr, totalLength);
|
||||
}
|
||||
|
||||
function decodeNumber(buf, offset, bytes) {
|
||||
var negative = bytes < 0;
|
||||
bytes = Math.abs(bytes);
|
||||
|
||||
var num = 0;
|
||||
var mult = 1;
|
||||
var odd;
|
||||
for(var i = bytes-1; i >= 0; --i) {
|
||||
var b = buf[offset+i];
|
||||
if(negative)
|
||||
b = -(~b & 0xff);
|
||||
|
||||
if(i == bytes-1)
|
||||
odd = b & 0x01;
|
||||
|
||||
num += b * mult;
|
||||
mult *= 0x100;
|
||||
}
|
||||
|
||||
if(num > maxInt || num < minInt || (num === minInt && odd))
|
||||
throw new RangeError('Cannot unpack signed integers larger than 54 bits');
|
||||
|
||||
return num;
|
||||
}
|
||||
|
||||
function decode(buf, pos, nested) {
|
||||
if(typeof nested === 'undefined') nested = false;
|
||||
|
||||
var code = buf[pos];
|
||||
var value;
|
||||
|
||||
if(code === 0) {
|
||||
value = null;
|
||||
if(nested) {
|
||||
pos += 2;
|
||||
} else {
|
||||
pos++;
|
||||
}
|
||||
}
|
||||
else if(code === BYTES_CODE || code === STRING_CODE) {
|
||||
var nullBytes = findNullBytes(buf, pos+1, true);
|
||||
|
||||
var start = pos+1;
|
||||
var end = nullBytes[nullBytes.length-1];
|
||||
|
||||
if(code === STRING_CODE && nullBytes.length === 1) {
|
||||
value = buf.toString('utf8', start, end);
|
||||
}
|
||||
else {
|
||||
value = new Buffer(end-start-(nullBytes.length-1));
|
||||
var valuePos = 0;
|
||||
|
||||
for(var i=0; i < nullBytes.length && start < end; ++i) {
|
||||
buf.copy(value, valuePos, start, nullBytes[i]);
|
||||
valuePos += nullBytes[i] - start;
|
||||
start = nullBytes[i] + 2;
|
||||
if(start <= end) {
|
||||
value[valuePos++] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if(code === STRING_CODE)
|
||||
value = value.toString('utf8');
|
||||
}
|
||||
|
||||
pos = end + 1;
|
||||
}
|
||||
else if(Math.abs(code-INT_ZERO_CODE) <= 7) {
|
||||
if(code === INT_ZERO_CODE)
|
||||
value = 0;
|
||||
else
|
||||
value = decodeNumber(buf, pos+1, code-INT_ZERO_CODE);
|
||||
|
||||
pos += Math.abs(INT_ZERO_CODE-code) + 1;
|
||||
}
|
||||
else if(Math.abs(code-INT_ZERO_CODE) <= 8)
|
||||
throw new RangeError('Cannot unpack signed integers larger than 54 bits');
|
||||
else if(code === FLOAT_CODE) {
|
||||
var valBuf = new Buffer(4);
|
||||
buf.copy(valBuf, 0, pos+1, pos+5);
|
||||
adjustFloat(valBuf, 0, false);
|
||||
value = Float.fromBytes(valBuf);
|
||||
pos += 5;
|
||||
}
|
||||
else if(code === DOUBLE_CODE) {
|
||||
var valBuf = new Buffer(8);
|
||||
buf.copy(valBuf, 0, pos+1, pos+9);
|
||||
adjustFloat(valBuf, 0, false);
|
||||
value = Double.fromBytes(valBuf);
|
||||
pos += 9;
|
||||
}
|
||||
else if(code === UUID_CODE) {
|
||||
var valBuf = new Buffer(16);
|
||||
buf.copy(valBuf, 0, pos+1, pos+17);
|
||||
value = new UUID(valBuf);
|
||||
pos += 17;
|
||||
}
|
||||
else if(code === FALSE_CODE) {
|
||||
pos++;
|
||||
value = false;
|
||||
}
|
||||
else if(code === TRUE_CODE) {
|
||||
pos++;
|
||||
value = true;
|
||||
}
|
||||
else if(code === NESTED_CODE) {
|
||||
pos++;
|
||||
value = []
|
||||
while (buf[pos] != 0 || pos+1 < buf.length && buf[pos+1] === 0xff) {
|
||||
var nestedVal = decode(buf, pos, true)
|
||||
pos = nestedVal.pos
|
||||
value.push(nestedVal.value)
|
||||
}
|
||||
pos++;
|
||||
}
|
||||
else
|
||||
throw new TypeError('Unknown data type in DB: ' + buf + ' at ' + pos);
|
||||
|
||||
return { pos: pos, value: value };
|
||||
}
|
||||
|
||||
function unpack(key) {
|
||||
var res = { pos: 0 };
|
||||
var arr = [];
|
||||
|
||||
key = fdbUtil.keyToBuffer(key);
|
||||
|
||||
while(res.pos < key.length) {
|
||||
res = decode(key, res.pos);
|
||||
arr.push(res.value);
|
||||
}
|
||||
|
||||
return arr;
|
||||
}
|
||||
|
||||
function range(arr) {
|
||||
var packed = pack(arr);
|
||||
return { begin: Buffer.concat([packed, nullByte]), end: Buffer.concat([packed, new Buffer('ff', 'hex')]) };
|
||||
}
|
||||
|
||||
function compare(arr1, arr2) {
|
||||
// NOTE: There is built-in comparison function included in 0.11.13 that we might want to switch to.
|
||||
var buf1 = pack(arr1);
|
||||
var buf2 = pack(arr2);
|
||||
var pos = 0;
|
||||
|
||||
while(pos < buf1.length && pos < buf2.length) {
|
||||
if(buf1[pos] != buf2[pos]) {
|
||||
if(buf1[pos] < buf2[pos]) {
|
||||
return -1;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
// The two arrays begin with a common prefix.
|
||||
if(buf1.length < buf2.length) {
|
||||
return -1;
|
||||
} else if(buf1.length == buf2.length) {
|
||||
return 0;
|
||||
} else {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {pack: pack, unpack: unpack, range: range, compare: compare, Float: Float, Double: Double, UUID: UUID};
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
"name": "@apple/fdb",
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org"
|
||||
},
|
||||
"version": "VERSION",
|
||||
"author": "FoundationDB <fdb-dist@apple.com> (https://www.foundationdb.org)",
|
||||
"description": "Node.js bindings for the FoundationDB database",
|
||||
"keywords": [ "FoundationDB", "database", "NoSQL", "ACID" ],
|
||||
"homepage": "http://17.199.145.104",
|
||||
"license": "Apache v2",
|
||||
"main": "./lib/fdb.js",
|
||||
"cpu": [ "x64" ],
|
||||
"dependencies": {
|
||||
"semver": "~ 4.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jshint": ">= 2.5.6",
|
||||
"promises-aplus-tests": ">= 2.1.0"
|
||||
},
|
||||
"engineStrict": true,
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node" : "0.8.x || 0.10.x"
|
||||
}
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
/*
|
||||
* Cluster.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <node.h>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
#include <node_version.h>
|
||||
|
||||
#include "Cluster.h"
|
||||
#include "Database.h"
|
||||
#include "FdbOptions.h"
|
||||
#include "NodeCallback.h"
|
||||
|
||||
using namespace v8;
|
||||
using namespace std;
|
||||
|
||||
Cluster::Cluster() { }
|
||||
Cluster::~Cluster() {
|
||||
fdb_cluster_destroy(cluster);
|
||||
}
|
||||
|
||||
Persistent<Function> Cluster::constructor;
|
||||
|
||||
Handle<Value> Cluster::OpenDatabase(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
Cluster *clusterPtr = ObjectWrap::Unwrap<Cluster>(args.Holder());
|
||||
|
||||
const char *dbName = "DB";
|
||||
FDBFuture *f = fdb_cluster_create_database(clusterPtr->cluster, (uint8_t*)dbName, (int)strlen(dbName));
|
||||
|
||||
fdb_error_t errorCode = fdb_future_block_until_ready(f);
|
||||
|
||||
FDBDatabase *database;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_future_get_database(f, &database);
|
||||
|
||||
if(errorCode != 0)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
Handle<Value> jsValue = Database::NewInstance(database);
|
||||
return scope.Close(jsValue);
|
||||
}
|
||||
|
||||
void Cluster::Init() {
|
||||
HandleScope scope;
|
||||
|
||||
Local<FunctionTemplate> tpl = FunctionTemplate::New(New);
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
tpl->SetClassName(String::NewSymbol("Cluster"));
|
||||
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("openDatabase"), FunctionTemplate::New(OpenDatabase)->GetFunction());
|
||||
|
||||
constructor = Persistent<Function>::New(tpl->GetFunction());
|
||||
}
|
||||
|
||||
Handle<Value> Cluster::New(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
Cluster *c = new Cluster();
|
||||
c->Wrap(args.Holder());
|
||||
|
||||
return scope.Close(args.Holder());
|
||||
}
|
||||
|
||||
Handle<Value> Cluster::NewInstance(FDBCluster *ptr) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Object> instance = constructor->NewInstance(0, NULL);
|
||||
|
||||
Cluster *clusterObj = ObjectWrap::Unwrap<Cluster>(instance);
|
||||
clusterObj->cluster = ptr;
|
||||
|
||||
instance->Set(String::NewSymbol("options"), FdbOptions::CreateOptions(FdbOptions::ClusterOption, instance));
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
/*
|
||||
* Cluster.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_CLUSTER_H
|
||||
#define FDB_NODE_CLUSTER_H
|
||||
|
||||
#include "Version.h"
|
||||
|
||||
#include <foundationdb/fdb_c.h>
|
||||
#include <node.h>
|
||||
|
||||
class Cluster : public node::ObjectWrap {
|
||||
public:
|
||||
static void Init();
|
||||
static v8::Handle<v8::Value> NewInstance(FDBCluster *ptr);
|
||||
static v8::Handle<v8::Value> New(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> OpenDatabase(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Destroy(const v8::Arguments &args);
|
||||
|
||||
FDBCluster* GetCluster() { return cluster; }
|
||||
|
||||
private:
|
||||
Cluster();
|
||||
~Cluster();
|
||||
static v8::Persistent<v8::Function> constructor;
|
||||
FDBCluster *cluster;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,86 +0,0 @@
|
|||
/*
|
||||
* Database.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <node.h>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
|
||||
#include "Database.h"
|
||||
#include "FdbOptions.h"
|
||||
#include "NodeCallback.h"
|
||||
|
||||
using namespace v8;
|
||||
using namespace std;
|
||||
|
||||
Database::Database() { };
|
||||
|
||||
Database::~Database() {
|
||||
fdb_database_destroy(db);
|
||||
};
|
||||
|
||||
Persistent<Function> Database::constructor;
|
||||
|
||||
void Database::Init() {
|
||||
Local<FunctionTemplate> tpl = FunctionTemplate::New(New);
|
||||
tpl->SetClassName(String::NewSymbol("Database"));
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("createTransaction"), FunctionTemplate::New(CreateTransaction)->GetFunction());
|
||||
|
||||
constructor = Persistent<Function>::New(tpl->GetFunction());
|
||||
}
|
||||
|
||||
Handle<v8::Value> Database::CreateTransaction(const v8::Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
Database *dbPtr = node::ObjectWrap::Unwrap<Database>(args.Holder());
|
||||
FDBDatabase *db = dbPtr->db;
|
||||
FDBTransaction *tr;
|
||||
fdb_error_t err = fdb_database_create_transaction(db, &tr);
|
||||
if (err) {
|
||||
ThrowException(FdbError::NewInstance(err, fdb_get_error(err)));
|
||||
return scope.Close(Undefined());
|
||||
}
|
||||
|
||||
return scope.Close(Transaction::NewInstance(tr));
|
||||
}
|
||||
|
||||
Handle<Value> Database::New(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
Database *db = new Database();
|
||||
db->Wrap(args.Holder());
|
||||
|
||||
return scope.Close(args.Holder());
|
||||
}
|
||||
|
||||
Handle<Value> Database::NewInstance(FDBDatabase *ptr) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Object> instance = constructor->NewInstance(0, NULL);
|
||||
Database *dbObj = ObjectWrap::Unwrap<Database>(instance);
|
||||
dbObj->db = ptr;
|
||||
|
||||
instance->Set(String::NewSymbol("options"), FdbOptions::CreateOptions(FdbOptions::DatabaseOption, instance));
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Database.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_DATABASE_H
|
||||
#define FDB_NODE_DATABASE_H
|
||||
|
||||
#include "Version.h"
|
||||
#include "Transaction.h"
|
||||
|
||||
#include <foundationdb/fdb_c.h>
|
||||
#include <node.h>
|
||||
|
||||
class Database: public node::ObjectWrap {
|
||||
public:
|
||||
static void Init();
|
||||
static v8::Handle<v8::Value> NewInstance(FDBDatabase *ptr);
|
||||
static v8::Handle<v8::Value> New(const v8::Arguments &args);
|
||||
|
||||
FDBDatabase* GetDatabase() { return db; }
|
||||
|
||||
private:
|
||||
Database();
|
||||
~Database();
|
||||
|
||||
static v8::Handle<v8::Value> CreateTransaction(const v8::Arguments &args);
|
||||
static v8::Persistent<v8::Function> constructor;
|
||||
|
||||
FDBDatabase *db;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* FdbError.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <node.h>
|
||||
#include "FdbError.h"
|
||||
|
||||
using namespace v8;
|
||||
using namespace node;
|
||||
|
||||
static Persistent<Object> module;
|
||||
|
||||
void FdbError::Init( Handle<Object> module ) {
|
||||
::module = Persistent<Object>::New( module );
|
||||
}
|
||||
|
||||
Handle<Value> FdbError::NewInstance(fdb_error_t code, const char *description) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Value> constructor = module->Get( String::NewSymbol("FDBError") );
|
||||
Local<Object> instance;
|
||||
if (!constructor.IsEmpty() && constructor->IsFunction()) {
|
||||
Local<Value> constructorArgs[] = { String::New(description), Integer::New(code) };
|
||||
instance = Local<Function>::Cast(constructor)->NewInstance(2, constructorArgs);
|
||||
} else {
|
||||
// We can't find the (javascript) FDBError class, so construct and throw *something*
|
||||
instance = Exception::Error(String::New("FDBError class not found. Unable to deliver error."))->ToObject();
|
||||
}
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
|
@ -1,40 +0,0 @@
|
|||
/*
|
||||
* FdbError.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_FDB_ERROR_H
|
||||
#define FDB_NODE_FDB_ERROR_H
|
||||
|
||||
#include "Version.h"
|
||||
|
||||
#include <foundationdb/fdb_c.h>
|
||||
#include <node.h>
|
||||
|
||||
class FdbError {
|
||||
public:
|
||||
static v8::Handle<v8::Value> NewInstance(fdb_error_t code, const char *description);
|
||||
|
||||
static void Init( v8::Handle<v8::Object> module );
|
||||
|
||||
private:
|
||||
FdbError(); // not implemented by design
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,241 +0,0 @@
|
|||
/*
|
||||
* FdbOptions.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include "FdbOptions.h"
|
||||
#include "Cluster.h"
|
||||
#include "Database.h"
|
||||
#include "Transaction.h"
|
||||
#include "FdbError.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <node_buffer.h>
|
||||
|
||||
#define INVALID_OPTION_VALUE_ERROR_CODE (fdb_error_t)2006
|
||||
|
||||
using namespace v8;
|
||||
using namespace node;
|
||||
|
||||
std::map<FdbOptions::Scope, Persistent<FunctionTemplate>> FdbOptions::optionTemplates;
|
||||
std::map<FdbOptions::Scope, ScopeInfo> FdbOptions::scopeInfo;
|
||||
std::map<FdbOptions::Scope, std::map<int, FdbOptions::ParameterType>> FdbOptions::parameterTypes;
|
||||
|
||||
FdbOptions::FdbOptions() { }
|
||||
|
||||
void FdbOptions::InitOptionsTemplate(Persistent<FunctionTemplate> &tpl, const char *className) {
|
||||
tpl = Persistent<FunctionTemplate>::New(FunctionTemplate::New(New));
|
||||
tpl->SetClassName(String::NewSymbol(className));
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
}
|
||||
|
||||
void FdbOptions::AddOption(Scope scope, std::string name, int value, ParameterType type) {
|
||||
if(scope == NetworkOption || scope == ClusterOption || scope == DatabaseOption || scope == TransactionOption || scope == MutationType) {
|
||||
bool isSetter = scope != MutationType;
|
||||
optionTemplates[scope]->PrototypeTemplate()->Set(v8::String::NewSymbol(ToJavaScriptName(name, isSetter).c_str()),
|
||||
v8::FunctionTemplate::New(scopeInfo[scope].optionFunction, v8::Integer::New(value))->GetFunction());
|
||||
parameterTypes[scope][value] = type;
|
||||
}
|
||||
else if(scope == StreamingMode) {
|
||||
optionTemplates[scope]->PrototypeTemplate()->Set(v8::String::NewSymbol(ToJavaScriptName(name, false).c_str()), v8::Integer::New(value));
|
||||
}
|
||||
else if(scope == ConflictRangeType) {
|
||||
//Conflict range type enum is not exposed to JS code
|
||||
}
|
||||
}
|
||||
|
||||
Handle<Value> FdbOptions::New(const Arguments &args) {
|
||||
FdbOptions *options = new FdbOptions();
|
||||
options->Wrap(args.Holder());
|
||||
|
||||
return args.Holder();
|
||||
}
|
||||
|
||||
void FdbOptions::WeakCallback(Persistent<Value> value, void *data) { }
|
||||
|
||||
Handle<Value> FdbOptions::NewInstance(Persistent<FunctionTemplate> optionsTemplate, Handle<Value> source) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Object> instance = optionsTemplate->GetFunction()->NewInstance();
|
||||
|
||||
FdbOptions *optionsObj = ObjectWrap::Unwrap<FdbOptions>(instance);
|
||||
optionsObj->source = Persistent<Value>::New(source);
|
||||
optionsObj->source.MakeWeak(optionsObj, WeakCallback);
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
||||
|
||||
Handle<Value> FdbOptions::CreateOptions(Scope scope, Handle<Value> source) {
|
||||
return NewInstance(optionTemplates[scope], source);
|
||||
}
|
||||
|
||||
Handle<Value> FdbOptions::CreateEnum(Scope scope) {
|
||||
return optionTemplates[scope]->GetFunction()->NewInstance();
|
||||
}
|
||||
|
||||
Parameter GetStringParameter(const Arguments &args, int index) {
|
||||
if(args.Length() <= index || (!Buffer::HasInstance(args[index]) && !args[index]->IsString()))
|
||||
return INVALID_OPTION_VALUE_ERROR_CODE;
|
||||
else if(args[index]->IsString()) {
|
||||
String::Utf8Value val(args[index]);
|
||||
return std::string(*val, val.length());
|
||||
}
|
||||
else
|
||||
return std::string(Buffer::Data(args[index]->ToObject()), Buffer::Length(args[index]->ToObject()));
|
||||
};
|
||||
|
||||
Parameter FdbOptions::GetOptionParameter(const Arguments &args, Scope scope, int optionValue, int index) {
|
||||
if(args.Length() > index) {
|
||||
int64_t val;
|
||||
switch(parameterTypes[scope][optionValue]) {
|
||||
case FdbOptions::String:
|
||||
return GetStringParameter(args, index);
|
||||
|
||||
case FdbOptions::Bytes:
|
||||
if(!Buffer::HasInstance(args[index]))
|
||||
return INVALID_OPTION_VALUE_ERROR_CODE;
|
||||
|
||||
return std::string(Buffer::Data(args[index]->ToObject()), Buffer::Length(args[index]->ToObject()));
|
||||
|
||||
case FdbOptions::Int:
|
||||
if(!args[index]->IsNumber())
|
||||
return INVALID_OPTION_VALUE_ERROR_CODE;
|
||||
val = args[index]->IntegerValue();
|
||||
return std::string((const char*)&val, 8);
|
||||
|
||||
|
||||
case FdbOptions::None:
|
||||
return Parameter();
|
||||
}
|
||||
}
|
||||
|
||||
return Parameter();
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> SetNetworkOption(const Arguments &args) {
|
||||
FDBNetworkOption op = (FDBNetworkOption)args.Data()->Uint32Value();
|
||||
|
||||
Parameter param = FdbOptions::GetOptionParameter(args, FdbOptions::NetworkOption, op);
|
||||
fdb_error_t errorCode = param.errorCode;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_network_set_option(op, param.getValue(), param.getLength());
|
||||
|
||||
if(errorCode)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> SetClusterOption(const Arguments &args) {
|
||||
FdbOptions *options = ObjectWrap::Unwrap<FdbOptions>(args.Holder());
|
||||
Cluster *cluster = ObjectWrap::Unwrap<Cluster>(options->GetSource()->ToObject());
|
||||
FDBClusterOption op = (FDBClusterOption)args.Data()->Uint32Value();
|
||||
|
||||
Parameter param = FdbOptions::GetOptionParameter(args, FdbOptions::ClusterOption, op);
|
||||
fdb_error_t errorCode = param.errorCode;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_cluster_set_option(cluster->GetCluster(), op, param.getValue(), param.getLength());
|
||||
|
||||
if(errorCode)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> SetDatabaseOption(const Arguments &args) {
|
||||
FdbOptions *options = ObjectWrap::Unwrap<FdbOptions>(args.Holder());
|
||||
Database *db = ObjectWrap::Unwrap<Database>(options->GetSource()->ToObject());
|
||||
FDBDatabaseOption op = (FDBDatabaseOption)args.Data()->Uint32Value();
|
||||
|
||||
Parameter param = FdbOptions::GetOptionParameter(args, FdbOptions::DatabaseOption, op);
|
||||
fdb_error_t errorCode = param.errorCode;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_database_set_option(db->GetDatabase(), op, param.getValue(), param.getLength());
|
||||
|
||||
if(errorCode)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> SetTransactionOption(const Arguments &args) {
|
||||
FdbOptions *options = ObjectWrap::Unwrap<FdbOptions>(args.Holder());
|
||||
Transaction *tr = ObjectWrap::Unwrap<Transaction>(options->GetSource()->ToObject());
|
||||
FDBTransactionOption op = (FDBTransactionOption)args.Data()->Uint32Value();
|
||||
|
||||
Parameter param = FdbOptions::GetOptionParameter(args, FdbOptions::TransactionOption, op);
|
||||
fdb_error_t errorCode = param.errorCode;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_transaction_set_option(tr->GetTransaction(), op, param.getValue(), param.getLength());
|
||||
|
||||
if(errorCode)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
v8::Handle<v8::Value> CallAtomicOperation(const Arguments &args) {
|
||||
Transaction *tr = ObjectWrap::Unwrap<Transaction>(args.Holder());
|
||||
Parameter key = GetStringParameter(args, 0);
|
||||
Parameter value = GetStringParameter(args, 1);
|
||||
|
||||
fdb_error_t errorCode = key.errorCode > 0 ? key.errorCode : value.errorCode;
|
||||
if(errorCode > 0)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
fdb_transaction_atomic_op(tr->GetTransaction(), key.getValue(), key.getLength(), value.getValue(), value.getLength(), (FDBMutationType)args.Data()->Uint32Value());
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
//Converts names using underscores as word separators to camel case (but preserves existing capitalization, if present). If isSetter, prepends the word 'set' to each name
|
||||
std::string FdbOptions::ToJavaScriptName(std::string optionName, bool isSetter) {
|
||||
if(isSetter)
|
||||
optionName = "set_" + optionName;
|
||||
|
||||
size_t start = 0;
|
||||
while(start < optionName.size()) {
|
||||
if(start != 0)
|
||||
optionName[start] = ::toupper(optionName[start]);
|
||||
size_t index = optionName.find_first_of('_', start);
|
||||
if(index == std::string::npos)
|
||||
break;
|
||||
|
||||
optionName.erase(optionName.begin() + index);
|
||||
|
||||
start = index;
|
||||
}
|
||||
|
||||
return optionName;
|
||||
}
|
||||
|
||||
void FdbOptions::Init() {
|
||||
scopeInfo[NetworkOption] = ScopeInfo("FdbNetworkOptions", SetNetworkOption);
|
||||
scopeInfo[ClusterOption] = ScopeInfo("FdbClusterOptions", SetClusterOption);
|
||||
scopeInfo[DatabaseOption] = ScopeInfo("FdbDatabaseOptions", SetDatabaseOption);
|
||||
scopeInfo[TransactionOption] = ScopeInfo("FdbTransactionOptions", SetTransactionOption);
|
||||
scopeInfo[StreamingMode] = ScopeInfo("FdbStreamingMode", NULL);
|
||||
scopeInfo[MutationType] = ScopeInfo("AtomicOperations", CallAtomicOperation);
|
||||
//scopeInfo[ConflictRangeType] = ScopeInfo("ConflictRangeType", NULL);
|
||||
|
||||
for(auto itr = scopeInfo.begin(); itr != scopeInfo.end(); ++itr)
|
||||
InitOptionsTemplate(optionTemplates[itr->first], itr->second.templateClassName.c_str());
|
||||
|
||||
InitOptions();
|
||||
}
|
|
@ -1,109 +0,0 @@
|
|||
/*
|
||||
* FdbOptions.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_FDB_OPTIONS_H
|
||||
#define FDB_NODE_FDB_OPTIONS_H
|
||||
|
||||
#define ADD_OPTION(scope, name, value, type) AddOption(scope, name, value, type)
|
||||
|
||||
#include "Version.h"
|
||||
|
||||
#include <foundationdb/fdb_c.h>
|
||||
#include <node.h>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
struct Parameter {
|
||||
Parameter() : isNull(true), errorCode(0) { }
|
||||
Parameter(std::string param) : param(param), isNull(false), errorCode(0) { }
|
||||
Parameter(fdb_error_t errorCode) : isNull(false), errorCode(errorCode) { }
|
||||
|
||||
std::string param;
|
||||
bool isNull;
|
||||
fdb_error_t errorCode;
|
||||
|
||||
uint8_t const* getValue() { return isNull ? NULL : (uint8_t const*)param.c_str(); }
|
||||
int getLength() { return isNull ? 0 : (int)param.size(); }
|
||||
};
|
||||
|
||||
struct ScopeInfo {
|
||||
std::string templateClassName;
|
||||
v8::Handle<v8::Value> (*optionFunction) (const v8::Arguments &args);
|
||||
|
||||
ScopeInfo() { }
|
||||
ScopeInfo(std::string templateClassName, v8::Handle<v8::Value> (*optionFunction) (const v8::Arguments &args)) {
|
||||
this->templateClassName = templateClassName;
|
||||
this->optionFunction = optionFunction;
|
||||
}
|
||||
};
|
||||
|
||||
class FdbOptions : node::ObjectWrap {
|
||||
public:
|
||||
static void Init();
|
||||
|
||||
enum ParameterType {
|
||||
None,
|
||||
Int,
|
||||
String,
|
||||
Bytes
|
||||
};
|
||||
|
||||
enum Scope {
|
||||
NetworkOption,
|
||||
ClusterOption,
|
||||
DatabaseOption,
|
||||
TransactionOption,
|
||||
StreamingMode,
|
||||
MutationType,
|
||||
ConflictRangeType
|
||||
};
|
||||
|
||||
static v8::Handle<v8::Value> CreateOptions(Scope scope, v8::Handle<v8::Value> source = v8::Null());
|
||||
static v8::Handle<v8::Value> CreateEnum(Scope scope);
|
||||
|
||||
static Parameter GetOptionParameter(const v8::Arguments &args, Scope scope, int optionValue, int index = 0);
|
||||
|
||||
v8::Persistent<v8::Value> GetSource() {
|
||||
return source;
|
||||
}
|
||||
|
||||
private:
|
||||
static v8::Handle<v8::Value> New(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> NewInstance(v8::Persistent<v8::FunctionTemplate> optionsTemplate, v8::Handle<v8::Value> source);
|
||||
|
||||
FdbOptions();
|
||||
|
||||
static void InitOptionsTemplate(v8::Persistent<v8::FunctionTemplate> &tpl, const char *className);
|
||||
static void InitOptions();
|
||||
|
||||
static void AddOption(Scope scope, std::string name, int value, ParameterType type);
|
||||
static void WeakCallback(v8::Persistent<v8::Value> value, void *data);
|
||||
|
||||
static std::string ToJavaScriptName(std::string optionName, bool isSetter);
|
||||
|
||||
static std::map<Scope, ScopeInfo> scopeInfo;
|
||||
static std::map<Scope, v8::Persistent<v8::FunctionTemplate>> optionTemplates;
|
||||
static std::map<Scope, std::map<int, ParameterType>> parameterTypes;
|
||||
|
||||
v8::Persistent<v8::Value> source;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* FdbUtil.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <node.h>
|
||||
#include "FdbUtil.h"
|
||||
|
||||
using namespace v8;
|
||||
|
||||
Handle<Value> ToFloat(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
if (args.Length() != 1) {
|
||||
return ThrowException(Exception::TypeError(String::NewSymbol("Wrong number of arguments (must be exactly 1)")));
|
||||
}
|
||||
|
||||
if (!args[0]->IsNumber()) {
|
||||
return ThrowException(Exception::TypeError(String::NewSymbol("Argument is not a Number")));
|
||||
}
|
||||
|
||||
float value = (float)args[0]->NumberValue();
|
||||
Handle<Value> jsValue = Number::New(value);
|
||||
|
||||
return scope.Close(jsValue);
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
/*
|
||||
* FdbUtil.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_FDB_UTIL_H
|
||||
#define FDB_NODE_FDB_UTIL_H
|
||||
|
||||
#include <node.h>
|
||||
|
||||
v8::Handle<v8::Value> ToFloat(const v8::Arguments &args);
|
||||
|
||||
#endif
|
|
@ -1,151 +0,0 @@
|
|||
/*
|
||||
* FdbV8Wrapper.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <string>
|
||||
#include "node.h"
|
||||
#include <iostream>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <sstream>
|
||||
#include <node_version.h>
|
||||
|
||||
#include "Database.h"
|
||||
#include "NodeCallback.h"
|
||||
#include "Cluster.h"
|
||||
#include "Version.h"
|
||||
#include "FdbError.h"
|
||||
#include "FdbOptions.h"
|
||||
#include "FdbUtil.h"
|
||||
|
||||
uv_thread_t fdbThread;
|
||||
|
||||
using namespace v8;
|
||||
using namespace std;
|
||||
|
||||
bool networkStarted = false;
|
||||
|
||||
Handle<Value> ApiVersion(const Arguments &args) {
|
||||
int apiVersion = args[0]->Int32Value();
|
||||
fdb_error_t errorCode = fdb_select_api_version(apiVersion);
|
||||
|
||||
if(errorCode != 0) {
|
||||
if(errorCode == 2203) {
|
||||
int maxSupportedVersion = fdb_get_max_api_version();
|
||||
|
||||
ostringstream errorStr;
|
||||
if(FDB_API_VERSION > maxSupportedVersion) {
|
||||
errorStr << "This version of the FoundationDB Node.js binding is not supported by the installed FoundationDB "
|
||||
<< "C library. The binding requires a library that supports API version " << FDB_API_VERSION
|
||||
<< ", but the installed library supports a maximum version of " << maxSupportedVersion << ".";
|
||||
}
|
||||
else {
|
||||
errorStr << "API version " << apiVersion << " is not supported by the installed FoundationDB C library.";
|
||||
}
|
||||
|
||||
return ThrowException(FdbError::NewInstance(errorCode, errorStr.str().c_str()));
|
||||
}
|
||||
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
}
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
static void networkThread(void *arg) {
|
||||
fdb_error_t errorCode = fdb_run_network();
|
||||
if(errorCode != 0)
|
||||
fprintf(stderr, "Unhandled error in FoundationDB network thread: %s (%d)\n", fdb_get_error(errorCode), errorCode);
|
||||
}
|
||||
|
||||
static Handle<Value> runNetwork() {
|
||||
fdb_error_t errorCode = fdb_setup_network();
|
||||
|
||||
if(errorCode != 0)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
uv_thread_create(&fdbThread, networkThread, NULL); // FIXME: Return code?
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> CreateCluster(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
FDBFuture *f = fdb_create_cluster(*String::AsciiValue(args[0]->ToString()));
|
||||
fdb_error_t errorCode = fdb_future_block_until_ready(f);
|
||||
|
||||
FDBCluster *cluster;
|
||||
if(errorCode == 0)
|
||||
errorCode = fdb_future_get_cluster(f, &cluster);
|
||||
|
||||
if(errorCode != 0)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
Handle<Value> jsValue = Local<Value>::New(Cluster::NewInstance(cluster));
|
||||
return scope.Close(jsValue);
|
||||
}
|
||||
|
||||
Handle<Value> StartNetwork(const Arguments &args) {
|
||||
if(!networkStarted) {
|
||||
networkStarted = true;
|
||||
return runNetwork();
|
||||
}
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> StopNetwork(const Arguments &args) {
|
||||
fdb_error_t errorCode = fdb_stop_network();
|
||||
|
||||
if(errorCode != 0)
|
||||
return ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
|
||||
uv_thread_join(&fdbThread);
|
||||
|
||||
//This line forces garbage collection. Useful for doing valgrind tests
|
||||
//while(!V8::IdleNotification());
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
void init(Handle<Object> target){
|
||||
FdbError::Init( target );
|
||||
Database::Init();
|
||||
Transaction::Init();
|
||||
Cluster::Init();
|
||||
FdbOptions::Init();
|
||||
Watch::Init();
|
||||
|
||||
target->Set(String::NewSymbol("apiVersion"), FunctionTemplate::New(ApiVersion)->GetFunction());
|
||||
target->Set(String::NewSymbol("createCluster"), FunctionTemplate::New(CreateCluster)->GetFunction());
|
||||
target->Set(String::NewSymbol("startNetwork"), FunctionTemplate::New(StartNetwork)->GetFunction());
|
||||
target->Set(String::NewSymbol("stopNetwork"), FunctionTemplate::New(StopNetwork)->GetFunction());
|
||||
target->Set(String::NewSymbol("options"), FdbOptions::CreateOptions(FdbOptions::NetworkOption));
|
||||
target->Set(String::NewSymbol("streamingMode"), FdbOptions::CreateEnum(FdbOptions::StreamingMode));
|
||||
target->Set(String::NewSymbol("atomic"), FdbOptions::CreateOptions(FdbOptions::MutationType));
|
||||
target->Set(String::NewSymbol("toFloat"), FunctionTemplate::New(ToFloat)->GetFunction());
|
||||
}
|
||||
|
||||
#if NODE_VERSION_AT_LEAST(0, 8, 0)
|
||||
NODE_MODULE(fdblib, init);
|
||||
#else
|
||||
#error "Node.js versions before v0.8.0 are not supported"
|
||||
#endif
|
|
@ -1,149 +0,0 @@
|
|||
/*
|
||||
* NodeCallback.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_NODE_CALLBACK_H
|
||||
#define FDB_NODE_NODE_CALLBACK_H
|
||||
|
||||
#include "FdbError.h"
|
||||
|
||||
#include <v8.h>
|
||||
#include <cstdlib>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <node.h>
|
||||
#include <node_buffer.h>
|
||||
#include <node_version.h>
|
||||
#include <foundationdb/fdb_c.h>
|
||||
|
||||
#if NODE_VERSION_AT_LEAST(0, 7, 9)
|
||||
#else
|
||||
#error Node version too old
|
||||
#endif
|
||||
|
||||
using namespace std;
|
||||
using namespace v8;
|
||||
using namespace node;
|
||||
|
||||
struct NodeCallback {
|
||||
|
||||
public:
|
||||
NodeCallback(FDBFuture *future, Persistent<Function> cbFunc) : future(future), cbFunc(cbFunc), refCount(1) {
|
||||
uv_async_init(uv_default_loop(), &handle, &NodeCallback::nodeThreadCallback);
|
||||
uv_ref((uv_handle_t*)&handle);
|
||||
handle.data = this;
|
||||
}
|
||||
|
||||
void start() {
|
||||
if (fdb_future_set_callback(future, &NodeCallback::futureReadyCallback, this)) {
|
||||
fprintf(stderr, "fdb_future_set_callback failed.\n");
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
virtual ~NodeCallback() {
|
||||
cbFunc.Dispose();
|
||||
fdb_future_destroy(future);
|
||||
}
|
||||
|
||||
void addRef() {
|
||||
++refCount;
|
||||
}
|
||||
|
||||
void delRef() {
|
||||
if(--refCount == 0) {
|
||||
delete this;
|
||||
}
|
||||
}
|
||||
|
||||
FDBFuture* getFuture() {
|
||||
return future;
|
||||
}
|
||||
|
||||
private:
|
||||
void close() {
|
||||
uv_close((uv_handle_t*)&handle, &NodeCallback::closeCallback);
|
||||
}
|
||||
|
||||
static void closeCallback(uv_handle_s *handle) {
|
||||
NodeCallback *nc = (NodeCallback*)((uv_async_t*)handle)->data;
|
||||
nc->delRef();
|
||||
}
|
||||
|
||||
static void futureReadyCallback(FDBFuture *f, void *ptr) {
|
||||
NodeCallback *nc = (NodeCallback*)ptr;
|
||||
uv_async_send(&nc->handle);
|
||||
}
|
||||
|
||||
static void nodeThreadCallback(uv_async_t *handle, int status) {
|
||||
HandleScope scope;
|
||||
|
||||
NodeCallback *nc = (NodeCallback*)handle->data;
|
||||
FDBFuture *future = nc->future;
|
||||
|
||||
uv_unref((uv_handle_t*)handle);
|
||||
|
||||
Handle<Value> jsError;
|
||||
Handle<Value> jsValue;
|
||||
|
||||
fdb_error_t errorCode;
|
||||
jsValue = nc->extractValue(future, errorCode);
|
||||
if (errorCode == 0)
|
||||
jsError = Null();
|
||||
else
|
||||
jsError = FdbError::NewInstance(errorCode, fdb_get_error(errorCode));
|
||||
|
||||
Handle<Value> args[2] = { jsError, jsValue };
|
||||
|
||||
v8::TryCatch ex;
|
||||
nc->cbFunc->Call(Context::GetCurrent()->Global(), 2, args);
|
||||
|
||||
if(ex.HasCaught())
|
||||
fprintf(stderr, "\n%s\n", *String::AsciiValue(ex.StackTrace()->ToString()));
|
||||
|
||||
nc->close();
|
||||
}
|
||||
|
||||
FDBFuture* future;
|
||||
uv_async_t handle;
|
||||
Persistent<Function> cbFunc;
|
||||
int refCount;
|
||||
|
||||
protected:
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) = 0;
|
||||
|
||||
static Handle<Value> makeBuffer(const char *arr, int length) {
|
||||
HandleScope scope;
|
||||
|
||||
Buffer *buf = Buffer::New(length);
|
||||
Local<Object> slowBufferHandle = Local<Object>::New( buf->handle_ ); // Else the buffer, which has only a weak handle to itself, could be freed by GC in one of the below calls...
|
||||
memcpy(Buffer::Data(buf), (const char*)arr, length);
|
||||
|
||||
Local<Object> globalObj = Context::GetCurrent()->Global();
|
||||
Local<Function> bufferConstructor = Local<Function>::Cast(globalObj->Get(String::NewSymbol("Buffer")));
|
||||
|
||||
Handle<Value> constructorArgs[3] = { slowBufferHandle, Integer::New(length), Integer::New(0) };
|
||||
Handle<Object> actualBuffer = bufferConstructor->NewInstance(3, constructorArgs);
|
||||
|
||||
return scope.Close(actualBuffer);
|
||||
}
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,485 +0,0 @@
|
|||
/*
|
||||
* Transaction.cpp
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#include <node.h>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <cstring>
|
||||
#include <vector>
|
||||
#include <node_buffer.h>
|
||||
#include <node_version.h>
|
||||
|
||||
#include "Transaction.h"
|
||||
#include "NodeCallback.h"
|
||||
#include "FdbError.h"
|
||||
#include "FdbOptions.h"
|
||||
|
||||
using namespace v8;
|
||||
using namespace std;
|
||||
using namespace node;
|
||||
|
||||
// Transaction Implementation
|
||||
Transaction::Transaction() { };
|
||||
|
||||
Transaction::~Transaction() {
|
||||
fdb_transaction_destroy(tr);
|
||||
};
|
||||
|
||||
Persistent<Function> Transaction::constructor;
|
||||
|
||||
struct NodeValueCallback : NodeCallback {
|
||||
|
||||
NodeValueCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) {
|
||||
HandleScope scope;
|
||||
|
||||
const char *value;
|
||||
int valueLength;
|
||||
int valuePresent;
|
||||
|
||||
outErr = fdb_future_get_value(future, &valuePresent, (const uint8_t**)&value, &valueLength);
|
||||
if (outErr) return scope.Close(Undefined());
|
||||
|
||||
Handle<Value> jsValue;
|
||||
|
||||
if(!valuePresent)
|
||||
jsValue = Null();
|
||||
else
|
||||
jsValue = makeBuffer(value, valueLength);
|
||||
|
||||
return scope.Close(jsValue);
|
||||
}
|
||||
};
|
||||
|
||||
struct NodeKeyCallback : NodeCallback {
|
||||
|
||||
NodeKeyCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) {
|
||||
HandleScope scope;
|
||||
|
||||
const char *key;
|
||||
int keyLength;
|
||||
|
||||
outErr = fdb_future_get_key(future, (const uint8_t**)&key, &keyLength);
|
||||
if (outErr) return scope.Close(Undefined());
|
||||
|
||||
Handle<Value> jsValue = makeBuffer(key, keyLength);
|
||||
|
||||
return scope.Close(jsValue);
|
||||
}
|
||||
};
|
||||
|
||||
struct NodeVoidCallback : NodeCallback {
|
||||
|
||||
NodeVoidCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) {
|
||||
outErr = fdb_future_get_error(future);
|
||||
return Undefined();
|
||||
}
|
||||
};
|
||||
|
||||
struct NodeKeyValueCallback : NodeCallback {
|
||||
|
||||
NodeKeyValueCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) {
|
||||
HandleScope scope;
|
||||
|
||||
const FDBKeyValue *kv;
|
||||
int len;
|
||||
fdb_bool_t more;
|
||||
|
||||
outErr = fdb_future_get_keyvalue_array(future, &kv, &len, &more);
|
||||
if (outErr) return scope.Close(Undefined());
|
||||
|
||||
/*
|
||||
* Constructing a JavaScript array of KeyValue objects:
|
||||
* {
|
||||
* key: "some key",
|
||||
* value: "some value"
|
||||
* }
|
||||
*
|
||||
*/
|
||||
|
||||
Handle<Object> returnObj = Object::New();
|
||||
Handle<Array> jsValueArray = Array::New(len);
|
||||
|
||||
Handle<String> keySymbol = String::NewSymbol("key");
|
||||
Handle<String> valueSymbol = String::NewSymbol("value");
|
||||
|
||||
for(int i = 0; i < len; i++) {
|
||||
Local<Object> jsKeyValue = Object::New();
|
||||
|
||||
Handle<Value> jsKeyBuffer = makeBuffer((const char*)kv[i].key, kv[i].key_length);
|
||||
Handle<Value> jsValueBuffer = makeBuffer((const char*)kv[i].value, kv[i].value_length);
|
||||
|
||||
jsKeyValue->Set(keySymbol, jsKeyBuffer);
|
||||
jsKeyValue->Set(valueSymbol, jsValueBuffer);
|
||||
jsValueArray->Set(Number::New(i), jsKeyValue);
|
||||
}
|
||||
|
||||
returnObj->Set(String::NewSymbol("array"), jsValueArray);
|
||||
if(more)
|
||||
returnObj->Set(String::NewSymbol("more"), Number::New(1));
|
||||
|
||||
return scope.Close(returnObj);
|
||||
}
|
||||
};
|
||||
|
||||
struct NodeVersionCallback : NodeCallback {
|
||||
|
||||
NodeVersionCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture* future, fdb_error_t& outErr) {
|
||||
HandleScope scope;
|
||||
|
||||
int64_t version;
|
||||
|
||||
outErr = fdb_future_get_version(future, &version);
|
||||
if (outErr) return scope.Close(Undefined());
|
||||
|
||||
//SOMEDAY: This limits the version to 53-bits. Do something different here?
|
||||
Handle<Value> jsValue = Number::New((double)version);
|
||||
|
||||
return scope.Close(jsValue);
|
||||
}
|
||||
};
|
||||
|
||||
struct NodeStringArrayCallback : NodeCallback {
|
||||
|
||||
NodeStringArrayCallback(FDBFuture *future, Persistent<Function> cbFunc) : NodeCallback(future, cbFunc) { }
|
||||
|
||||
virtual Handle<Value> extractValue(FDBFuture *future, fdb_error_t& outErr) {
|
||||
HandleScope scope;
|
||||
|
||||
const char **strings;
|
||||
int stringCount;
|
||||
|
||||
outErr = fdb_future_get_string_array(future, &strings, &stringCount);
|
||||
if (outErr) return scope.Close(Undefined());
|
||||
|
||||
Handle<Array> jsArray = Array::New(stringCount);
|
||||
for(int i = 0; i < stringCount; i++)
|
||||
jsArray->Set(Number::New(i), makeBuffer(strings[i], (int)strlen(strings[i])));
|
||||
|
||||
return scope.Close(jsArray);
|
||||
}
|
||||
};
|
||||
|
||||
struct StringParams {
|
||||
uint8_t *str;
|
||||
int len;
|
||||
|
||||
/*
|
||||
* String arguments always have to be buffers to
|
||||
* preserve bytes. Otherwise, stuff gets converted
|
||||
* to UTF-8.
|
||||
*/
|
||||
StringParams(Handle<Value> keyVal) {
|
||||
str = (uint8_t*)(Buffer::Data(keyVal->ToObject()));
|
||||
len = (int)Buffer::Length(keyVal->ToObject());
|
||||
}
|
||||
};
|
||||
|
||||
FDBTransaction* Transaction::GetTransactionFromArgs(const Arguments &args) {
|
||||
return node::ObjectWrap::Unwrap<Transaction>(args.Holder())->tr;
|
||||
}
|
||||
|
||||
Persistent<Function> Transaction::GetCallback(Handle<Value> funcVal) {
|
||||
return Persistent<Function>::New(Handle<Function>(Function::Cast(*funcVal)));
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Set(const Arguments &args){
|
||||
StringParams key(args[0]);
|
||||
StringParams val(args[1]);
|
||||
fdb_transaction_set(GetTransactionFromArgs(args), key.str, key.len, val.str, val.len);
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Commit(const Arguments &args) {
|
||||
FDBFuture *f = fdb_transaction_commit(GetTransactionFromArgs(args));
|
||||
(new NodeVoidCallback(f, GetCallback(args[0])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Clear(const Arguments &args) {
|
||||
StringParams key(args[0]);
|
||||
fdb_transaction_clear(GetTransactionFromArgs(args), key.str, key.len);
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
/*
|
||||
* ClearRange takes two key strings.
|
||||
*/
|
||||
Handle<Value> Transaction::ClearRange(const Arguments &args) {
|
||||
StringParams begin(args[0]);
|
||||
StringParams end(args[1]);
|
||||
fdb_transaction_clear_range(GetTransactionFromArgs(args), begin.str, begin.len, end.str, end.len);
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
/*
|
||||
* This function takes a KeySelector and returns a future.
|
||||
*/
|
||||
Handle<Value> Transaction::GetKey(const Arguments &args) {
|
||||
StringParams key(args[0]);
|
||||
int selectorOrEqual = args[1]->Int32Value();
|
||||
int selectorOffset = args[2]->Int32Value();
|
||||
bool snapshot = args[3]->BooleanValue();
|
||||
|
||||
FDBFuture *f = fdb_transaction_get_key(GetTransactionFromArgs(args), key.str, key.len, (fdb_bool_t)selectorOrEqual, selectorOffset, snapshot);
|
||||
(new NodeKeyCallback(f, GetCallback(args[4])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Get(const Arguments &args) {
|
||||
StringParams key(args[0]);
|
||||
bool snapshot = args[1]->BooleanValue();
|
||||
|
||||
FDBFuture *f = fdb_transaction_get(GetTransactionFromArgs(args), key.str, key.len, snapshot);
|
||||
(new NodeValueCallback(f, GetCallback(args[2])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::GetRange(const Arguments &args) {
|
||||
StringParams start(args[0]);
|
||||
int startOrEqual = args[1]->Int32Value();
|
||||
int startOffset = args[2]->Int32Value();
|
||||
|
||||
StringParams end(args[3]);
|
||||
int endOrEqual = args[4]->Int32Value();
|
||||
int endOffset = args[5]->Int32Value();
|
||||
|
||||
int limit = args[6]->Int32Value();
|
||||
FDBStreamingMode mode = (FDBStreamingMode)args[7]->Int32Value();
|
||||
int iteration = args[8]->Int32Value();
|
||||
bool snapshot = args[9]->BooleanValue();
|
||||
bool reverse = args[10]->BooleanValue();
|
||||
|
||||
FDBFuture *f = fdb_transaction_get_range(GetTransactionFromArgs(args), start.str, start.len, (fdb_bool_t)startOrEqual, startOffset,
|
||||
end.str, end.len, (fdb_bool_t)endOrEqual, endOffset, limit, 0, mode, iteration, snapshot, reverse);
|
||||
|
||||
(new NodeKeyValueCallback(f, GetCallback(args[11])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Watch(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
Transaction *trPtr = node::ObjectWrap::Unwrap<Transaction>(args.Holder());
|
||||
|
||||
uint8_t *keyStr = (uint8_t*)(Buffer::Data(args[0]->ToObject()));
|
||||
int keyLen = (int)Buffer::Length(args[0]->ToObject());
|
||||
|
||||
Persistent<Function> cb = Persistent<Function>::New(Handle<Function>(Function::Cast(*args[1])));
|
||||
|
||||
FDBFuture *f = fdb_transaction_watch(trPtr->tr, keyStr, keyLen);
|
||||
NodeVoidCallback *callback = new NodeVoidCallback(f, cb);
|
||||
Handle<Value> watch = Watch::NewInstance(callback);
|
||||
|
||||
callback->start();
|
||||
return scope.Close(watch);
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::AddConflictRange(const Arguments &args, FDBConflictRangeType type) {
|
||||
StringParams start(args[0]);
|
||||
StringParams end(args[1]);
|
||||
|
||||
fdb_error_t errorCode = fdb_transaction_add_conflict_range(GetTransactionFromArgs(args), start.str, start.len, end.str, end.len, type);
|
||||
|
||||
if(errorCode != 0) {
|
||||
ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
return Undefined();
|
||||
}
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::AddReadConflictRange(const Arguments &args) {
|
||||
return AddConflictRange(args, FDB_CONFLICT_RANGE_TYPE_READ);
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::AddWriteConflictRange(const Arguments &args) {
|
||||
return AddConflictRange(args, FDB_CONFLICT_RANGE_TYPE_WRITE);
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::OnError(const Arguments &args) {
|
||||
fdb_error_t errorCode = args[0]->Int32Value();
|
||||
FDBFuture *f = fdb_transaction_on_error(GetTransactionFromArgs(args), errorCode);
|
||||
(new NodeVoidCallback(f, GetCallback(args[1])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Reset(const Arguments &args) {
|
||||
fdb_transaction_reset(GetTransactionFromArgs(args));
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::SetReadVersion(const Arguments &args) {
|
||||
int64_t version = args[0]->IntegerValue();
|
||||
fdb_transaction_set_read_version(GetTransactionFromArgs(args), version);
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::GetReadVersion(const Arguments &args) {
|
||||
FDBFuture *f = fdb_transaction_get_read_version(GetTransactionFromArgs(args));
|
||||
(new NodeVersionCallback(f, GetCallback(args[0])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::GetCommittedVersion(const Arguments &args) {
|
||||
HandleScope scope;
|
||||
|
||||
int64_t version;
|
||||
fdb_error_t errorCode = fdb_transaction_get_committed_version(GetTransactionFromArgs(args), &version);
|
||||
|
||||
if(errorCode != 0) {
|
||||
ThrowException(FdbError::NewInstance(errorCode, fdb_get_error(errorCode)));
|
||||
return scope.Close(Undefined());
|
||||
}
|
||||
|
||||
return scope.Close(Number::New((double)version));
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::GetVersionstamp(const Arguments &args) {
|
||||
FDBFuture *f = fdb_transaction_get_versionstamp(GetTransactionFromArgs(args));
|
||||
(new NodeKeyCallback(f, GetCallback(args[0])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::Cancel(const Arguments &args) {
|
||||
fdb_transaction_cancel(GetTransactionFromArgs(args));
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::GetAddressesForKey(const Arguments &args) {
|
||||
StringParams key(args[0]);
|
||||
|
||||
FDBFuture *f = fdb_transaction_get_addresses_for_key(GetTransactionFromArgs(args), key.str, key.len);
|
||||
(new NodeStringArrayCallback(f, GetCallback(args[1])))->start();
|
||||
return Null();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::New(const Arguments &args) {
|
||||
Transaction *tr = new Transaction();
|
||||
tr->Wrap(args.Holder());
|
||||
|
||||
return args.Holder();
|
||||
}
|
||||
|
||||
Handle<Value> Transaction::NewInstance(FDBTransaction *ptr) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Object> instance = constructor->NewInstance();
|
||||
|
||||
Transaction *trObj = ObjectWrap::Unwrap<Transaction>(instance);
|
||||
trObj->tr = ptr;
|
||||
|
||||
instance->Set(String::NewSymbol("options"), FdbOptions::CreateOptions(FdbOptions::TransactionOption, instance));
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
||||
|
||||
void Transaction::Init() {
|
||||
Local<FunctionTemplate> tpl = FunctionTemplate::New(New);
|
||||
|
||||
tpl->SetClassName(String::NewSymbol("Transaction"));
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("get"), FunctionTemplate::New(Get)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getRange"), FunctionTemplate::New(GetRange)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getKey"), FunctionTemplate::New(GetKey)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("watch"), FunctionTemplate::New(Watch)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("set"), FunctionTemplate::New(Set)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("commit"), FunctionTemplate::New(Commit)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("clear"), FunctionTemplate::New(Clear)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("clearRange"), FunctionTemplate::New(ClearRange)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("addReadConflictRange"), FunctionTemplate::New(AddReadConflictRange)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("addWriteConflictRange"), FunctionTemplate::New(AddWriteConflictRange)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("onError"), FunctionTemplate::New(OnError)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("reset"), FunctionTemplate::New(Reset)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getReadVersion"), FunctionTemplate::New(GetReadVersion)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("setReadVersion"), FunctionTemplate::New(SetReadVersion)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getCommittedVersion"), FunctionTemplate::New(GetCommittedVersion)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getVersionstamp"), FunctionTemplate::New(GetVersionstamp)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("cancel"), FunctionTemplate::New(Cancel)->GetFunction());
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("getAddressesForKey"), FunctionTemplate::New(GetAddressesForKey)->GetFunction());
|
||||
|
||||
constructor = Persistent<Function>::New(tpl->GetFunction());
|
||||
}
|
||||
|
||||
// Watch implementation
|
||||
Watch::Watch() : callback(NULL) { };
|
||||
|
||||
Watch::~Watch() {
|
||||
if(callback) {
|
||||
if(callback->getFuture())
|
||||
fdb_future_cancel(callback->getFuture());
|
||||
|
||||
callback->delRef();
|
||||
}
|
||||
};
|
||||
|
||||
Persistent<Function> Watch::constructor;
|
||||
|
||||
Handle<Value> Watch::NewInstance(NodeCallback *callback) {
|
||||
HandleScope scope;
|
||||
|
||||
Local<Object> instance = constructor->NewInstance();
|
||||
|
||||
Watch *watchObj = ObjectWrap::Unwrap<Watch>(instance);
|
||||
watchObj->callback = callback;
|
||||
callback->addRef();
|
||||
|
||||
return scope.Close(instance);
|
||||
}
|
||||
|
||||
Handle<Value> Watch::New(const Arguments &args) {
|
||||
Watch *c = new Watch();
|
||||
c->Wrap(args.Holder());
|
||||
|
||||
return args.Holder();
|
||||
}
|
||||
|
||||
Handle<Value> Watch::Cancel(const Arguments &args) {
|
||||
NodeCallback *callback = node::ObjectWrap::Unwrap<Watch>(args.Holder())->callback;
|
||||
|
||||
if(callback && callback->getFuture())
|
||||
fdb_future_cancel(callback->getFuture());
|
||||
|
||||
return Null();
|
||||
}
|
||||
|
||||
void Watch::Init() {
|
||||
Local<FunctionTemplate> tpl = FunctionTemplate::New(New);
|
||||
tpl->SetClassName(String::NewSymbol("Watch"));
|
||||
tpl->InstanceTemplate()->SetInternalFieldCount(1);
|
||||
|
||||
tpl->PrototypeTemplate()->Set(String::NewSymbol("cancel"), FunctionTemplate::New(Cancel)->GetFunction());
|
||||
|
||||
constructor = Persistent<Function>::New(tpl->GetFunction());
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
/*
|
||||
* Transaction.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_TRANSACTION_H
|
||||
#define FDB_NODE_TRANSACTION_H
|
||||
|
||||
#include "Version.h"
|
||||
|
||||
#include <foundationdb/fdb_c.h>
|
||||
#include <node.h>
|
||||
|
||||
#include "NodeCallback.h"
|
||||
|
||||
class Transaction: public node::ObjectWrap {
|
||||
public:
|
||||
static void Init();
|
||||
static v8::Handle<v8::Value> NewInstance(FDBTransaction *ptr);
|
||||
static v8::Handle<v8::Value> New(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> Get(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> GetKey(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Set(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Commit(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Clear(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> ClearRange(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> GetRange(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Watch(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> AddConflictRange(const v8::Arguments &args, FDBConflictRangeType type);
|
||||
static v8::Handle<v8::Value> AddReadConflictRange(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> AddWriteConflictRange(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> OnError(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> Reset(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> SetReadVersion(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> GetReadVersion(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> GetCommittedVersion(const v8::Arguments &args);
|
||||
static v8::Handle<v8::Value> GetVersionstamp(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> Cancel(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> GetAddressesForKey(const v8::Arguments &args);
|
||||
|
||||
FDBTransaction* GetTransaction() { return tr; }
|
||||
private:
|
||||
Transaction();
|
||||
~Transaction();
|
||||
|
||||
static v8::Persistent<v8::Function> constructor;
|
||||
FDBTransaction *tr;
|
||||
|
||||
static FDBTransaction* GetTransactionFromArgs(const v8::Arguments &args);
|
||||
static v8::Persistent<v8::Function> GetCallback(const v8::Handle<v8::Value> funcVal);
|
||||
};
|
||||
|
||||
class Watch : public node::ObjectWrap {
|
||||
public:
|
||||
static void Init();
|
||||
|
||||
static v8::Handle<v8::Value> NewInstance(NodeCallback *callback);
|
||||
static v8::Handle<v8::Value> New(const v8::Arguments &args);
|
||||
|
||||
static v8::Handle<v8::Value> Cancel(const v8::Arguments &args);
|
||||
|
||||
private:
|
||||
Watch();
|
||||
~Watch();
|
||||
|
||||
static v8::Persistent<v8::Function> constructor;
|
||||
NodeCallback *callback;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -1,27 +0,0 @@
|
|||
/*
|
||||
* Version.h
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef FDB_NODE_VERSION_H
|
||||
#define FDB_NODE_VERSION_H
|
||||
|
||||
#define FDB_API_VERSION 510
|
||||
|
||||
#endif
|
|
@ -1,51 +0,0 @@
|
|||
/*
|
||||
* async_test.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
fdb.open(null, null, function(dbErr, dbVal) {
|
||||
fdb.open(null, null, function(dbErr, dbVal) {
|
||||
if(dbVal == null)
|
||||
console.log("database is null");
|
||||
console.log("created database", dbErr);
|
||||
console.log(JSON.stringify(dbVal));
|
||||
var tr = dbVal.createTransaction();
|
||||
console.log("created transaction");
|
||||
|
||||
tr.get('foo', function(err, val) {
|
||||
console.log("get called", val, err);
|
||||
tr.set('foo', 'bar');
|
||||
tr.commit(function(err) {
|
||||
console.log("commit called", err);
|
||||
var x = tr.get('foo');
|
||||
x(function(err, val) {
|
||||
console.log("get called", val.toString(), err);
|
||||
tr.clear('foo')
|
||||
tr.commit(function(err) {
|
||||
console.log("commit called", err);
|
||||
tr.get('foo', function(err, val) {
|
||||
console.log("get called", val, err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,304 +0,0 @@
|
|||
/*
|
||||
* directory_extension.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var util = require('util');
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var fdbUtil = require('../lib/fdbUtil.js');
|
||||
var dirUtil = require('./directory_util.js');
|
||||
|
||||
var logAll = false;
|
||||
|
||||
var logInstructions = false;
|
||||
var logOps = false;
|
||||
var logDirs = false;
|
||||
var logErrors = false;
|
||||
|
||||
var logOp = function(message, force) {
|
||||
if(logOps || logAll || force)
|
||||
console.log(message);
|
||||
};
|
||||
|
||||
var DirectoryExtension = function() {
|
||||
this.dirList = [fdb.directory];
|
||||
this.dirIndex = 0;
|
||||
this.errorIndex = 0;
|
||||
};
|
||||
|
||||
DirectoryExtension.prototype.processInstruction = function(inst, cb) {
|
||||
var self = this;
|
||||
var directory = this.dirList[this.dirIndex];
|
||||
|
||||
var promiseCb = function(err) {
|
||||
if(err && (logErrors || logAll)) {
|
||||
console.log(err);
|
||||
//console.log(err.stack);
|
||||
}
|
||||
|
||||
dirUtil.pushError(self, inst, err);
|
||||
cb();
|
||||
};
|
||||
|
||||
var appendDir = function(dir) {
|
||||
if(logDirs || logAll)
|
||||
console.log(util.format('pushed at %d (op=%s)', self.dirList.length, inst.op));
|
||||
|
||||
self.dirList.push(dir);
|
||||
};
|
||||
|
||||
if(logAll || logInstructions)
|
||||
console.log(inst.context.instructionIndex, inst.tokens[0].toString());
|
||||
|
||||
if(inst.op === 'DIRECTORY_CREATE_SUBSPACE') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(path) {
|
||||
return inst.pop()
|
||||
.then(function(rawPrefix) {
|
||||
logOp(util.format('created subspace at (%s): %s', path, fdb.buffer.printable(rawPrefix)));
|
||||
appendDir(new fdb.Subspace(path, rawPrefix));
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE_LAYER') {
|
||||
inst.pop({count: 3})
|
||||
.then(function(params) {
|
||||
var index1 = params[0];
|
||||
var index2 = params[1];
|
||||
var allowManualPrefixes = params[2];
|
||||
|
||||
var dir1 = self.dirList[params[0]];
|
||||
var dir2 = self.dirList[params[1]];
|
||||
if(dir1 === null || dir2 === null) {
|
||||
logOp('create directory layer: None');
|
||||
appendDir(null);
|
||||
}
|
||||
else {
|
||||
logOp(util.format('create directory layer: node_subspace (%d) = %s, content_subspace (%d) = %s, allow_manual_prefixes = %d', index1, fdb.buffer.printable(dir1.rawPrefix), index2, fdb.buffer.printable(dir2.rawPrefix), allowManualPrefixes));
|
||||
appendDir(new fdb.DirectoryLayer({ nodeSubspace: dir1,
|
||||
contentSubspace: dir2,
|
||||
allowManualPrefixes: allowManualPrefixes === 1 }));
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CHANGE') {
|
||||
inst.pop()
|
||||
.then(function(index) {
|
||||
if(self.dirList[index] === null)
|
||||
self.dirIndex = self.errorIndex;
|
||||
else
|
||||
self.dirIndex = index;
|
||||
|
||||
if(logDirs || logAll) {
|
||||
var dir = self.dirList[self.dirIndex];
|
||||
console.log(util.format('changed directory to %d ((%s))', self.dirIndex, dir._path));
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_SET_ERROR_INDEX') {
|
||||
inst.pop()
|
||||
.then(function(errorIndex) {
|
||||
self.errorIndex = errorIndex;
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE_OR_OPEN') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(path) {
|
||||
return inst.pop()
|
||||
.then(function(layer) {
|
||||
logOp(util.format('create_or_open (%s): layer=%s', directory._path + path, fdb.buffer.printable(layer) || ''));
|
||||
return directory.createOrOpen(inst.tr, path, {'layer': layer || undefined});
|
||||
})
|
||||
.then(function(dir) {
|
||||
appendDir(dir);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(path) {
|
||||
return inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
logOp(util.format('create (%s): layer=%s, prefix=%s', directory._path + path, fdb.buffer.printable(params[0]) || '', fdb.buffer.printable(params[1] || '')));
|
||||
return directory.create(inst.tr, path, {'layer': params[0] || undefined, 'prefix': params[1] || undefined});
|
||||
})
|
||||
.then(function(dir) {
|
||||
appendDir(dir);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_OPEN') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(path) {
|
||||
return inst.pop()
|
||||
.then(function(layer) {
|
||||
logOp(util.format('open (%s): layer=%s', directory._path + path, fdb.buffer.printable(layer) || ''));
|
||||
return directory.open(inst.tr, path, {'layer': layer});
|
||||
})
|
||||
.then(function(dir) {
|
||||
appendDir(dir);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_MOVE') {
|
||||
dirUtil.popTuples(inst, 2)
|
||||
.then(function(paths) {
|
||||
logOp(util.format('move (%s) to (%s)', directory._path + paths[0], directory._path + paths[1]));
|
||||
return directory.move(inst.tr, paths[0], paths[1])
|
||||
.then(function(dir) {
|
||||
appendDir(dir);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_MOVE_TO') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(newAbsolutePath) {
|
||||
logOp(util.format('move (%s) to (%s)', directory._path, newAbsolutePath));
|
||||
return directory.moveTo(inst.tr, newAbsolutePath)
|
||||
.then(function(dir) {
|
||||
appendDir(dir);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_REMOVE') {
|
||||
inst.pop()
|
||||
.then(function(count) {
|
||||
return dirUtil.popTuples(inst, count)
|
||||
.then(function(path) {
|
||||
logOp(util.format('remove (%s)', directory._path + (path ? path : '')));
|
||||
return directory.remove(inst.tr, path);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_REMOVE_IF_EXISTS') {
|
||||
inst.pop()
|
||||
.then(function(count) {
|
||||
return dirUtil.popTuples(inst, count)
|
||||
.then(function(path) {
|
||||
logOp(util.format('remove_if_exists (%s)', directory._path + (path ? path : '')));
|
||||
return directory.removeIfExists(inst.tr, path);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LIST') {
|
||||
inst.pop()
|
||||
.then(function(count) {
|
||||
return dirUtil.popTuples(inst, count)
|
||||
.then(function(path) {
|
||||
return directory.list(inst.tr, path);
|
||||
})
|
||||
.then(function(children) {
|
||||
inst.push(fdb.tuple.pack(children));
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_EXISTS') {
|
||||
var path;
|
||||
inst.pop()
|
||||
.then(function(count) {
|
||||
return dirUtil.popTuples(inst, count)
|
||||
.then(function(p) {
|
||||
path = p;
|
||||
return directory.exists(inst.tr, path);
|
||||
})
|
||||
.then(function(exists) {
|
||||
logOp(util.format('exists (%s): %d', directory._path + (path ? path : ''), exists ? 1 : 0));
|
||||
inst.push(exists ? 1 : 0);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_PACK_KEY') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(keyTuple) {
|
||||
inst.push(directory.pack(keyTuple));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_UNPACK_KEY') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
logOp(util.format('unpack %s in subspace with prefix %s', fdb.buffer.printable(key), fdb.buffer.printable(directory.rawPrefix)));
|
||||
var tup = directory.unpack(key);
|
||||
for(var i = 0; i < tup.length; ++i)
|
||||
inst.push(tup[i]);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_RANGE') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(tup) {
|
||||
var rng = directory.range(tup);
|
||||
inst.push(rng.begin);
|
||||
inst.push(rng.end);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CONTAINS') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
inst.push(directory.contains(key) ? 1 : 0);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_OPEN_SUBSPACE') {
|
||||
dirUtil.popTuples(inst)
|
||||
.then(function(path) {
|
||||
logOp(util.format('open_subspace (%s)', path));
|
||||
appendDir(directory.subspace(path));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LOG_SUBSPACE') {
|
||||
inst.pop()
|
||||
.then(function(prefix) {
|
||||
inst.tr.set(Buffer.concat([prefix, fdb.tuple.pack([self.dirIndex])]), directory.key());
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LOG_DIRECTORY') {
|
||||
inst.pop()
|
||||
.then(function(prefix) {
|
||||
var exists;
|
||||
return directory.exists(inst.tr)
|
||||
.then(function(e) {
|
||||
exists = e;
|
||||
if(exists)
|
||||
return directory.list(inst.tr);
|
||||
else
|
||||
return [];
|
||||
})
|
||||
.then(function(children) {
|
||||
var logSubspace = new fdb.Subspace([self.dirIndex], prefix);
|
||||
inst.tr.set(logSubspace.get('path'), fdb.tuple.pack(directory.getPath()));
|
||||
inst.tr.set(logSubspace.get('layer'), fdb.tuple.pack([directory.getLayer()]));
|
||||
inst.tr.set(logSubspace.get('exists'), fdb.tuple.pack([exists ? 1 : 0]));
|
||||
inst.tr.set(logSubspace.get('children'), fdb.tuple.pack(children));
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_STRIP_PREFIX') {
|
||||
inst.pop()
|
||||
.then(function(str) {
|
||||
if(!fdbUtil.buffersEqual(fdb.buffer(str).slice(0, directory.key().length), directory.key()))
|
||||
throw new Error('String ' + str + ' does not start with raw prefix ' + directory.key());
|
||||
|
||||
inst.push(str.slice(directory.key().length));
|
||||
})(promiseCb);
|
||||
}
|
||||
else {
|
||||
throw new Error('Unknown op: ' + inst.op);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = DirectoryExtension;
|
|
@ -1,77 +0,0 @@
|
|||
/*
|
||||
* directory_util.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var util = require('../lib/fdbUtil.js');
|
||||
|
||||
var opsThatCreateDirs = [
|
||||
'DIRECTORY_CREATE_SUBSPACE',
|
||||
'DIRECTORY_CREATE_LAYER',
|
||||
'DIRECTORY_CREATE_OR_OPEN',
|
||||
'DIRECTORY_CREATE',
|
||||
'DIRECTORY_OPEN',
|
||||
'DIRECTORY_MOVE',
|
||||
'DIRECTORY_MOVE_TO',
|
||||
'DIRECTORY_OPEN_SUBSPACE'
|
||||
];
|
||||
|
||||
var popTuples = function(inst, num, cb) {
|
||||
if(typeof num === 'undefined')
|
||||
num = 1;
|
||||
return fdb.future.create(function(futureCb) {
|
||||
var tuples = [];
|
||||
if(num === 0) return futureCb();
|
||||
util.whileLoop(function(loopCb) {
|
||||
inst.pop()
|
||||
.then(function(count) {
|
||||
return inst.pop({count: count})
|
||||
.then(function(tuple) {
|
||||
tuples.push(tuple);
|
||||
if(--num === 0)
|
||||
return null;
|
||||
});
|
||||
})(loopCb);
|
||||
}, function() {
|
||||
if(tuples.length == 1)
|
||||
futureCb(undefined, tuples[0]);
|
||||
else
|
||||
futureCb(undefined, tuples);
|
||||
});
|
||||
})(cb);
|
||||
};
|
||||
|
||||
var pushError = function(self, inst, err) {
|
||||
if(err) {
|
||||
//console.log(err.toString());
|
||||
//console.log(err.stack);
|
||||
inst.push(fdb.buffer('DIRECTORY_ERROR'));
|
||||
|
||||
if(opsThatCreateDirs.indexOf(inst.op) >= 0)
|
||||
self.dirList.push(null);
|
||||
}
|
||||
|
||||
return err;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
popTuples: popTuples,
|
||||
pushError: pushError
|
||||
};
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* get_range.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
var db = fdb.open(null, null)
|
||||
|
||||
var tr = db.createTransaction();
|
||||
|
||||
for(var i = 0; i < 10000; i++)
|
||||
tr.set('foo' + i, 'bar' + i)
|
||||
|
||||
tr.commit(function(err) {
|
||||
if(err)
|
||||
console.log('commit error', err);
|
||||
|
||||
console.log('get range: foo-fooa');
|
||||
var itr = tr.getRange('foo', 'fooa', null);
|
||||
|
||||
itr.forEach(
|
||||
function(val, cb) {
|
||||
console.log(val.key.toString(), val.value.toString());
|
||||
cb();
|
||||
},
|
||||
function(err, res) {
|
||||
if(err)
|
||||
console.log(err);
|
||||
else {
|
||||
itr.forEach(
|
||||
function(val, cb) {
|
||||
console.log('pass2: ' + val.key.toString(), val.value.toString());
|
||||
cb();
|
||||
},
|
||||
function(err, res) {
|
||||
if(err)
|
||||
console.log(err);
|
||||
else { }
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
|
|
@ -1,59 +0,0 @@
|
|||
/*
|
||||
* get_versionstamp.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var bufferEqual = function (a, b) {
|
||||
if (!Buffer.isBuffer(a)) return undefined;
|
||||
if (!Buffer.isBuffer(b)) return undefined;
|
||||
if (typeof a.equals === 'function') return a.equals(b);
|
||||
if (a.length !== b.length) return false;
|
||||
|
||||
for (var i = 0; i < a.length; i++) {
|
||||
if (a[i] !== b[i]) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
var fdb = require('../lib/fdb').apiVersion(410);
|
||||
|
||||
var db = fdb.open(null, null)
|
||||
|
||||
var tr = db.createTransaction();
|
||||
|
||||
tr.getVersionstamp(function(error, vs) {
|
||||
db.get('foo', function(error, val) {
|
||||
if(bufferEqual(val, vs))
|
||||
console.log('versionstamps match!')
|
||||
else {
|
||||
console.log("versionstamps don't match!")
|
||||
console.log("database verionstamp: " + val)
|
||||
console.log("transaction versionstamp: " + vs)
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
tr.setVersionstampedValue('foo', 'blahblahbl')
|
||||
|
||||
tr.commit(function(err) {
|
||||
if(err)
|
||||
console.log('commit error', err);
|
||||
|
||||
console.log(tr.getCommittedVersion())
|
||||
});
|
|
@ -1,58 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/*
|
||||
* promise_aplus_test.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var promisesAplusTests = require('promises-aplus-tests');
|
||||
var future = require('../lib/future.js');
|
||||
|
||||
var adapter = {
|
||||
resolved: function(value) {
|
||||
var f = future.create();
|
||||
f._state.fulfill(value);
|
||||
return f;
|
||||
},
|
||||
|
||||
rejected: function(reason) {
|
||||
var f = future.create();
|
||||
f._state.reject(reason);
|
||||
return f;
|
||||
},
|
||||
|
||||
deferred: function() {
|
||||
var f = future.create();
|
||||
|
||||
return {
|
||||
promise: f,
|
||||
resolve: function(value) {
|
||||
f._state.fulfill(value);
|
||||
},
|
||||
reject: function(reason) {
|
||||
f._state.reject(reason);
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
promisesAplusTests(adapter, function(err) {
|
||||
console.log('Finished tests:', err);
|
||||
});
|
|
@ -1,58 +0,0 @@
|
|||
/*
|
||||
* retry_test.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
function set(db, key, value, cb) {
|
||||
db.doTransaction(function(tr, cb2) {
|
||||
console.log("setting key");
|
||||
tr.set(new Buffer(key), new Buffer(value));
|
||||
cb2(null);
|
||||
}, cb);
|
||||
};
|
||||
|
||||
function setTxn(tr, key, value, cb) {
|
||||
console.log('calling set');
|
||||
tr.set(new Buffer(key), new Buffer(value));
|
||||
cb();
|
||||
};
|
||||
setTxn = fdb.transactional(setTxn);
|
||||
|
||||
function getAndClear(db, key, cb) {
|
||||
db.doTransaction(function(tr, cb2) {
|
||||
console.log("getting key");
|
||||
tr.get(new Buffer(key), function(err, res) {
|
||||
tr.clear(new Buffer(key));
|
||||
//setTimeout(function() { cb2(err, res); }, 6000);
|
||||
cb2(err, res);
|
||||
});
|
||||
}, cb);
|
||||
};
|
||||
|
||||
fdb.open(null, null, function(dbErr, db) {
|
||||
console.log("created database", dbErr);
|
||||
setTxn(db, 'foo', 'bar', function(err) {
|
||||
console.log("Called transactional function", err);
|
||||
getAndClear(db, 'foo', function(err, res) {
|
||||
console.log("Called get and clear", err, res.toString());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
function doSomething(_){
|
||||
console.log("start");
|
||||
db = fdb.open(null, null);
|
||||
db = db(_);
|
||||
db = fdb.open(null, null);
|
||||
db = db(_);
|
||||
|
||||
a = db.get('foo');
|
||||
console.log('foo = ', a(_));
|
||||
db.clear('foo', 'bar', _);
|
||||
console.log('foo = ', db.get('foo', _));
|
||||
b = db.set('foo', 'bar');
|
||||
b(_);
|
||||
console.log('foo = ', db.get('foo', _));
|
||||
|
||||
/*var tr = db.createTransaction();
|
||||
tr.set(new Buffer('foo'), new Buffer('bar'));
|
||||
tr.commit(_);
|
||||
|
||||
var a = tr.get(new Buffer('foo'))
|
||||
var b = tr.get(new Buffer('bar'))
|
||||
|
||||
console.log(a(_));
|
||||
console.log(b(_));
|
||||
|
||||
var c = tr.get(new Buffer('a'));
|
||||
console.log(c(_));*/
|
||||
}
|
||||
|
||||
doSomething(_);
|
||||
console.log("after");
|
|
@ -1,148 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var fdbUtil = require('../lib/fdbUtil.js');
|
||||
var dirUtil = require('./directory_util.js');
|
||||
|
||||
var StreamlineDirectoryExtension = function() {
|
||||
this.dirList = [fdb.directory];
|
||||
this.dirIndex = 0;
|
||||
this.errorIndex = 0;
|
||||
};
|
||||
|
||||
StreamlineDirectoryExtension.prototype.processInstruction = function(inst, _) {
|
||||
var directory = this.dirList[this.dirIndex];
|
||||
|
||||
try {
|
||||
//console.log(inst.op);
|
||||
|
||||
if(inst.op === 'DIRECTORY_CREATE_SUBSPACE') {
|
||||
var path = dirUtil.popTuples(inst)(_);
|
||||
var rawPrefix = inst.pop()(_);
|
||||
this.dirList.push(new fdb.Subspace(path, rawPrefix));
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE_LAYER') {
|
||||
var params = inst.pop({count: 3})(_);
|
||||
if(this.dirList[params[0]] === null || this.dirList[params[1]] === null)
|
||||
this.dirList.push(null);
|
||||
else {
|
||||
this.dirList.push(new fdb.DirectoryLayer({ nodeSubspace: this.dirList[params[0]],
|
||||
contentSubspace: this.dirList[params[1]],
|
||||
allowManualPrefixes: params[2] }));
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CHANGE') {
|
||||
var index = inst.pop()(_);
|
||||
if(this.dirList[index] === null)
|
||||
this.dirIndex = this.errorIndex;
|
||||
else
|
||||
this.dirIndex = index;
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_SET_ERROR_INDEX') {
|
||||
this.errorIndex = inst.pop()(_);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE_OR_OPEN') {
|
||||
var path = dirUtil.popTuples(inst)(_);
|
||||
var layer = inst.pop()(_);
|
||||
var dir = directory.createOrOpen(inst.tr, path, {'layer': layer || undefined})(_);
|
||||
this.dirList.push(dir);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CREATE') {
|
||||
var path = dirUtil.popTuples(inst)(_);
|
||||
var params = inst.pop({count: 2})(_);
|
||||
var dir = directory.create(inst.tr, path, {'layer': params[0] || undefined, 'prefix': params[1] || undefined})(_);
|
||||
this.dirList.push(dir);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_OPEN') {
|
||||
var path = dirUtil.popTuples(inst)(_);
|
||||
var layer = inst.pop()(_);
|
||||
var dir = directory.open(inst.tr, path, {'layer': layer || undefined})(_);
|
||||
this.dirList.push(dir);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_MOVE') {
|
||||
var paths = dirUtil.popTuples(inst, 2)(_);
|
||||
var movedDir = directory.move(inst.tr, paths[0], paths[1])(_);
|
||||
this.dirList.push(movedDir);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_MOVE_TO') {
|
||||
var newAbsolutePath = dirUtil.popTuples(inst)(_);
|
||||
var movedDir = directory.moveTo(inst.tr, newAbsolutePath)(_);
|
||||
this.dirList.push(movedDir);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_REMOVE') {
|
||||
var count = inst.pop()(_);
|
||||
var path = dirUtil.popTuples(inst, count)(_);
|
||||
directory.remove(inst.tr, path)(_);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_REMOVE_IF_EXISTS') {
|
||||
var count = inst.pop()(_);
|
||||
var path = dirUtil.popTuples(inst, count)(_);
|
||||
directory.removeIfExists(inst.tr, path)(_);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LIST') {
|
||||
var count = inst.pop()(_);
|
||||
var path = dirUtil.popTuples(inst, count)(_);
|
||||
var children = directory.list(inst.tr, path)(_);
|
||||
inst.push(fdb.tuple.pack(children));
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_EXISTS') {
|
||||
var count = inst.pop()(_);
|
||||
var path = dirUtil.popTuples(inst, count)(_);
|
||||
var exists = directory.exists(inst.tr, path)(_);
|
||||
inst.push(exists ? 1 : 0);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_PACK_KEY') {
|
||||
var keyTuple = dirUtil.popTuples(inst)(_);
|
||||
inst.push(directory.pack(keyTuple));
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_UNPACK_KEY') {
|
||||
var key = inst.pop()(_);
|
||||
var tup = directory.unpack(key);
|
||||
for(var i = 0; i < tup.length; ++i)
|
||||
inst.push(tup[i]);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_RANGE') {
|
||||
var tup = dirUtil.popTuples(inst)(_);
|
||||
var rng = directory.range(tup);
|
||||
inst.push(rng.begin);
|
||||
inst.push(rng.end);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_CONTAINS') {
|
||||
var key = inst.pop()(_);
|
||||
inst.push(directory.contains(key) ? 1 : 0);
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_OPEN_SUBSPACE') {
|
||||
var path = dirUtil.popTuples(inst)(_);
|
||||
this.dirList.push(directory.subspace(path));
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LOG_SUBSPACE') {
|
||||
var prefix = inst.pop()(_);
|
||||
inst.tr.set(Buffer.concat([prefix, fdb.tuple.pack([this.dirIndex])]), directory.key());
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_LOG_DIRECTORY') {
|
||||
var prefix = inst.pop()(_);
|
||||
var exists = directory.exists(inst.tr)(_);
|
||||
var children = exists ? directory.list(inst.tr)(_) : [];
|
||||
var logSubspace = new fdb.Subspace([this.dirIndex], prefix);
|
||||
inst.tr.set(logSubspace.get('path'), fdb.tuple.pack(directory.getPath()));
|
||||
inst.tr.set(logSubspace.get('layer'), fdb.tuple.pack([directory.getLayer()]));
|
||||
inst.tr.set(logSubspace.get('exists'), fdb.tuple.pack([exists ? 1 : 0]));
|
||||
inst.tr.set(logSubspace.get('children'), fdb.tuple.pack(children));
|
||||
}
|
||||
else if(inst.op === 'DIRECTORY_STRIP_PREFIX') {
|
||||
var str = inst.pop()(_);
|
||||
if(!fdbUtil.buffersEqual(fdb.buffer(str).slice(0, directory.key().length), directory.key()))
|
||||
throw new Error('String ' + str + ' does not start with raw prefix ' + directory.key());
|
||||
|
||||
inst.push(str.slice(directory.key().length));
|
||||
}
|
||||
else {
|
||||
throw new Error('Unknown op: ' + inst.op);
|
||||
}
|
||||
}
|
||||
catch(err) {
|
||||
dirUtil.pushError(this, inst, err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = StreamlineDirectoryExtension;
|
|
@ -1,41 +0,0 @@
|
|||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
db = fdb.open(null, null, _);
|
||||
|
||||
var tr = db.createTransaction();
|
||||
|
||||
tr.set('foo1', 'bar1');
|
||||
tr.set('foo2', 'bar2');
|
||||
tr.set('foo3', 'bar3');
|
||||
tr.set('foo4', 'bar4');
|
||||
tr.set('foo5', 'bar5');
|
||||
tr.set('bar1', 'foo1');
|
||||
tr.set('bar2', 'foo2');
|
||||
|
||||
tr.commit(_);
|
||||
|
||||
console.log('get range: foo1-foo4');
|
||||
var itr = tr.getRange('foo1', 'foo4', null);
|
||||
|
||||
a = itr.forEach(function(val, cb) {
|
||||
console.log(val.key.toString(), val.value.toString());
|
||||
cb(null, null);
|
||||
});
|
||||
|
||||
console.log('get range starts with: foo');
|
||||
itr = tr.getRangeStartsWith('foo');
|
||||
|
||||
b = itr.forEachBatch(function(arr, cb) {
|
||||
console.log('processing array', arr.length);
|
||||
for(var i in arr)
|
||||
console.log(arr[i].key.toString(), arr[i].value.toString());
|
||||
cb(null, null);
|
||||
});
|
||||
c = itr.forEachBatch(function(arr, cb) {
|
||||
console.log('processing array concurrent', arr.length);
|
||||
for(var i in arr)
|
||||
console.log(arr[i].key.toString(), arr[i].value.toString());
|
||||
cb(null, null);
|
||||
});
|
||||
|
||||
console.log(a(_) + b(_) + c(_));
|
|
@ -1,48 +0,0 @@
|
|||
var fdb = require('../lib/fdb').apiVersion(200);
|
||||
|
||||
function set(db, key, value, _) {
|
||||
db.doTransaction(function(tr, _) {
|
||||
console.log("setting key");
|
||||
tr.set(key, value);
|
||||
return;
|
||||
}, _);
|
||||
};
|
||||
|
||||
function getAndClear(db, key, _) {
|
||||
a = db.doTransaction(function(tr, _) {
|
||||
console.log("getting key");
|
||||
res = tr.get(key, _);
|
||||
tr.clear(key);
|
||||
return res;
|
||||
});
|
||||
|
||||
b = db.doTransaction(function(tr, _) {
|
||||
console.log("getting key");
|
||||
res = tr.get(key, _);
|
||||
tr.clear(key);
|
||||
return res;
|
||||
});
|
||||
|
||||
return a(_) + b(_);
|
||||
};
|
||||
|
||||
getAndClearTxn = fdb.transactional(function(tr, key, _) {
|
||||
console.log("getting key");
|
||||
tr.getKey(fdb.KeySelector.firstGreaterOrEqual(key));
|
||||
res = tr.get(key, _);
|
||||
tr.clear(key);
|
||||
return res;
|
||||
});
|
||||
|
||||
db = fdb.open(null, null, _);
|
||||
set(db, 'foo', 'bar', _);
|
||||
|
||||
//res = getAndClear(db, 'foo');
|
||||
//console.log("Called get and clear", res(_).toString());
|
||||
|
||||
a = getAndClearTxn(db, 'foo');
|
||||
b = getAndClearTxn(db, 'foo');
|
||||
|
||||
res = a(_) + b(_);
|
||||
console.log("Result:", res.toString());
|
||||
|
|
@ -1,464 +0,0 @@
|
|||
#!/usr/bin/env _node
|
||||
|
||||
"use strict";
|
||||
|
||||
//cmd line: _node streamline_tester._js <test_prefix> <optional_cluster_file>
|
||||
var startTestPrefix = process.argv[2];
|
||||
|
||||
var assert = require('assert');
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var fdbUtil = require('../lib/fdbUtil.js');
|
||||
var testerUtil = require('./util.js');
|
||||
var DirectoryExtension = require('./streamline_directory_extension._js');
|
||||
|
||||
var db = fdb.open(process.argv[4]);
|
||||
|
||||
function pushError(inst, err) {
|
||||
if(err) {
|
||||
if(!err.code)
|
||||
throw err;
|
||||
|
||||
inst.push(fdb.tuple.pack([fdb.buffer('ERROR'), fdb.buffer(err.code.toString())]));
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
var rangeChoice = 0;
|
||||
function pushRange(itr, inst, prefixFilter, _) {
|
||||
var outArray = [];
|
||||
|
||||
function pushKV(kv) {
|
||||
if(typeof prefixFilter === 'undefined' || prefixFilter === null || fdbUtil.buffersEqual(kv.key.slice(0, prefixFilter.length), prefixFilter)) {
|
||||
outArray.push(kv.key);
|
||||
outArray.push(kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
//Test the different methods for getting data from a range
|
||||
if(inst.isDatabase) {
|
||||
for(var i = 0; i < itr.length; ++i)
|
||||
pushKV(itr[i]);
|
||||
}
|
||||
else if(rangeChoice % 4 === 0) {
|
||||
itr.forEachBatch(function(res, _) {
|
||||
for(var i = 0; i < res.length; ++i)
|
||||
pushKV(res[i]);
|
||||
|
||||
if(rangeChoice % 8 === 0)
|
||||
setTimeout(_, 0);
|
||||
}, _);
|
||||
}
|
||||
else if(rangeChoice % 4 === 1) {
|
||||
itr.forEach(function(res, _) {
|
||||
pushKV(res);
|
||||
if(rangeChoice % 8 === 1)
|
||||
setTimeout(_, 0);
|
||||
}, _);
|
||||
}
|
||||
else if(rangeChoice % 4 === 2) {
|
||||
var arr = itr.toArray(_);
|
||||
for(var i = 0; i < arr.length; ++i)
|
||||
pushKV(arr[i]);
|
||||
}
|
||||
else {
|
||||
fdbUtil.whileLoop(function(_) {
|
||||
var kv = itr.next(_);
|
||||
if(!kv)
|
||||
return null;
|
||||
else
|
||||
pushKV(kv);
|
||||
}, _);
|
||||
}
|
||||
|
||||
rangeChoice++;
|
||||
inst.push(fdb.tuple.pack(outArray));
|
||||
}
|
||||
|
||||
var waitEmpty = fdb.transactional(function(tr, prefix, _) {
|
||||
var itr = tr.getRangeStartsWith(prefix, { limit: 1 });
|
||||
var arr = itr.toArray(_);
|
||||
|
||||
if(arr.length > 0)
|
||||
throw new fdb.FDBError('', 1020);
|
||||
});
|
||||
|
||||
var testWatches = function(db, _) {
|
||||
db.set('w0', '0', _);
|
||||
db.set('w3', '3', _);
|
||||
|
||||
var ready = [ false, false, false, false ];
|
||||
|
||||
var watches = [];
|
||||
watches[0] = db.doTransaction(function(tr, _) {
|
||||
return tr.watch('w0');
|
||||
}, _);
|
||||
|
||||
watches[1] = db.clearAndWatch('w1', _).watch;
|
||||
watches[2] = db.setAndWatch('w2', '2', _).watch;
|
||||
watches[3] = db.getAndWatch('w3', _);
|
||||
|
||||
assert.strictEqual(watches[3].value.toString(), '3', 'get and watch');
|
||||
watches[3] = watches[3].watch;
|
||||
|
||||
for(var i = 0; i < watches.length; ++i) {
|
||||
(function(i) {
|
||||
watches[i](function(err) { if(!err) ready[i] = true; });
|
||||
})(i);
|
||||
}
|
||||
|
||||
function checkWatches(expected, testName) {
|
||||
for(var i = 0; i < watches.length; ++i)
|
||||
assert.strictEqual(ready[i], expected, 'testName' + i);
|
||||
}
|
||||
|
||||
setTimeout(_, 1000);
|
||||
checkWatches(false, 'test 1');
|
||||
|
||||
db.set('w0', '0', _);
|
||||
db.clear('w1', _);
|
||||
|
||||
setTimeout(_, 5000);
|
||||
checkWatches(false, 'test 2');
|
||||
|
||||
db.set('w0', 'a', _);
|
||||
db.set('w1', 'b', _);
|
||||
db.clear('w2', _);
|
||||
db.xor('w3', fdb.buffer.fromByteLiteral('\xff\xff'), _);
|
||||
|
||||
setTimeout(_, 2000);
|
||||
checkWatches(true, 'test 3');
|
||||
};
|
||||
|
||||
var testLocality = function(_) {
|
||||
db.doTransaction(function(tr, _) {
|
||||
tr.options.setTimeout(60*1000);
|
||||
tr.options.setReadSystemKeys();
|
||||
|
||||
var boundaryKeys = fdb.locality.getBoundaryKeys(tr, '', fdb.buffer.fromByteLiteral('\xff\xff'), _).toArray(_);
|
||||
var success = true;
|
||||
|
||||
for(var i = 0; i < boundaryKeys.length-1; ++i) {
|
||||
var start = boundaryKeys[i];
|
||||
var end = tr.getKey(fdb.KeySelector.lastLessThan(boundaryKeys[i+1]), _);
|
||||
var startAddresses = fdb.locality.getAddressesForKey(tr, start, _);
|
||||
var endAddresses = fdb.locality.getAddressesForKey(tr, end, _);
|
||||
for(var j = 0; j < startAddresses.length; ++j) {
|
||||
var found = false;
|
||||
for(var k = 0; k < endAddresses.length; ++k) {
|
||||
if(startAddresses[j].toString() === endAddresses[k].toString()) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(!found) {
|
||||
success = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(!success)
|
||||
break;
|
||||
}
|
||||
|
||||
if(!success)
|
||||
throw(new Error('Locality not internally consistent'));
|
||||
}, _);
|
||||
}
|
||||
|
||||
var numOperations = 0;
|
||||
function processOperation(context, inst, _) {
|
||||
//if(inst.op !== 'SWAP' && inst.op !== 'PUSH')
|
||||
//console.log(context.prefix + ':', context.instructionIndex + '.', inst.op);
|
||||
|
||||
var params, numParams, res, itr;
|
||||
|
||||
try {
|
||||
if(inst.op === 'PUSH')
|
||||
inst.push(inst.tokens[1]);
|
||||
else if(inst.op === 'POP')
|
||||
inst.pop()(_);
|
||||
else if(inst.op === 'DUP')
|
||||
context.stack.pushEntry(context.stack.get(context.stack.length()-1));
|
||||
else if(inst.op === 'EMPTY_STACK')
|
||||
context.stack = new testerUtil.Stack();
|
||||
else if(inst.op === 'SWAP') {
|
||||
var index = inst.pop()(_);
|
||||
assert.strictEqual(context.stack.length() > index, true, 'Cannot swap; stack too small');
|
||||
index = context.stack.length() - index - 1;
|
||||
if(context.stack.length() > index + 1) {
|
||||
var tmp = context.stack.get(index);
|
||||
context.stack.set(index, context.stack.popEntry());
|
||||
context.stack.pushEntry(tmp);
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'WAIT_FUTURE') {
|
||||
var stackEntry = inst.pop({withMetadata: true})(_);
|
||||
context.stack.pushEntry(stackEntry);
|
||||
}
|
||||
else if(inst.op === 'WAIT_EMPTY') {
|
||||
var waitKey = inst.pop()(_);
|
||||
waitEmpty(db, waitKey, _);
|
||||
inst.push('WAITED_FOR_EMPTY');
|
||||
}
|
||||
else if(inst.op === 'START_THREAD') {
|
||||
var prefix = inst.pop()(_);
|
||||
processTest(prefix, function(err, res) {
|
||||
if(err) {
|
||||
console.error('ERROR in Thread', prefix + ':');
|
||||
console.error(err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
}
|
||||
else if(inst.op === 'NEW_TRANSACTION') {
|
||||
context.newTransaction();
|
||||
}
|
||||
else if(inst.op === 'USE_TRANSACTION') {
|
||||
var name = inst.pop()(_);
|
||||
context.switchTransaction(name);
|
||||
}
|
||||
else if(inst.op === 'SET') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
|
||||
res = inst.tr.set(params[0], params[1]);
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
}
|
||||
else if(inst.op === 'CLEAR') {
|
||||
var key = inst.pop()(_);
|
||||
|
||||
res = inst.tr.clear(key);
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
}
|
||||
else if(inst.op === 'CLEAR_RANGE') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
|
||||
res = inst.tr.clearRange(params[0], params[1]);
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
}
|
||||
else if(inst.op === 'CLEAR_RANGE_STARTS_WITH') {
|
||||
var prefix = inst.pop()(_);
|
||||
|
||||
res = inst.tr.clearRangeStartsWith(prefix);
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
}
|
||||
else if(inst.op === 'ATOMIC_OP') {
|
||||
params = inst.pop({count: 3})(_);
|
||||
|
||||
res = inst.tr[testerUtil.toJavaScriptName(params[0])](params[1], params[2]);
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
}
|
||||
else if(inst.op === 'COMMIT') {
|
||||
inst.push(inst.tr.commit(), true);
|
||||
}
|
||||
else if(inst.op === 'RESET')
|
||||
inst.tr.reset();
|
||||
else if(inst.op === 'CANCEL')
|
||||
inst.tr.cancel();
|
||||
else if(inst.op === 'GET') {
|
||||
var key = inst.pop()(_);
|
||||
inst.push(inst.tr.get(key), true);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE') {
|
||||
params = inst.pop({count: 5})(_);
|
||||
|
||||
if(inst.isDatabase)
|
||||
itr = inst.tr.getRange(params[0], params[1], { limit: params[2], reverse: params[3], streamingMode: params[4] }, _);
|
||||
else
|
||||
itr = inst.tr.getRange(params[0], params[1], { limit: params[2], reverse: params[3], streamingMode: params[4] });
|
||||
|
||||
pushRange(itr, inst, undefined, _);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE_SELECTOR') {
|
||||
params = inst.pop({count: 10})(_);
|
||||
|
||||
var start = new fdb.KeySelector(params[0], params[1], params[2]);
|
||||
var end = new fdb.KeySelector(params[3], params[4], params[5]);
|
||||
|
||||
if(inst.isDatabase)
|
||||
itr = inst.tr.getRange(start, end, { limit: params[6], reverse: params[7], streamingMode: params[8] }, _);
|
||||
else
|
||||
itr = inst.tr.getRange(start, end, { limit: params[6], reverse: params[7], streamingMode: params[8] });
|
||||
|
||||
pushRange(itr, inst, params[9], _);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE_STARTS_WITH') {
|
||||
params = inst.pop({count: 4})(_);
|
||||
|
||||
if(inst.isDatabase)
|
||||
itr = inst.tr.getRangeStartsWith(params[0], { limit: params[1], reverse: params[2], streamingMode: params[3] }, _);
|
||||
else
|
||||
itr = inst.tr.getRangeStartsWith(params[0], { limit: params[1], reverse: params[2], streamingMode: params[3] });
|
||||
|
||||
pushRange(itr, inst, undefined, _);
|
||||
}
|
||||
else if(inst.op === 'GET_KEY') {
|
||||
params = inst.pop({count: 4})(_);
|
||||
var key = inst.tr.getKey(new fdb.KeySelector(params[0], params[1], params[2]), _);
|
||||
|
||||
if(fdbUtil.buffersEqual(key.slice(0, params[3].length), params[3])) {
|
||||
inst.push(key);
|
||||
}
|
||||
else if(fdb.buffer.toByteLiteral(key) < fdb.buffer.toByteLiteral(params[3])) {
|
||||
inst.push(params[3]);
|
||||
}
|
||||
else {
|
||||
inst.push(fdbUtil.strinc(params[3]));
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'READ_CONFLICT_RANGE') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
inst.tr.addReadConflictRange(params[0], params[1]);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_RANGE'));
|
||||
}
|
||||
else if(inst.op === 'WRITE_CONFLICT_RANGE') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
inst.tr.addWriteConflictRange(params[0], params[1]);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_RANGE'));
|
||||
}
|
||||
else if(inst.op === 'READ_CONFLICT_KEY') {
|
||||
var key = inst.pop()(_);
|
||||
inst.tr.addReadConflictKey(key);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_KEY'));
|
||||
}
|
||||
else if(inst.op === 'WRITE_CONFLICT_KEY') {
|
||||
var key = inst.pop()(_);
|
||||
inst.tr.addWriteConflictKey(key);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_KEY'));
|
||||
}
|
||||
else if(inst.op === 'DISABLE_WRITE_CONFLICT') {
|
||||
inst.tr.options.setNextWriteNoWriteConflictRange();
|
||||
}
|
||||
else if(inst.op === 'GET_READ_VERSION') {
|
||||
context.lastVersion = inst.tr.getReadVersion(_);
|
||||
inst.push(fdb.buffer('GOT_READ_VERSION'));
|
||||
}
|
||||
else if(inst.op === 'GET_COMMITTED_VERSION') {
|
||||
context.lastVersion = inst.tr.getCommittedVersion();
|
||||
inst.push(fdb.buffer('GOT_COMMITTED_VERSION'));
|
||||
}
|
||||
else if(inst.op === 'GET_VERSIONSTAMP') {
|
||||
inst.push(inst.tr.getVersionstamp(), true);
|
||||
}
|
||||
else if(inst.op === 'SET_READ_VERSION') {
|
||||
assert.notStrictEqual(typeof context.lastVersion, 'undefined', 'Cannot set read version; version has never been read');
|
||||
inst.tr.setReadVersion(context.lastVersion);
|
||||
}
|
||||
else if(inst.op === 'ON_ERROR') {
|
||||
var errorCode = inst.pop()(_);
|
||||
var testErr = new fdb.FDBError('', errorCode);
|
||||
|
||||
inst.push(inst.tr.onError(testErr), true);
|
||||
}
|
||||
else if(inst.op === 'TUPLE_PACK') {
|
||||
numParams = inst.pop()(_);
|
||||
params = inst.pop({count: numParams})(_);
|
||||
inst.push(fdb.tuple.pack(params));
|
||||
}
|
||||
else if(inst.op === 'TUPLE_UNPACK') {
|
||||
var packedTuple = inst.pop()(_);
|
||||
var arr = fdb.tuple.unpack(packedTuple);
|
||||
for(var i = 0; i < arr.length; ++i)
|
||||
inst.push(fdb.tuple.pack([arr[i]]));
|
||||
}
|
||||
else if(inst.op === 'TUPLE_RANGE') {
|
||||
numParams = inst.pop()(_);
|
||||
params = inst.pop({count: numParams})(_);
|
||||
var range = fdb.tuple.range(params);
|
||||
inst.push(range.begin);
|
||||
inst.push(range.end);
|
||||
}
|
||||
else if(inst.op === 'SUB') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
inst.push(params[0] - params[1]);
|
||||
}
|
||||
else if(inst.op === 'CONCAT') {
|
||||
params = inst.pop({count: 2})(_);
|
||||
if(Buffer.isBuffer(params[0])) {
|
||||
inst.push(Buffer.concat([params[0], params[1]]))
|
||||
}
|
||||
else {
|
||||
inst.push(params[0] + params[1]);
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'LOG_STACK') {
|
||||
var prefix = inst.pop()(_);
|
||||
var items = inst.pop({count: context.stack.length(), withMetadata: true})(_);
|
||||
|
||||
for(var i = 0; i < items.length; ++i) {
|
||||
if(i % 100 === 0)
|
||||
inst.tr.commit(_);
|
||||
inst.tr.reset();
|
||||
|
||||
var entry = items[items.length - i -1];
|
||||
var packedSubKey = fdb.tuple.pack([i, entry.instructionIndex]);
|
||||
|
||||
var packedValue = fdb.tuple.pack([entry.item]);
|
||||
if(packedValue.length > 40000)
|
||||
packedValue = packedValue.slice(0, 40000);
|
||||
|
||||
inst.tr.set(Buffer.concat([prefix, packedSubKey], prefix.length + packedSubKey.length), packedValue);
|
||||
}
|
||||
|
||||
inst.tr.commit(_);
|
||||
inst.tr.reset();
|
||||
}
|
||||
else if(inst.op === 'UNIT_TESTS') {
|
||||
try {
|
||||
db.options.setLocationCacheSize(100001);
|
||||
db.doTransaction(function(tr, _) {
|
||||
tr.options.setPrioritySystemImmediate();
|
||||
tr.options.setPriorityBatch();
|
||||
tr.options.setCausalReadRisky();
|
||||
tr.options.setCausalWriteRisky();
|
||||
tr.options.setReadYourWritesDisable();
|
||||
tr.options.setReadAheadDisable();
|
||||
tr.options.setReadSystemKeys();
|
||||
tr.options.setAccessSystemKeys();
|
||||
tr.options.setDurabilityDevNullIsWebScale();
|
||||
tr.options.setTimeout(1000);
|
||||
tr.options.setRetryLimit(5);
|
||||
tr.options.setMaxRetryDelay(100);
|
||||
tr.options.setUsedDuringCommitProtectionDisable();
|
||||
tr.options.setTransactionLoggingEnable('my_transaction');
|
||||
|
||||
tr.get(fdb.buffer.fromByteLiteral('\xff'), _);
|
||||
}, _);
|
||||
|
||||
testWatches(db, _);
|
||||
testLocality(_);
|
||||
}
|
||||
catch(err) {
|
||||
throw('Unit tests failed: ' + err);
|
||||
}
|
||||
}
|
||||
else if(testerUtil.startsWith(inst.op, 'DIRECTORY_')) {
|
||||
context.directoryExtension.processInstruction(inst, _);
|
||||
}
|
||||
else
|
||||
throw new Error('Unrecognized operation');
|
||||
}
|
||||
catch(err) {
|
||||
pushError(inst, err);
|
||||
}
|
||||
}
|
||||
|
||||
function processTest(prefix, _) {
|
||||
var context = new testerUtil.Context(db, prefix, processOperation, new DirectoryExtension());
|
||||
try {
|
||||
context.run(_);
|
||||
}
|
||||
catch(err) {
|
||||
console.error('ERROR during operation \'' + context.ops[context.current].value.toString() + '\':');
|
||||
console.error(err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
processTest(startTestPrefix, _);
|
|
@ -1,754 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
/*
|
||||
* tester.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
//cmd line: node tester.js <test_prefix> <optional_cluster_file>
|
||||
var startTestPrefix = process.argv[2];
|
||||
if(process.argv.length === 5)
|
||||
var clusterFile = process.argv[4];
|
||||
else
|
||||
var clusterFile = '';
|
||||
|
||||
var assert = require('assert');
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var fdbUtil = require('../lib/fdbUtil.js');
|
||||
var testerUtil = require('./util.js');
|
||||
var DirectoryExtension = require('./directory_extension.js');
|
||||
//fdb.options.setTraceEnable()
|
||||
|
||||
var db = fdb.open(clusterFile);
|
||||
|
||||
function pushError(inst, err) {
|
||||
if(err) {
|
||||
if(!err.code) {
|
||||
console.error('ERROR during operation \'' + inst.op + '\':');
|
||||
console.error(err.stack);
|
||||
context.cb(err);
|
||||
}
|
||||
|
||||
inst.push(fdb.tuple.pack([fdb.buffer('ERROR'), fdb.buffer(err.code.toString())]));
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
var rangeChoice = 0;
|
||||
function pushRange(itr, inst, prefixFilter, cb) {
|
||||
return fdb.future.create(function(futureCb) {
|
||||
var outArray = [];
|
||||
|
||||
function pushKV(kv) {
|
||||
if(typeof prefixFilter === 'undefined' || prefixFilter === null || fdbUtil.buffersEqual(kv.key.slice(0, prefixFilter.length), prefixFilter)) {
|
||||
outArray.push(kv.key);
|
||||
outArray.push(kv.value);
|
||||
}
|
||||
}
|
||||
|
||||
function finish(err) {
|
||||
if(!pushError(inst, err))
|
||||
inst.push(fdb.tuple.pack(outArray));
|
||||
|
||||
futureCb();
|
||||
}
|
||||
|
||||
//Test different methods for getting a range
|
||||
if(inst.isDatabase) {
|
||||
for(var i = 0; i < itr.length; ++i)
|
||||
pushKV(itr[i]);
|
||||
|
||||
finish();
|
||||
}
|
||||
else if(rangeChoice % 4 === 0) {
|
||||
itr.forEachBatch(function(res, itrCb) {
|
||||
for(var i = 0 ; i < res.length; ++i)
|
||||
pushKV(res[i]);
|
||||
|
||||
if(rangeChoice % 8 === 0)
|
||||
setTimeout(itrCb, 0);
|
||||
else
|
||||
itrCb();
|
||||
}, function(err, res) {
|
||||
finish(err);
|
||||
});
|
||||
}
|
||||
else if(rangeChoice % 4 === 1) {
|
||||
itr.forEach(function(res, itrCb) {
|
||||
pushKV(res);
|
||||
|
||||
if(rangeChoice % 8 === 1)
|
||||
setTimeout(itrCb, 0);
|
||||
else
|
||||
itrCb();
|
||||
}, function(err, res) {
|
||||
finish(err);
|
||||
});
|
||||
}
|
||||
else if(rangeChoice % 4 === 2) {
|
||||
itr.toArray(function(err, arr) {
|
||||
if(!err) {
|
||||
for(var i = 0; i < arr.length; ++i)
|
||||
pushKV(arr[i]);
|
||||
}
|
||||
|
||||
finish(err);
|
||||
});
|
||||
}
|
||||
else {
|
||||
fdbUtil.whileLoop(function(loopCb) {
|
||||
itr.next(function(err, res) {
|
||||
if(err)
|
||||
loopCb(err);
|
||||
else if(!res)
|
||||
loopCb(undefined, null);
|
||||
else {
|
||||
pushKV(res);
|
||||
loopCb();
|
||||
}
|
||||
});
|
||||
}, finish);
|
||||
}
|
||||
|
||||
rangeChoice++;
|
||||
})(cb);
|
||||
}
|
||||
|
||||
var waitEmpty = fdb.transactional(function(tr, prefix, cb) {
|
||||
var itr = tr.getRangeStartsWith(prefix, { limit: 1 });
|
||||
itr.toArray(function(err, res) {
|
||||
if(err)
|
||||
cb(err, null);
|
||||
else if(res.length > 0)
|
||||
cb(new fdb.FDBError('', 1020), null);
|
||||
else
|
||||
cb(null, null);
|
||||
});
|
||||
});
|
||||
|
||||
var timeoutFuture = function(time) {
|
||||
return fdb.future.create(function(futureCb) {
|
||||
setTimeout(futureCb, time);
|
||||
});
|
||||
};
|
||||
|
||||
var checkWatches = function(db, watches, ready, error, expected, cb) {
|
||||
var i = 0;
|
||||
return fdbUtil.whileLoop(function(loopCb) {
|
||||
if(i == watches.length) return loopCb(undefined, true); // terminate loop
|
||||
if(!ready[i] && expected) {
|
||||
return watches[i]
|
||||
.then(function() {
|
||||
loopCb(); // Recheck this watch when it finishes
|
||||
})
|
||||
.catch(function() {
|
||||
loopCb(); // Check the error
|
||||
});
|
||||
}
|
||||
assert.strictEqual(!ready[i] || expected, true, 'watch shouldnt be ready: ' + i);
|
||||
if(typeof error[i] !== 'undefined') {
|
||||
var tr = db.createTransaction();
|
||||
return tr.onError(error[i])
|
||||
.then(function() {
|
||||
return false;
|
||||
})(loopCb);
|
||||
}
|
||||
|
||||
i++;
|
||||
loopCb();
|
||||
})(cb);
|
||||
}
|
||||
|
||||
var testWatches = function(db, cb) {
|
||||
return fdbUtil.whileLoop(function(loopCb) {
|
||||
var ready = [ false, false, false, false ];
|
||||
var error = [ undefined, undefined, undefined, undefined ];
|
||||
var watches = [];
|
||||
|
||||
db.doTransaction(function(tr, innerCb) {
|
||||
tr.set('w0', '0')
|
||||
tr.set('w3', '3');
|
||||
innerCb();
|
||||
})
|
||||
.then(function() {
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
watches[0] = tr.watch('w0');
|
||||
innerCb();
|
||||
});
|
||||
})
|
||||
.then(function() {
|
||||
return db.clearAndWatch('w1');
|
||||
})
|
||||
.then(function(w) {
|
||||
watches[1] = w.watch;
|
||||
return db.setAndWatch('w2', '2');
|
||||
})
|
||||
.then(function(w) {
|
||||
watches[2] = w.watch;
|
||||
return db.getAndWatch('w3');
|
||||
})
|
||||
.then(function(w) {
|
||||
assert.strictEqual(w.value.toString(), '3', 'get and watch');
|
||||
watches[3] = w.watch;
|
||||
|
||||
for(var i = 0; i < watches.length; ++i) {
|
||||
(function(i) {
|
||||
watches[i](function(err) {
|
||||
if(!err) ready[i] = true;
|
||||
else error[i] = err;
|
||||
});
|
||||
})(i);
|
||||
}
|
||||
|
||||
return timeoutFuture(1000);
|
||||
})
|
||||
.then(function() {
|
||||
return checkWatches(db, watches, ready, error, false);
|
||||
})
|
||||
.then(function(result) {
|
||||
if(!result) return; // go around the loop again
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
tr.set('w0', '0');
|
||||
innerCb();
|
||||
})
|
||||
.then(function() {
|
||||
return db.clear('w1');
|
||||
})
|
||||
.then(function() {
|
||||
return timeoutFuture(5000);
|
||||
})
|
||||
.then(function() {
|
||||
return checkWatches(db, watches, ready, error, false);
|
||||
})
|
||||
.then(function(result) {
|
||||
if(!result) return; // go around the loop again
|
||||
return db.set('w0', 'a')
|
||||
.then(function() {
|
||||
return db.set('w1', 'b');
|
||||
})
|
||||
.then(function() {
|
||||
return db.clear('w2');
|
||||
})
|
||||
.then(function() {
|
||||
return db.xor('w3', fdb.buffer.fromByteLiteral('\xff\xff'));
|
||||
})
|
||||
.then(function() {
|
||||
return timeoutFuture(2000);
|
||||
})
|
||||
.then(function() {
|
||||
return checkWatches(db, watches, ready, error, true);
|
||||
})
|
||||
.then(function(result) {
|
||||
if(result) return null; //terminate loop
|
||||
return;
|
||||
});
|
||||
});
|
||||
})(loopCb);
|
||||
})(cb);
|
||||
};
|
||||
|
||||
var testLocality = function(db, cb) {
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
tr.options.setTimeout(60*1000);
|
||||
tr.options.setReadSystemKeys();
|
||||
|
||||
fdb.locality.getBoundaryKeys(tr, '', fdb.buffer.fromByteLiteral('\xff\xff'), function(err, itr) {
|
||||
if(err) return innerCb(err);
|
||||
|
||||
var index = 0;
|
||||
var start;
|
||||
var end;
|
||||
itr.forEach(function(boundaryKey, loopCb) {
|
||||
if(err) return loopCb(err);
|
||||
|
||||
start = end;
|
||||
end = boundaryKey;
|
||||
if(index++ == 0)
|
||||
return loopCb();
|
||||
|
||||
tr.getKey(fdb.KeySelector.lastLessThan(end), function(err, end) {
|
||||
if(err) return loopCb(err);
|
||||
|
||||
fdb.locality.getAddressesForKey(tr, start, function(err, startAddresses) {
|
||||
if(err) return loopCb(err);
|
||||
|
||||
fdb.locality.getAddressesForKey(tr, end, function(err, endAddresses) {
|
||||
if(err) return loopCb(err);
|
||||
|
||||
for(var j = 0; j < startAddresses.length; ++j) {
|
||||
var found = false;
|
||||
for(var k = 0; k < endAddresses.length; ++k) {
|
||||
if(startAddresses[j].toString() === endAddresses[k].toString()) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(!found) {
|
||||
return loopCb(new Error('Locality not internally consistent'));
|
||||
}
|
||||
}
|
||||
|
||||
loopCb();
|
||||
});
|
||||
});
|
||||
});
|
||||
}, innerCb);
|
||||
});
|
||||
}, cb);
|
||||
};
|
||||
|
||||
var numOperations = 0;
|
||||
function processOperation(context, inst, cb) {
|
||||
//if(inst.op !== 'SWAP' && inst.op !== 'PUSH')
|
||||
//console.log(context.prefix + ':', context.instructionIndex + '.', inst.op);
|
||||
|
||||
var promiseCb = function(err) {
|
||||
pushError(inst, err);
|
||||
cb();
|
||||
};
|
||||
|
||||
if(inst.op === 'PUSH') {
|
||||
inst.push(inst.tokens[1]);
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'POP') {
|
||||
inst.pop()(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DUP') {
|
||||
context.stack.pushEntry(context.stack.get(context.stack.length()-1));
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'EMPTY_STACK') {
|
||||
context.stack = new testerUtil.Stack();
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'SWAP') {
|
||||
inst.pop()
|
||||
.then(function(index) {
|
||||
assert.strictEqual(context.stack.length() > index, true, 'Cannot swap; stack too small');
|
||||
index = context.stack.length() - index - 1;
|
||||
if(context.stack.length() > index + 1) {
|
||||
var tmp = context.stack.get(index);
|
||||
context.stack.set(index, context.stack.popEntry());
|
||||
context.stack.pushEntry(tmp);
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'WAIT_FUTURE') {
|
||||
inst.pop({withMetadata: true})
|
||||
.then(function(stackEntry) {
|
||||
context.stack.pushEntry(stackEntry);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'WAIT_EMPTY') {
|
||||
inst.pop()
|
||||
.then(function(waitKey) {
|
||||
return waitEmpty(db, waitKey)
|
||||
.then(function() {
|
||||
inst.push('WAITED_FOR_EMPTY');
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'START_THREAD') {
|
||||
inst.pop()
|
||||
.then(function(prefix) {
|
||||
processTest(prefix, function(err, res) {
|
||||
if(err) {
|
||||
console.error('ERROR in Thread', prefix + ':');
|
||||
console.error(err.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'NEW_TRANSACTION') {
|
||||
context.newTransaction();
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'USE_TRANSACTION') {
|
||||
inst.pop()
|
||||
.then(function(name) {
|
||||
context.switchTransaction(name);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'SET') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
var res = inst.tr.set(params[0], params[1]);
|
||||
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'CLEAR') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
var res = inst.tr.clear(key);
|
||||
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'CLEAR_RANGE') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
var res = inst.tr.clearRange(params[0], params[1]);
|
||||
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'CLEAR_RANGE_STARTS_WITH') {
|
||||
inst.pop()
|
||||
.then(function(prefix) {
|
||||
var res = inst.tr.clearRangeStartsWith(prefix);
|
||||
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'ATOMIC_OP') {
|
||||
inst.pop({count: 3})
|
||||
.then(function(params) {
|
||||
var res = inst.tr[testerUtil.toJavaScriptName(params[0])](params[1], params[2]);
|
||||
|
||||
if(inst.isDatabase)
|
||||
inst.push(res, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'COMMIT') {
|
||||
inst.push(inst.tr.commit(), true);
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'RESET') {
|
||||
inst.tr.reset();
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'CANCEL') {
|
||||
inst.tr.cancel();
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'GET') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
inst.push(inst.tr.get(key), true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE') {
|
||||
inst.pop({count: 5})
|
||||
.then(function(params) {
|
||||
var itr = inst.tr.getRange(params[0], params[1], { limit: params[2], reverse: params[3], streamingMode: params[4] });
|
||||
if(inst.isDatabase) {
|
||||
return itr.then(function(arr) {
|
||||
return pushRange(arr, inst);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return pushRange(itr, inst);
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE_SELECTOR') {
|
||||
inst.pop({count: 10})
|
||||
.then(function(params) {
|
||||
var start = new fdb.KeySelector(params[0], params[1], params[2]);
|
||||
var end = new fdb.KeySelector(params[3], params[4], params[5]);
|
||||
var itr = inst.tr.getRange(start, end, { limit: params[6], reverse: params[7], streamingMode: params[8] });
|
||||
if(inst.isDatabase) {
|
||||
return itr.then(function(arr) {
|
||||
return pushRange(arr, inst, params[9]);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return pushRange(itr, inst, params[9]);
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'GET_RANGE_STARTS_WITH') {
|
||||
inst.pop({count: 4})
|
||||
.then(function(params) {
|
||||
var itr = inst.tr.getRangeStartsWith(params[0], { limit: params[1], reverse: params[2], streamingMode: params[3] });
|
||||
if(inst.isDatabase) {
|
||||
return itr.then(function(arr) {
|
||||
return pushRange(arr, inst);
|
||||
});
|
||||
}
|
||||
else {
|
||||
return pushRange(itr, inst);
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'GET_KEY') {
|
||||
inst.pop({count: 4})
|
||||
.then(function(params) {
|
||||
var result = inst.tr.getKey(new fdb.KeySelector(params[0], params[1], params[2]))
|
||||
.then(function(key) {
|
||||
if(fdbUtil.buffersEqual(key.slice(0, params[3].length), params[3])) {
|
||||
return key;
|
||||
}
|
||||
else if(fdb.buffer.toByteLiteral(key) < fdb.buffer.toByteLiteral(params[3])) {
|
||||
return params[3];
|
||||
}
|
||||
else {
|
||||
return fdbUtil.strinc(params[3]);
|
||||
}
|
||||
});
|
||||
|
||||
inst.push(result, true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'READ_CONFLICT_RANGE') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
inst.tr.addReadConflictRange(params[0], params[1]);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_RANGE'));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'WRITE_CONFLICT_RANGE') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
inst.tr.addWriteConflictRange(params[0], params[1]);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_RANGE'));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'READ_CONFLICT_KEY') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
inst.tr.addReadConflictKey(key);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_KEY'));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'WRITE_CONFLICT_KEY') {
|
||||
inst.pop()
|
||||
.then(function(key) {
|
||||
inst.tr.addWriteConflictKey(key);
|
||||
inst.push(fdb.buffer('SET_CONFLICT_KEY'));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DISABLE_WRITE_CONFLICT') {
|
||||
inst.tr.options.setNextWriteNoWriteConflictRange();
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'GET_READ_VERSION') {
|
||||
inst.tr.getReadVersion(function(err, res) {
|
||||
if(!pushError(inst, err)) {
|
||||
context.lastVersion = res;
|
||||
inst.push(fdb.buffer('GOT_READ_VERSION'));
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
else if(inst.op === 'GET_COMMITTED_VERSION') {
|
||||
try {
|
||||
context.lastVersion = inst.tr.getCommittedVersion();
|
||||
|
||||
inst.push(fdb.buffer('GOT_COMMITTED_VERSION'));
|
||||
cb();
|
||||
}
|
||||
catch(err) {
|
||||
pushError(inst, err);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'GET_VERSIONSTAMP') {
|
||||
try {
|
||||
inst.push(inst.tr.getVersionstamp(), true)
|
||||
cb();
|
||||
}
|
||||
catch(err) {
|
||||
pushError(inst, err);
|
||||
cb();
|
||||
}
|
||||
}
|
||||
else if(inst.op === 'SET_READ_VERSION') {
|
||||
assert.notStrictEqual(typeof context.lastVersion, 'undefined', 'Cannot set read version; version has never been read');
|
||||
inst.tr.setReadVersion(context.lastVersion);
|
||||
cb();
|
||||
}
|
||||
else if(inst.op === 'ON_ERROR') {
|
||||
inst.pop()
|
||||
.then(function(errorCode) {
|
||||
var testErr = new fdb.FDBError('', errorCode);
|
||||
|
||||
inst.push(inst.tr.onError(testErr), true);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'TUPLE_PACK') {
|
||||
inst.pop()
|
||||
.then(function(numParams) {
|
||||
return inst.pop({count: numParams})
|
||||
.then(function(params) {
|
||||
inst.push(fdb.tuple.pack(params));
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'TUPLE_UNPACK') {
|
||||
inst.pop()
|
||||
.then(function(packedTuple) {
|
||||
var arr = fdb.tuple.unpack(packedTuple);
|
||||
for(var i = 0; i < arr.length; ++i)
|
||||
inst.push(fdb.tuple.pack([arr[i]]));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'TUPLE_RANGE') {
|
||||
inst.pop()
|
||||
.then(function(numParams) {
|
||||
return inst.pop({count: numParams})
|
||||
.then(function(params) {
|
||||
var range = fdb.tuple.range(params);
|
||||
inst.push(range.begin);
|
||||
inst.push(range.end);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'TUPLE_SORT') {
|
||||
inst.pop()
|
||||
.then(function(numParams) {
|
||||
return inst.pop({count: numParams})
|
||||
.then(function(params) {
|
||||
var tuples = [];
|
||||
for(var i = 0; i < params.length; ++i)
|
||||
tuples.push(fdb.tuple.unpack(params[i]));
|
||||
tuples.sort(fdb.tuple.compare);
|
||||
for(var i = 0; i < tuples.length; ++i)
|
||||
inst.push(fdb.tuple.pack(tuples[i]));
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'SUB') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
inst.push(params[0] - params[1]);
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'ENCODE_FLOAT') {
|
||||
inst.pop()
|
||||
.then(function(fBytes) {
|
||||
inst.push(fdb.tuple.Float.fromBytes(fBytes));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'ENCODE_DOUBLE') {
|
||||
inst.pop()
|
||||
.then(function(dBytes) {
|
||||
inst.push(fdb.tuple.Double.fromBytes(dBytes));
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DECODE_FLOAT') {
|
||||
inst.pop()
|
||||
.then(function(fVal) {
|
||||
inst.push(fVal.toBytes());
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'DECODE_DOUBLE') {
|
||||
inst.pop()
|
||||
.then(function(dVal) {
|
||||
inst.push(dVal.toBytes());
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'CONCAT') {
|
||||
inst.pop({count: 2})
|
||||
.then(function(params) {
|
||||
if(Buffer.isBuffer(params[0])) {
|
||||
inst.push(Buffer.concat([params[0], params[1]]))
|
||||
}
|
||||
else {
|
||||
inst.push(params[0] + params[1]);
|
||||
}
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'LOG_STACK') {
|
||||
inst.pop()
|
||||
.then(function(prefix) {
|
||||
return fdbUtil.whileLoop(function(loopCb) {
|
||||
inst.pop({count: 100, withMetadata: true})
|
||||
.then(function(items) {
|
||||
if(items.length == 0) {
|
||||
return null
|
||||
}
|
||||
return db.doTransaction(function(tr, innerCb) {
|
||||
for(var index = 0; index < items.length; ++index) {
|
||||
var entry = items[items.length - index - 1];
|
||||
var packedSubKey = fdb.tuple.pack([context.stack.length() + index, entry.instructionIndex]);
|
||||
|
||||
var packedValue = fdb.tuple.pack([entry.item]);
|
||||
if(packedValue.length > 40000)
|
||||
packedValue = packedValue.slice(0, 40000);
|
||||
|
||||
tr.set(Buffer.concat([prefix, packedSubKey], prefix.length + packedSubKey.length), packedValue);
|
||||
}
|
||||
|
||||
innerCb();
|
||||
});
|
||||
})(loopCb);
|
||||
});
|
||||
})(promiseCb);
|
||||
}
|
||||
else if(inst.op === 'UNIT_TESTS') {
|
||||
db.options.setLocationCacheSize(100001);
|
||||
db.doTransaction(function(tr, innerCb) {
|
||||
tr.options.setPrioritySystemImmediate();
|
||||
tr.options.setPriorityBatch();
|
||||
tr.options.setCausalReadRisky();
|
||||
tr.options.setCausalWriteRisky();
|
||||
tr.options.setReadYourWritesDisable();
|
||||
tr.options.setReadAheadDisable();
|
||||
tr.options.setReadSystemKeys();
|
||||
tr.options.setAccessSystemKeys();
|
||||
tr.options.setDurabilityDevNullIsWebScale();
|
||||
tr.options.setTimeout(60*1000);
|
||||
tr.options.setRetryLimit(50);
|
||||
tr.options.setMaxRetryDelay(100);
|
||||
tr.options.setUsedDuringCommitProtectionDisable();
|
||||
tr.options.setTransactionLoggingEnable('my_transaction');
|
||||
tr.options.setReadLockAware()
|
||||
tr.options.setLockAware()
|
||||
|
||||
tr.get(fdb.buffer.fromByteLiteral('\xff'), innerCb);
|
||||
})
|
||||
.then(function() {
|
||||
return testWatches(db);
|
||||
})
|
||||
.then(function() {
|
||||
return testLocality(db);
|
||||
})
|
||||
.then(cb)
|
||||
.catch(function(err) {
|
||||
cb('Unit tests failed: ' + err + "\n" + err.stack);
|
||||
});
|
||||
}
|
||||
else if(testerUtil.startsWith(inst.op, 'DIRECTORY_')) {
|
||||
context.directoryExtension.processInstruction(inst, cb);
|
||||
}
|
||||
else {
|
||||
cb('Unrecognized operation');
|
||||
}
|
||||
}
|
||||
|
||||
function processTest(prefix, cb) {
|
||||
var context = new testerUtil.Context(db, prefix, processOperation, new DirectoryExtension());
|
||||
context.run(cb);
|
||||
}
|
||||
|
||||
processTest(startTestPrefix, function(err, res) {
|
||||
if(err)
|
||||
process.exit(1);
|
||||
});
|
|
@ -1,101 +0,0 @@
|
|||
/*
|
||||
* tuple_test.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
var fdb = require('../lib/fdb.js').apiVersion(510);
|
||||
var fdbModule = require('../lib/fdbModule.js');
|
||||
|
||||
console.log(fdb.tuple.pack([-Math.pow(2,53)]));
|
||||
console.log(fdb.tuple.pack([-Math.pow(2,53)+1]));
|
||||
|
||||
console.log(fdb.tuple.unpack(fdb.tuple.pack([-Math.pow(2,53)])));
|
||||
console.log(fdb.tuple.unpack(fdb.tuple.pack([-Math.pow(2,53)+1])));
|
||||
|
||||
try {
|
||||
console.log(fdb.tuple.unpack(fdb.buffer.fromByteLiteral('\x0d\xdf\xff\xff\xff\xff\xff\xfe')));
|
||||
}
|
||||
catch(err) {
|
||||
console.log(err);
|
||||
}
|
||||
|
||||
console.log(fdb.tuple.pack([0xff * 0xff]));
|
||||
console.log(fdb.tuple.pack([0xffffffff + 100 ]));
|
||||
console.log(fdb.buffer.printable(fdb.tuple.pack(['begin', [true, null, false], 'end'])))
|
||||
console.log(fdb.tuple.unpack(fdb.buffer.fromByteLiteral('\x1a\xff\xff\xff\xff\xff\xff')));
|
||||
console.log(fdb.tuple.unpack(fdb.tuple.pack(['TEST', 'herp', 1, -10, 393493, '\u0000abc', 0xffffffff + 100, true, false, [new Boolean(true), null, new Boolean(false), 0, 'asdf'], null])));
|
||||
console.log(fdb.buffer.printable(fdb.tuple.pack([[[[['three']]], 'two'], 'one'])))
|
||||
console.log(fdb.tuple.range(['TEST', 1]));
|
||||
console.log(fdb.buffer.printable(fdb.tuple.pack([fdb.tuple.Float.fromBytes(new Buffer('402df854', 'hex')), fdb.tuple.Double.fromBytes(new Buffer('4005BF0A8B145769', 'hex')), new fdb.tuple.UUID(new Buffer('deadc0deba5eba115ca1ab1edeadc0de', 'hex'))])))
|
||||
console.log(fdb.tuple.unpack(fdb.tuple.pack([fdb.tuple.Float.fromBytes(new Buffer('2734236f', 'hex'))])))
|
||||
|
||||
tuples = [
|
||||
[1,2],
|
||||
[1],
|
||||
[2],
|
||||
[true],
|
||||
[false],
|
||||
[1,true],
|
||||
[1,false],
|
||||
[1, []],
|
||||
[1, [null]],
|
||||
[1, [0]],
|
||||
[1, [1]],
|
||||
[1, [0,1,2]],
|
||||
[null],
|
||||
[]
|
||||
];
|
||||
tuples.sort(fdb.tuple.compare);
|
||||
console.log(tuples);
|
||||
|
||||
tuples = [
|
||||
[fdb.tuple.Float.fromBytes(new Buffer('2734236f', 'hex'))], // A really small value.
|
||||
[fdb.tuple.Float.fromBytes(new Buffer('80000000', 'hex'))], // -0.0
|
||||
[new fdb.tuple.Float(0.0)],
|
||||
[new fdb.tuple.Float(3.14)],
|
||||
[new fdb.tuple.Float(-3.14)],
|
||||
[new fdb.tuple.Float(2.7182818)],
|
||||
[new fdb.tuple.Float(-2.7182818)],
|
||||
[fdb.tuple.Float.fromBytes(new Buffer('7f800000', 'hex'))], // Infinity
|
||||
[fdb.tuple.Float.fromBytes(new Buffer('7fffffff', 'hex'))], // NaN
|
||||
[fdb.tuple.Float.fromBytes(new Buffer('ffffffff', 'hex'))], // -NaN
|
||||
];
|
||||
tuples.sort(fdb.tuple.compare);
|
||||
console.log(tuples);
|
||||
|
||||
// Float overruns.
|
||||
const floats = [ 2.037036e90, -2.037036e90, 4.9090935e-91, -4.9090935e-91, 2.345624805922133125e14, -2.345624805922133125e14 ];
|
||||
for (var i = 0; i < floats.length; i++) {
|
||||
var f = floats[i];
|
||||
console.log(f + " -> " + fdb.tuple.Float.fromBytes((new fdb.tuple.Float(f)).toBytes()).value);
|
||||
}
|
||||
|
||||
// Float type errors.
|
||||
try {
|
||||
console.log((new fdb.tuple.Float("asdf")).toBytes());
|
||||
} catch (e) {
|
||||
console.log("Caught!");
|
||||
console.log(e);
|
||||
}
|
||||
|
||||
try {
|
||||
console.log(fdbModule.toFloat(3.14, 2.718));
|
||||
} catch (e) {
|
||||
console.log("Caught!");
|
||||
console.log(e);
|
||||
}
|
|
@ -1,276 +0,0 @@
|
|||
/*
|
||||
* util.js
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
var fdb = require('../lib/fdb.js').apiVersion(parseInt(process.argv[3]));
|
||||
var util = require('../lib/fdbUtil.js');
|
||||
|
||||
function Stack() {
|
||||
this.stack = [];
|
||||
}
|
||||
|
||||
Stack.prototype.length = function() {
|
||||
return this.stack.length;
|
||||
};
|
||||
|
||||
Stack.prototype.get = function(index) {
|
||||
return this.stack[index];
|
||||
};
|
||||
|
||||
Stack.prototype.set = function(index, val) {
|
||||
this.stack[index] = val;
|
||||
};
|
||||
|
||||
Stack.prototype.push = function(instructionIndex, item, isFuture) {
|
||||
if(typeof isFuture === 'undefined')
|
||||
isFuture = false;
|
||||
|
||||
this.pushEntry({ instructionIndex: instructionIndex, item: item, isFuture: isFuture });
|
||||
};
|
||||
|
||||
Stack.prototype.pushEntry = function(entry) {
|
||||
this.stack.push(entry);
|
||||
};
|
||||
|
||||
Stack.prototype.pop = function(options, callback) {
|
||||
var self = this;
|
||||
return fdb.future.create(function(futureCb) {
|
||||
if(typeof options === 'undefined')
|
||||
options = {};
|
||||
|
||||
var count = options.count;
|
||||
if(typeof count === 'undefined')
|
||||
count = 1;
|
||||
|
||||
var params = self.stack.slice(self.stack.length-count).reverse();
|
||||
self.stack = self.stack.slice(0, self.stack.length-count);
|
||||
|
||||
var index = 0;
|
||||
|
||||
var itemCallback = function(err, val) {
|
||||
if(err) {
|
||||
//console.log(err);
|
||||
params[index].item = fdb.tuple.pack([fdb.buffer('ERROR'), fdb.buffer(err.code.toString())]);
|
||||
}
|
||||
else if(val)
|
||||
params[index].item = val;
|
||||
else
|
||||
params[index].item = fdb.buffer('RESULT_NOT_PRESENT');
|
||||
|
||||
params[index].isFuture = false;
|
||||
|
||||
if(!options.withMetadata)
|
||||
params[index] = params[index].item;
|
||||
|
||||
index++;
|
||||
processNext();
|
||||
};
|
||||
|
||||
var processNext = function() {
|
||||
while(true) {
|
||||
if(index >= params.length) {
|
||||
if(typeof options.count === 'undefined')
|
||||
futureCb(undefined, params[0]);
|
||||
else
|
||||
futureCb(undefined, params);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if(params[index].isFuture) {
|
||||
params[index].item(itemCallback);
|
||||
return;
|
||||
}
|
||||
|
||||
if(!options.withMetadata)
|
||||
params[index] = params[index].item;
|
||||
|
||||
index++;
|
||||
}
|
||||
};
|
||||
|
||||
processNext();
|
||||
})(callback);
|
||||
};
|
||||
|
||||
Stack.prototype.popEntry = function() {
|
||||
return this.stack.pop();
|
||||
};
|
||||
|
||||
function Context(db, prefix, processInstruction, directoryExtension) {
|
||||
var range = fdb.tuple.range([fdb.buffer(prefix)]);
|
||||
|
||||
this.prefix = prefix;
|
||||
this.stack = new Stack();
|
||||
this.db = db;
|
||||
this.next = range.begin;
|
||||
this.end = range.end;
|
||||
this.processInstruction = processInstruction;
|
||||
this.instructionIndex = -1;
|
||||
this.directoryExtension = directoryExtension;
|
||||
this.trName = prefix;
|
||||
}
|
||||
|
||||
Context.trMap = {}
|
||||
|
||||
Context.prototype.newTransaction = function() {
|
||||
Context.trMap[this.trName] = this.db.createTransaction();
|
||||
};
|
||||
|
||||
Context.prototype.switchTransaction = function(name) {
|
||||
this.trName = name;
|
||||
if(typeof Context.trMap[this.trName] === 'undefined') {
|
||||
this.newTransaction();
|
||||
}
|
||||
};
|
||||
|
||||
Context.prototype.updateResults = function(results) {
|
||||
this.ops = results;
|
||||
this.current = 0;
|
||||
this.next = fdb.KeySelector.firstGreaterThan(results[results.length-1].key);
|
||||
};
|
||||
|
||||
var issueInstruction = function(context, cb) {
|
||||
try {
|
||||
var tokens = fdb.tuple.unpack(context.ops[context.current].value);
|
||||
var op = tokens[0].toString();
|
||||
|
||||
var snapshotStr = '_SNAPSHOT';
|
||||
var databaseStr = '_DATABASE';
|
||||
|
||||
var isSnapshot = endsWith(op, snapshotStr);
|
||||
var isDatabase = endsWith(op, databaseStr);
|
||||
|
||||
var tr = Context.trMap[context.trName];
|
||||
if(isSnapshot) {
|
||||
op = op.substr(0, op.length - snapshotStr.length);
|
||||
tr = tr.snapshot;
|
||||
}
|
||||
else if(isDatabase) {
|
||||
op = op.substr(0, op.length - databaseStr.length);
|
||||
tr = context.db;
|
||||
}
|
||||
|
||||
var inst = new Instruction(context, tr, op, tokens, isDatabase, isSnapshot);
|
||||
context.processInstruction(context, inst, cb);
|
||||
}
|
||||
catch(e) {
|
||||
cb(e);
|
||||
}
|
||||
};
|
||||
|
||||
Context.prototype.run = function(cb) {
|
||||
var self = this;
|
||||
|
||||
function getInstructions(instCb) {
|
||||
self.db.doTransaction(function(tr, trCb) {
|
||||
tr.getRange(self.next, self.end, { limit: 1000 } ).toArray(function(rangeErr, rangeRes) {
|
||||
if(rangeErr) return trCb(rangeErr);
|
||||
|
||||
trCb(undefined, rangeRes);
|
||||
});
|
||||
}, function(err, rangeRes) {
|
||||
if(err) return instCb(err);
|
||||
if(rangeRes.length > 0)
|
||||
self.updateResults(rangeRes);
|
||||
instCb();
|
||||
});
|
||||
}
|
||||
|
||||
function readAndExecuteInstructions(loopCb) {
|
||||
++self.instructionIndex;
|
||||
if(!self.ops || ++self.current === self.ops.length) {
|
||||
getInstructions(function(err) {
|
||||
if(err) return loopCb(err);
|
||||
if(self.current < self.ops.length)
|
||||
issueInstruction(self, loopCb);
|
||||
else
|
||||
loopCb(undefined, null); // terminate the loop
|
||||
});
|
||||
}
|
||||
else
|
||||
issueInstruction(self, loopCb);
|
||||
}
|
||||
|
||||
util.whileLoop(readAndExecuteInstructions, function(err) {
|
||||
if(err) {
|
||||
if(self.ops && self.current < self.ops.length)
|
||||
console.error('ERROR during operation \'' + self.ops[self.current].value.toString() + '\':');
|
||||
else
|
||||
console.error('ERROR getting operations:');
|
||||
|
||||
if(err.stack)
|
||||
console.error(err.stack);
|
||||
else
|
||||
console.error(err);
|
||||
}
|
||||
|
||||
cb(err);
|
||||
});
|
||||
};
|
||||
|
||||
function Instruction(context, tr, op, tokens, isDatabase, isSnapshot) {
|
||||
this.context = context;
|
||||
this.tr = tr;
|
||||
this.op = op;
|
||||
this.tokens = tokens;
|
||||
this.isDatabase = isDatabase;
|
||||
this.isSnapshot = isSnapshot;
|
||||
}
|
||||
|
||||
Instruction.prototype.pop = function(options, callback) {
|
||||
return this.context.stack.pop(options, callback);
|
||||
};
|
||||
|
||||
Instruction.prototype.push = function(item, isFuture) {
|
||||
this.context.stack.push(this.context.instructionIndex, item, isFuture);
|
||||
};
|
||||
|
||||
function toJavaScriptName(name) {
|
||||
name = name.toString().toLowerCase();
|
||||
var start = 0;
|
||||
while(start < name.length) {
|
||||
start = name.indexOf('_', start);
|
||||
if(start === -1)
|
||||
break;
|
||||
|
||||
name = name.slice(0, start) + name[start+1].toUpperCase() + name.slice(start+2);
|
||||
}
|
||||
|
||||
return name.replace(/_/g, '');
|
||||
}
|
||||
|
||||
function startsWith(str, prefixStr) {
|
||||
return str.length >= prefixStr.length && str.substr(0, prefixStr.length) === prefixStr;
|
||||
}
|
||||
|
||||
function endsWith(str, endStr) {
|
||||
return str.length >= endStr.length && str.substr(str.length - endStr.length) === endStr;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Stack: Stack,
|
||||
Context: Context,
|
||||
Instruction: Instruction,
|
||||
toJavaScriptName: toJavaScriptName,
|
||||
startsWith: startsWith,
|
||||
endsWith: endsWith
|
||||
};
|
|
@ -472,7 +472,7 @@ module FDB
|
|||
prefix &&
|
||||
prefix.length > 0 &&
|
||||
!node_containing_key(tr, prefix) &&
|
||||
tr.get_range(@node_subspace.pack([prefix]), @node_subspace.pack([tr.send(:strinc, prefix)]),
|
||||
tr.get_range(@node_subspace.pack([prefix]), @node_subspace.pack([FDB.strinc(prefix)]),
|
||||
{ :limit => 1 }).to_a.empty?
|
||||
end
|
||||
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,47 +0,0 @@
|
|||
#!/usr/bin/python
|
||||
#
|
||||
# tarball.py
|
||||
#
|
||||
# This source file is part of the FoundationDB open source project
|
||||
#
|
||||
# Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
|
||||
import tarfile
|
||||
import argparse
|
||||
import glob
|
||||
|
||||
|
||||
def createTarball(outFile, rootDir, inFiles):
|
||||
tar = tarfile.open(outFile, 'w:gz')
|
||||
for fStr in inFiles:
|
||||
for f in glob.glob(fStr):
|
||||
if rootDir is None:
|
||||
tar.add(f)
|
||||
else:
|
||||
tar.add(f, rootDir + "/" + f)
|
||||
|
||||
tar.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Python tar utility')
|
||||
parser.add_argument('-r', dest='rootDir', type=str, help='The root directory for files in the tarball')
|
||||
parser.add_argument('outFile', type=str, help='The output tarball')
|
||||
parser.add_argument('inFile', type=str, nargs='+', help='The files to put into the tarball')
|
||||
|
||||
args = parser.parse_args()
|
||||
createTarball(args.outFile, args.rootDir, args.inFile)
|
|
@ -611,7 +611,7 @@ For **RHEL/CentOS**, perform the upgrade using the rpm command:
|
|||
user@host$ sudo rpm -Uvh |package-rpm-clients| \\
|
||||
|package-rpm-server|
|
||||
|
||||
The ``foundationdb-clients`` package also installs the :doc:`Python <api-python>` and :doc:`C <api-c>` APIs. If your clients use :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, :doc:`Node.js <api-node>`, or `Go <godoc/fdb.html>`_, follow the instructions in the corresponding language documentation to install the APIs.
|
||||
The ``foundationdb-clients`` package also installs the :doc:`Python <api-python>` and :doc:`C <api-c>` APIs. If your clients use :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, or `Go <https://godoc.org/github.com/apple/foundationdb/bindings/go/src/fdb>`_, follow the instructions in the corresponding language documentation to install the APIs.
|
||||
|
||||
Test the database
|
||||
-----------------
|
||||
|
|
|
@ -443,7 +443,6 @@
|
|||
.. |directory-create-or-open-blurb| replace::
|
||||
Opens the directory with ``path`` specified as |dir-path-type| of strings. ``path`` can also be a string, in which case it will be automatically wrapped in |dir-path-type|. All string values in a path will be converted to unicode. If the directory does not exist, it is created (creating parent directories if necessary).
|
||||
|
||||
.. **NOTE** this blurb is not used in api-node
|
||||
.. |directory-create-or-open-return-blurb| replace::
|
||||
Returns the directory and its contents as a |directory-subspace|.
|
||||
|
||||
|
@ -456,7 +455,6 @@
|
|||
.. |directory-move-blurb| replace::
|
||||
Moves the directory at ``old_path`` to ``new_path``. There is no effect on the physical prefix of the given directory or on clients that already have the directory open. The method will |error-raise-type| an |error-type| if a directory does not exist at ``old_path``, a directory already exists at ``new_path``, or the parent directory of ``new_path`` does not exist.
|
||||
|
||||
.. **NOTE** this blurb is not used in api-node
|
||||
.. |directory-move-return-blurb| replace::
|
||||
Returns the directory at its new location as a |directory-subspace|.
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -12,7 +12,6 @@ The following documents give detailed descriptions of the API for each language:
|
|||
|
||||
api-python
|
||||
api-ruby
|
||||
Node.js API <api-node>
|
||||
Java API <relative://javadoc/index.html>
|
||||
Go API <relative://godoc/fdb.html>
|
||||
api-c
|
||||
|
|
|
@ -110,7 +110,7 @@ Managing the FoundationDB service
|
|||
Next steps
|
||||
==========
|
||||
|
||||
* Install the APIs for :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, or :doc:`Node.js <api-node>` if you intend to use those languages. :doc:`Python <api-python>` and :doc:`C <api-c>` APIs were installed along with the ``foundationdb-clients`` package above.
|
||||
* Install the APIs for :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, or `Go <https://godoc.org/github.com/apple/foundationdb/bindings/go/src/fdb>`_ if you intend to use those languages. :doc:`Python <api-python>` and :doc:`C <api-c>` APIs were installed along with the ``foundationdb-clients`` package above.
|
||||
* See :doc:`tutorials` for samples of developing applications with FoundationDB.
|
||||
* See :doc:`developer-guide` for information of interest to developers, including common design patterns and performance considerations.
|
||||
* See :doc:`administration` for detailed administration information.
|
||||
|
|
|
@ -101,7 +101,7 @@ Managing the FoundationDB service
|
|||
Next steps
|
||||
==========
|
||||
|
||||
* Install the APIs for :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, or :doc:`Node.js <api-node>` if you intend to use those languages. :doc:`Python <api-python>` and :doc:`C <api-c>` APIs were installed using the FoundationDB installer above.
|
||||
* Install the APIs for :doc:`Ruby <api-ruby>`, `Java <javadoc/index.html>`_, or `Go <https://godoc.org/github.com/apple/foundationdb/bindings/go/src/fdb>`_ if you intend to use those languages. :doc:`Python <api-python>` and :doc:`C <api-c>` APIs were installed using the FoundationDB installer above.
|
||||
* See :doc:`tutorials` for samples of developing applications with FoundationDB.
|
||||
* See :doc:`developer-guide` for information of interest to developers, including common design patterns and performance considerations.
|
||||
* See :doc:`administration` for detailed administration information.
|
||||
|
|
|
@ -1,90 +0,0 @@
|
|||
/*
|
||||
* nodejs.cs
|
||||
*
|
||||
* This source file is part of the FoundationDB open source project
|
||||
*
|
||||
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace vexillographer
|
||||
{
|
||||
class nodejs : BindingWriter
|
||||
{
|
||||
private static string getNodeLine(Option o)
|
||||
{
|
||||
string comment = "";
|
||||
if(o.comment.Length > 0)
|
||||
comment = String.Format("\t// {0}\n", o.comment);
|
||||
|
||||
string parameterComment = "";
|
||||
if (o.getParameterComment().Length > 0)
|
||||
parameterComment = String.Format("\t// {0}\n", "Parameter: " + o.getParameterComment());
|
||||
|
||||
return String.Format("{0}{1}\tADD_OPTION({2}, \"{3}\", {4}, {5});", comment, parameterComment, o.scope.ToString(), o.name, o.code, o.paramType.ToString());
|
||||
}
|
||||
|
||||
public void writeFiles(string filePath, IEnumerable<Option> options)
|
||||
{
|
||||
using (var nodeFile = System.IO.File.Open(filePath,
|
||||
System.IO.FileMode.Create, System.IO.FileAccess.Write))
|
||||
{
|
||||
TextWriter outFile = new StreamWriter(nodeFile);
|
||||
outFile.NewLine = "\n";
|
||||
|
||||
outFile.WriteLine(@"/*
|
||||
* FoundationDB Node.js API
|
||||
* Copyright (c) 2013-2018 Apple Inc.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the ""Software""), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include ""FdbOptions.h""
|
||||
|
||||
void FdbOptions::InitOptions() {");
|
||||
|
||||
foreach (Option option in options) {
|
||||
if (!option.hidden && option.scope != Scope.ErrorPredicate) {
|
||||
outFile.WriteLine(getNodeLine(option) + "\n");
|
||||
}
|
||||
}
|
||||
|
||||
outFile.WriteLine("}");
|
||||
outFile.Flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -113,7 +113,7 @@ namespace vexillographer
|
|||
|
||||
private static void usage()
|
||||
{
|
||||
Console.WriteLine("{0} inputFile {{c,cpp,java,ruby,python,nodejs}} <outputDirectory/outputFile>",
|
||||
Console.WriteLine("{0} inputFile {{c,cpp,java,ruby,python}} <outputDirectory/outputFile>",
|
||||
Environment.GetCommandLineArgs()[0]);
|
||||
}
|
||||
|
||||
|
|
|
@ -42,7 +42,6 @@
|
|||
<Compile Include="c.cs" />
|
||||
<Compile Include="cpp.cs" />
|
||||
<Compile Include="java.cs" />
|
||||
<Compile Include="nodejs.cs" />
|
||||
<Compile Include="python.cs" />
|
||||
<Compile Include="ruby.cs" />
|
||||
<Compile Include="vexillographer.cs" />
|
||||
|
@ -74,7 +73,6 @@
|
|||
<Exec Command=""$(SolutionDir)bin\$(Configuration)\vexillographer.exe" "$(ProjectDir)fdb.options" c "$(ProjectDir)..\..\bindings\c\foundationdb\fdb_c_options.g.h"" />
|
||||
<Exec Command=""$(SolutionDir)bin\$(Configuration)\vexillographer.exe" "$(ProjectDir)fdb.options" cpp "$(ProjectDir)..\..\fdbclient\FDBOptions.g"" />
|
||||
<Exec Command=""$(SolutionDir)bin\$(Configuration)\vexillographer.exe" "$(ProjectDir)fdb.options" python "$(ProjectDir)..\..\bindings\python\fdb\fdboptions.py"" />
|
||||
<Exec Command=""$(SolutionDir)bin\$(Configuration)\vexillographer.exe" "$(ProjectDir)fdb.options" nodejs "$(ProjectDir)..\..\bindings\nodejs\src\FdbOptions.g.cpp"" />
|
||||
<Exec Command=""$(SolutionDir)bin\$(Configuration)\vexillographer.exe" "$(ProjectDir)fdb.options" ruby "$(ProjectDir)..\..\bindings\ruby\lib\fdboptions.rb"" />
|
||||
</Target>
|
||||
</Project>
|
||||
|
|
|
@ -64,22 +64,6 @@ Project("{930C7802-8A8C-48F9-8165-68863BCCD9DD}") = "MSIInstaller", "packaging\m
|
|||
{E2939DAA-238E-4970-96C4-4C57980F93BD} = {E2939DAA-238E-4970-96C4-4C57980F93BD}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fdb_node_0_8", "bindings\nodejs\fdb_node_0_8.vcxproj", "{E936E200-689E-49FD-8463-32FE763F1860}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{CACB2C8E-3E55-4309-A411-2A9C56C6C1CB} = {CACB2C8E-3E55-4309-A411-2A9C56C6C1CB}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fdb_node_0_10", "bindings\nodejs\fdb_node_0_10.vcxproj", "{E22D4EF8-E75D-4281-93F9-A9F73936DE54}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{CACB2C8E-3E55-4309-A411-2A9C56C6C1CB} = {CACB2C8E-3E55-4309-A411-2A9C56C6C1CB}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fdb_node", "bindings\nodejs\fdb_node.vcxproj", "{9463CB25-DCA0-9D45-C46E-0A8E68EE7FAE}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{E936E200-689E-49FD-8463-32FE763F1860} = {E936E200-689E-49FD-8463-32FE763F1860}
|
||||
{E22D4EF8-E75D-4281-93F9-A9F73936DE54} = {E22D4EF8-E75D-4281-93F9-A9F73936DE54}
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "fdb_flow", "bindings\flow\fdb_flow.vcxproj", "{2BA0A5E2-EB4C-4A32-948C-CBAABD77AF87}"
|
||||
ProjectSection(ProjectDependencies) = postProject
|
||||
{00AC9087-0378-4872-9992-DF267CF12ACB} = {00AC9087-0378-4872-9992-DF267CF12ACB}
|
||||
|
|
Loading…
Reference in New Issue