2017-05-26 04:48:44 +08:00
|
|
|
/*
|
|
|
|
* Arena.h
|
|
|
|
*
|
|
|
|
* This source file is part of the FoundationDB open source project
|
|
|
|
*
|
|
|
|
* Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
2018-02-22 02:25:11 +08:00
|
|
|
*
|
2017-05-26 04:48:44 +08:00
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
* See the License for the specific language governing permissions and
|
|
|
|
* limitations under the License.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef FLOW_ARENA_H
|
|
|
|
#define FLOW_ARENA_H
|
2020-07-10 01:49:33 +08:00
|
|
|
#include <array>
|
|
|
|
#include <iterator>
|
2017-05-26 04:48:44 +08:00
|
|
|
#pragma once
|
|
|
|
|
2018-10-20 01:30:13 +08:00
|
|
|
#include "flow/FastAlloc.h"
|
|
|
|
#include "flow/FastRef.h"
|
|
|
|
#include "flow/Error.h"
|
|
|
|
#include "flow/Trace.h"
|
2019-01-26 07:10:20 +08:00
|
|
|
#include "flow/ObjectSerializerTraits.h"
|
2019-01-31 05:53:23 +08:00
|
|
|
#include "flow/FileIdentifier.h"
|
2017-05-26 04:48:44 +08:00
|
|
|
#include <algorithm>
|
|
|
|
#include <stdint.h>
|
|
|
|
#include <string>
|
|
|
|
#include <cstring>
|
|
|
|
#include <limits>
|
2020-07-04 06:45:18 +08:00
|
|
|
#include <optional>
|
2017-05-26 04:48:44 +08:00
|
|
|
#include <set>
|
|
|
|
#include <type_traits>
|
2019-03-16 06:32:39 +08:00
|
|
|
#include <sstream>
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
// TrackIt is a zero-size class for tracking constructions, destructions, and assignments of instances
|
|
|
|
// of a class. Just inherit TrackIt<T> from T to enable tracking of construction and destruction of
|
|
|
|
// T, and use the TRACKIT_ASSIGN(rhs) macro in any operator= definitions to enable assignment tracking.
|
|
|
|
//
|
|
|
|
// TrackIt writes to standard output because the trace log isn't available early in execution
|
|
|
|
// so applying TrackIt to StringRef or VectorRef, for example, would a segfault using the trace log.
|
|
|
|
//
|
|
|
|
// The template parameter enables TrackIt to be inherited multiple times in the ancestry
|
|
|
|
// of a class without producing an "inaccessible due to ambiguity" error.
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class T>
|
2017-05-26 04:48:44 +08:00
|
|
|
struct TrackIt {
|
|
|
|
typedef TrackIt<T> TrackItType;
|
2021-03-11 02:06:03 +08:00
|
|
|
// Put TRACKIT_ASSIGN into any operator= functions for which you want assignments tracked
|
|
|
|
#define TRACKIT_ASSIGN(o) *(TrackItType*)this = *(TrackItType*)&(o)
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
// The type name T is in the TrackIt output so that objects that inherit TrackIt multiple times
|
|
|
|
// can be tracked propertly, otherwise the create and delete addresses appear duplicative.
|
|
|
|
// This function returns just the string "T]" parsed from the __PRETTY_FUNCTION__ macro. There
|
|
|
|
// doesn't seem to be a better portable way to do this.
|
2021-03-11 02:06:03 +08:00
|
|
|
static const char* __trackit__type() {
|
|
|
|
const char* s = __PRETTY_FUNCTION__ + sizeof(__PRETTY_FUNCTION__);
|
|
|
|
while (*--s != '=')
|
|
|
|
;
|
2017-05-26 04:48:44 +08:00
|
|
|
return s + 2;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
TrackIt() { printf("TrackItCreate\t%s\t%p\t%s\n", __trackit__type(), this, platform::get_backtrace().c_str()); }
|
|
|
|
TrackIt(const TrackIt& o) : TrackIt() {}
|
|
|
|
TrackIt(const TrackIt&& o) : TrackIt() {}
|
|
|
|
TrackIt& operator=(const TrackIt& o) {
|
2017-05-26 04:48:44 +08:00
|
|
|
printf("TrackItAssigned\t%s\t%p<%p\t%s\n", __trackit__type(), this, &o, platform::get_backtrace().c_str());
|
|
|
|
return *this;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
TrackIt& operator=(const TrackIt&& o) { return *this = (const TrackIt&)o; }
|
|
|
|
~TrackIt() { printf("TrackItDestroy\t%s\t%p\n", __trackit__type(), this); }
|
2017-05-26 04:48:44 +08:00
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
class NonCopyable {
|
|
|
|
protected:
|
|
|
|
NonCopyable() = default;
|
|
|
|
~NonCopyable() = default; /// Protected non-virtual destructor
|
|
|
|
NonCopyable(NonCopyable&&) = default;
|
|
|
|
NonCopyable& operator=(NonCopyable&&) = default;
|
|
|
|
NonCopyable(const NonCopyable&) = delete;
|
|
|
|
NonCopyable& operator=(const NonCopyable&) = delete;
|
2017-05-26 04:48:44 +08:00
|
|
|
};
|
|
|
|
|
2020-06-11 23:43:31 +08:00
|
|
|
// An Arena is a custom allocator that consists of a set of ArenaBlocks. Allocation is performed by bumping a pointer
|
|
|
|
// on the most recent ArenaBlock until the block is unable to service the next allocation request. When the current
|
|
|
|
// ArenaBlock is full, a new (larger) one is added to the Arena. Deallocation is not directly supported. Instead,
|
|
|
|
// memory is freed by deleting the entire Arena at once. See flow/README.md for details on using Arenas.
|
2017-05-26 04:48:44 +08:00
|
|
|
class Arena {
|
|
|
|
public:
|
2020-02-29 06:21:38 +08:00
|
|
|
Arena();
|
|
|
|
explicit Arena(size_t reservedSize);
|
2017-05-26 04:48:44 +08:00
|
|
|
//~Arena();
|
|
|
|
Arena(const Arena&);
|
2020-06-10 08:33:41 +08:00
|
|
|
Arena(Arena&& r) noexcept;
|
2017-05-26 04:48:44 +08:00
|
|
|
Arena& operator=(const Arena&);
|
2020-06-10 08:33:41 +08:00
|
|
|
Arena& operator=(Arena&&) noexcept;
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2020-02-29 06:21:38 +08:00
|
|
|
void dependsOn(const Arena& p);
|
2021-05-16 18:58:05 +08:00
|
|
|
void* allocate4kAlignedBuffer(uint32_t size);
|
2020-02-29 06:21:38 +08:00
|
|
|
size_t getSize() const;
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2020-02-29 06:21:38 +08:00
|
|
|
bool hasFree(size_t size, const void* address);
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
friend void* operator new(size_t size, Arena& p);
|
|
|
|
friend void* operator new[](size_t size, Arena& p);
|
2020-02-29 06:21:38 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
bool sameArena(const Arena& other) const { return impl.getPtr() == other.impl.getPtr(); }
|
2020-07-14 12:10:34 +08:00
|
|
|
|
|
|
|
private:
|
2017-05-26 04:48:44 +08:00
|
|
|
Reference<struct ArenaBlock> impl;
|
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <>
|
2019-01-26 07:10:20 +08:00
|
|
|
struct scalar_traits<Arena> : std::true_type {
|
|
|
|
constexpr static size_t size = 0;
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
|
|
|
static void save(uint8_t*, const Arena&, Context&) {}
|
2019-01-26 07:10:20 +08:00
|
|
|
// Context is an arbitrary type that is plumbed by reference throughout
|
|
|
|
// the load call tree.
|
|
|
|
template <class Context>
|
|
|
|
static void load(const uint8_t*, Arena& arena, Context& context) {
|
|
|
|
context.addArena(arena);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
struct ArenaBlockRef {
|
2021-05-06 04:50:52 +08:00
|
|
|
union {
|
|
|
|
ArenaBlock* next;
|
2021-05-15 14:12:00 +08:00
|
|
|
void* aligned4kBuffer;
|
2021-05-06 04:50:52 +08:00
|
|
|
};
|
2021-05-18 09:02:09 +08:00
|
|
|
|
|
|
|
// Only one of (next, aligned4kBuffer) is valid at any one time, as they occupy the same space.
|
|
|
|
// If aligned4kBufferSize is not 0, aligned4kBuffer is valid, otherwise next is valid.
|
|
|
|
uint32_t aligned4kBufferSize;
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
uint32_t nextBlockOffset;
|
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
struct ArenaBlock : NonCopyable, ThreadSafeReferenceCounted<ArenaBlock> {
|
2017-05-26 04:48:44 +08:00
|
|
|
enum {
|
|
|
|
SMALL = 64,
|
2019-03-11 12:09:47 +08:00
|
|
|
LARGE = 8193 // If size == used == LARGE, then use hugeSize, hugeUsed
|
2017-05-26 04:48:44 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
enum { NOT_TINY = 255, TINY_HEADER = 6 };
|
|
|
|
|
|
|
|
// int32_t referenceCount; // 4 bytes (in ThreadSafeReferenceCounted)
|
2021-03-11 02:06:03 +08:00
|
|
|
uint8_t tinySize, tinyUsed; // If these == NOT_TINY, use bigSize, bigUsed instead
|
2017-05-26 04:48:44 +08:00
|
|
|
// if tinySize != NOT_TINY, following variables aren't used
|
2021-03-11 02:06:03 +08:00
|
|
|
uint32_t bigSize, bigUsed; // include block header
|
2017-05-26 04:48:44 +08:00
|
|
|
uint32_t nextBlockOffset;
|
|
|
|
|
2020-02-29 06:21:38 +08:00
|
|
|
void addref();
|
2020-02-29 04:33:57 +08:00
|
|
|
void delref();
|
|
|
|
bool isTiny() const;
|
|
|
|
int size() const;
|
|
|
|
int used() const;
|
|
|
|
int unused() const;
|
|
|
|
const void* getData() const;
|
|
|
|
const void* getNextData() const;
|
|
|
|
size_t totalSize();
|
2017-05-26 04:48:44 +08:00
|
|
|
// just for debugging:
|
2020-02-29 04:33:57 +08:00
|
|
|
void getUniqueBlocks(std::set<ArenaBlock*>& a);
|
|
|
|
int addUsed(int bytes);
|
|
|
|
void makeReference(ArenaBlock* next);
|
2021-05-16 18:58:05 +08:00
|
|
|
void* make4kAlignedBuffer(uint32_t size);
|
2020-02-29 04:33:57 +08:00
|
|
|
static void dependOn(Reference<ArenaBlock>& self, ArenaBlock* other);
|
2021-05-16 18:58:05 +08:00
|
|
|
static void* dependOn4kAlignedBuffer(Reference<ArenaBlock>& self, uint32_t size);
|
2020-02-29 04:33:57 +08:00
|
|
|
static void* allocate(Reference<ArenaBlock>& self, int bytes);
|
2017-05-26 04:48:44 +08:00
|
|
|
// Return an appropriately-sized ArenaBlock to store the given data
|
2020-02-29 04:33:57 +08:00
|
|
|
static ArenaBlock* create(int dataSize, Reference<ArenaBlock>& next);
|
|
|
|
void destroy();
|
|
|
|
void destroyLeaf();
|
2021-03-11 02:06:03 +08:00
|
|
|
static void* operator new(size_t s) = delete;
|
2017-05-26 04:48:44 +08:00
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void* operator new(size_t size, Arena& p) {
|
|
|
|
UNSTOPPABLE_ASSERT(size < std::numeric_limits<int>::max());
|
|
|
|
return ArenaBlock::allocate(p.impl, (int)size);
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void operator delete(void*, Arena& p) {}
|
|
|
|
inline void* operator new[](size_t size, Arena& p) {
|
|
|
|
UNSTOPPABLE_ASSERT(size < std::numeric_limits<int>::max());
|
|
|
|
return ArenaBlock::allocate(p.impl, (int)size);
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void operator delete[](void*, Arena& p) {}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
template <class Archive>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void load(Archive& ar, Arena& p) {
|
2017-05-26 04:48:44 +08:00
|
|
|
p = ar.arena();
|
|
|
|
}
|
|
|
|
template <class Archive>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void save(Archive& ar, const Arena& p) {
|
2017-05-26 04:48:44 +08:00
|
|
|
// No action required
|
|
|
|
}
|
|
|
|
|
2020-07-04 06:45:18 +08:00
|
|
|
// Optional is a wrapper for std::optional. There
|
|
|
|
// are two primary reasons to use this wrapper instead
|
|
|
|
// of using std::optional directly:
|
|
|
|
//
|
|
|
|
// 1) Legacy: A lot of code was written using Optional before
|
|
|
|
// std::optional was available.
|
|
|
|
// 2) When you call get but no value is present Optional gives an
|
|
|
|
// assertion failure. std::optional, on the other hand, would
|
2020-07-07 10:24:28 +08:00
|
|
|
// throw std::bad_optional_access. It is easier to debug assertion
|
|
|
|
// failures, and FDB generally does not handle std exceptions, so
|
|
|
|
// assertion failures are preferable. This is the main reason we
|
|
|
|
// don't intend to use std::optional directly.
|
2019-03-24 08:54:46 +08:00
|
|
|
template <class T>
|
2020-07-15 07:26:16 +08:00
|
|
|
class Optional : public ComposedIdentifier<T, 4> {
|
2019-03-24 08:54:46 +08:00
|
|
|
public:
|
2020-07-04 06:45:18 +08:00
|
|
|
Optional() = default;
|
2019-03-24 08:54:46 +08:00
|
|
|
|
|
|
|
template <class U>
|
2020-07-07 10:24:28 +08:00
|
|
|
Optional(const U& t) : impl(std::in_place, t) {}
|
2019-03-24 08:54:46 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
/* This conversion constructor was nice, but combined with the prior constructor it means that Optional<int> can be
|
|
|
|
converted to Optional<Optional<int>> in the wrong way (a non-present Optional<int> converts to a non-present
|
|
|
|
Optional<Optional<int>>). Use .castTo<>() instead. template <class S> Optional(const Optional<S>& o) :
|
|
|
|
valid(o.present()) { if (valid) new (&value) T(o.get()); } */
|
2019-03-24 08:54:46 +08:00
|
|
|
|
2020-07-04 06:45:18 +08:00
|
|
|
Optional(Arena& a, const Optional<T>& o) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (o.present())
|
|
|
|
impl = std::make_optional<T>(a, o.get());
|
2019-03-24 08:54:46 +08:00
|
|
|
}
|
2020-07-04 06:45:18 +08:00
|
|
|
int expectedSize() const { return present() ? get().expectedSize() : 0; }
|
2019-03-24 08:54:46 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class R>
|
|
|
|
Optional<R> castTo() const {
|
|
|
|
return map<R>([](const T& v) { return (R)v; });
|
2019-03-24 08:54:46 +08:00
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class R>
|
|
|
|
Optional<R> map(std::function<R(T)> f) const {
|
2019-03-24 08:54:46 +08:00
|
|
|
if (present()) {
|
|
|
|
return Optional<R>(f(get()));
|
2021-03-11 02:06:03 +08:00
|
|
|
} else {
|
2019-03-24 08:54:46 +08:00
|
|
|
return Optional<R>();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-04 06:45:18 +08:00
|
|
|
bool present() const { return impl.has_value(); }
|
2020-10-25 12:05:42 +08:00
|
|
|
T& get() & {
|
2020-07-04 06:45:18 +08:00
|
|
|
UNSTOPPABLE_ASSERT(impl.has_value());
|
|
|
|
return impl.value();
|
2019-03-24 08:54:46 +08:00
|
|
|
}
|
2020-10-25 12:05:42 +08:00
|
|
|
T const& get() const& {
|
2020-07-04 06:45:18 +08:00
|
|
|
UNSTOPPABLE_ASSERT(impl.has_value());
|
|
|
|
return impl.value();
|
2019-03-24 08:54:46 +08:00
|
|
|
}
|
2020-10-25 12:05:42 +08:00
|
|
|
T&& get() && {
|
|
|
|
UNSTOPPABLE_ASSERT(impl.has_value());
|
|
|
|
return std::move(impl.value());
|
|
|
|
}
|
2020-07-04 06:45:18 +08:00
|
|
|
T orDefault(T const& default_value) const { return impl.value_or(default_value); }
|
2019-03-24 08:54:46 +08:00
|
|
|
|
2020-02-12 17:18:35 +08:00
|
|
|
// Spaceship operator. Treats not-present as less-than present.
|
2021-03-11 02:06:03 +08:00
|
|
|
int compare(Optional const& rhs) const {
|
|
|
|
if (present() == rhs.present()) {
|
2020-02-12 17:18:35 +08:00
|
|
|
return present() ? get().compare(rhs.get()) : 0;
|
|
|
|
}
|
|
|
|
return present() ? 1 : -1;
|
|
|
|
}
|
|
|
|
|
2020-07-04 06:45:18 +08:00
|
|
|
bool operator==(Optional const& o) const { return impl == o.impl; }
|
2021-03-11 02:06:03 +08:00
|
|
|
bool operator!=(Optional const& o) const { return !(*this == o); }
|
2019-03-24 08:54:46 +08:00
|
|
|
// Ordering: If T is ordered, then Optional() < Optional(t) and (Optional(u)<Optional(v))==(u<v)
|
2020-07-04 06:45:18 +08:00
|
|
|
bool operator<(Optional const& o) const { return impl < o.impl; }
|
|
|
|
|
|
|
|
void reset() { impl.reset(); }
|
2019-03-28 04:37:15 +08:00
|
|
|
|
2019-03-24 08:54:46 +08:00
|
|
|
private:
|
2020-07-04 06:45:18 +08:00
|
|
|
std::optional<T> impl;
|
2019-03-24 08:54:46 +08:00
|
|
|
};
|
|
|
|
|
2020-07-04 06:45:18 +08:00
|
|
|
template <class Archive, class T>
|
|
|
|
inline void load(Archive& ar, Optional<T>& value) {
|
|
|
|
bool valid;
|
|
|
|
ar >> valid;
|
|
|
|
if (valid) {
|
|
|
|
T t;
|
|
|
|
ar >> t;
|
|
|
|
value = Optional<T>(t);
|
|
|
|
} else {
|
|
|
|
value.reset();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class Archive, class T>
|
|
|
|
inline void save(Archive& ar, const Optional<T>& value) {
|
|
|
|
ar << value.present();
|
|
|
|
if (value.present()) {
|
|
|
|
ar << value.get();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class T>
|
2019-04-05 00:59:08 +08:00
|
|
|
struct Traceable<Optional<T>> : std::conditional<Traceable<T>::value, std::true_type, std::false_type>::type {
|
|
|
|
static std::string toString(const Optional<T>& value) {
|
|
|
|
return value.present() ? Traceable<T>::toString(value.get()) : "[not set]";
|
2019-04-14 01:05:59 +08:00
|
|
|
}
|
2019-04-10 05:29:21 +08:00
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class T>
|
2019-04-10 05:29:21 +08:00
|
|
|
struct union_like_traits<Optional<T>> : std::true_type {
|
|
|
|
using Member = Optional<T>;
|
|
|
|
using alternatives = pack<T>;
|
|
|
|
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
2021-03-11 02:06:03 +08:00
|
|
|
static uint8_t index(const Member& variant, Context&) {
|
|
|
|
return 0;
|
|
|
|
}
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
2021-03-11 02:06:03 +08:00
|
|
|
static bool empty(const Member& variant, Context&) {
|
|
|
|
return !variant.present();
|
|
|
|
}
|
2019-07-16 03:58:31 +08:00
|
|
|
|
|
|
|
template <int i, class Context>
|
|
|
|
static const T& get(const Member& variant, Context&) {
|
2019-04-10 05:29:21 +08:00
|
|
|
static_assert(i == 0);
|
|
|
|
return variant.get();
|
|
|
|
}
|
|
|
|
|
2019-07-16 03:58:31 +08:00
|
|
|
template <size_t i, class U, class Context>
|
2019-10-27 05:29:05 +08:00
|
|
|
static void assign(Member& member, const U& t, Context&) {
|
2019-04-10 05:29:21 +08:00
|
|
|
member = t;
|
2019-04-05 00:59:08 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
//#define STANDALONE_ALWAYS_COPY
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
class Standalone : private Arena, public T {
|
|
|
|
public:
|
|
|
|
// T must have no destructor
|
|
|
|
Arena& arena() { return *(Arena*)this; }
|
|
|
|
const Arena& arena() const { return *(const Arena*)this; }
|
|
|
|
|
|
|
|
T& contents() { return *(T*)this; }
|
|
|
|
T const& contents() const { return *(T const*)this; }
|
|
|
|
|
|
|
|
Standalone() {}
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone(const T& t) : Arena(t.expectedSize()), T(arena(), t) {}
|
|
|
|
Standalone<T>& operator=(const T& t) {
|
|
|
|
Arena old = std::move(arena()); // We want to defer the destruction of the arena until after we have copied t,
|
|
|
|
// in case it cross-references our previous value
|
2017-05-26 04:48:44 +08:00
|
|
|
*(Arena*)this = Arena(t.expectedSize());
|
2021-03-11 02:06:03 +08:00
|
|
|
*(T*)this = T(arena(), t);
|
2017-05-26 04:48:44 +08:00
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
// Always-copy mode was meant to make alloc instrumentation more useful by making allocations occur at the final resting
|
|
|
|
// place of objects leaked It doesn't actually work because some uses of Standalone things assume the object's memory
|
|
|
|
// will not change on copy or assignment
|
2017-05-26 04:48:44 +08:00
|
|
|
#ifdef STANDALONE_ALWAYS_COPY
|
|
|
|
// Treat Standalone<T>'s as T's in construction and assignment so the memory is copied
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone(const T& t, const Arena& arena) : Standalone(t) {}
|
|
|
|
Standalone(const Standalone<T>& t) : Standalone((T const&)t) {}
|
|
|
|
Standalone(const Standalone<T>&& t) : Standalone((T const&)t) {}
|
|
|
|
Standalone<T>& operator=(const Standalone<T>&& t) {
|
2017-05-26 04:48:44 +08:00
|
|
|
*this = (T const&)t;
|
|
|
|
return *this;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<T>& operator=(const Standalone<T>& t) {
|
2017-05-26 04:48:44 +08:00
|
|
|
*this = (T const&)t;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
#else
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone(const T& t, const Arena& arena) : Arena(arena), T(t) {}
|
2020-05-21 01:19:10 +08:00
|
|
|
Standalone(const Standalone<T>&) = default;
|
|
|
|
Standalone<T>& operator=(const Standalone<T>&) = default;
|
|
|
|
Standalone(Standalone<T>&&) = default;
|
|
|
|
Standalone<T>& operator=(Standalone<T>&&) = default;
|
|
|
|
~Standalone() = default;
|
2017-05-26 04:48:44 +08:00
|
|
|
#endif
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class U>
|
|
|
|
Standalone<U> castTo() const {
|
2020-02-07 05:19:24 +08:00
|
|
|
return Standalone<U>(*this, arena());
|
|
|
|
}
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
template <class Archive>
|
|
|
|
void serialize(Archive& ar) {
|
|
|
|
// FIXME: something like BinaryReader(ar) >> arena >> *(T*)this; to guarantee standalone arena???
|
2021-03-11 02:06:03 +08:00
|
|
|
// T tmp;
|
|
|
|
// ar >> tmp;
|
2017-05-26 04:48:44 +08:00
|
|
|
//*this = tmp;
|
2018-12-29 02:49:26 +08:00
|
|
|
serializer(ar, (*(T*)this), arena());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
/*static Standalone<T> fakeStandalone( const T& t ) {
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<T> x;
|
|
|
|
*(T*)&x = t;
|
|
|
|
return x;
|
2017-05-26 04:48:44 +08:00
|
|
|
}*/
|
|
|
|
private:
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class U>
|
|
|
|
Standalone(Standalone<U> const&); // unimplemented
|
|
|
|
template <class U>
|
|
|
|
Standalone<T> const& operator=(Standalone<U> const&); // unimplemented
|
2017-05-26 04:48:44 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
extern std::string format(const char* form, ...);
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
#pragma pack(push, 4)
|
2017-05-26 04:48:44 +08:00
|
|
|
class StringRef {
|
|
|
|
public:
|
2019-01-31 05:53:23 +08:00
|
|
|
constexpr static FileIdentifier file_identifier = 13300811;
|
2017-05-26 04:48:44 +08:00
|
|
|
StringRef() : data(0), length(0) {}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef(Arena& p, const StringRef& toCopy) : data(new (p) uint8_t[toCopy.size()]), length(toCopy.size()) {
|
2019-12-04 05:09:29 +08:00
|
|
|
if (length > 0) {
|
|
|
|
memcpy((void*)data, toCopy.data, length);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef(Arena& p, const std::string& toCopy) : length((int)toCopy.size()) {
|
|
|
|
UNSTOPPABLE_ASSERT(toCopy.size() <= std::numeric_limits<int>::max());
|
2017-05-26 04:48:44 +08:00
|
|
|
data = new (p) uint8_t[toCopy.size()];
|
2021-03-11 02:06:03 +08:00
|
|
|
if (length)
|
|
|
|
memcpy((void*)data, &toCopy[0], length);
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef(Arena& p, const uint8_t* toCopy, int length) : data(new (p) uint8_t[length]), length(length) {
|
2019-12-04 05:09:29 +08:00
|
|
|
if (length > 0) {
|
|
|
|
memcpy((void*)data, toCopy, length);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef(const uint8_t* data, int length) : data(data), length(length) {}
|
|
|
|
StringRef(const std::string& s) : data((const uint8_t*)s.c_str()), length((int)s.size()) {
|
|
|
|
if (s.size() > std::numeric_limits<int>::max())
|
|
|
|
abort();
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
// StringRef( const StringRef& p );
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
const uint8_t* begin() const { return data; }
|
|
|
|
const uint8_t* end() const { return data + length; }
|
|
|
|
int size() const { return length; }
|
|
|
|
|
|
|
|
uint8_t operator[](int i) const { return data[i]; }
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef substr(int start) const { return StringRef(data + start, length - start); }
|
|
|
|
StringRef substr(int start, int size) const { return StringRef(data + start, size); }
|
2021-06-05 00:53:23 +08:00
|
|
|
bool startsWith(const StringRef& s) const {
|
|
|
|
// Avoid UB - can't pass nullptr to memcmp
|
|
|
|
if (s.size() == 0) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return size() >= s.size() && !memcmp(begin(), s.begin(), s.size());
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
bool endsWith(const StringRef& s) const {
|
2021-06-05 00:53:23 +08:00
|
|
|
// Avoid UB - can't pass nullptr to memcmp
|
|
|
|
if (s.size() == 0) {
|
|
|
|
return true;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
return size() >= s.size() && !memcmp(end() - s.size(), s.begin(), s.size());
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-12-04 05:09:29 +08:00
|
|
|
StringRef withPrefix(const StringRef& prefix, Arena& arena) const {
|
|
|
|
uint8_t* s = new (arena) uint8_t[prefix.size() + size()];
|
|
|
|
if (prefix.size() > 0) {
|
|
|
|
memcpy(s, prefix.begin(), prefix.size());
|
|
|
|
}
|
|
|
|
if (size() > 0) {
|
|
|
|
memcpy(s + prefix.size(), begin(), size());
|
|
|
|
}
|
|
|
|
return StringRef(s, prefix.size() + size());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef withSuffix(const StringRef& suffix, Arena& arena) const {
|
|
|
|
uint8_t* s = new (arena) uint8_t[suffix.size() + size()];
|
2019-12-04 05:09:29 +08:00
|
|
|
if (size() > 0) {
|
|
|
|
memcpy(s, begin(), size());
|
|
|
|
}
|
|
|
|
if (suffix.size() > 0) {
|
|
|
|
memcpy(s + size(), suffix.begin(), suffix.size());
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
return StringRef(s, suffix.size() + size());
|
2018-03-22 09:58:19 +08:00
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<StringRef> withPrefix(const StringRef& prefix) const {
|
2017-05-26 04:48:44 +08:00
|
|
|
Standalone<StringRef> r;
|
2018-05-10 00:01:22 +08:00
|
|
|
r.contents() = withPrefix(prefix, r.arena());
|
2017-05-26 04:48:44 +08:00
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
Standalone<StringRef> withSuffix(const StringRef& suffix) const {
|
2018-03-22 09:58:19 +08:00
|
|
|
Standalone<StringRef> r;
|
2018-05-10 00:01:22 +08:00
|
|
|
r.contents() = withSuffix(suffix, r.arena());
|
2017-05-26 04:48:44 +08:00
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef removePrefix(const StringRef& s) const {
|
2017-05-26 04:48:44 +08:00
|
|
|
// pre: startsWith(s)
|
2021-03-11 02:06:03 +08:00
|
|
|
UNSTOPPABLE_ASSERT(s.size() <= size()); //< In debug mode, we could check startsWith()
|
|
|
|
return substr(s.size());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef removeSuffix(const StringRef& s) const {
|
2018-03-22 09:58:19 +08:00
|
|
|
// pre: endsWith(s)
|
2021-03-11 02:06:03 +08:00
|
|
|
UNSTOPPABLE_ASSERT(s.size() <= size()); //< In debug mode, we could check endsWith()
|
|
|
|
return substr(0, size() - s.size());
|
2018-03-22 09:58:19 +08:00
|
|
|
}
|
|
|
|
|
2020-04-02 12:27:49 +08:00
|
|
|
std::string toString() const { return std::string((const char*)data, length); }
|
2019-03-16 06:32:39 +08:00
|
|
|
|
|
|
|
static bool isPrintable(char c) { return c > 32 && c < 127; }
|
2019-04-06 04:11:50 +08:00
|
|
|
inline std::string printable() const;
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2018-07-23 18:09:13 +08:00
|
|
|
std::string toHexString(int limit = -1) const {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (limit < 0)
|
2018-08-17 07:22:10 +08:00
|
|
|
limit = length;
|
2021-03-11 02:06:03 +08:00
|
|
|
if (length > limit) {
|
|
|
|
// If limit is high enough split it so that 2/3 of limit is used to show prefix bytes and the rest is used
|
|
|
|
// for suffix bytes
|
|
|
|
if (limit >= 9) {
|
2018-08-17 07:22:10 +08:00
|
|
|
int suffix = limit / 3;
|
2021-03-11 02:06:03 +08:00
|
|
|
return substr(0, limit - suffix).toHexString() + "..." + substr(length - suffix, suffix).toHexString() +
|
|
|
|
format(" [%d bytes]", length);
|
2018-08-17 07:22:10 +08:00
|
|
|
}
|
|
|
|
return substr(0, limit).toHexString() + format("...[%d]", length);
|
|
|
|
}
|
|
|
|
|
2018-06-08 18:32:34 +08:00
|
|
|
std::string s;
|
2018-08-17 07:22:10 +08:00
|
|
|
s.reserve(length * 7);
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < length; i++) {
|
2018-06-08 18:32:34 +08:00
|
|
|
uint8_t b = (*this)[i];
|
2021-03-11 02:06:03 +08:00
|
|
|
if (isalnum(b))
|
2018-06-08 18:32:34 +08:00
|
|
|
s.append(format("%02x (%c) ", b, b));
|
|
|
|
else
|
|
|
|
s.append(format("%02x ", b));
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
if (s.size() > 0)
|
2018-07-23 18:09:13 +08:00
|
|
|
s.resize(s.size() - 1);
|
2018-06-08 18:32:34 +08:00
|
|
|
return s;
|
|
|
|
}
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
int expectedSize() const { return size(); }
|
|
|
|
|
2019-12-04 05:09:29 +08:00
|
|
|
int compare(StringRef const& other) const {
|
2020-04-04 02:18:06 +08:00
|
|
|
size_t minSize = std::min(size(), other.size());
|
|
|
|
if (minSize != 0) {
|
|
|
|
int c = memcmp(begin(), other.begin(), minSize);
|
2021-03-11 02:06:03 +08:00
|
|
|
if (c != 0)
|
|
|
|
return c;
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2020-04-04 02:18:06 +08:00
|
|
|
return ::compare(size(), other.size());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2021-03-03 05:06:01 +08:00
|
|
|
int compareSuffix(StringRef const& other, int prefixLen) const {
|
|
|
|
// pre: prefixLen <= size() && prefixLen <= other.size()
|
|
|
|
size_t minSuffixSize = std::min(size(), other.size()) - prefixLen;
|
|
|
|
if (minSuffixSize != 0) {
|
|
|
|
int c = memcmp(begin() + prefixLen, other.begin() + prefixLen, minSuffixSize);
|
|
|
|
if (c != 0)
|
|
|
|
return c;
|
|
|
|
}
|
|
|
|
return ::compare(size(), other.size());
|
|
|
|
}
|
|
|
|
|
2017-12-21 17:58:15 +08:00
|
|
|
// Removes bytes from begin up to and including the sep string, returns StringRef of the part before sep
|
|
|
|
StringRef eat(StringRef sep) {
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0, iend = size() - sep.size(); i <= iend; ++i) {
|
|
|
|
if (sep.compare(substr(i, sep.size())) == 0) {
|
2017-12-21 17:58:15 +08:00
|
|
|
StringRef token = substr(0, i);
|
|
|
|
*this = substr(i + sep.size());
|
|
|
|
return token;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return eat();
|
|
|
|
}
|
|
|
|
StringRef eat() {
|
|
|
|
StringRef r = *this;
|
|
|
|
*this = StringRef();
|
|
|
|
return r;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef eat(const char* sep) { return eat(StringRef((const uint8_t*)sep, (int)strlen(sep))); }
|
2019-03-05 20:00:11 +08:00
|
|
|
// Return StringRef of bytes from begin() up to but not including the first byte matching any byte in sep,
|
|
|
|
// and remove that sequence (including the sep byte) from *this
|
|
|
|
// Returns and removes all bytes from *this if no bytes within sep were found
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef eatAny(StringRef sep, uint8_t* foundSeparator) {
|
2019-03-05 20:00:11 +08:00
|
|
|
auto iSep = std::find_first_of(begin(), end(), sep.begin(), sep.end());
|
2021-03-11 02:06:03 +08:00
|
|
|
if (iSep != end()) {
|
|
|
|
if (foundSeparator != nullptr) {
|
2019-03-06 13:14:21 +08:00
|
|
|
*foundSeparator = *iSep;
|
|
|
|
}
|
2019-03-05 20:00:11 +08:00
|
|
|
const int i = iSep - begin();
|
|
|
|
StringRef token = substr(0, i);
|
|
|
|
*this = substr(i + 1);
|
|
|
|
return token;
|
|
|
|
}
|
|
|
|
return eat();
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef eatAny(const char* sep, uint8_t* foundSeparator) {
|
|
|
|
return eatAny(StringRef((const uint8_t*)sep, strlen(sep)), foundSeparator);
|
2017-12-21 17:58:15 +08:00
|
|
|
}
|
|
|
|
|
2020-04-08 18:23:46 +08:00
|
|
|
// Copies string contents to dst and returns a pointer to the next byte after
|
2021-03-11 02:06:03 +08:00
|
|
|
uint8_t* copyTo(uint8_t* dst) const {
|
2020-04-08 18:23:46 +08:00
|
|
|
memcpy(dst, data, length);
|
|
|
|
return dst + length;
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
|
2020-06-30 03:19:44 +08:00
|
|
|
std::vector<StringRef> splitAny(StringRef sep) const {
|
|
|
|
StringRef r = *this;
|
|
|
|
std::vector<StringRef> tokens;
|
|
|
|
while (r.size()) {
|
|
|
|
tokens.push_back(r.eatAny(sep, nullptr));
|
|
|
|
}
|
|
|
|
return tokens;
|
|
|
|
}
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
private:
|
|
|
|
// Unimplemented; blocks conversion through std::string
|
2021-03-11 02:06:03 +08:00
|
|
|
StringRef(char*);
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
const uint8_t* data;
|
|
|
|
int length;
|
|
|
|
};
|
2021-03-11 02:06:03 +08:00
|
|
|
#pragma pack(pop)
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2020-04-11 03:54:59 +08:00
|
|
|
namespace std {
|
2021-03-11 02:06:03 +08:00
|
|
|
template <>
|
|
|
|
struct hash<StringRef> {
|
|
|
|
static constexpr std::hash<std::string_view> hashFunc{};
|
|
|
|
std::size_t operator()(StringRef const& tag) const {
|
|
|
|
return hashFunc(std::string_view((const char*)tag.begin(), tag.size()));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
} // namespace std
|
2020-04-11 03:54:59 +08:00
|
|
|
|
|
|
|
template <>
|
2019-04-06 04:11:50 +08:00
|
|
|
struct TraceableString<StringRef> {
|
2021-03-11 02:06:03 +08:00
|
|
|
static const char* begin(StringRef value) { return reinterpret_cast<const char*>(value.begin()); }
|
2019-04-06 04:11:50 +08:00
|
|
|
|
|
|
|
static bool atEnd(const StringRef& value, const char* iter) {
|
|
|
|
return iter == reinterpret_cast<const char*>(value.end());
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
static std::string toString(const StringRef& value) { return value.toString(); }
|
2019-03-15 09:40:28 +08:00
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <>
|
2019-04-06 04:11:50 +08:00
|
|
|
struct Traceable<StringRef> : TraceableStringImpl<StringRef> {};
|
|
|
|
|
|
|
|
inline std::string StringRef::printable() const {
|
|
|
|
return Traceable<StringRef>::toString(*this);
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class T>
|
2019-03-15 09:40:28 +08:00
|
|
|
struct Traceable<Standalone<T>> : std::conditional<Traceable<T>::value, std::true_type, std::false_type>::type {
|
2021-03-11 02:06:03 +08:00
|
|
|
static std::string toString(const Standalone<T>& value) { return Traceable<T>::toString(value); }
|
2019-03-15 09:40:28 +08:00
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
#define LiteralStringRef(str) StringRef((const uint8_t*)(str), sizeof((str)) - 1)
|
|
|
|
inline StringRef operator"" _sr(const char* str, size_t size) {
|
2020-07-08 00:06:13 +08:00
|
|
|
return StringRef(reinterpret_cast<const uint8_t*>(str), size);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
// makeString is used to allocate a Standalone<StringRef> of a known length for later
|
|
|
|
// mutation (via mutateString). If you need to append to a string of unknown length,
|
|
|
|
// consider factoring StringBuffer from DiskQueue.actor.cpp.
|
2021-03-11 02:06:03 +08:00
|
|
|
inline static Standalone<StringRef> makeString(int length) {
|
2017-05-26 04:48:44 +08:00
|
|
|
Standalone<StringRef> returnString;
|
2021-03-11 02:06:03 +08:00
|
|
|
uint8_t* outData = new (returnString.arena()) uint8_t[length];
|
2017-05-26 04:48:44 +08:00
|
|
|
((StringRef&)returnString) = StringRef(outData, length);
|
|
|
|
return returnString;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
inline static Standalone<StringRef> makeAlignedString(int alignment, int length) {
|
2019-02-08 09:02:15 +08:00
|
|
|
Standalone<StringRef> returnString;
|
2021-03-11 02:06:03 +08:00
|
|
|
uint8_t* outData = new (returnString.arena()) uint8_t[alignment + length];
|
2019-02-08 09:02:15 +08:00
|
|
|
outData = (uint8_t*)((((uintptr_t)outData + (alignment - 1)) / alignment) * alignment);
|
|
|
|
((StringRef&)returnString) = StringRef(outData, length);
|
|
|
|
return returnString;
|
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
inline static StringRef makeString(int length, Arena& arena) {
|
|
|
|
uint8_t* outData = new (arena) uint8_t[length];
|
2017-05-26 04:48:44 +08:00
|
|
|
return StringRef(outData, length);
|
|
|
|
}
|
|
|
|
|
|
|
|
// mutateString() simply casts away const and returns a pointer that can be used to mutate the
|
|
|
|
// contents of the given StringRef (it will also accept Standalone<StringRef>). Obviously this
|
|
|
|
// is only legitimate if you know where the StringRef's memory came from and that it is not shared!
|
2021-03-11 02:06:03 +08:00
|
|
|
inline static uint8_t* mutateString(StringRef& s) {
|
|
|
|
return const_cast<uint8_t*>(s.begin());
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
template <class Archive>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void load(Archive& ar, StringRef& value) {
|
2017-05-26 04:48:44 +08:00
|
|
|
uint32_t length;
|
|
|
|
ar >> length;
|
|
|
|
value = StringRef(ar.arenaRead(length), length);
|
|
|
|
}
|
|
|
|
template <class Archive>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void save(Archive& ar, const StringRef& value) {
|
2017-05-26 04:48:44 +08:00
|
|
|
ar << (uint32_t)value.size();
|
2021-03-11 02:06:03 +08:00
|
|
|
ar.serializeBytes(value.begin(), value.size());
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2019-01-26 07:10:20 +08:00
|
|
|
|
2019-07-02 07:32:25 +08:00
|
|
|
template <>
|
2019-01-26 07:10:20 +08:00
|
|
|
struct dynamic_size_traits<StringRef> : std::true_type {
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
2021-03-11 02:06:03 +08:00
|
|
|
static size_t size(const StringRef& t, Context&) {
|
|
|
|
return t.size();
|
|
|
|
}
|
|
|
|
template <class Context>
|
|
|
|
static void save(uint8_t* out, const StringRef& t, Context&) {
|
|
|
|
std::copy(t.begin(), t.end(), out);
|
|
|
|
}
|
2019-01-26 07:10:20 +08:00
|
|
|
|
|
|
|
template <class Context>
|
|
|
|
static void load(const uint8_t* ptr, size_t sz, StringRef& str, Context& context) {
|
|
|
|
str = StringRef(context.tryReadZeroCopy(ptr, sz), sz);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-12-04 05:09:29 +08:00
|
|
|
inline bool operator==(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
if (lhs.size() == 0 && rhs.size() == 0) {
|
|
|
|
return true;
|
|
|
|
}
|
2021-02-09 04:06:51 +08:00
|
|
|
ASSERT(lhs.size() >= 0);
|
|
|
|
return lhs.size() == rhs.size() && memcmp(lhs.begin(), rhs.begin(), static_cast<unsigned int>(lhs.size())) == 0;
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
2019-12-04 05:09:29 +08:00
|
|
|
inline bool operator<(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
if (std::min(lhs.size(), rhs.size()) > 0) {
|
|
|
|
int c = memcmp(lhs.begin(), rhs.begin(), std::min(lhs.size(), rhs.size()));
|
2021-03-11 02:06:03 +08:00
|
|
|
if (c != 0)
|
|
|
|
return c < 0;
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
return lhs.size() < rhs.size();
|
|
|
|
}
|
2019-12-04 05:09:29 +08:00
|
|
|
inline bool operator>(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
if (std::min(lhs.size(), rhs.size()) > 0) {
|
|
|
|
int c = memcmp(lhs.begin(), rhs.begin(), std::min(lhs.size(), rhs.size()));
|
2021-03-11 02:06:03 +08:00
|
|
|
if (c != 0)
|
|
|
|
return c > 0;
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
return lhs.size() > rhs.size();
|
|
|
|
}
|
2021-03-11 02:06:03 +08:00
|
|
|
inline bool operator!=(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
return !(lhs == rhs);
|
|
|
|
}
|
|
|
|
inline bool operator<=(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
return !(lhs > rhs);
|
|
|
|
}
|
|
|
|
inline bool operator>=(const StringRef& lhs, const StringRef& rhs) {
|
|
|
|
return !(lhs < rhs);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2020-05-03 11:43:50 +08:00
|
|
|
// This trait is used by VectorRef to determine if deep copy constructor should recursively
|
|
|
|
// call deep copies of each element.
|
2020-05-24 13:57:41 +08:00
|
|
|
//
|
|
|
|
// TODO: There should be an easier way to identify the difference between flow_ref and non-flow_ref types.
|
|
|
|
// std::is_trivially_copyable does not work because some flow_ref types are trivially copyable
|
|
|
|
// and some non-flow_ref types are not trivially copyable.
|
2017-05-26 04:48:44 +08:00
|
|
|
template <typename T>
|
2020-05-24 13:57:41 +08:00
|
|
|
struct flow_ref : std::integral_constant<bool, !std::is_fundamental_v<T>> {};
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
template <>
|
2020-05-24 13:57:41 +08:00
|
|
|
struct flow_ref<UID> : std::integral_constant<bool, false> {};
|
2020-05-03 11:43:50 +08:00
|
|
|
|
|
|
|
template <class A, class B>
|
2020-05-24 13:57:41 +08:00
|
|
|
struct flow_ref<std::pair<A, B>> : std::integral_constant<bool, false> {};
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class T>
|
2019-07-10 09:07:47 +08:00
|
|
|
struct string_serialized_traits : std::false_type {
|
2021-03-11 02:06:03 +08:00
|
|
|
int32_t getSize(const T& item) const { return 0; }
|
2019-07-10 09:07:47 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
uint32_t save(uint8_t* out, const T& t) const { return 0; }
|
2019-07-10 09:07:47 +08:00
|
|
|
|
|
|
|
template <class Context>
|
|
|
|
uint32_t load(const uint8_t* data, T& t, Context& context) {
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
enum class VecSerStrategy { FlatBuffers, String };
|
2019-07-16 05:46:00 +08:00
|
|
|
|
|
|
|
template <class T, VecSerStrategy>
|
2019-07-10 09:07:47 +08:00
|
|
|
struct VectorRefPreserializer {
|
|
|
|
VectorRefPreserializer() {}
|
2019-07-16 05:46:00 +08:00
|
|
|
VectorRefPreserializer(const VectorRefPreserializer<T, VecSerStrategy::FlatBuffers>&) {}
|
|
|
|
VectorRefPreserializer& operator=(const VectorRefPreserializer<T, VecSerStrategy::FlatBuffers>&) { return *this; }
|
|
|
|
VectorRefPreserializer(const VectorRefPreserializer<T, VecSerStrategy::String>&) {}
|
|
|
|
VectorRefPreserializer& operator=(const VectorRefPreserializer<T, VecSerStrategy::String>&) { return *this; }
|
2019-07-10 09:07:47 +08:00
|
|
|
|
|
|
|
void invalidate() {}
|
|
|
|
void add(const T& item) {}
|
|
|
|
void remove(const T& item) {}
|
2020-06-22 08:57:30 +08:00
|
|
|
void reset() {}
|
2019-07-10 09:07:47 +08:00
|
|
|
};
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
template <class T>
|
2019-07-16 05:46:00 +08:00
|
|
|
struct VectorRefPreserializer<T, VecSerStrategy::String> {
|
2019-07-10 09:07:47 +08:00
|
|
|
mutable int32_t _cached_size; // -1 means unknown
|
|
|
|
string_serialized_traits<T> _string_traits;
|
|
|
|
|
|
|
|
VectorRefPreserializer() : _cached_size(0) {}
|
2019-07-16 05:46:00 +08:00
|
|
|
VectorRefPreserializer(const VectorRefPreserializer<T, VecSerStrategy::String>& other)
|
|
|
|
: _cached_size(other._cached_size) {}
|
|
|
|
VectorRefPreserializer& operator=(const VectorRefPreserializer<T, VecSerStrategy::String>& other) {
|
2019-07-10 09:07:47 +08:00
|
|
|
_cached_size = other._cached_size;
|
|
|
|
return *this;
|
|
|
|
}
|
2019-07-16 05:46:00 +08:00
|
|
|
VectorRefPreserializer(const VectorRefPreserializer<T, VecSerStrategy::FlatBuffers>&) : _cached_size(-1) {}
|
|
|
|
VectorRefPreserializer& operator=(const VectorRefPreserializer<T, VecSerStrategy::FlatBuffers>&) {
|
|
|
|
_cached_size = -1;
|
|
|
|
return *this;
|
|
|
|
}
|
2019-07-10 09:07:47 +08:00
|
|
|
|
|
|
|
void invalidate() { _cached_size = -1; }
|
|
|
|
void add(const T& item) {
|
|
|
|
if (_cached_size > 0) {
|
|
|
|
_cached_size += _string_traits.getSize(item);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
void remove(const T& item) {
|
|
|
|
if (_cached_size > 0) {
|
|
|
|
_cached_size -= _string_traits.getSize(item);
|
|
|
|
}
|
|
|
|
}
|
2020-06-22 08:57:30 +08:00
|
|
|
void reset() { _cached_size = 0; }
|
2019-07-10 09:07:47 +08:00
|
|
|
};
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class T, VecSerStrategy SerStrategy = VecSerStrategy::FlatBuffers>
|
2020-07-15 07:26:16 +08:00
|
|
|
class VectorRef : public ComposedIdentifier<T, 3>, public VectorRefPreserializer<T, SerStrategy> {
|
2019-07-16 05:46:00 +08:00
|
|
|
using VPS = VectorRefPreserializer<T, SerStrategy>;
|
2021-03-11 02:06:03 +08:00
|
|
|
friend class VectorRef<T,
|
|
|
|
SerStrategy == VecSerStrategy::FlatBuffers ? VecSerStrategy::String
|
|
|
|
: VecSerStrategy::FlatBuffers>;
|
2019-07-16 05:46:00 +08:00
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
public:
|
2019-01-26 07:10:20 +08:00
|
|
|
using value_type = T;
|
2019-07-16 05:46:00 +08:00
|
|
|
static_assert(SerStrategy == VecSerStrategy::FlatBuffers || string_serialized_traits<T>::value);
|
2019-01-26 07:10:20 +08:00
|
|
|
|
2020-06-22 14:53:41 +08:00
|
|
|
// T must be trivially copyable!
|
|
|
|
// T must be trivially destructible, because ~T is never called
|
2020-07-04 08:55:57 +08:00
|
|
|
static_assert(std::is_trivially_destructible_v<T>);
|
2017-05-26 04:48:44 +08:00
|
|
|
VectorRef() : data(0), m_size(0), m_capacity(0) {}
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <VecSerStrategy S>
|
|
|
|
VectorRef(const VectorRef<T, S>& other)
|
|
|
|
: VPS(other), data(other.data), m_size(other.m_size), m_capacity(other.m_capacity) {}
|
|
|
|
template <VecSerStrategy S>
|
|
|
|
VectorRef& operator=(const VectorRef<T, S>& other) {
|
2019-07-10 09:07:47 +08:00
|
|
|
*static_cast<VPS*>(this) = other;
|
|
|
|
data = other.data;
|
|
|
|
m_size = other.m_size;
|
|
|
|
m_capacity = other.m_capacity;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
2020-05-24 13:57:41 +08:00
|
|
|
// Arena constructor for non-Ref types, identified by !flow_ref
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class T2 = T, VecSerStrategy S>
|
2020-05-24 13:57:41 +08:00
|
|
|
VectorRef(Arena& p, const VectorRef<T, S>& toCopy, typename std::enable_if<!flow_ref<T2>::value, int>::type = 0)
|
2019-07-10 09:07:47 +08:00
|
|
|
: VPS(toCopy), data((T*)new (p) uint8_t[sizeof(T) * toCopy.size()]), m_size(toCopy.size()),
|
|
|
|
m_capacity(toCopy.size()) {
|
2019-12-04 05:09:29 +08:00
|
|
|
if (m_size > 0) {
|
2020-05-03 11:43:50 +08:00
|
|
|
std::copy(toCopy.data, toCopy.data + m_size, data);
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Arena constructor for Ref types, which must have an Arena constructor
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class T2 = T, VecSerStrategy S>
|
2020-05-24 13:57:41 +08:00
|
|
|
VectorRef(Arena& p, const VectorRef<T, S>& toCopy, typename std::enable_if<flow_ref<T2>::value, int>::type = 0)
|
2019-07-10 09:07:47 +08:00
|
|
|
: VPS(), data((T*)new (p) uint8_t[sizeof(T) * toCopy.size()]), m_size(toCopy.size()), m_capacity(toCopy.size()) {
|
|
|
|
for (int i = 0; i < m_size; i++) {
|
|
|
|
auto ptr = new (&data[i]) T(p, toCopy[i]);
|
|
|
|
VPS::add(*ptr);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2020-07-15 05:33:30 +08:00
|
|
|
template <class It>
|
|
|
|
VectorRef(Arena& arena, It first, It last) {
|
|
|
|
if constexpr (flow_ref<T>::value) {
|
|
|
|
append_deep(arena, first, std::distance(first, last));
|
|
|
|
} else {
|
|
|
|
append(arena, first, std::distance(first, last));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
VectorRef(T* data, int size) : data(data), m_size(size), m_capacity(size) {}
|
|
|
|
VectorRef(T* data, int size, int capacity) : data(data), m_size(size), m_capacity(capacity) {}
|
|
|
|
// VectorRef( const VectorRef<T>& toCopy ) : data( toCopy.data ), m_size( toCopy.m_size ), m_capacity(
|
|
|
|
// toCopy.m_capacity ) {} VectorRef<T>& operator=( const VectorRef<T>& );
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <VecSerStrategy S = SerStrategy>
|
|
|
|
typename std::enable_if<S == VecSerStrategy::String, uint32_t>::type serializedSize() const {
|
2019-07-10 09:07:47 +08:00
|
|
|
uint32_t result = sizeof(uint32_t);
|
|
|
|
string_serialized_traits<T> t;
|
|
|
|
if (VPS::_cached_size >= 0) {
|
|
|
|
return result + VPS::_cached_size;
|
|
|
|
}
|
|
|
|
for (const auto& v : *this) {
|
|
|
|
result += t.getSize(v);
|
|
|
|
}
|
|
|
|
VPS::_cached_size = result - sizeof(uint32_t);
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
const T* begin() const { return data; }
|
|
|
|
const T* end() const { return data + m_size; }
|
|
|
|
T const& front() const { return *begin(); }
|
|
|
|
T const& back() const { return end()[-1]; }
|
|
|
|
int size() const { return m_size; }
|
2018-03-15 03:39:50 +08:00
|
|
|
bool empty() const { return m_size == 0; }
|
2017-05-26 04:48:44 +08:00
|
|
|
const T& operator[](int i) const { return data[i]; }
|
|
|
|
|
2019-09-05 07:44:19 +08:00
|
|
|
// const versions of some VectorRef operators
|
2019-09-05 06:39:21 +08:00
|
|
|
const T* cbegin() const { return data; }
|
|
|
|
const T* cend() const { return data + m_size; }
|
|
|
|
T const& cfront() const { return *begin(); }
|
|
|
|
T const& cback() const { return end()[-1]; }
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
std::reverse_iterator<const T*> rbegin() const { return std::reverse_iterator<const T*>(end()); }
|
|
|
|
std::reverse_iterator<const T*> rend() const { return std::reverse_iterator<const T*>(begin()); }
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <VecSerStrategy S = SerStrategy>
|
|
|
|
typename std::enable_if<S == VecSerStrategy::FlatBuffers, VectorRef>::type slice(int begin, int end) const {
|
|
|
|
return VectorRef(data + begin, end - begin);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <VecSerStrategy S>
|
|
|
|
bool operator==(VectorRef<T, S> const& rhs) const {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (size() != rhs.size())
|
|
|
|
return false;
|
2019-07-16 05:46:00 +08:00
|
|
|
for (int i = 0; i < m_size; i++)
|
2021-03-11 02:06:03 +08:00
|
|
|
if ((*this)[i] != rhs[i])
|
|
|
|
return false;
|
2017-05-26 04:48:44 +08:00
|
|
|
return true;
|
|
|
|
}
|
2020-07-11 05:37:47 +08:00
|
|
|
template <VecSerStrategy S>
|
|
|
|
bool operator!=(VectorRef<T, S> const& rhs) const {
|
|
|
|
return !(*this == rhs);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
|
|
|
// Warning: Do not mutate a VectorRef that has previously been copy constructed or assigned,
|
|
|
|
// since copies will share data
|
2019-07-16 05:46:00 +08:00
|
|
|
T* begin() {
|
|
|
|
VPS::invalidate();
|
|
|
|
return data;
|
|
|
|
}
|
|
|
|
T* end() {
|
|
|
|
VPS::invalidate();
|
|
|
|
return data + m_size;
|
|
|
|
}
|
|
|
|
T& front() {
|
|
|
|
VPS::invalidate();
|
|
|
|
return *begin();
|
|
|
|
}
|
|
|
|
T& back() {
|
|
|
|
VPS::invalidate();
|
|
|
|
return end()[-1];
|
|
|
|
}
|
|
|
|
T& operator[](int i) {
|
|
|
|
VPS::invalidate();
|
|
|
|
return data[i];
|
|
|
|
}
|
|
|
|
void push_back(Arena& p, const T& value) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (m_size + 1 > m_capacity)
|
|
|
|
reallocate(p, m_size + 1);
|
2019-07-10 09:07:47 +08:00
|
|
|
auto ptr = new (&data[m_size]) T(value);
|
|
|
|
VPS::add(*ptr);
|
2017-05-26 04:48:44 +08:00
|
|
|
m_size++;
|
|
|
|
}
|
2020-06-20 10:28:39 +08:00
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class... Us>
|
|
|
|
T& emplace_back(Arena& p, Us&&... args) {
|
|
|
|
if (m_size + 1 > m_capacity)
|
|
|
|
reallocate(p, m_size + 1);
|
2020-06-20 10:28:39 +08:00
|
|
|
auto ptr = new (&data[m_size]) T(std::forward<Us>(args)...);
|
|
|
|
VPS::add(*ptr);
|
|
|
|
m_size++;
|
|
|
|
return *ptr;
|
|
|
|
}
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
// invokes the "Deep copy constructor" T(Arena&, const T&) moving T entirely into arena
|
2019-07-16 05:46:00 +08:00
|
|
|
void push_back_deep(Arena& p, const T& value) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (m_size + 1 > m_capacity)
|
|
|
|
reallocate(p, m_size + 1);
|
2019-07-10 09:07:47 +08:00
|
|
|
auto ptr = new (&data[m_size]) T(p, value);
|
|
|
|
VPS::add(*ptr);
|
2017-05-26 04:48:44 +08:00
|
|
|
m_size++;
|
|
|
|
}
|
2020-06-20 10:28:39 +08:00
|
|
|
|
|
|
|
// invokes the "Deep copy constructor" T(Arena&, U&&) moving T entirely into arena
|
2021-03-11 02:06:03 +08:00
|
|
|
template <class... Us>
|
|
|
|
T& emplace_back_deep(Arena& p, Us&&... args) {
|
|
|
|
if (m_size + 1 > m_capacity)
|
|
|
|
reallocate(p, m_size + 1);
|
2020-06-20 10:28:39 +08:00
|
|
|
auto ptr = new (&data[m_size]) T(p, std::forward<Us>(args)...);
|
|
|
|
VPS::add(*ptr);
|
|
|
|
m_size++;
|
|
|
|
return *ptr;
|
|
|
|
}
|
|
|
|
|
2020-07-15 01:44:16 +08:00
|
|
|
template <class It>
|
|
|
|
void append(Arena& p, It begin, int count) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (m_size + count > m_capacity)
|
|
|
|
reallocate(p, m_size + count);
|
2019-07-10 09:07:47 +08:00
|
|
|
VPS::invalidate();
|
2019-12-04 05:09:29 +08:00
|
|
|
if (count > 0) {
|
2020-05-03 11:43:50 +08:00
|
|
|
std::copy(begin, begin + count, data + m_size);
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
m_size += count;
|
|
|
|
}
|
|
|
|
template <class It>
|
2019-07-16 05:46:00 +08:00
|
|
|
void append_deep(Arena& p, It begin, int count) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (m_size + count > m_capacity)
|
|
|
|
reallocate(p, m_size + count);
|
2019-07-16 05:46:00 +08:00
|
|
|
for (int i = 0; i < count; i++) {
|
|
|
|
auto ptr = new (&data[m_size + i]) T(p, *begin++);
|
2019-07-10 09:07:47 +08:00
|
|
|
VPS::add(*ptr);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
m_size += count;
|
|
|
|
}
|
2019-07-16 05:46:00 +08:00
|
|
|
void pop_back() {
|
|
|
|
VPS::remove(back());
|
|
|
|
m_size--;
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
void pop_front(int count) {
|
2019-07-10 09:07:47 +08:00
|
|
|
VPS::invalidate();
|
2017-05-26 04:48:44 +08:00
|
|
|
count = std::min(m_size, count);
|
|
|
|
|
|
|
|
data += count;
|
|
|
|
m_size -= count;
|
|
|
|
m_capacity -= count;
|
|
|
|
}
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
void resize(Arena& p, int size) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (size > m_capacity)
|
|
|
|
reallocate(p, size);
|
2019-07-16 05:46:00 +08:00
|
|
|
for (int i = m_size; i < size; i++) {
|
2019-07-10 09:07:47 +08:00
|
|
|
auto ptr = new (&data[i]) T();
|
|
|
|
VPS::add(*ptr);
|
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
m_size = size;
|
|
|
|
}
|
|
|
|
|
2020-06-22 08:57:30 +08:00
|
|
|
void clear() {
|
|
|
|
VPS::reset();
|
|
|
|
m_size = 0;
|
|
|
|
}
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
void reserve(Arena& p, int size) {
|
2021-03-11 02:06:03 +08:00
|
|
|
if (size > m_capacity)
|
|
|
|
reallocate(p, size);
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
2020-05-24 13:57:41 +08:00
|
|
|
// expectedSize() for non-Ref types, identified by !flow_ref
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class T2 = T>
|
2020-05-24 13:57:41 +08:00
|
|
|
typename std::enable_if<!flow_ref<T2>::value, size_t>::type expectedSize() const {
|
2019-07-16 05:46:00 +08:00
|
|
|
return sizeof(T) * m_size;
|
2017-05-26 04:48:44 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// expectedSize() for Ref types, which must in turn have expectedSize() implemented.
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class T2 = T>
|
2020-05-24 13:57:41 +08:00
|
|
|
typename std::enable_if<flow_ref<T2>::value, size_t>::type expectedSize() const {
|
2019-07-16 05:46:00 +08:00
|
|
|
size_t t = sizeof(T) * m_size;
|
2021-03-11 02:06:03 +08:00
|
|
|
for (int i = 0; i < m_size; i++)
|
|
|
|
t += data[i].expectedSize();
|
2017-05-26 04:48:44 +08:00
|
|
|
return t;
|
|
|
|
}
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
int capacity() const { return m_capacity; }
|
2017-05-26 04:48:44 +08:00
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
void extendUnsafeNoReallocNoInit(int amount) { m_size += amount; }
|
2018-09-10 15:44:09 +08:00
|
|
|
|
2020-07-10 01:49:33 +08:00
|
|
|
protected:
|
2017-05-26 04:48:44 +08:00
|
|
|
T* data;
|
|
|
|
int m_size, m_capacity;
|
|
|
|
|
|
|
|
void reallocate(Arena& p, int requiredCapacity) {
|
2019-07-16 05:46:00 +08:00
|
|
|
requiredCapacity = std::max(m_capacity * 2, requiredCapacity);
|
2017-05-26 04:48:44 +08:00
|
|
|
// SOMEDAY: Maybe we are right at the end of the arena and can expand cheaply
|
2020-05-03 02:12:53 +08:00
|
|
|
T* newData = new (p) T[requiredCapacity];
|
2019-12-04 05:09:29 +08:00
|
|
|
if (m_size > 0) {
|
2020-05-03 11:43:50 +08:00
|
|
|
std::move(data, data + m_size, newData);
|
2019-12-04 05:09:29 +08:00
|
|
|
}
|
2017-05-26 04:48:44 +08:00
|
|
|
data = newData;
|
|
|
|
m_capacity = requiredCapacity;
|
|
|
|
}
|
|
|
|
};
|
2019-03-16 06:32:39 +08:00
|
|
|
|
2020-07-10 01:49:33 +08:00
|
|
|
// This is a VectorRef that optimizes for tiny to small sizes.
|
|
|
|
// It keeps the first #InlineMembers on the stack - which means
|
|
|
|
// that all of them are always copied. This should be faster
|
|
|
|
// when you expect the vector to be usually very small as it
|
|
|
|
// won't need allocations in these cases.
|
|
|
|
template <class T, int InlineMembers = 1>
|
|
|
|
class SmallVectorRef {
|
2020-07-15 01:44:16 +08:00
|
|
|
static_assert(InlineMembers >= 0);
|
|
|
|
|
2020-07-10 01:49:33 +08:00
|
|
|
public:
|
|
|
|
// types
|
|
|
|
template <bool isConst>
|
|
|
|
class iterator_impl {
|
|
|
|
using self_t = iterator_impl<isConst>;
|
|
|
|
using VecType = SmallVectorRef<T, InlineMembers>;
|
|
|
|
std::conditional_t<isConst, const VecType*, VecType*> vec = nullptr;
|
|
|
|
int idx = 0;
|
|
|
|
|
|
|
|
public:
|
2020-07-15 01:44:16 +08:00
|
|
|
using iterator_category = std::random_access_iterator_tag;
|
2020-07-10 01:49:33 +08:00
|
|
|
using value_type = std::conditional_t<isConst, const T, T>;
|
|
|
|
using difference_type = int;
|
|
|
|
using pointer = value_type*;
|
|
|
|
using reference = value_type&;
|
|
|
|
friend class SmallVectorRef<T, InlineMembers>;
|
2020-07-15 05:33:30 +08:00
|
|
|
friend bool operator<(const self_t& lhs, const self_t& rhs) {
|
|
|
|
ASSERT(lhs.vec == rhs.vec);
|
|
|
|
return lhs.idx < rhs.idx;
|
|
|
|
}
|
|
|
|
friend bool operator>(const self_t& lhs, const self_t& rhs) {
|
|
|
|
ASSERT(lhs.vec == rhs.vec);
|
|
|
|
return lhs.idx > rhs.idx;
|
|
|
|
}
|
|
|
|
friend bool operator<=(const self_t& lhs, const self_t& rhs) {
|
|
|
|
ASSERT(lhs.vec == rhs.vec);
|
|
|
|
return lhs.idx <= rhs.idx;
|
|
|
|
}
|
|
|
|
friend bool operator>=(const self_t& lhs, const self_t& rhs) {
|
|
|
|
ASSERT(lhs.vec == rhs.vec);
|
|
|
|
return lhs.idx >= rhs.idx;
|
|
|
|
}
|
|
|
|
friend self_t operator+(const self_t& lhs, difference_type diff) {
|
|
|
|
auto res = lhs;
|
|
|
|
res.idx += diff;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
friend self_t operator+(difference_type diff, const self_t& lhs) {
|
|
|
|
auto res = lhs;
|
|
|
|
res.idx += diff;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
friend self_t operator-(const self_t& lhs, difference_type diff) {
|
|
|
|
auto res = lhs;
|
|
|
|
res.idx -= diff;
|
|
|
|
return res;
|
|
|
|
}
|
2020-07-16 02:13:35 +08:00
|
|
|
friend difference_type operator-(const self_t& lhs, const self_t& rhs) {
|
|
|
|
ASSERT(lhs.vec == rhs.vec);
|
|
|
|
return lhs.idx - rhs.idx;
|
|
|
|
}
|
2020-07-10 01:49:33 +08:00
|
|
|
|
|
|
|
self_t& operator++() {
|
|
|
|
++idx;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
self_t operator++(int) {
|
|
|
|
auto res = *this;
|
2020-07-11 01:22:05 +08:00
|
|
|
++(*this);
|
2020-07-10 01:49:33 +08:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
self_t& operator--() {
|
|
|
|
--idx;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
self_t operator--(int) {
|
|
|
|
auto res = *this;
|
2020-07-11 01:22:05 +08:00
|
|
|
--(*this);
|
2020-07-10 01:49:33 +08:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
self_t& operator+=(difference_type diff) {
|
|
|
|
idx += diff;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
self_t& operator-=(difference_type diff) {
|
|
|
|
idx -= diff;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
bool operator!=(self_t const& o) const { return vec != o.vec || idx != o.idx; }
|
|
|
|
bool operator==(self_t const& o) const { return vec == o.vec && idx == o.idx; }
|
|
|
|
reference operator[](difference_type i) const { return get(idx + i); }
|
|
|
|
reference& get(int i) const {
|
|
|
|
if (i < InlineMembers) {
|
|
|
|
return vec->arr[i];
|
|
|
|
} else {
|
2020-07-14 05:47:05 +08:00
|
|
|
return vec->data[i - InlineMembers];
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
reference get() const { return get(idx); }
|
|
|
|
reference operator*() const { return get(); }
|
|
|
|
pointer operator->() const { return &get(); }
|
|
|
|
};
|
|
|
|
using const_iterator = iterator_impl<true>;
|
|
|
|
using iterator = iterator_impl<false>;
|
|
|
|
using reverse_iterator = std::reverse_iterator<iterator>;
|
|
|
|
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
|
|
|
|
|
|
|
|
public: // Construction
|
2020-07-10 07:20:55 +08:00
|
|
|
static_assert(std::is_trivially_destructible_v<T>);
|
2020-07-10 01:49:33 +08:00
|
|
|
SmallVectorRef() {}
|
|
|
|
SmallVectorRef(const SmallVectorRef<T, InlineMembers>& other)
|
2020-07-11 01:22:05 +08:00
|
|
|
: m_size(other.m_size), arr(other.arr), data(other.data) {}
|
2020-07-10 01:49:33 +08:00
|
|
|
SmallVectorRef& operator=(const SmallVectorRef<T, InlineMembers>& other) {
|
|
|
|
m_size = other.m_size;
|
|
|
|
arr = other.arr;
|
|
|
|
data = other.data;
|
|
|
|
return *this;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T2 = T, int IM = InlineMembers>
|
2021-03-11 02:06:03 +08:00
|
|
|
SmallVectorRef(Arena& arena,
|
|
|
|
const SmallVectorRef<T, IM>& toCopy,
|
2020-07-15 05:33:30 +08:00
|
|
|
typename std::enable_if<!flow_ref<T2>::value, int>::type = 0)
|
|
|
|
: m_size(0) {
|
|
|
|
append(arena, toCopy.begin(), toCopy.size());
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class T2 = T, int IM = InlineMembers>
|
2021-03-11 02:06:03 +08:00
|
|
|
SmallVectorRef(Arena& arena,
|
|
|
|
const SmallVectorRef<T2, IM>& toCopy,
|
2020-07-10 01:49:33 +08:00
|
|
|
typename std::enable_if<flow_ref<T2>::value, int>::type = 0)
|
2020-07-15 05:33:30 +08:00
|
|
|
: m_size(0) {
|
|
|
|
append_deep(arena, toCopy.begin(), toCopy.size());
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class It>
|
2020-07-15 05:33:30 +08:00
|
|
|
SmallVectorRef(Arena& arena, It first, It last) : m_size(0) {
|
|
|
|
if constexpr (flow_ref<T>::value) {
|
|
|
|
append_deep(arena, first, std::distance(first, last));
|
|
|
|
} else {
|
|
|
|
append(arena, first, std::distance(first, last));
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public: // information
|
|
|
|
int size() const { return m_size; }
|
2020-07-11 01:22:05 +08:00
|
|
|
int capacity() const { return InlineMembers + data.capacity(); }
|
2020-07-10 01:49:33 +08:00
|
|
|
bool empty() const { return m_size == 0; }
|
|
|
|
|
|
|
|
public: // element access
|
|
|
|
T const& front() const { return *cbegin(); }
|
|
|
|
T const& back() const { return *crbegin(); }
|
|
|
|
T& front() { return *begin(); }
|
|
|
|
T& back() { return *rbegin(); }
|
|
|
|
T const& operator[](int i) const {
|
|
|
|
if (i < InlineMembers) {
|
|
|
|
return arr[i];
|
|
|
|
} else {
|
|
|
|
return data[i - InlineMembers];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
public: // Modification
|
|
|
|
void push_back(Arena& arena, T const& value) {
|
|
|
|
if (m_size < InlineMembers) {
|
|
|
|
new (&arr[m_size++]) T(value);
|
|
|
|
return;
|
|
|
|
}
|
2020-07-11 01:22:05 +08:00
|
|
|
++m_size;
|
|
|
|
data.push_back(arena, value);
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void push_back_deep(Arena& arena, T const& value) {
|
|
|
|
if (m_size < InlineMembers) {
|
|
|
|
new (&arr[m_size++]) T(arena, value);
|
|
|
|
return;
|
|
|
|
}
|
2020-07-11 01:22:05 +08:00
|
|
|
++m_size;
|
|
|
|
data.push_back_deep(arena, value);
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
2021-03-11 02:06:03 +08:00
|
|
|
void pop_back() { --m_size; }
|
2020-07-10 01:49:33 +08:00
|
|
|
|
|
|
|
template <class It>
|
2020-07-15 01:44:16 +08:00
|
|
|
void append(Arena& arena, It first, int count) {
|
|
|
|
ASSERT(count >= 0);
|
|
|
|
while (count > 0 && m_size < InlineMembers) {
|
2020-07-10 01:49:33 +08:00
|
|
|
new (&(arr[m_size++])) T(*(first++));
|
2020-07-15 01:44:16 +08:00
|
|
|
--count;
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
2020-07-15 01:44:16 +08:00
|
|
|
data.append(arena, first, count);
|
|
|
|
m_size += count;
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
template <class It>
|
2020-07-15 01:44:16 +08:00
|
|
|
void append_deep(Arena& arena, It first, int count) {
|
|
|
|
ASSERT(count >= 0);
|
|
|
|
while (count > 0 && m_size < InlineMembers) {
|
2020-07-28 05:11:50 +08:00
|
|
|
new (&(arr[m_size++])) T(arena, *(first++));
|
2020-07-15 01:44:16 +08:00
|
|
|
--count;
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
2020-07-15 01:44:16 +08:00
|
|
|
data.append_deep(arena, first, count);
|
|
|
|
m_size += count;
|
2020-07-10 01:49:33 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
public: // iterator access
|
|
|
|
iterator begin() {
|
|
|
|
iterator res;
|
|
|
|
res.vec = this;
|
|
|
|
res.idx = 0;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
const_iterator cbegin() const {
|
|
|
|
const_iterator res;
|
|
|
|
res.vec = this;
|
|
|
|
res.idx = 0;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
const_iterator begin() const { return cbegin(); }
|
|
|
|
|
|
|
|
iterator end() {
|
|
|
|
iterator res;
|
|
|
|
res.vec = this;
|
|
|
|
res.idx = m_size;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
const_iterator cend() const {
|
|
|
|
const_iterator res;
|
|
|
|
res.vec = this;
|
|
|
|
res.idx = m_size;
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
const_iterator end() const { return cend(); }
|
|
|
|
|
|
|
|
reverse_iterator rbegin() { return reverse_iterator(end()); }
|
|
|
|
|
|
|
|
const_reverse_iterator crbegin() const { return const_reverse_iterator(cend()); }
|
|
|
|
|
|
|
|
const_reverse_iterator rbegin() const { return crbegin(); }
|
|
|
|
|
|
|
|
reverse_iterator rend() { return reverse_iterator(begin()); }
|
|
|
|
|
|
|
|
const_reverse_iterator crend() const { return const_reverse_iterator(begin()); }
|
|
|
|
|
|
|
|
const_reverse_iterator rend() const { return crend(); }
|
|
|
|
|
|
|
|
private:
|
2020-07-11 01:22:05 +08:00
|
|
|
int m_size = 0;
|
2020-07-10 01:49:33 +08:00
|
|
|
std::array<T, InlineMembers> arr;
|
2020-07-11 01:22:05 +08:00
|
|
|
VectorRef<T> data;
|
2020-07-10 01:49:33 +08:00
|
|
|
};
|
|
|
|
|
|
|
|
template <class T>
|
2019-03-16 06:32:39 +08:00
|
|
|
struct Traceable<VectorRef<T>> {
|
|
|
|
constexpr static bool value = Traceable<T>::value;
|
|
|
|
|
|
|
|
static std::string toString(const VectorRef<T>& value) {
|
|
|
|
std::stringstream ss;
|
|
|
|
bool first = true;
|
|
|
|
for (const auto& v : value) {
|
|
|
|
if (first) {
|
|
|
|
first = false;
|
|
|
|
} else {
|
|
|
|
ss << ' ';
|
|
|
|
}
|
|
|
|
ss << Traceable<T>::toString(v);
|
|
|
|
}
|
|
|
|
return ss.str();
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class Archive, class T, VecSerStrategy S>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void load(Archive& ar, VectorRef<T, S>& value) {
|
2017-05-26 04:48:44 +08:00
|
|
|
// FIXME: range checking for length, here and in other serialize code
|
|
|
|
uint32_t length;
|
|
|
|
ar >> length;
|
2021-03-11 02:06:03 +08:00
|
|
|
UNSTOPPABLE_ASSERT(length * sizeof(T) < (100 << 20));
|
2017-05-26 04:48:44 +08:00
|
|
|
// SOMEDAY: Can we avoid running constructors for all the values?
|
|
|
|
value.resize(ar.arena(), length);
|
2021-03-11 02:06:03 +08:00
|
|
|
for (uint32_t i = 0; i < length; i++)
|
2017-05-26 04:48:44 +08:00
|
|
|
ar >> value[i];
|
|
|
|
}
|
2019-07-16 05:46:00 +08:00
|
|
|
template <class Archive, class T, VecSerStrategy S>
|
2021-03-11 02:06:03 +08:00
|
|
|
inline void save(Archive& ar, const VectorRef<T, S>& value) {
|
2017-05-26 04:48:44 +08:00
|
|
|
uint32_t length = value.size();
|
|
|
|
ar << length;
|
2021-03-11 02:06:03 +08:00
|
|
|
for (uint32_t i = 0; i < length; i++)
|
2017-05-26 04:48:44 +08:00
|
|
|
ar << value[i];
|
|
|
|
}
|
|
|
|
|
2019-01-26 07:10:20 +08:00
|
|
|
template <class T>
|
2019-07-16 05:46:00 +08:00
|
|
|
struct vector_like_traits<VectorRef<T, VecSerStrategy::FlatBuffers>> : std::true_type {
|
2019-01-26 07:10:20 +08:00
|
|
|
using Vec = VectorRef<T>;
|
|
|
|
using value_type = typename Vec::value_type;
|
|
|
|
using iterator = const T*;
|
|
|
|
using insert_iterator = T*;
|
|
|
|
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
|
|
|
static size_t num_entries(const VectorRef<T>& v, Context&) {
|
|
|
|
return v.size();
|
|
|
|
}
|
2019-01-26 07:10:20 +08:00
|
|
|
template <class Context>
|
|
|
|
static void reserve(VectorRef<T>& v, size_t s, Context& context) {
|
|
|
|
v.resize(context.arena(), s);
|
|
|
|
}
|
|
|
|
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
2021-03-11 02:06:03 +08:00
|
|
|
static insert_iterator insert(Vec& v, Context&) {
|
|
|
|
return v.begin();
|
|
|
|
}
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
2021-03-11 02:06:03 +08:00
|
|
|
static iterator begin(const Vec& v, Context&) {
|
|
|
|
return v.begin();
|
|
|
|
}
|
2019-01-26 07:10:20 +08:00
|
|
|
};
|
|
|
|
|
2019-07-10 09:07:47 +08:00
|
|
|
template <class V>
|
2019-07-16 05:46:00 +08:00
|
|
|
struct dynamic_size_traits<VectorRef<V, VecSerStrategy::String>> : std::true_type {
|
|
|
|
using T = VectorRef<V, VecSerStrategy::String>;
|
2019-07-10 09:07:47 +08:00
|
|
|
// May be called multiple times during one serialization
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
|
|
|
static size_t size(const T& t, Context&) {
|
2019-07-10 09:07:47 +08:00
|
|
|
return t.serializedSize();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Guaranteed to be called only once during serialization
|
2019-07-16 03:58:31 +08:00
|
|
|
template <class Context>
|
|
|
|
static void save(uint8_t* out, const T& t, Context&) {
|
2019-07-10 09:07:47 +08:00
|
|
|
string_serialized_traits<V> traits;
|
|
|
|
auto* p = out;
|
|
|
|
uint32_t length = t.size();
|
2019-07-30 08:11:45 +08:00
|
|
|
*reinterpret_cast<decltype(length)*>(out) = length;
|
2019-07-10 09:07:47 +08:00
|
|
|
out += sizeof(length);
|
|
|
|
for (const auto& item : t) {
|
|
|
|
out += traits.save(out, item);
|
|
|
|
}
|
2019-07-11 02:35:52 +08:00
|
|
|
ASSERT(out - p == t._cached_size + sizeof(uint32_t));
|
2019-07-10 09:07:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
// Context is an arbitrary type that is plumbed by reference throughout the
|
|
|
|
// load call tree.
|
|
|
|
template <class Context>
|
|
|
|
static void load(const uint8_t* data, size_t size, T& t, Context& context) {
|
|
|
|
string_serialized_traits<V> traits;
|
|
|
|
auto* p = data;
|
|
|
|
uint32_t num_elements;
|
|
|
|
memcpy(&num_elements, data, sizeof(num_elements));
|
|
|
|
data += sizeof(num_elements);
|
|
|
|
t.resize(context.arena(), num_elements);
|
2019-08-01 08:01:23 +08:00
|
|
|
for (unsigned i = 0; i < num_elements; ++i) {
|
2019-07-10 09:07:47 +08:00
|
|
|
data += traits.load(data, t[i], context);
|
|
|
|
}
|
|
|
|
ASSERT(data - p == size);
|
|
|
|
t._cached_size = size - sizeof(uint32_t);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-05-26 04:48:44 +08:00
|
|
|
#endif
|