2017-05-26 04:48:44 +08:00
/*
* Arena . h
*
* This source file is part of the FoundationDB open source project
*
* Copyright 2013 - 2018 Apple Inc . and the FoundationDB project authors
2018-02-22 02:25:11 +08:00
*
2017-05-26 04:48:44 +08:00
* Licensed under the Apache License , Version 2.0 ( the " License " ) ;
* you may not use this file except in compliance with the License .
* You may obtain a copy of the License at
2018-02-22 02:25:11 +08:00
*
2017-05-26 04:48:44 +08:00
* http : //www.apache.org/licenses/LICENSE-2.0
2018-02-22 02:25:11 +08:00
*
2017-05-26 04:48:44 +08:00
* Unless required by applicable law or agreed to in writing , software
* distributed under the License is distributed on an " AS IS " BASIS ,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
* See the License for the specific language governing permissions and
* limitations under the License .
*/
# ifndef FLOW_ARENA_H
# define FLOW_ARENA_H
# pragma once
2018-10-20 01:30:13 +08:00
# include "flow/FastAlloc.h"
# include "flow/FastRef.h"
# include "flow/Error.h"
# include "flow/Trace.h"
2019-01-26 07:10:20 +08:00
# include "flow/ObjectSerializerTraits.h"
2019-01-31 05:53:23 +08:00
# include "flow/FileIdentifier.h"
2017-05-26 04:48:44 +08:00
# include <algorithm>
# include <stdint.h>
# include <string>
# include <cstring>
# include <limits>
# include <set>
# include <type_traits>
2019-03-16 06:32:39 +08:00
# include <sstream>
2017-05-26 04:48:44 +08:00
// TrackIt is a zero-size class for tracking constructions, destructions, and assignments of instances
// of a class. Just inherit TrackIt<T> from T to enable tracking of construction and destruction of
// T, and use the TRACKIT_ASSIGN(rhs) macro in any operator= definitions to enable assignment tracking.
//
// TrackIt writes to standard output because the trace log isn't available early in execution
// so applying TrackIt to StringRef or VectorRef, for example, would a segfault using the trace log.
//
// The template parameter enables TrackIt to be inherited multiple times in the ancestry
// of a class without producing an "inaccessible due to ambiguity" error.
template < class T >
struct TrackIt {
typedef TrackIt < T > TrackItType ;
// Put TRACKIT_ASSIGN into any operator= functions for which you want assignments tracked
# define TRACKIT_ASSIGN(o) *(TrackItType *)this = *(TrackItType *)&(o)
// The type name T is in the TrackIt output so that objects that inherit TrackIt multiple times
// can be tracked propertly, otherwise the create and delete addresses appear duplicative.
// This function returns just the string "T]" parsed from the __PRETTY_FUNCTION__ macro. There
// doesn't seem to be a better portable way to do this.
static const char * __trackit__type ( ) {
const char * s = __PRETTY_FUNCTION__ + sizeof ( __PRETTY_FUNCTION__ ) ;
while ( * - - s ! = ' = ' ) ;
return s + 2 ;
}
TrackIt ( ) {
printf ( " TrackItCreate \t %s \t %p \t %s \n " , __trackit__type ( ) , this , platform : : get_backtrace ( ) . c_str ( ) ) ;
}
TrackIt ( const TrackIt & o ) : TrackIt ( ) { }
TrackIt ( const TrackIt & & o ) : TrackIt ( ) { }
TrackIt & operator = ( const TrackIt & o ) {
printf ( " TrackItAssigned \t %s \t %p<%p \t %s \n " , __trackit__type ( ) , this , & o , platform : : get_backtrace ( ) . c_str ( ) ) ;
return * this ;
}
TrackIt & operator = ( const TrackIt & & o ) {
return * this = ( const TrackIt & ) o ;
}
~ TrackIt ( ) {
printf ( " TrackItDestroy \t %s \t %p \n " , __trackit__type ( ) , this ) ;
}
} ;
class NonCopyable
{
protected :
NonCopyable ( ) { }
~ NonCopyable ( ) { } /// Protected non-virtual destructor
private :
NonCopyable ( const NonCopyable & ) ;
NonCopyable & operator = ( const NonCopyable & ) ;
} ;
class Arena {
public :
inline Arena ( ) ;
inline explicit Arena ( size_t reservedSize ) ;
//~Arena();
Arena ( const Arena & ) ;
2019-01-26 08:49:59 +08:00
Arena ( Arena & & r ) BOOST_NOEXCEPT ;
2017-05-26 04:48:44 +08:00
Arena & operator = ( const Arena & ) ;
2019-01-26 08:49:59 +08:00
Arena & operator = ( Arena & & ) BOOST_NOEXCEPT ;
2017-05-26 04:48:44 +08:00
inline void dependsOn ( const Arena & p ) ;
inline size_t getSize ( ) const ;
inline bool hasFree ( size_t size , const void * address ) ;
friend void * operator new ( size_t size , Arena & p ) ;
friend void * operator new [ ] ( size_t size , Arena & p ) ;
//private:
Reference < struct ArenaBlock > impl ;
} ;
2019-01-26 07:10:20 +08:00
template < >
struct scalar_traits < Arena > : std : : true_type {
constexpr static size_t size = 0 ;
2019-07-16 03:58:31 +08:00
template < class Context >
static void save ( uint8_t * , const Arena & , Context & ) { }
2019-01-26 07:10:20 +08:00
// Context is an arbitrary type that is plumbed by reference throughout
// the load call tree.
template < class Context >
static void load ( const uint8_t * , Arena & arena , Context & context ) {
context . addArena ( arena ) ;
}
} ;
2017-05-26 04:48:44 +08:00
struct ArenaBlockRef {
ArenaBlock * next ;
uint32_t nextBlockOffset ;
} ;
struct ArenaBlock : NonCopyable , ThreadSafeReferenceCounted < ArenaBlock >
{
enum {
SMALL = 64 ,
2019-03-11 12:09:47 +08:00
LARGE = 8193 // If size == used == LARGE, then use hugeSize, hugeUsed
2017-05-26 04:48:44 +08:00
} ;
enum { NOT_TINY = 255 , TINY_HEADER = 6 } ;
// int32_t referenceCount; // 4 bytes (in ThreadSafeReferenceCounted)
uint8_t tinySize , tinyUsed ; // If these == NOT_TINY, use bigSize, bigUsed instead
// if tinySize != NOT_TINY, following variables aren't used
uint32_t bigSize , bigUsed ; // include block header
uint32_t nextBlockOffset ;
void delref ( ) {
if ( delref_no_destroy ( ) )
destroy ( ) ;
}
bool isTiny ( ) const { return tinySize ! = NOT_TINY ; }
int size ( ) const { if ( isTiny ( ) ) return tinySize ; else return bigSize ; }
int used ( ) const { if ( isTiny ( ) ) return tinyUsed ; else return bigUsed ; }
inline int unused ( ) const { if ( isTiny ( ) ) return tinySize - tinyUsed ; else return bigSize - bigUsed ; }
const void * getData ( ) const { return this ; }
const void * getNextData ( ) const { return ( const uint8_t * ) getData ( ) + used ( ) ; }
size_t totalSize ( ) {
if ( isTiny ( ) ) return size ( ) ;
size_t s = size ( ) ;
int o = nextBlockOffset ;
while ( o ) {
ArenaBlockRef * r = ( ArenaBlockRef * ) ( ( char * ) getData ( ) + o ) ;
s + = r - > next - > totalSize ( ) ;
o = r - > nextBlockOffset ;
}
return s ;
}
// just for debugging:
void getUniqueBlocks ( std : : set < ArenaBlock * > & a ) {
a . insert ( this ) ;
if ( isTiny ( ) ) return ;
int o = nextBlockOffset ;
while ( o ) {
ArenaBlockRef * r = ( ArenaBlockRef * ) ( ( char * ) getData ( ) + o ) ;
r - > next - > getUniqueBlocks ( a ) ;
o = r - > nextBlockOffset ;
}
return ;
}
inline int addUsed ( int bytes ) {
if ( isTiny ( ) ) {
int t = tinyUsed ;
tinyUsed + = bytes ;
return t ;
} else {
int t = bigUsed ;
bigUsed + = bytes ;
return t ;
}
}
void makeReference ( ArenaBlock * next ) {
ArenaBlockRef * r = ( ArenaBlockRef * ) ( ( char * ) getData ( ) + bigUsed ) ;
r - > next = next ;
r - > nextBlockOffset = nextBlockOffset ;
nextBlockOffset = bigUsed ;
bigUsed + = sizeof ( ArenaBlockRef ) ;
}
static void dependOn ( Reference < ArenaBlock > & self , ArenaBlock * other ) {
other - > addref ( ) ;
if ( ! self | | self - > isTiny ( ) | | self - > unused ( ) < sizeof ( ArenaBlockRef ) )
create ( SMALL , self ) - > makeReference ( other ) ;
else
self - > makeReference ( other ) ;
}
static inline void * allocate ( Reference < ArenaBlock > & self , int bytes ) {
ArenaBlock * b = self . getPtr ( ) ;
if ( ! self | | self - > unused ( ) < bytes )
b = create ( bytes , self ) ;
return ( char * ) b - > getData ( ) + b - > addUsed ( bytes ) ;
}
// Return an appropriately-sized ArenaBlock to store the given data
static ArenaBlock * create ( int dataSize , Reference < ArenaBlock > & next ) {
ArenaBlock * b ;
if ( dataSize < = SMALL - TINY_HEADER & & ! next ) {
if ( dataSize < = 16 - TINY_HEADER ) { b = ( ArenaBlock * ) FastAllocator < 16 > : : allocate ( ) ; b - > tinySize = 16 ; INSTRUMENT_ALLOCATE ( " Arena16 " ) ; }
else if ( dataSize < = 32 - TINY_HEADER ) { b = ( ArenaBlock * ) FastAllocator < 32 > : : allocate ( ) ; b - > tinySize = 32 ; INSTRUMENT_ALLOCATE ( " Arena32 " ) ; }
else { b = ( ArenaBlock * ) FastAllocator < 64 > : : allocate ( ) ; b - > tinySize = 64 ; INSTRUMENT_ALLOCATE ( " Arena64 " ) ; }
b - > tinyUsed = TINY_HEADER ;
} else {
int reqSize = dataSize + sizeof ( ArenaBlock ) ;
if ( next ) reqSize + = sizeof ( ArenaBlockRef ) ;
if ( reqSize < LARGE ) {
// Each block should be larger than the previous block, up to a limit, to minimize allocations
// Worst-case allocation pattern: 1 +10 +17 +42 +67 +170 +323 +681 +1348 +2728 +2210 +2211 (+1K +3K+1 +4K)*
// Overhead: 4X for small arenas, 3X intermediate, 1.33X for large arenas
int prevSize = next ? next - > size ( ) : 0 ;
reqSize = std : : max ( reqSize , std : : min ( prevSize * 2 , std : : max ( LARGE - 1 , reqSize * 4 ) ) ) ;
}
if ( reqSize < LARGE ) {
if ( reqSize < = 128 ) { b = ( ArenaBlock * ) FastAllocator < 128 > : : allocate ( ) ; b - > bigSize = 128 ; INSTRUMENT_ALLOCATE ( " Arena128 " ) ; }
else if ( reqSize < = 256 ) { b = ( ArenaBlock * ) FastAllocator < 256 > : : allocate ( ) ; b - > bigSize = 256 ; INSTRUMENT_ALLOCATE ( " Arena256 " ) ; }
else if ( reqSize < = 512 ) { b = ( ArenaBlock * ) FastAllocator < 512 > : : allocate ( ) ; b - > bigSize = 512 ; INSTRUMENT_ALLOCATE ( " Arena512 " ) ; }
else if ( reqSize < = 1024 ) { b = ( ArenaBlock * ) FastAllocator < 1024 > : : allocate ( ) ; b - > bigSize = 1024 ; INSTRUMENT_ALLOCATE ( " Arena1024 " ) ; }
else if ( reqSize < = 2048 ) { b = ( ArenaBlock * ) FastAllocator < 2048 > : : allocate ( ) ; b - > bigSize = 2048 ; INSTRUMENT_ALLOCATE ( " Arena2048 " ) ; }
2019-03-09 04:34:20 +08:00
else if ( reqSize < = 4096 ) { b = ( ArenaBlock * ) FastAllocator < 4096 > : : allocate ( ) ; b - > bigSize = 4096 ; INSTRUMENT_ALLOCATE ( " Arena4096 " ) ; }
else { b = ( ArenaBlock * ) FastAllocator < 8192 > : : allocate ( ) ; b - > bigSize = 8192 ; INSTRUMENT_ALLOCATE ( " Arena8192 " ) ; }
2017-05-26 04:48:44 +08:00
b - > tinySize = b - > tinyUsed = NOT_TINY ;
b - > bigUsed = sizeof ( ArenaBlock ) ;
} else {
# ifdef ALLOC_INSTRUMENTATION
allocInstr [ " ArenaHugeKB " ] . alloc ( ( reqSize + 1023 ) > > 10 ) ;
# endif
b = ( ArenaBlock * ) new uint8_t [ reqSize ] ;
b - > tinySize = b - > tinyUsed = NOT_TINY ;
b - > bigSize = reqSize ;
b - > bigUsed = sizeof ( ArenaBlock ) ;
2019-05-11 05:01:52 +08:00
if ( FLOW_KNOBS & & g_trace_depth = = 0 & & nondeterministicRandom ( ) - > random01 ( ) < ( reqSize / FLOW_KNOBS - > HUGE_ARENA_LOGGING_BYTES ) ) {
2019-03-31 04:36:13 +08:00
hugeArenaSample ( reqSize ) ;
2019-03-21 02:20:47 +08:00
}
2019-07-17 06:22:19 +08:00
g_hugeArenaMemory . fetch_add ( reqSize ) ;
2019-03-21 02:20:47 +08:00
2017-05-26 04:48:44 +08:00
// If the new block has less free space than the old block, make the old block depend on it
if ( next & & ! next - > isTiny ( ) & & next - > unused ( ) > = reqSize - dataSize ) {
b - > nextBlockOffset = 0 ;
b - > setrefCountUnsafe ( 1 ) ;
next - > makeReference ( b ) ;
return b ;
}
}
b - > nextBlockOffset = 0 ;
if ( next ) b - > makeReference ( next . getPtr ( ) ) ;
}
b - > setrefCountUnsafe ( 1 ) ;
next . setPtrUnsafe ( b ) ;
return b ;
}
inline void destroy ( ) ;
void destroyLeaf ( ) {
if ( isTiny ( ) ) {
if ( tinySize < = 16 ) { FastAllocator < 16 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena16 " ) ; }
else if ( tinySize < = 32 ) { FastAllocator < 32 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena32 " ) ; }
else { FastAllocator < 64 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena64 " ) ; }
} else {
if ( bigSize < = 128 ) { FastAllocator < 128 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena128 " ) ; }
else if ( bigSize < = 256 ) { FastAllocator < 256 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena256 " ) ; }
else if ( bigSize < = 512 ) { FastAllocator < 512 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena512 " ) ; }
else if ( bigSize < = 1024 ) { FastAllocator < 1024 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena1024 " ) ; }
else if ( bigSize < = 2048 ) { FastAllocator < 2048 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena2048 " ) ; }
else if ( bigSize < = 4096 ) { FastAllocator < 4096 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena4096 " ) ; }
2019-03-09 04:34:20 +08:00
else if ( bigSize < = 8192 ) { FastAllocator < 8192 > : : release ( this ) ; INSTRUMENT_RELEASE ( " Arena8192 " ) ; }
2017-05-26 04:48:44 +08:00
else {
# ifdef ALLOC_INSTRUMENTATION
allocInstr [ " ArenaHugeKB " ] . dealloc ( ( bigSize + 1023 ) > > 10 ) ;
# endif
2019-07-17 06:22:19 +08:00
g_hugeArenaMemory . fetch_sub ( bigSize ) ;
2017-05-26 04:48:44 +08:00
delete [ ] ( uint8_t * ) this ;
}
}
}
private :
static void * operator new ( size_t s ) ; // not implemented
} ;
inline Arena : : Arena ( ) : impl ( NULL ) { }
inline Arena : : Arena ( size_t reservedSize ) : impl ( 0 ) {
UNSTOPPABLE_ASSERT ( reservedSize < std : : numeric_limits < int > : : max ( ) ) ;
if ( reservedSize )
ArenaBlock : : create ( ( int ) reservedSize , impl ) ;
}
inline Arena : : Arena ( const Arena & r ) : impl ( r . impl ) { }
2019-01-26 08:49:59 +08:00
inline Arena : : Arena ( Arena & & r ) BOOST_NOEXCEPT : impl ( std : : move ( r . impl ) ) { }
2017-05-26 04:48:44 +08:00
inline Arena & Arena : : operator = ( const Arena & r ) {
impl = r . impl ;
return * this ;
}
2019-01-26 08:49:59 +08:00
inline Arena & Arena : : operator = ( Arena & & r ) BOOST_NOEXCEPT {
2017-05-26 04:48:44 +08:00
impl = std : : move ( r . impl ) ;
return * this ;
}
inline void Arena : : dependsOn ( const Arena & p ) {
if ( p . impl )
ArenaBlock : : dependOn ( impl , p . impl . getPtr ( ) ) ;
}
inline size_t Arena : : getSize ( ) const { return impl ? impl - > totalSize ( ) : 0 ; }
inline bool Arena : : hasFree ( size_t size , const void * address ) { return impl & & impl - > unused ( ) > = size & & impl - > getNextData ( ) = = address ; }
inline void * operator new ( size_t size , Arena & p ) {
UNSTOPPABLE_ASSERT ( size < std : : numeric_limits < int > : : max ( ) ) ;
return ArenaBlock : : allocate ( p . impl , ( int ) size ) ;
}
inline void operator delete ( void * , Arena & p ) { }
inline void * operator new [ ] ( size_t size , Arena & p ) {
UNSTOPPABLE_ASSERT ( size < std : : numeric_limits < int > : : max ( ) ) ;
return ArenaBlock : : allocate ( p . impl , ( int ) size ) ;
}
inline void operator delete [ ] ( void * , Arena & p ) { }
template < class Archive >
inline void load ( Archive & ar , Arena & p ) {
p = ar . arena ( ) ;
}
template < class Archive >
inline void save ( Archive & ar , const Arena & p ) {
// No action required
}
2019-03-24 08:54:46 +08:00
template < class T >
2019-04-17 00:30:02 +08:00
class Optional : public ComposedIdentifier < T , 0x10 > {
2019-03-24 08:54:46 +08:00
public :
Optional ( ) : valid ( false ) { }
Optional ( const Optional < T > & o ) : valid ( o . valid ) {
if ( valid ) new ( & value ) T ( o . get ( ) ) ;
}
template < class U >
Optional ( const U & t ) : valid ( true ) { new ( & value ) T ( t ) ; }
/* This conversion constructor was nice, but combined with the prior constructor it means that Optional<int> can be converted to Optional<Optional<int>> in the wrong way
( a non - present Optional < int > converts to a non - present Optional < Optional < int > > ) .
Use . castTo < > ( ) instead .
template < class S > Optional ( const Optional < S > & o ) : valid ( o . present ( ) ) { if ( valid ) new ( & value ) T ( o . get ( ) ) ; } */
Optional ( Arena & a , const Optional < T > & o ) : valid ( o . valid ) {
if ( valid ) new ( & value ) T ( a , o . get ( ) ) ;
}
int expectedSize ( ) const { return valid ? get ( ) . expectedSize ( ) : 0 ; }
template < class R > Optional < R > castTo ( ) const {
return map < R > ( [ ] ( const T & v ) { return ( R ) v ; } ) ;
}
template < class R > Optional < R > map ( std : : function < R ( T ) > f ) const {
if ( present ( ) ) {
return Optional < R > ( f ( get ( ) ) ) ;
}
else {
return Optional < R > ( ) ;
}
}
~ Optional ( ) {
if ( valid ) ( ( T * ) & value ) - > ~ T ( ) ;
}
Optional & operator = ( Optional const & o ) {
if ( valid ) {
valid = false ;
( ( T * ) & value ) - > ~ T ( ) ;
}
if ( o . valid ) {
new ( & value ) T ( o . get ( ) ) ;
valid = true ;
}
return * this ;
}
bool present ( ) const { return valid ; }
T & get ( ) {
UNSTOPPABLE_ASSERT ( valid ) ;
return * ( T * ) & value ;
}
T const & get ( ) const {
UNSTOPPABLE_ASSERT ( valid ) ;
return * ( T const * ) & value ;
}
T orDefault ( T const & default_value ) const { if ( valid ) return get ( ) ; else return default_value ; }
template < class Ar >
void serialize ( Ar & ar ) {
// SOMEDAY: specialize for space efficiency?
if ( valid & & Ar : : isDeserializing )
( * ( T * ) & value ) . ~ T ( ) ;
serializer ( ar , valid ) ;
if ( valid ) {
if ( Ar : : isDeserializing ) new ( & value ) T ( ) ;
serializer ( ar , * ( T * ) & value ) ;
}
}
bool operator = = ( Optional const & o ) const {
return present ( ) = = o . present ( ) & & ( ! present ( ) | | get ( ) = = o . get ( ) ) ;
}
bool operator ! = ( Optional const & o ) const {
return ! ( * this = = o ) ;
}
// Ordering: If T is ordered, then Optional() < Optional(t) and (Optional(u)<Optional(v))==(u<v)
bool operator < ( Optional const & o ) const {
if ( present ( ) ! = o . present ( ) ) return o . present ( ) ;
if ( ! present ( ) ) return false ;
return get ( ) < o . get ( ) ;
}
2019-03-28 04:37:15 +08:00
void reset ( ) {
if ( valid ) {
valid = false ;
( ( T * ) & value ) - > ~ T ( ) ;
}
}
2019-03-24 08:54:46 +08:00
private :
typename std : : aligned_storage < sizeof ( T ) , __alignof ( T ) > : : type value ;
bool valid ;
} ;
2019-04-05 00:59:08 +08:00
template < class T >
struct Traceable < Optional < T > > : std : : conditional < Traceable < T > : : value , std : : true_type , std : : false_type > : : type {
static std : : string toString ( const Optional < T > & value ) {
return value . present ( ) ? Traceable < T > : : toString ( value . get ( ) ) : " [not set] " ;
2019-04-14 01:05:59 +08:00
}
2019-04-10 05:29:21 +08:00
} ;
2019-04-14 01:05:59 +08:00
template < class T >
2019-04-10 05:29:21 +08:00
struct union_like_traits < Optional < T > > : std : : true_type {
using Member = Optional < T > ;
using alternatives = pack < T > ;
2019-07-16 03:58:31 +08:00
template < class Context >
static uint8_t index ( const Member & variant , Context & ) { return 0 ; }
template < class Context >
static bool empty ( const Member & variant , Context & ) { return ! variant . present ( ) ; }
template < int i , class Context >
static const T & get ( const Member & variant , Context & ) {
2019-04-10 05:29:21 +08:00
static_assert ( i = = 0 ) ;
return variant . get ( ) ;
}
2019-07-16 03:58:31 +08:00
template < size_t i , class U , class Context >
static const void assign ( Member & member , const U & t , Context & ) {
2019-04-10 05:29:21 +08:00
member = t ;
2019-04-05 00:59:08 +08:00
}
} ;
2017-05-26 04:48:44 +08:00
//#define STANDALONE_ALWAYS_COPY
template < class T >
class Standalone : private Arena , public T {
public :
// T must have no destructor
Arena & arena ( ) { return * ( Arena * ) this ; }
const Arena & arena ( ) const { return * ( const Arena * ) this ; }
T & contents ( ) { return * ( T * ) this ; }
T const & contents ( ) const { return * ( T const * ) this ; }
Standalone ( ) { }
Standalone ( const T & t ) : Arena ( t . expectedSize ( ) ) , T ( arena ( ) , t ) { }
Standalone < T > & operator = ( const T & t ) {
Arena old = std : : move ( arena ( ) ) ; // We want to defer the destruction of the arena until after we have copied t, in case it cross-references our previous value
* ( Arena * ) this = Arena ( t . expectedSize ( ) ) ;
* ( T * ) this = T ( arena ( ) , t ) ;
return * this ;
}
// Always-copy mode was meant to make alloc instrumentation more useful by making allocations occur at the final resting place of objects leaked
// It doesn't actually work because some uses of Standalone things assume the object's memory will not change on copy or assignment
# ifdef STANDALONE_ALWAYS_COPY
// Treat Standalone<T>'s as T's in construction and assignment so the memory is copied
Standalone ( const T & t , const Arena & arena ) : Standalone ( t ) { }
Standalone ( const Standalone < T > & t ) : Standalone ( ( T const & ) t ) { }
Standalone ( const Standalone < T > & & t ) : Standalone ( ( T const & ) t ) { }
Standalone < T > & operator = ( const Standalone < T > & & t ) {
* this = ( T const & ) t ;
return * this ;
}
Standalone < T > & operator = ( const Standalone < T > & t ) {
* this = ( T const & ) t ;
return * this ;
}
# else
Standalone ( const T & t , const Arena & arena ) : Arena ( arena ) , T ( t ) { }
Standalone ( const Standalone < T > & t ) : Arena ( ( Arena const & ) t ) , T ( ( T const & ) t ) { }
Standalone < T > & operator = ( const Standalone < T > & t ) {
* ( Arena * ) this = ( Arena const & ) t ;
* ( T * ) this = ( T const & ) t ;
return * this ;
}
# endif
template < class Archive >
void serialize ( Archive & ar ) {
// FIXME: something like BinaryReader(ar) >> arena >> *(T*)this; to guarantee standalone arena???
//T tmp;
//ar >> tmp;
//*this = tmp;
2018-12-29 02:49:26 +08:00
serializer ( ar , ( * ( T * ) this ) , arena ( ) ) ;
2017-05-26 04:48:44 +08:00
}
/*static Standalone<T> fakeStandalone( const T& t ) {
Standalone < T > x ;
* ( T * ) & x = t ;
return x ;
} */
private :
template < class U > Standalone ( Standalone < U > const & ) ; // unimplemented
template < class U > Standalone < T > const & operator = ( Standalone < U > const & ) ; // unimplemented
} ;
extern std : : string format ( const char * form , . . . ) ;
# pragma pack( push, 4 )
class StringRef {
public :
2019-01-31 05:53:23 +08:00
constexpr static FileIdentifier file_identifier = 13300811 ;
2017-05-26 04:48:44 +08:00
StringRef ( ) : data ( 0 ) , length ( 0 ) { }
StringRef ( Arena & p , const StringRef & toCopy ) : data ( new ( p ) uint8_t [ toCopy . size ( ) ] ) , length ( toCopy . size ( ) ) {
2019-12-04 05:09:29 +08:00
if ( length > 0 ) {
memcpy ( ( void * ) data , toCopy . data , length ) ;
}
2017-05-26 04:48:44 +08:00
}
StringRef ( Arena & p , const std : : string & toCopy ) : length ( ( int ) toCopy . size ( ) ) {
UNSTOPPABLE_ASSERT ( toCopy . size ( ) < = std : : numeric_limits < int > : : max ( ) ) ;
data = new ( p ) uint8_t [ toCopy . size ( ) ] ;
if ( length ) memcpy ( ( void * ) data , & toCopy [ 0 ] , length ) ;
}
StringRef ( Arena & p , const uint8_t * toCopy , int length ) : data ( new ( p ) uint8_t [ length ] ) , length ( length ) {
2019-12-04 05:09:29 +08:00
if ( length > 0 ) {
memcpy ( ( void * ) data , toCopy , length ) ;
}
2017-05-26 04:48:44 +08:00
}
StringRef ( const uint8_t * data , int length ) : data ( data ) , length ( length ) { }
StringRef ( const std : : string & s ) : data ( ( const uint8_t * ) s . c_str ( ) ) , length ( ( int ) s . size ( ) ) {
if ( s . size ( ) > std : : numeric_limits < int > : : max ( ) ) abort ( ) ;
}
//StringRef( const StringRef& p );
const uint8_t * begin ( ) const { return data ; }
const uint8_t * end ( ) const { return data + length ; }
int size ( ) const { return length ; }
uint8_t operator [ ] ( int i ) const { return data [ i ] ; }
StringRef substr ( int start ) const { return StringRef ( data + start , length - start ) ; }
StringRef substr ( int start , int size ) const { return StringRef ( data + start , size ) ; }
bool startsWith ( const StringRef & s ) const { return size ( ) > = s . size ( ) & & ! memcmp ( begin ( ) , s . begin ( ) , s . size ( ) ) ; }
bool endsWith ( const StringRef & s ) const { return size ( ) > = s . size ( ) & & ! memcmp ( end ( ) - s . size ( ) , s . begin ( ) , s . size ( ) ) ; }
2019-12-04 05:09:29 +08:00
StringRef withPrefix ( const StringRef & prefix , Arena & arena ) const {
uint8_t * s = new ( arena ) uint8_t [ prefix . size ( ) + size ( ) ] ;
if ( prefix . size ( ) > 0 ) {
memcpy ( s , prefix . begin ( ) , prefix . size ( ) ) ;
}
if ( size ( ) > 0 ) {
memcpy ( s + prefix . size ( ) , begin ( ) , size ( ) ) ;
}
return StringRef ( s , prefix . size ( ) + size ( ) ) ;
2017-05-26 04:48:44 +08:00
}
2018-03-22 09:58:19 +08:00
StringRef withSuffix ( const StringRef & suffix , Arena & arena ) const {
uint8_t * s = new ( arena ) uint8_t [ suffix . size ( ) + size ( ) ] ;
2019-12-04 05:09:29 +08:00
if ( size ( ) > 0 ) {
memcpy ( s , begin ( ) , size ( ) ) ;
}
if ( suffix . size ( ) > 0 ) {
memcpy ( s + size ( ) , suffix . begin ( ) , suffix . size ( ) ) ;
}
2018-03-22 09:58:19 +08:00
return StringRef ( s , suffix . size ( ) + size ( ) ) ;
}
2017-05-26 04:48:44 +08:00
Standalone < StringRef > withPrefix ( const StringRef & prefix ) const {
Standalone < StringRef > r ;
2018-05-10 00:01:22 +08:00
r . contents ( ) = withPrefix ( prefix , r . arena ( ) ) ;
2017-05-26 04:48:44 +08:00
return r ;
}
2018-03-22 09:58:19 +08:00
Standalone < StringRef > withSuffix ( const StringRef & suffix ) const {
Standalone < StringRef > r ;
2018-05-10 00:01:22 +08:00
r . contents ( ) = withSuffix ( suffix , r . arena ( ) ) ;
2017-05-26 04:48:44 +08:00
return r ;
}
StringRef removePrefix ( const StringRef & s ) const {
// pre: startsWith(s)
UNSTOPPABLE_ASSERT ( s . size ( ) < = size ( ) ) ; //< In debug mode, we could check startsWith()
return substr ( s . size ( ) ) ;
}
2018-03-22 09:58:19 +08:00
StringRef removeSuffix ( const StringRef & s ) const {
// pre: endsWith(s)
UNSTOPPABLE_ASSERT ( s . size ( ) < = size ( ) ) ; //< In debug mode, we could check endsWith()
return substr ( 0 , size ( ) - s . size ( ) ) ;
}
2017-05-26 04:48:44 +08:00
std : : string toString ( ) const { return std : : string ( ( const char * ) data , length ) ; }
2019-03-16 06:32:39 +08:00
static bool isPrintable ( char c ) { return c > 32 & & c < 127 ; }
2019-04-06 04:11:50 +08:00
inline std : : string printable ( ) const ;
2017-05-26 04:48:44 +08:00
2018-07-23 18:09:13 +08:00
std : : string toHexString ( int limit = - 1 ) const {
2018-08-17 07:22:10 +08:00
if ( limit < 0 )
limit = length ;
if ( length > limit ) {
// If limit is high enough split it so that 2/3 of limit is used to show prefix bytes and the rest is used for suffix bytes
if ( limit > = 9 ) {
int suffix = limit / 3 ;
2018-08-29 04:46:14 +08:00
return substr ( 0 , limit - suffix ) . toHexString ( ) + " ... " + substr ( length - suffix , suffix ) . toHexString ( ) + format ( " [%d bytes] " , length ) ;
2018-08-17 07:22:10 +08:00
}
return substr ( 0 , limit ) . toHexString ( ) + format ( " ...[%d] " , length ) ;
}
2018-06-08 18:32:34 +08:00
std : : string s ;
2018-08-17 07:22:10 +08:00
s . reserve ( length * 7 ) ;
2018-06-08 18:32:34 +08:00
for ( int i = 0 ; i < length ; i + + ) {
uint8_t b = ( * this ) [ i ] ;
if ( isalnum ( b ) )
s . append ( format ( " %02x (%c) " , b , b ) ) ;
else
s . append ( format ( " %02x " , b ) ) ;
}
2018-07-23 18:09:13 +08:00
if ( s . size ( ) > 0 )
s . resize ( s . size ( ) - 1 ) ;
2018-06-08 18:32:34 +08:00
return s ;
}
2017-05-26 04:48:44 +08:00
int expectedSize ( ) const { return size ( ) ; }
2019-12-04 05:09:29 +08:00
int compare ( StringRef const & other ) const {
if ( std : : min ( size ( ) , other . size ( ) ) > 0 ) {
int c = memcmp ( begin ( ) , other . begin ( ) , std : : min ( size ( ) , other . size ( ) ) ) ;
if ( c ! = 0 ) return c ;
}
2017-05-26 04:48:44 +08:00
return size ( ) - other . size ( ) ;
}
2017-12-21 17:58:15 +08:00
// Removes bytes from begin up to and including the sep string, returns StringRef of the part before sep
StringRef eat ( StringRef sep ) {
for ( int i = 0 , iend = size ( ) - sep . size ( ) ; i < = iend ; + + i ) {
if ( sep . compare ( substr ( i , sep . size ( ) ) ) = = 0 ) {
StringRef token = substr ( 0 , i ) ;
* this = substr ( i + sep . size ( ) ) ;
return token ;
}
}
return eat ( ) ;
}
StringRef eat ( ) {
StringRef r = * this ;
* this = StringRef ( ) ;
return r ;
}
StringRef eat ( const char * sep ) {
2019-03-16 01:34:57 +08:00
return eat ( StringRef ( ( const uint8_t * ) sep , ( int ) strlen ( sep ) ) ) ;
2017-12-21 17:58:15 +08:00
}
2019-03-05 20:00:11 +08:00
// Return StringRef of bytes from begin() up to but not including the first byte matching any byte in sep,
// and remove that sequence (including the sep byte) from *this
// Returns and removes all bytes from *this if no bytes within sep were found
2019-03-06 13:14:21 +08:00
StringRef eatAny ( StringRef sep , uint8_t * foundSeparator ) {
2019-03-05 20:00:11 +08:00
auto iSep = std : : find_first_of ( begin ( ) , end ( ) , sep . begin ( ) , sep . end ( ) ) ;
if ( iSep ! = end ( ) ) {
2019-03-06 13:14:21 +08:00
if ( foundSeparator ! = nullptr ) {
* foundSeparator = * iSep ;
}
2019-03-05 20:00:11 +08:00
const int i = iSep - begin ( ) ;
StringRef token = substr ( 0 , i ) ;
* this = substr ( i + 1 ) ;
return token ;
}
return eat ( ) ;
}
2019-03-06 13:14:21 +08:00
StringRef eatAny ( const char * sep , uint8_t * foundSeparator ) {
return eatAny ( StringRef ( ( const uint8_t * ) sep , strlen ( sep ) ) , foundSeparator ) ;
2017-12-21 17:58:15 +08:00
}
2017-05-26 04:48:44 +08:00
private :
// Unimplemented; blocks conversion through std::string
StringRef ( char * ) ;
const uint8_t * data ;
int length ;
} ;
# pragma pack( pop )
2019-03-15 09:40:28 +08:00
template < >
2019-04-06 04:11:50 +08:00
struct TraceableString < StringRef > {
static const char * begin ( StringRef value ) {
return reinterpret_cast < const char * > ( value . begin ( ) ) ;
}
static bool atEnd ( const StringRef & value , const char * iter ) {
return iter = = reinterpret_cast < const char * > ( value . end ( ) ) ;
}
2019-03-15 09:40:28 +08:00
static std : : string toString ( const StringRef & value ) {
2019-04-06 04:11:50 +08:00
return value . toString ( ) ;
2019-03-15 09:40:28 +08:00
}
} ;
2019-04-06 04:11:50 +08:00
template < >
struct Traceable < StringRef > : TraceableStringImpl < StringRef > { } ;
inline std : : string StringRef : : printable ( ) const {
return Traceable < StringRef > : : toString ( * this ) ;
}
2019-03-15 09:40:28 +08:00
template < class T >
struct Traceable < Standalone < T > > : std : : conditional < Traceable < T > : : value , std : : true_type , std : : false_type > : : type {
static std : : string toString ( const Standalone < T > & value ) {
return Traceable < T > : : toString ( value ) ;
}
} ;
2017-05-26 04:48:44 +08:00
# define LiteralStringRef( str ) StringRef( (const uint8_t*)(str), sizeof((str))-1 )
// makeString is used to allocate a Standalone<StringRef> of a known length for later
// mutation (via mutateString). If you need to append to a string of unknown length,
// consider factoring StringBuffer from DiskQueue.actor.cpp.
inline static Standalone < StringRef > makeString ( int length ) {
Standalone < StringRef > returnString ;
uint8_t * outData = new ( returnString . arena ( ) ) uint8_t [ length ] ;
( ( StringRef & ) returnString ) = StringRef ( outData , length ) ;
return returnString ;
}
2019-02-08 09:02:15 +08:00
inline static Standalone < StringRef > makeAlignedString ( int alignment , int length ) {
Standalone < StringRef > returnString ;
uint8_t * outData = new ( returnString . arena ( ) ) uint8_t [ alignment + length ] ;
outData = ( uint8_t * ) ( ( ( ( uintptr_t ) outData + ( alignment - 1 ) ) / alignment ) * alignment ) ;
( ( StringRef & ) returnString ) = StringRef ( outData , length ) ;
return returnString ;
}
2017-05-26 04:48:44 +08:00
inline static StringRef makeString ( int length , Arena & arena ) {
uint8_t * outData = new ( arena ) uint8_t [ length ] ;
return StringRef ( outData , length ) ;
}
// mutateString() simply casts away const and returns a pointer that can be used to mutate the
// contents of the given StringRef (it will also accept Standalone<StringRef>). Obviously this
// is only legitimate if you know where the StringRef's memory came from and that it is not shared!
inline static uint8_t * mutateString ( StringRef & s ) { return const_cast < uint8_t * > ( s . begin ( ) ) ; }
template < class Archive >
inline void load ( Archive & ar , StringRef & value ) {
uint32_t length ;
ar > > length ;
value = StringRef ( ar . arenaRead ( length ) , length ) ;
}
template < class Archive >
inline void save ( Archive & ar , const StringRef & value ) {
ar < < ( uint32_t ) value . size ( ) ;
ar . serializeBytes ( value . begin ( ) , value . size ( ) ) ;
}
2019-01-26 07:10:20 +08:00
2019-07-02 07:32:25 +08:00
template < >
2019-01-26 07:10:20 +08:00
struct dynamic_size_traits < StringRef > : std : : true_type {
2019-07-16 03:58:31 +08:00
template < class Context >
static size_t size ( const StringRef & t , Context & ) { return t . size ( ) ; }
template < class Context >
static void save ( uint8_t * out , const StringRef & t , Context & ) { std : : copy ( t . begin ( ) , t . end ( ) , out ) ; }
2019-01-26 07:10:20 +08:00
template < class Context >
static void load ( const uint8_t * ptr , size_t sz , StringRef & str , Context & context ) {
str = StringRef ( context . tryReadZeroCopy ( ptr , sz ) , sz ) ;
}
} ;
2019-12-04 05:09:29 +08:00
inline bool operator = = ( const StringRef & lhs , const StringRef & rhs ) {
if ( lhs . size ( ) = = 0 & & rhs . size ( ) = = 0 ) {
return true ;
}
2017-05-26 04:48:44 +08:00
return lhs . size ( ) = = rhs . size ( ) & & ! memcmp ( lhs . begin ( ) , rhs . begin ( ) , lhs . size ( ) ) ;
}
2019-12-04 05:09:29 +08:00
inline bool operator < ( const StringRef & lhs , const StringRef & rhs ) {
if ( std : : min ( lhs . size ( ) , rhs . size ( ) ) > 0 ) {
int c = memcmp ( lhs . begin ( ) , rhs . begin ( ) , std : : min ( lhs . size ( ) , rhs . size ( ) ) ) ;
if ( c ! = 0 ) return c < 0 ;
}
2017-05-26 04:48:44 +08:00
return lhs . size ( ) < rhs . size ( ) ;
}
2019-12-04 05:09:29 +08:00
inline bool operator > ( const StringRef & lhs , const StringRef & rhs ) {
if ( std : : min ( lhs . size ( ) , rhs . size ( ) ) > 0 ) {
int c = memcmp ( lhs . begin ( ) , rhs . begin ( ) , std : : min ( lhs . size ( ) , rhs . size ( ) ) ) ;
if ( c ! = 0 ) return c > 0 ;
}
2017-05-26 04:48:44 +08:00
return lhs . size ( ) > rhs . size ( ) ;
}
inline bool operator ! = ( const StringRef & lhs , const StringRef & rhs ) { return ! ( lhs = = rhs ) ; }
inline bool operator < = ( const StringRef & lhs , const StringRef & rhs ) { return ! ( lhs > rhs ) ; }
inline bool operator > = ( const StringRef & lhs , const StringRef & rhs ) { return ! ( lhs < rhs ) ; }
// This trait is used by VectorRef to determine if it should just memcpy the vector contents.
// FIXME: VectorRef really should use std::is_trivially_copyable for this BUT that is not implemented
// in gcc c++0x so instead we will use this custom trait which defaults to std::is_trivial, which
// handles most situations but others will have to be specialized.
template < typename T >
struct memcpy_able : std : : is_trivial < T > { } ;
template < >
struct memcpy_able < UID > : std : : integral_constant < bool , true > { } ;
2019-07-10 09:07:47 +08:00
template < class T >
struct string_serialized_traits : std : : false_type {
int32_t getSize ( const T & item ) const {
return 0 ;
}
uint32_t save ( uint8_t * out , const T & t ) const {
return 0 ;
}
template < class Context >
uint32_t load ( const uint8_t * data , T & t , Context & context ) {
return 0 ;
}
} ;
2019-07-16 05:46:00 +08:00
enum class VecSerStrategy {
FlatBuffers , String
} ;
template < class T , VecSerStrategy >
2019-07-10 09:07:47 +08:00
struct VectorRefPreserializer {
VectorRefPreserializer ( ) { }
2019-07-16 05:46:00 +08:00
VectorRefPreserializer ( const VectorRefPreserializer < T , VecSerStrategy : : FlatBuffers > & ) { }
VectorRefPreserializer & operator = ( const VectorRefPreserializer < T , VecSerStrategy : : FlatBuffers > & ) { return * this ; }
VectorRefPreserializer ( const VectorRefPreserializer < T , VecSerStrategy : : String > & ) { }
VectorRefPreserializer & operator = ( const VectorRefPreserializer < T , VecSerStrategy : : String > & ) { return * this ; }
2019-07-10 09:07:47 +08:00
void invalidate ( ) { }
void add ( const T & item ) { }
void remove ( const T & item ) { }
} ;
2017-05-26 04:48:44 +08:00
template < class T >
2019-07-16 05:46:00 +08:00
struct VectorRefPreserializer < T , VecSerStrategy : : String > {
2019-07-10 09:07:47 +08:00
mutable int32_t _cached_size ; // -1 means unknown
string_serialized_traits < T > _string_traits ;
VectorRefPreserializer ( ) : _cached_size ( 0 ) { }
2019-07-16 05:46:00 +08:00
VectorRefPreserializer ( const VectorRefPreserializer < T , VecSerStrategy : : String > & other )
: _cached_size ( other . _cached_size ) { }
VectorRefPreserializer & operator = ( const VectorRefPreserializer < T , VecSerStrategy : : String > & other ) {
2019-07-10 09:07:47 +08:00
_cached_size = other . _cached_size ;
return * this ;
}
2019-07-16 05:46:00 +08:00
VectorRefPreserializer ( const VectorRefPreserializer < T , VecSerStrategy : : FlatBuffers > & ) : _cached_size ( - 1 ) { }
VectorRefPreserializer & operator = ( const VectorRefPreserializer < T , VecSerStrategy : : FlatBuffers > & ) {
_cached_size = - 1 ;
return * this ;
}
2019-07-10 09:07:47 +08:00
void invalidate ( ) { _cached_size = - 1 ; }
void add ( const T & item ) {
if ( _cached_size > 0 ) {
_cached_size + = _string_traits . getSize ( item ) ;
}
}
void remove ( const T & item ) {
if ( _cached_size > 0 ) {
_cached_size - = _string_traits . getSize ( item ) ;
}
}
} ;
2019-07-16 05:46:00 +08:00
template < class T , VecSerStrategy SerStrategy = VecSerStrategy : : FlatBuffers >
class VectorRef : public ComposedIdentifier < T , 0x8 > , public VectorRefPreserializer < T , SerStrategy > {
using VPS = VectorRefPreserializer < T , SerStrategy > ;
friend class VectorRef < T , SerStrategy = = VecSerStrategy : : FlatBuffers ? VecSerStrategy : : String
: VecSerStrategy : : FlatBuffers > ;
2017-05-26 04:48:44 +08:00
public :
2019-01-26 07:10:20 +08:00
using value_type = T ;
2019-07-16 05:46:00 +08:00
static_assert ( SerStrategy = = VecSerStrategy : : FlatBuffers | | string_serialized_traits < T > : : value ) ;
2019-01-26 07:10:20 +08:00
2017-05-26 04:48:44 +08:00
// T must be trivially destructible (and copyable)!
VectorRef ( ) : data ( 0 ) , m_size ( 0 ) , m_capacity ( 0 ) { }
2019-07-16 05:46:00 +08:00
template < VecSerStrategy S >
VectorRef ( const VectorRef < T , S > & other )
: VPS ( other ) , data ( other . data ) , m_size ( other . m_size ) , m_capacity ( other . m_capacity ) { }
template < VecSerStrategy S >
VectorRef & operator = ( const VectorRef < T , S > & other ) {
2019-07-10 09:07:47 +08:00
* static_cast < VPS * > ( this ) = other ;
data = other . data ;
m_size = other . m_size ;
m_capacity = other . m_capacity ;
return * this ;
}
2017-05-26 04:48:44 +08:00
// Arena constructor for non-Ref types, identified by memcpy_able
2019-07-16 05:46:00 +08:00
template < class T2 = T , VecSerStrategy S >
2019-07-10 09:07:47 +08:00
VectorRef ( Arena & p , const VectorRef < T , S > & toCopy , typename std : : enable_if < memcpy_able < T2 > : : value , int > : : type = 0 )
: VPS ( toCopy ) , data ( ( T * ) new ( p ) uint8_t [ sizeof ( T ) * toCopy . size ( ) ] ) , m_size ( toCopy . size ( ) ) ,
m_capacity ( toCopy . size ( ) ) {
2019-12-04 05:09:29 +08:00
if ( m_size > 0 ) {
memcpy ( data , toCopy . data , m_size * sizeof ( T ) ) ;
}
2017-05-26 04:48:44 +08:00
}
// Arena constructor for Ref types, which must have an Arena constructor
2019-07-16 05:46:00 +08:00
template < class T2 = T , VecSerStrategy S >
2019-07-10 09:07:47 +08:00
VectorRef ( Arena & p , const VectorRef < T , S > & toCopy , typename std : : enable_if < ! memcpy_able < T2 > : : value , int > : : type = 0 )
: VPS ( ) , data ( ( T * ) new ( p ) uint8_t [ sizeof ( T ) * toCopy . size ( ) ] ) , m_size ( toCopy . size ( ) ) , m_capacity ( toCopy . size ( ) ) {
for ( int i = 0 ; i < m_size ; i + + ) {
auto ptr = new ( & data [ i ] ) T ( p , toCopy [ i ] ) ;
VPS : : add ( * ptr ) ;
}
2017-05-26 04:48:44 +08:00
}
2019-07-16 05:46:00 +08:00
VectorRef ( T * data , int size ) : data ( data ) , m_size ( size ) , m_capacity ( size ) { }
VectorRef ( T * data , int size , int capacity ) : data ( data ) , m_size ( size ) , m_capacity ( capacity ) { }
// VectorRef( const VectorRef<T>& toCopy ) : data( toCopy.data ), m_size( toCopy.m_size ), m_capacity(
// toCopy.m_capacity ) {} VectorRef<T>& operator=( const VectorRef<T>& );
2017-05-26 04:48:44 +08:00
2019-07-16 05:46:00 +08:00
template < VecSerStrategy S = SerStrategy >
typename std : : enable_if < S = = VecSerStrategy : : String , uint32_t > : : type serializedSize ( ) const {
2019-07-10 09:07:47 +08:00
uint32_t result = sizeof ( uint32_t ) ;
string_serialized_traits < T > t ;
if ( VPS : : _cached_size > = 0 ) {
return result + VPS : : _cached_size ;
}
for ( const auto & v : * this ) {
result + = t . getSize ( v ) ;
}
VPS : : _cached_size = result - sizeof ( uint32_t ) ;
return result ;
}
2017-05-26 04:48:44 +08:00
const T * begin ( ) const { return data ; }
const T * end ( ) const { return data + m_size ; }
T const & front ( ) const { return * begin ( ) ; }
T const & back ( ) const { return end ( ) [ - 1 ] ; }
int size ( ) const { return m_size ; }
2018-03-15 03:39:50 +08:00
bool empty ( ) const { return m_size = = 0 ; }
2017-05-26 04:48:44 +08:00
const T & operator [ ] ( int i ) const { return data [ i ] ; }
2019-07-16 05:46:00 +08:00
std : : reverse_iterator < const T * > rbegin ( ) const { return std : : reverse_iterator < const T * > ( end ( ) ) ; }
std : : reverse_iterator < const T * > rend ( ) const { return std : : reverse_iterator < const T * > ( begin ( ) ) ; }
2017-05-26 04:48:44 +08:00
2019-07-16 05:46:00 +08:00
template < VecSerStrategy S = SerStrategy >
typename std : : enable_if < S = = VecSerStrategy : : FlatBuffers , VectorRef > : : type slice ( int begin , int end ) const {
return VectorRef ( data + begin , end - begin ) ;
}
2017-05-26 04:48:44 +08:00
2019-07-16 05:46:00 +08:00
template < VecSerStrategy S >
bool operator = = ( VectorRef < T , S > const & rhs ) const {
2017-05-26 04:48:44 +08:00
if ( size ( ) ! = rhs . size ( ) ) return false ;
2019-07-16 05:46:00 +08:00
for ( int i = 0 ; i < m_size ; i + + )
if ( ( * this ) [ i ] ! = rhs [ i ] ) return false ;
2017-05-26 04:48:44 +08:00
return true ;
}
// Warning: Do not mutate a VectorRef that has previously been copy constructed or assigned,
// since copies will share data
2019-07-16 05:46:00 +08:00
T * begin ( ) {
VPS : : invalidate ( ) ;
return data ;
}
T * end ( ) {
VPS : : invalidate ( ) ;
return data + m_size ;
}
T & front ( ) {
VPS : : invalidate ( ) ;
return * begin ( ) ;
}
T & back ( ) {
VPS : : invalidate ( ) ;
return end ( ) [ - 1 ] ;
}
T & operator [ ] ( int i ) {
VPS : : invalidate ( ) ;
return data [ i ] ;
}
void push_back ( Arena & p , const T & value ) {
if ( m_size + 1 > m_capacity ) reallocate ( p , m_size + 1 ) ;
2019-07-10 09:07:47 +08:00
auto ptr = new ( & data [ m_size ] ) T ( value ) ;
VPS : : add ( * ptr ) ;
2017-05-26 04:48:44 +08:00
m_size + + ;
}
// invokes the "Deep copy constructor" T(Arena&, const T&) moving T entirely into arena
2019-07-16 05:46:00 +08:00
void push_back_deep ( Arena & p , const T & value ) {
if ( m_size + 1 > m_capacity ) reallocate ( p , m_size + 1 ) ;
2019-07-10 09:07:47 +08:00
auto ptr = new ( & data [ m_size ] ) T ( p , value ) ;
VPS : : add ( * ptr ) ;
2017-05-26 04:48:44 +08:00
m_size + + ;
}
2019-07-16 05:46:00 +08:00
void append ( Arena & p , const T * begin , int count ) {
2017-05-26 04:48:44 +08:00
if ( m_size + count > m_capacity ) reallocate ( p , m_size + count ) ;
2019-07-10 09:07:47 +08:00
VPS : : invalidate ( ) ;
2019-12-04 05:09:29 +08:00
if ( count > 0 ) {
memcpy ( data + m_size , begin , sizeof ( T ) * count ) ;
}
2017-05-26 04:48:44 +08:00
m_size + = count ;
}
template < class It >
2019-07-16 05:46:00 +08:00
void append_deep ( Arena & p , It begin , int count ) {
2017-05-26 04:48:44 +08:00
if ( m_size + count > m_capacity ) reallocate ( p , m_size + count ) ;
2019-07-16 05:46:00 +08:00
for ( int i = 0 ; i < count ; i + + ) {
auto ptr = new ( & data [ m_size + i ] ) T ( p , * begin + + ) ;
2019-07-10 09:07:47 +08:00
VPS : : add ( * ptr ) ;
}
2017-05-26 04:48:44 +08:00
m_size + = count ;
}
2019-07-16 05:46:00 +08:00
void pop_back ( ) {
VPS : : remove ( back ( ) ) ;
m_size - - ;
}
2017-05-26 04:48:44 +08:00
2019-07-16 05:46:00 +08:00
void pop_front ( int count ) {
2019-07-10 09:07:47 +08:00
VPS : : invalidate ( ) ;
2017-05-26 04:48:44 +08:00
count = std : : min ( m_size , count ) ;
data + = count ;
m_size - = count ;
m_capacity - = count ;
}
2019-07-16 05:46:00 +08:00
void resize ( Arena & p , int size ) {
2017-05-26 04:48:44 +08:00
if ( size > m_capacity ) reallocate ( p , size ) ;
2019-07-16 05:46:00 +08:00
for ( int i = m_size ; i < size ; i + + ) {
2019-07-10 09:07:47 +08:00
auto ptr = new ( & data [ i ] ) T ( ) ;
VPS : : add ( * ptr ) ;
}
2017-05-26 04:48:44 +08:00
m_size = size ;
}
2019-07-16 05:46:00 +08:00
void reserve ( Arena & p , int size ) {
2017-05-26 04:48:44 +08:00
if ( size > m_capacity ) reallocate ( p , size ) ;
}
// expectedSize() for non-Ref types, identified by memcpy_able
2019-07-16 05:46:00 +08:00
template < class T2 = T >
2017-05-26 04:48:44 +08:00
typename std : : enable_if < memcpy_able < T2 > : : value , size_t > : : type expectedSize ( ) const {
2019-07-16 05:46:00 +08:00
return sizeof ( T ) * m_size ;
2017-05-26 04:48:44 +08:00
}
// expectedSize() for Ref types, which must in turn have expectedSize() implemented.
2019-07-16 05:46:00 +08:00
template < class T2 = T >
typename std : : enable_if < ! memcpy_able < T2 > : : value , size_t > : : type expectedSize ( ) const {
size_t t = sizeof ( T ) * m_size ;
for ( int i = 0 ; i < m_size ; i + + ) t + = data [ i ] . expectedSize ( ) ;
2017-05-26 04:48:44 +08:00
return t ;
}
2019-07-16 05:46:00 +08:00
int capacity ( ) const { return m_capacity ; }
2017-05-26 04:48:44 +08:00
2019-07-16 05:46:00 +08:00
void extendUnsafeNoReallocNoInit ( int amount ) { m_size + = amount ; }
2018-09-10 15:44:09 +08:00
2017-05-26 04:48:44 +08:00
private :
T * data ;
int m_size , m_capacity ;
void reallocate ( Arena & p , int requiredCapacity ) {
2019-07-16 05:46:00 +08:00
requiredCapacity = std : : max ( m_capacity * 2 , requiredCapacity ) ;
2017-05-26 04:48:44 +08:00
// SOMEDAY: Maybe we are right at the end of the arena and can expand cheaply
2019-07-16 05:46:00 +08:00
T * newData = ( T * ) new ( p ) uint8_t [ requiredCapacity * sizeof ( T ) ] ;
2019-12-04 05:09:29 +08:00
if ( m_size > 0 ) {
memcpy ( newData , data , m_size * sizeof ( T ) ) ;
}
2017-05-26 04:48:44 +08:00
data = newData ;
m_capacity = requiredCapacity ;
}
} ;
2019-03-16 06:32:39 +08:00
template < class T >
struct Traceable < VectorRef < T > > {
constexpr static bool value = Traceable < T > : : value ;
static std : : string toString ( const VectorRef < T > & value ) {
std : : stringstream ss ;
bool first = true ;
for ( const auto & v : value ) {
if ( first ) {
first = false ;
} else {
ss < < ' ' ;
}
ss < < Traceable < T > : : toString ( v ) ;
}
return ss . str ( ) ;
}
} ;
2019-07-16 05:46:00 +08:00
template < class Archive , class T , VecSerStrategy S >
inline void load ( Archive & ar , VectorRef < T , S > & value ) {
2017-05-26 04:48:44 +08:00
// FIXME: range checking for length, here and in other serialize code
uint32_t length ;
ar > > length ;
UNSTOPPABLE_ASSERT ( length * sizeof ( T ) < ( 100 < < 20 ) ) ;
// SOMEDAY: Can we avoid running constructors for all the values?
value . resize ( ar . arena ( ) , length ) ;
for ( uint32_t i = 0 ; i < length ; i + + )
ar > > value [ i ] ;
}
2019-07-16 05:46:00 +08:00
template < class Archive , class T , VecSerStrategy S >
inline void save ( Archive & ar , const VectorRef < T , S > & value ) {
2017-05-26 04:48:44 +08:00
uint32_t length = value . size ( ) ;
ar < < length ;
for ( uint32_t i = 0 ; i < length ; i + + )
ar < < value [ i ] ;
}
2019-01-26 07:10:20 +08:00
template < class T >
2019-07-16 05:46:00 +08:00
struct vector_like_traits < VectorRef < T , VecSerStrategy : : FlatBuffers > > : std : : true_type {
2019-01-26 07:10:20 +08:00
using Vec = VectorRef < T > ;
using value_type = typename Vec : : value_type ;
using iterator = const T * ;
using insert_iterator = T * ;
2019-07-16 03:58:31 +08:00
template < class Context >
static size_t num_entries ( const VectorRef < T > & v , Context & ) {
return v . size ( ) ;
}
2019-01-26 07:10:20 +08:00
template < class Context >
static void reserve ( VectorRef < T > & v , size_t s , Context & context ) {
v . resize ( context . arena ( ) , s ) ;
}
2019-07-16 03:58:31 +08:00
template < class Context >
static insert_iterator insert ( Vec & v , Context & ) { return v . begin ( ) ; }
template < class Context >
static iterator begin ( const Vec & v , Context & ) { return v . begin ( ) ; }
2019-01-26 07:10:20 +08:00
} ;
2019-07-10 09:07:47 +08:00
template < class V >
2019-07-16 05:46:00 +08:00
struct dynamic_size_traits < VectorRef < V , VecSerStrategy : : String > > : std : : true_type {
using T = VectorRef < V , VecSerStrategy : : String > ;
2019-07-10 09:07:47 +08:00
// May be called multiple times during one serialization
2019-07-16 03:58:31 +08:00
template < class Context >
static size_t size ( const T & t , Context & ) {
2019-07-10 09:07:47 +08:00
return t . serializedSize ( ) ;
}
// Guaranteed to be called only once during serialization
2019-07-16 03:58:31 +08:00
template < class Context >
static void save ( uint8_t * out , const T & t , Context & ) {
2019-07-10 09:07:47 +08:00
string_serialized_traits < V > traits ;
auto * p = out ;
uint32_t length = t . size ( ) ;
2019-07-30 08:11:45 +08:00
* reinterpret_cast < decltype ( length ) * > ( out ) = length ;
2019-07-10 09:07:47 +08:00
out + = sizeof ( length ) ;
for ( const auto & item : t ) {
out + = traits . save ( out , item ) ;
}
2019-07-11 02:35:52 +08:00
ASSERT ( out - p = = t . _cached_size + sizeof ( uint32_t ) ) ;
2019-07-10 09:07:47 +08:00
}
// Context is an arbitrary type that is plumbed by reference throughout the
// load call tree.
template < class Context >
static void load ( const uint8_t * data , size_t size , T & t , Context & context ) {
string_serialized_traits < V > traits ;
auto * p = data ;
uint32_t num_elements ;
memcpy ( & num_elements , data , sizeof ( num_elements ) ) ;
data + = sizeof ( num_elements ) ;
t . resize ( context . arena ( ) , num_elements ) ;
2019-08-01 08:01:23 +08:00
for ( unsigned i = 0 ; i < num_elements ; + + i ) {
2019-07-10 09:07:47 +08:00
data + = traits . load ( data , t [ i ] , context ) ;
}
ASSERT ( data - p = = size ) ;
t . _cached_size = size - sizeof ( uint32_t ) ;
}
} ;
2017-05-26 04:48:44 +08:00
void ArenaBlock : : destroy ( ) {
// If the stack never contains more than one item, nothing will be allocated from stackArena.
// If stackArena is used, it will always be a linked list, so destroying *it* will not create another arena
ArenaBlock * tinyStack = this ;
Arena stackArena ;
VectorRef < ArenaBlock * > stack ( & tinyStack , 1 ) ;
while ( stack . size ( ) ) {
ArenaBlock * b = stack . end ( ) [ - 1 ] ;
stack . pop_back ( ) ;
if ( ! b - > isTiny ( ) ) {
int o = b - > nextBlockOffset ;
while ( o ) {
ArenaBlockRef * br = ( ArenaBlockRef * ) ( ( char * ) b - > getData ( ) + o ) ;
if ( br - > next - > delref_no_destroy ( ) )
stack . push_back ( stackArena , br - > next ) ;
o = br - > nextBlockOffset ;
}
}
b - > destroyLeaf ( ) ;
}
}
# endif