mirror of https://github.com/apache/iotdb
add overflow metadata test
This commit is contained in:
parent
797ca161bb
commit
909df230d9
2
pom.xml
2
pom.xml
|
@ -27,7 +27,7 @@
|
|||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>3.8.1</version>
|
||||
<version>4.12</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
package cn.edu.thu.tsfiledb;
|
||||
|
||||
/**
|
||||
* Hello world!
|
||||
*
|
||||
*/
|
||||
public class App
|
||||
{
|
||||
public static void main( String[] args )
|
||||
{
|
||||
System.out.println( "Hello World!" );
|
||||
}
|
||||
}
|
|
@ -19,13 +19,13 @@ import cn.edu.thu.tsfile.common.utils.BytesUtils;
|
|||
import cn.edu.thu.tsfile.file.metadata.TSDigest;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.VInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.converter.TSFileMetaDataConverter;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.CompressionTypeName;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSChunkType;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.thu.tsfile.file.metadata.statistics.Statistics;
|
||||
|
||||
import cn.edu.thu.tsfile.file.utils.ReadWriteThriftFormatUtils;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.ReadWriteThriftFormatUtils;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.TSFileMetaDataConverter;
|
||||
import cn.edu.thu.tsfiledb.engine.bufferwrite.FileNodeConstants;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.IIntervalTreeOperator;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFFileMetadata;
|
||||
|
|
|
@ -14,16 +14,15 @@ import org.slf4j.LoggerFactory;
|
|||
import cn.edu.thu.tsfile.common.conf.TSFileConfig;
|
||||
import cn.edu.thu.tsfile.common.conf.TSFileDescriptor;
|
||||
import cn.edu.thu.tsfile.common.utils.BytesUtils;
|
||||
import cn.edu.thu.tsfile.file.metadata.converter.TSFileMetaDataConverter;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
|
||||
import cn.edu.thu.tsfile.file.utils.ReadWriteThriftFormatUtils;
|
||||
import cn.edu.thu.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression;
|
||||
import cn.edu.thu.tsfile.timeseries.write.schema.converter.TSDataTypeConverter;
|
||||
import cn.edu.thu.tsfiledb.engine.bufferwrite.Action;
|
||||
import cn.edu.thu.tsfiledb.engine.bufferwrite.FileNodeConstants;
|
||||
import cn.edu.thu.tsfiledb.engine.lru.LRUProcessor;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFFileMetadata;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.ReadWriteThriftFormatUtils;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.TSFileMetaDataConverter;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.TimePair;
|
||||
import cn.edu.thu.tsfiledb.engine.utils.FlushState;
|
||||
import cn.edu.thu.tsfiledb.exception.OverflowProcessorException;
|
||||
|
@ -196,7 +195,7 @@ public class OverflowProcessor extends LRUProcessor {
|
|||
}
|
||||
int off = 0;
|
||||
int len = buff.length - off;
|
||||
cn.edu.thu.tsfile.format.OFFileMetadata thriftfileMetadata = null;
|
||||
cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata thriftfileMetadata = null;
|
||||
try {
|
||||
do {
|
||||
int num = fileInputStream.read(buff, off, len);
|
||||
|
@ -269,7 +268,7 @@ public class OverflowProcessor extends LRUProcessor {
|
|||
*/
|
||||
public void insert(String deltaObjectId, String measurementId, long timestamp, TSDataType type, String v)
|
||||
throws OverflowProcessorException {
|
||||
insert(deltaObjectId, measurementId, timestamp, type, TSDataTypeConverter.convertStringToBytes(type, v));
|
||||
insert(deltaObjectId, measurementId, timestamp, type, convertStringToBytes(type, v));
|
||||
}
|
||||
|
||||
private void insert(String deltaObjectId, String measurementId, long timestamp, TSDataType type, byte[] v)
|
||||
|
@ -314,7 +313,7 @@ public class OverflowProcessor extends LRUProcessor {
|
|||
public void update(String deltaObjectId, String measurementId, long startTime, long endTime, TSDataType type,
|
||||
String v) throws OverflowProcessorException {
|
||||
if (ofSupport.update(deltaObjectId, measurementId, startTime, endTime, type,
|
||||
TSDataTypeConverter.convertStringToBytes(type, v))) {
|
||||
convertStringToBytes(type, v))) {
|
||||
++recordCount;
|
||||
checkMemorySize();
|
||||
} else {
|
||||
|
@ -529,6 +528,31 @@ public class OverflowProcessor extends LRUProcessor {
|
|||
isMerging = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* convert String to byte array
|
||||
*
|
||||
* @return result byte array
|
||||
*/
|
||||
private byte[] convertStringToBytes(TSDataType type, String o) {
|
||||
switch (type) {
|
||||
case INT32:
|
||||
return BytesUtils.intToBytes(Integer.valueOf(o));
|
||||
case INT64:
|
||||
return BytesUtils.longToBytes(Long.valueOf(o));
|
||||
case BOOLEAN:
|
||||
return BytesUtils.boolToBytes(Boolean.valueOf(o));
|
||||
case FLOAT:
|
||||
return BytesUtils.floatToBytes(Float.valueOf(o));
|
||||
case DOUBLE:
|
||||
return BytesUtils.doubleToBytes(Double.valueOf(o));
|
||||
case BYTE_ARRAY:
|
||||
return BytesUtils.StringToBytes(o);
|
||||
default:
|
||||
LOGGER.error("unsupport data type: {}", type);
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
||||
private class OverflowStoreStruct {
|
||||
public final long lastOverflowFilePosition;
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.thrift.TBase;
|
||||
import org.apache.thrift.TException;
|
||||
import org.apache.thrift.protocol.TCompactProtocol;
|
||||
import org.apache.thrift.protocol.TProtocol;
|
||||
import org.apache.thrift.transport.TIOStreamTransport;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
*
|
||||
* ConverterUtils is a utility class. It provide conversion between tsfile and thrift metadata
|
||||
* class. It also provides function that read/write page header from/to stream
|
||||
*
|
||||
* @author XuYi xuyi556677@163.com
|
||||
*
|
||||
*/
|
||||
public class ReadWriteThriftFormatUtils {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(ReadWriteThriftFormatUtils.class);
|
||||
|
||||
private static void write(TBase<?, ?> tbase, OutputStream to) throws IOException {
|
||||
try {
|
||||
tbase.write(protocol(to));
|
||||
} catch (TException e) {
|
||||
LOGGER.error("tsfile-file Utils: can not write {}", tbase, e);
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static <T extends TBase<?, ?>> T read(InputStream from, T tbase) throws IOException {
|
||||
try {
|
||||
tbase.read(protocol(from));
|
||||
return tbase;
|
||||
} catch (TException e) {
|
||||
LOGGER.error("tsfile-file Utils: can not read {}", tbase, e);
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* read overflow file metadata(thrift format) from stream
|
||||
*
|
||||
* @param from
|
||||
* @throws IOException
|
||||
*/
|
||||
public static OFFileMetadata readOFFileMetaData(InputStream from) throws IOException {
|
||||
return ReadWriteThriftFormatUtils.read(from, new OFFileMetadata());
|
||||
}
|
||||
|
||||
/**
|
||||
* write overflow metadata(thrift format) to stream
|
||||
*
|
||||
* @param ofFileMetadata
|
||||
* @param to
|
||||
* @throws IOException
|
||||
*/
|
||||
public static void writeOFFileMetaData(OFFileMetadata ofFileMetadata, OutputStream to)
|
||||
throws IOException {
|
||||
write(ofFileMetadata, to);
|
||||
}
|
||||
|
||||
private static TProtocol protocol(OutputStream to) {
|
||||
return new TCompactProtocol((new TIOStreamTransport(to)));
|
||||
}
|
||||
|
||||
private static TProtocol protocol(InputStream from) {
|
||||
return new TCompactProtocol((new TIOStreamTransport(from)));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.utils;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFFileMetadata;
|
||||
|
||||
/**
|
||||
* @Description converter for file metadata
|
||||
* @author XuYi xuyi556677@163.com
|
||||
* @date Apr 29, 2016 10:06:10 PM
|
||||
*/
|
||||
public class TSFileMetaDataConverter {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TSFileMetaDataConverter.class);
|
||||
|
||||
|
||||
/**
|
||||
* @Description convert thrift format overflow file matadata to tsfile format overflow file matadata. For more
|
||||
* information about thrift format overflow file matadata, see
|
||||
* {@code com.corp.delta.tsfile.format.OFFileMetadata} a in tsfile-format
|
||||
* @param ofFileMetaData - overflow file metadata in thrift format
|
||||
* @return OFFileMetadata - overflow file metadata in tsfile format
|
||||
*/
|
||||
public OFFileMetadata toOFFileMetadata(cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata ofFileMetaDataThrift) {
|
||||
OFFileMetadata ofFileMetadata = new OFFileMetadata();
|
||||
try {
|
||||
ofFileMetadata.convertToTSF(ofFileMetaDataThrift);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error(
|
||||
"tsfile-file TSFMetaDataConverter: failed to convert from overflow file metadata from thrift to TSFile, content is {}",
|
||||
ofFileMetaDataThrift, e);
|
||||
}
|
||||
return ofFileMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* @Description convert tsfile format overflow file matadata to thrift format overflow file
|
||||
* matadata. For more information about thrift format file matadata, see
|
||||
* {@code com.corp.delta.tsfile.format.OFFileMetadata} in tsfile-format
|
||||
* @param currentVersion - current verison
|
||||
* @param ofFileMetadata - overflow file metadata in tsfile format
|
||||
* @return org.corp.tsfile.format.OFFileMetaData - overflow file metadata in thrift format
|
||||
* @throws
|
||||
*/
|
||||
public cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata toThriftOFFileMetadata(int currentVersion,
|
||||
OFFileMetadata ofFileMetadata) {
|
||||
try {
|
||||
return ofFileMetadata.convertToThrift();
|
||||
} catch (Exception e) {
|
||||
LOGGER.error(
|
||||
"tsfile-file TSFMetaDataConverter: failed to convert overflow file metadata from TSFile to thrift, content is {}",
|
||||
ofFileMetadata, e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
package cn.edu.thu.tsfiledb;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
|
||||
/**
|
||||
* Unit test for simple App.
|
||||
*/
|
||||
public class AppTest
|
||||
extends TestCase
|
||||
{
|
||||
/**
|
||||
* Create the test case
|
||||
*
|
||||
* @param testName name of the test case
|
||||
*/
|
||||
public AppTest( String testName )
|
||||
{
|
||||
super( testName );
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the suite of tests being tested
|
||||
*/
|
||||
public static Test suite()
|
||||
{
|
||||
return new TestSuite( AppTest.class );
|
||||
}
|
||||
|
||||
/**
|
||||
* Rigourous Test :-)
|
||||
*/
|
||||
public void testApp()
|
||||
{
|
||||
assertTrue( true );
|
||||
}
|
||||
}
|
|
@ -0,0 +1,311 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import cn.edu.thu.tsfile.common.utils.RandomAccessOutputStream;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.CompressionTypeName;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSChunkType;
|
||||
import cn.edu.thu.tsfile.format.CompressionType;
|
||||
import cn.edu.thu.tsfile.format.TimeSeriesChunkType;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.utils.TSFileMetaDataConverter;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Unit test for the metadata of overflow. {@code OFSeriesListMetadata} convert
|
||||
* the thrift {@code cn.edu.thu.tsfile.format.OFSeriesListMetadata}
|
||||
* {@code OFRowGroupListMetadata} convert the thrift
|
||||
* {@code cn.edu.thu.tsfile.format.OFRowGroupListMetadata}
|
||||
* {@code OFFileMetadata} convert the thrift
|
||||
* {@code cn.edu.thu.tsfile.format.OFFileMetadata} Convert the overflow file
|
||||
* metadata to overflow thrift metadata, write to file stream. Read from the
|
||||
* file stream, convert the overflow thrift metadata to the overflow file
|
||||
* metadata.
|
||||
*
|
||||
* @author liukun
|
||||
*
|
||||
*/
|
||||
public class OverflowMetaDataTest {
|
||||
|
||||
// data
|
||||
private final String DELTA_OBJECT_UID = "delta-3312";
|
||||
private final String MEASUREMENT_UID = "sensor231";
|
||||
private final long FILE_OFFSET = 2313424242L;
|
||||
private final long MAX_NUM_ROWS = 423432425L;
|
||||
private final long TOTAL_BYTE_SIZE = 432453453L;
|
||||
private final long DATA_PAGE_OFFSET = 42354334L;
|
||||
private final long DICTIONARY_PAGE_OFFSET = 23434543L;
|
||||
private final long INDEX_PAGE_OFFSET = 34243453L;
|
||||
private final int LAST_FOOTER_OFFSET = 3423432;
|
||||
private final String PATH = "target/OFFileMetaData.overflow";
|
||||
|
||||
// series chunk metadata
|
||||
private TimeSeriesChunkMetaData tsfTimeSeriesChunkMetaData;
|
||||
private cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData thriftTimeSeriesChunkMetaData;
|
||||
// of series list
|
||||
private OFSeriesListMetadata tsfOFSeriesListMetadata;
|
||||
private cn.edu.thu.tsfiledb.engine.overflow.thrift.OFSeriesListMetadata thriftOFSeriesListMetadata;
|
||||
// of row group list
|
||||
private OFRowGroupListMetadata tsfOFRowGroupListMetadata;
|
||||
private cn.edu.thu.tsfiledb.engine.overflow.thrift.OFRowGroupListMetadata thriftOFRowGroupListMetadata;
|
||||
// of file list
|
||||
private OFFileMetadata tsfOFFileMetadata;
|
||||
private cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata thriftOFFileMetadata;
|
||||
|
||||
// converter
|
||||
private TSFileMetaDataConverter converter;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
File file = new File(PATH);
|
||||
if (file.exists()) {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void OFSeriesListMetadataTest() {
|
||||
// of to thrift
|
||||
tsfOFSeriesListMetadata = new OFSeriesListMetadata();
|
||||
tsfOFSeriesListMetadata.setMeasurementId(MEASUREMENT_UID);
|
||||
for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) {
|
||||
for (TSChunkType chunkType : TSChunkType.values()) {
|
||||
tsfTimeSeriesChunkMetaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, FILE_OFFSET,
|
||||
compressionTypeName);
|
||||
tsfTimeSeriesChunkMetaData.setNumRows(MAX_NUM_ROWS);
|
||||
tsfTimeSeriesChunkMetaData.setTotalByteSize(TOTAL_BYTE_SIZE);
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setDataPageOffset(DATA_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setIndexPageOffset(INDEX_PAGE_OFFSET);
|
||||
tsfOFSeriesListMetadata.addSeriesMetaData(tsfTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
Utils.isOFSeriesListMetaDataEqual(tsfOFSeriesListMetadata, tsfOFSeriesListMetadata.convertToThrift());
|
||||
|
||||
// thrift to of
|
||||
thriftOFSeriesListMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFSeriesListMetadata();
|
||||
thriftOFSeriesListMetadata.setMeasurement_id(MEASUREMENT_UID);
|
||||
for (CompressionType compressionType : CompressionType.values()) {
|
||||
for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) {
|
||||
thriftTimeSeriesChunkMetaData = new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData(
|
||||
MEASUREMENT_UID, chunkType, FILE_OFFSET, compressionType);
|
||||
thriftTimeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS);
|
||||
thriftTimeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE);
|
||||
thriftTimeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray());
|
||||
thriftTimeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET);
|
||||
|
||||
thriftOFSeriesListMetadata.addToTsc_metadata(thriftTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
tsfOFSeriesListMetadata = new OFSeriesListMetadata();
|
||||
tsfOFSeriesListMetadata.convertToTSF(thriftOFSeriesListMetadata);
|
||||
Utils.isOFSeriesListMetaDataEqual(tsfOFSeriesListMetadata, thriftOFSeriesListMetadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void OFRowGroupListMetadataTest() {
|
||||
// of to thrift
|
||||
tsfOFRowGroupListMetadata = new OFRowGroupListMetadata();
|
||||
tsfOFRowGroupListMetadata.setDeltaObjectId(DELTA_OBJECT_UID);
|
||||
int size = 5;
|
||||
while (size > 0) {
|
||||
size--;
|
||||
tsfOFSeriesListMetadata = new OFSeriesListMetadata();
|
||||
tsfOFSeriesListMetadata.setMeasurementId(MEASUREMENT_UID);
|
||||
for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) {
|
||||
for (TSChunkType chunkType : TSChunkType.values()) {
|
||||
tsfTimeSeriesChunkMetaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType, FILE_OFFSET,
|
||||
compressionTypeName);
|
||||
tsfTimeSeriesChunkMetaData.setNumRows(MAX_NUM_ROWS);
|
||||
tsfTimeSeriesChunkMetaData.setTotalByteSize(TOTAL_BYTE_SIZE);
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setDataPageOffset(DATA_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setIndexPageOffset(INDEX_PAGE_OFFSET);
|
||||
tsfOFSeriesListMetadata.addSeriesMetaData(tsfTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
tsfOFRowGroupListMetadata.addSeriesListMetaData(tsfOFSeriesListMetadata);
|
||||
}
|
||||
Utils.isOFRowGroupListMetaDataEqual(tsfOFRowGroupListMetadata, tsfOFRowGroupListMetadata.convertToThrift());
|
||||
|
||||
// thrift to of
|
||||
|
||||
thriftOFRowGroupListMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFRowGroupListMetadata();
|
||||
thriftOFRowGroupListMetadata.setDeltaObject_id(DELTA_OBJECT_UID);
|
||||
size = 5;
|
||||
while (size > 0) {
|
||||
size--;
|
||||
thriftOFSeriesListMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFSeriesListMetadata();
|
||||
thriftOFSeriesListMetadata.setMeasurement_id(MEASUREMENT_UID);
|
||||
for (CompressionType compressionType : CompressionType.values()) {
|
||||
for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) {
|
||||
thriftTimeSeriesChunkMetaData = new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData(
|
||||
MEASUREMENT_UID, chunkType, FILE_OFFSET, compressionType);
|
||||
thriftTimeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS);
|
||||
thriftTimeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE);
|
||||
thriftTimeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray());
|
||||
thriftTimeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET);
|
||||
|
||||
thriftOFSeriesListMetadata.addToTsc_metadata(thriftTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
thriftOFRowGroupListMetadata.addToMeasurement_metadata(thriftOFSeriesListMetadata);
|
||||
}
|
||||
tsfOFRowGroupListMetadata = new OFRowGroupListMetadata();
|
||||
tsfOFRowGroupListMetadata.convertToTSF(thriftOFRowGroupListMetadata);
|
||||
Utils.isOFRowGroupListMetaDataEqual(tsfOFRowGroupListMetadata, thriftOFRowGroupListMetadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void OFFileMetaDataTest() {
|
||||
|
||||
converter = new TSFileMetaDataConverter();
|
||||
|
||||
tsfOFFileMetadata = new OFFileMetadata();
|
||||
tsfOFFileMetadata.setLastFooterOffset(LAST_FOOTER_OFFSET);
|
||||
int count = 5;
|
||||
while (count > 0) {
|
||||
count--;
|
||||
tsfOFRowGroupListMetadata = new OFRowGroupListMetadata();
|
||||
tsfOFRowGroupListMetadata.setDeltaObjectId(DELTA_OBJECT_UID);
|
||||
int size = 5;
|
||||
while (size > 0) {
|
||||
size--;
|
||||
tsfOFSeriesListMetadata = new OFSeriesListMetadata();
|
||||
tsfOFSeriesListMetadata.setMeasurementId(MEASUREMENT_UID);
|
||||
for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) {
|
||||
for (TSChunkType chunkType : TSChunkType.values()) {
|
||||
tsfTimeSeriesChunkMetaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType,
|
||||
FILE_OFFSET, compressionTypeName);
|
||||
tsfTimeSeriesChunkMetaData.setNumRows(MAX_NUM_ROWS);
|
||||
tsfTimeSeriesChunkMetaData.setTotalByteSize(TOTAL_BYTE_SIZE);
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setDataPageOffset(DATA_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setIndexPageOffset(INDEX_PAGE_OFFSET);
|
||||
tsfOFSeriesListMetadata.addSeriesMetaData(tsfTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
tsfOFRowGroupListMetadata.addSeriesListMetaData(tsfOFSeriesListMetadata);
|
||||
}
|
||||
tsfOFFileMetadata.addRowGroupListMetaData(tsfOFRowGroupListMetadata);
|
||||
}
|
||||
Utils.isOFFileMetaDataEqual(tsfOFFileMetadata, tsfOFFileMetadata.convertToThrift());
|
||||
Utils.isOFFileMetaDataEqual(tsfOFFileMetadata, converter.toThriftOFFileMetadata(0, tsfOFFileMetadata));
|
||||
|
||||
// thrift to of
|
||||
thriftOFFileMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata();
|
||||
thriftOFFileMetadata.setLast_footer_offset(LAST_FOOTER_OFFSET);
|
||||
count = 5;
|
||||
while (count > 0) {
|
||||
count--;
|
||||
thriftOFRowGroupListMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFRowGroupListMetadata();
|
||||
thriftOFRowGroupListMetadata.setDeltaObject_id(DELTA_OBJECT_UID);
|
||||
int size = 5;
|
||||
while (size > 0) {
|
||||
size--;
|
||||
thriftOFSeriesListMetadata = new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFSeriesListMetadata();
|
||||
thriftOFSeriesListMetadata.setMeasurement_id(MEASUREMENT_UID);
|
||||
for (CompressionType compressionType : CompressionType.values()) {
|
||||
for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) {
|
||||
thriftTimeSeriesChunkMetaData = new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData(
|
||||
MEASUREMENT_UID, chunkType, FILE_OFFSET, compressionType);
|
||||
thriftTimeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS);
|
||||
thriftTimeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE);
|
||||
thriftTimeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray());
|
||||
thriftTimeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET);
|
||||
thriftTimeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET);
|
||||
|
||||
thriftOFSeriesListMetadata.addToTsc_metadata(thriftTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
thriftOFRowGroupListMetadata.addToMeasurement_metadata(thriftOFSeriesListMetadata);
|
||||
}
|
||||
thriftOFFileMetadata.addToDeltaObject_metadata(thriftOFRowGroupListMetadata);
|
||||
}
|
||||
tsfOFFileMetadata = new OFFileMetadata();
|
||||
tsfOFFileMetadata.convertToTSF(thriftOFFileMetadata);
|
||||
Utils.isOFFileMetaDataEqual(tsfOFFileMetadata, thriftOFFileMetadata);
|
||||
Utils.isOFFileMetaDataEqual(converter.toOFFileMetadata(thriftOFFileMetadata), thriftOFFileMetadata);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void OFFileThriftFileTest() throws IOException {
|
||||
// offilemetadata flush to file
|
||||
tsfOFFileMetadata = new OFFileMetadata();
|
||||
tsfOFFileMetadata.setLastFooterOffset(LAST_FOOTER_OFFSET);
|
||||
int count = 5;
|
||||
while (count > 0) {
|
||||
count--;
|
||||
tsfOFRowGroupListMetadata = new OFRowGroupListMetadata();
|
||||
tsfOFRowGroupListMetadata.setDeltaObjectId(DELTA_OBJECT_UID);
|
||||
int size = 5;
|
||||
while (size > 0) {
|
||||
size--;
|
||||
tsfOFSeriesListMetadata = new OFSeriesListMetadata();
|
||||
tsfOFSeriesListMetadata.setMeasurementId(MEASUREMENT_UID);
|
||||
for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) {
|
||||
for (TSChunkType chunkType : TSChunkType.values()) {
|
||||
tsfTimeSeriesChunkMetaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType,
|
||||
FILE_OFFSET, compressionTypeName);
|
||||
tsfTimeSeriesChunkMetaData.setNumRows(MAX_NUM_ROWS);
|
||||
tsfTimeSeriesChunkMetaData.setTotalByteSize(TOTAL_BYTE_SIZE);
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
|
||||
tsfTimeSeriesChunkMetaData.setDataPageOffset(DATA_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET);
|
||||
tsfTimeSeriesChunkMetaData.setIndexPageOffset(INDEX_PAGE_OFFSET);
|
||||
tsfOFSeriesListMetadata.addSeriesMetaData(tsfTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
tsfOFRowGroupListMetadata.addSeriesListMetaData(tsfOFSeriesListMetadata);
|
||||
}
|
||||
tsfOFFileMetadata.addRowGroupListMetaData(tsfOFRowGroupListMetadata);
|
||||
}
|
||||
|
||||
File file = new File(PATH);
|
||||
if (file.exists()) {
|
||||
file.delete();
|
||||
}
|
||||
|
||||
FileOutputStream fos = new FileOutputStream(file);
|
||||
RandomAccessOutputStream out = new RandomAccessOutputStream(file, "rw");
|
||||
Utils.write(tsfOFFileMetadata.convertToThrift(), out.getOutputStream());
|
||||
|
||||
out.close();
|
||||
fos.close();
|
||||
// thriftfilemeta read from file
|
||||
FileInputStream fis = new FileInputStream(new File(PATH));
|
||||
Utils.isOFFileMetaDataEqual(tsfOFFileMetadata,
|
||||
Utils.read(fis, new cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata()));
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import cn.edu.thu.tsfile.common.utils.RandomAccessOutputStream;
|
||||
import cn.edu.thu.tsfile.file.metadata.TInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSFreqType;
|
||||
import cn.edu.thu.tsfile.format.DataType;
|
||||
import cn.edu.thu.tsfile.format.FreqType;
|
||||
import cn.edu.thu.tsfile.format.TimeInTimeSeriesChunkMetaData;
|
||||
|
||||
public class TInTimeSeriesChunkMetaDataTest {
|
||||
private TInTimeSeriesChunkMetaData metaData;
|
||||
public static List<Integer> frequencies1;
|
||||
public static List<Integer> frequencies2;
|
||||
public static final long startTime = 523372036854775806L;;
|
||||
public static final long endTime = 523372036854775806L;;
|
||||
final String PATH = "target/outputT.ksn";
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
metaData = new TInTimeSeriesChunkMetaData();
|
||||
frequencies1 = new ArrayList<Integer>();
|
||||
|
||||
frequencies2 = new ArrayList<Integer>();
|
||||
frequencies2.add(132);
|
||||
frequencies2.add(432);
|
||||
frequencies2.add(35435);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteIntoFile() throws IOException {
|
||||
TInTimeSeriesChunkMetaData metaData = TestHelper.createT2inTSF(TSDataType.BYTE_ARRAY,
|
||||
TSFreqType.IRREGULAR_FREQ, frequencies2, startTime, endTime);
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
FileOutputStream fos = new FileOutputStream(file);
|
||||
RandomAccessOutputStream out = new RandomAccessOutputStream(file, "rw");
|
||||
Utils.write(metaData.convertToThrift(), out.getOutputStream());
|
||||
|
||||
out.close();
|
||||
fos.close();
|
||||
|
||||
FileInputStream fis = new FileInputStream(new File(PATH));
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData,
|
||||
Utils.read(fis, new TimeInTimeSeriesChunkMetaData()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToThrift() {
|
||||
for (TSDataType dataType : TSDataType.values()) {
|
||||
TInTimeSeriesChunkMetaData metaData =
|
||||
new TInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
for (TSFreqType freqType : TSFreqType.values()) {
|
||||
metaData.setFreqType(freqType);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
List<Integer> frequencies = new ArrayList<Integer>();
|
||||
metaData.setFrequencies(frequencies);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
frequencies.add(132);
|
||||
frequencies.add(432);
|
||||
frequencies.add(35435);
|
||||
metaData.setFrequencies(frequencies);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
frequencies.clear();
|
||||
metaData.setFrequencies(frequencies);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToTSF() {
|
||||
for (DataType dataType : DataType.values()) {
|
||||
TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData =
|
||||
new TimeInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
for (FreqType freqType : FreqType.values()) {
|
||||
timeInTimeSeriesChunkMetaData.setFreq_type(freqType);
|
||||
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
|
||||
List<Integer> frequencies = new ArrayList<Integer>();
|
||||
timeInTimeSeriesChunkMetaData.setFrequencies(frequencies);
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
|
||||
frequencies.add(132);
|
||||
frequencies.add(432);
|
||||
frequencies.add(35435);
|
||||
timeInTimeSeriesChunkMetaData.setFrequencies(frequencies);
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
|
||||
frequencies.clear();
|
||||
timeInTimeSeriesChunkMetaData.setFrequencies(frequencies);
|
||||
metaData.convertToTSF(timeInTimeSeriesChunkMetaData);
|
||||
Utils.isTSeriesChunkMetadataEqual(metaData, timeInTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,245 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import cn.edu.thu.tsfile.file.metadata.TInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TSDigest;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.VInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.CompressionTypeName;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSChunkType;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSFreqType;
|
||||
import cn.edu.thu.tsfile.format.CompressionType;
|
||||
import cn.edu.thu.tsfile.format.DataType;
|
||||
import cn.edu.thu.tsfile.format.Digest;
|
||||
import cn.edu.thu.tsfile.format.FreqType;
|
||||
import cn.edu.thu.tsfile.format.TimeInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.format.TimeSeriesChunkType;
|
||||
import cn.edu.thu.tsfile.format.ValueInTimeSeriesChunkMetaData;
|
||||
|
||||
public class TestHelper {
|
||||
|
||||
|
||||
|
||||
public static TimeSeriesChunkMetaData createSimpleTimeSeriesChunkMetaDataInTSF()
|
||||
throws UnsupportedEncodingException {
|
||||
|
||||
TimeSeriesChunkMetaData metaData =
|
||||
new TimeSeriesChunkMetaData(TimeSeriesChunkMetaDataTest.MEASUREMENT_UID, TSChunkType.TIME,
|
||||
TimeSeriesChunkMetaDataTest.FILE_OFFSET, CompressionTypeName.GZIP);
|
||||
metaData.setNumRows(TimeSeriesChunkMetaDataTest.MAX_NUM_ROWS);
|
||||
metaData.setTotalByteSize(TimeSeriesChunkMetaDataTest.TOTAL_BYTE_SIZE);
|
||||
metaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
metaData.setDataPageOffset(TimeSeriesChunkMetaDataTest.DATA_PAGE_OFFSET);
|
||||
metaData.setDictionaryPageOffset(TimeSeriesChunkMetaDataTest.DICTIONARY_PAGE_OFFSET);
|
||||
metaData.setIndexPageOffset(TimeSeriesChunkMetaDataTest.INDEX_PAGE_OFFSET);
|
||||
metaData.setTInTimeSeriesChunkMetaData(TestHelper.createT2inTSF(TSDataType.BOOLEAN,
|
||||
TSFreqType.IRREGULAR_FREQ, null, TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
metaData.setVInTimeSeriesChunkMetaData(
|
||||
TestHelper.createSimpleV2InTSF(TSDataType.BOOLEAN, new TSDigest(), VInTimeSeriesChunkMetaDataTest.maxString, VInTimeSeriesChunkMetaDataTest.minString));
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData createSimpleTimeSeriesChunkMetaDataInThrift()
|
||||
throws UnsupportedEncodingException {
|
||||
cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData metaData =
|
||||
new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData(
|
||||
TimeSeriesChunkMetaDataTest.MEASUREMENT_UID, TimeSeriesChunkType.VALUE,
|
||||
TimeSeriesChunkMetaDataTest.FILE_OFFSET, CompressionType.LZO);
|
||||
metaData.setNum_rows(TimeSeriesChunkMetaDataTest.MAX_NUM_ROWS);
|
||||
metaData.setTotal_byte_size(TimeSeriesChunkMetaDataTest.TOTAL_BYTE_SIZE);
|
||||
metaData.setJson_metadata(TestHelper.getJSONArray());
|
||||
metaData.setData_page_offset(TimeSeriesChunkMetaDataTest.DATA_PAGE_OFFSET);
|
||||
metaData.setDictionary_page_offset(TimeSeriesChunkMetaDataTest.DICTIONARY_PAGE_OFFSET);
|
||||
metaData.setIndex_page_offset(TimeSeriesChunkMetaDataTest.INDEX_PAGE_OFFSET);
|
||||
metaData.setTime_tsc(TestHelper.createT2inThrift(DataType.BOOLEAN, FreqType.IRREGULAR_FREQ,
|
||||
null, TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
metaData.setValue_tsc(
|
||||
TestHelper.createSimpleV2InThrift(DataType.BOOLEAN, new Digest(), VInTimeSeriesChunkMetaDataTest.maxString, VInTimeSeriesChunkMetaDataTest.minString));
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static TInTimeSeriesChunkMetaData createT1inTSF(TSDataType dataType, long startTime,
|
||||
long endTime) {
|
||||
TInTimeSeriesChunkMetaData metaData =
|
||||
new TInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static TInTimeSeriesChunkMetaData createT2inTSF(TSDataType dataType, TSFreqType freqType,
|
||||
List<Integer> frequencies, long startTime, long endTime) {
|
||||
TInTimeSeriesChunkMetaData metaData =
|
||||
new TInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
metaData.setFreqType(freqType);
|
||||
metaData.setFrequencies(frequencies);
|
||||
|
||||
List<String> dataValues = new ArrayList<String>();
|
||||
dataValues.add("A");
|
||||
dataValues.add("B");
|
||||
dataValues.add("C");
|
||||
dataValues.add("D");
|
||||
metaData.setEnumValues(dataValues);
|
||||
return metaData;
|
||||
|
||||
}
|
||||
|
||||
public static List<TInTimeSeriesChunkMetaData> generateTSeriesChunkMetaDataListInTSF() {
|
||||
ArrayList<Integer> frequencies1 = new ArrayList<Integer>();
|
||||
|
||||
ArrayList<Integer> frequencies2 = new ArrayList<Integer>();
|
||||
frequencies2.add(132);
|
||||
frequencies2.add(432);
|
||||
frequencies2.add(35435);
|
||||
List<TInTimeSeriesChunkMetaData> list = new ArrayList<TInTimeSeriesChunkMetaData>();
|
||||
for (TSDataType dataType : TSDataType.values()) {
|
||||
list.add(createT1inTSF(dataType, TInTimeSeriesChunkMetaDataTest.startTime,
|
||||
TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
|
||||
for (TSFreqType freqType : TSFreqType.values()) {
|
||||
list.add(createT2inTSF(dataType, freqType, null, TInTimeSeriesChunkMetaDataTest.startTime,
|
||||
TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
list.add(createT2inTSF(dataType, freqType, frequencies1,
|
||||
TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
list.add(createT2inTSF(dataType, freqType, frequencies2,
|
||||
TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static List<TimeInTimeSeriesChunkMetaData> generateTimeInTimeSeriesChunkMetaDataInThrift() {
|
||||
ArrayList<Integer> frequencies1 = new ArrayList<Integer>();
|
||||
|
||||
ArrayList<Integer> frequencies2 = new ArrayList<Integer>();
|
||||
frequencies2.add(132);
|
||||
frequencies2.add(432);
|
||||
frequencies2.add(35435);
|
||||
List<TimeInTimeSeriesChunkMetaData> list = new ArrayList<TimeInTimeSeriesChunkMetaData>();
|
||||
for (DataType dataType : DataType.values()) {
|
||||
list.add(TestHelper.createT1inThrift(dataType, TInTimeSeriesChunkMetaDataTest.startTime,
|
||||
TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
|
||||
for (FreqType freqType : FreqType.values()) {
|
||||
list.add(TestHelper.createT2inThrift(dataType, freqType, null,
|
||||
TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
list.add(TestHelper.createT2inThrift(dataType, freqType, frequencies1,
|
||||
TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
list.add(TestHelper.createT2inThrift(dataType, freqType, frequencies2,
|
||||
TInTimeSeriesChunkMetaDataTest.startTime, TInTimeSeriesChunkMetaDataTest.endTime));
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static TimeInTimeSeriesChunkMetaData createT1inThrift(DataType dataType, long startTime,
|
||||
long endTime) {
|
||||
TimeInTimeSeriesChunkMetaData metaData =
|
||||
new TimeInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static TimeInTimeSeriesChunkMetaData createT2inThrift(DataType dataType, FreqType freqType,
|
||||
List<Integer> frequencies, long startTime, long endTime) {
|
||||
TimeInTimeSeriesChunkMetaData metaData =
|
||||
new TimeInTimeSeriesChunkMetaData(dataType, startTime, endTime);
|
||||
metaData.setFreq_type(freqType);
|
||||
metaData.setFrequencies(frequencies);
|
||||
List<String> dataValues = new ArrayList<String>();
|
||||
dataValues.add("Q");
|
||||
dataValues.add("W");
|
||||
dataValues.add("E");
|
||||
dataValues.add("R");
|
||||
metaData.setEnum_values(dataValues);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static List<VInTimeSeriesChunkMetaData> generateVSeriesChunkMetaDataListInTSF()
|
||||
throws UnsupportedEncodingException {
|
||||
List<VInTimeSeriesChunkMetaData> list = new ArrayList<VInTimeSeriesChunkMetaData>();
|
||||
for (TSDataType dataType : TSDataType.values()) {
|
||||
list.add(TestHelper.createSimpleV1InTSF(dataType, null));
|
||||
list.add(TestHelper.createSimpleV1InTSF(dataType, new TSDigest()));
|
||||
list.add(TestHelper.createSimpleV2InTSF(dataType, new TSDigest(),
|
||||
VInTimeSeriesChunkMetaDataTest.maxString, VInTimeSeriesChunkMetaDataTest.minString));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static List<ValueInTimeSeriesChunkMetaData> generateValueInTimeSeriesChunkMetaDataInThrift()
|
||||
throws UnsupportedEncodingException {
|
||||
List<ValueInTimeSeriesChunkMetaData> list = new ArrayList<ValueInTimeSeriesChunkMetaData>();
|
||||
for (DataType dataType : DataType.values()) {
|
||||
list.add(TestHelper.createSimpleV1InThrift(dataType, null));
|
||||
list.add(TestHelper.createSimpleV1InThrift(dataType, new Digest()));
|
||||
list.add(TestHelper.createSimpleV2InThrift(dataType, new Digest(),
|
||||
VInTimeSeriesChunkMetaDataTest.maxString, VInTimeSeriesChunkMetaDataTest.minString));
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
public static ValueInTimeSeriesChunkMetaData createSimpleV2InThrift(DataType dataType,
|
||||
Digest digest, String maxString, String minString) throws UnsupportedEncodingException {
|
||||
ByteBuffer max = ByteBuffer.wrap(maxString.getBytes("UTF-8"));
|
||||
ByteBuffer min = ByteBuffer.wrap(minString.getBytes("UTF-8"));
|
||||
|
||||
ValueInTimeSeriesChunkMetaData metaData = new ValueInTimeSeriesChunkMetaData(dataType);
|
||||
metaData.setMax_error(VInTimeSeriesChunkMetaDataTest.MAX_ERROR);
|
||||
digest.max = max;
|
||||
digest.min = min;
|
||||
metaData.setDigest(digest);
|
||||
|
||||
List<String> dataValues = new ArrayList<String>();
|
||||
dataValues.add("Q");
|
||||
dataValues.add("W");
|
||||
dataValues.add("E");
|
||||
dataValues.add("R");
|
||||
metaData.setEnum_values(dataValues);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static ValueInTimeSeriesChunkMetaData createSimpleV1InThrift(DataType dataType,
|
||||
Digest digest) throws UnsupportedEncodingException {
|
||||
ValueInTimeSeriesChunkMetaData metaData = new ValueInTimeSeriesChunkMetaData(dataType);
|
||||
metaData.setMax_error(VInTimeSeriesChunkMetaDataTest.MAX_ERROR);
|
||||
metaData.setDigest(digest);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static VInTimeSeriesChunkMetaData createSimpleV2InTSF(TSDataType dataType, TSDigest digest,
|
||||
String maxString, String minString) throws UnsupportedEncodingException {
|
||||
VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType);
|
||||
metaData.setMaxError(VInTimeSeriesChunkMetaDataTest.MAX_ERROR);
|
||||
ByteBuffer max = ByteBuffer.wrap(maxString.getBytes("UTF-8"));
|
||||
ByteBuffer min = ByteBuffer.wrap(minString.getBytes("UTF-8"));
|
||||
digest.max = max;
|
||||
digest.min = min;
|
||||
metaData.setDigest(digest);
|
||||
|
||||
List<String> dataValues = new ArrayList<String>();
|
||||
dataValues.add("A");
|
||||
dataValues.add("B");
|
||||
dataValues.add("C");
|
||||
dataValues.add("D");
|
||||
metaData.setEnumValues(dataValues);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static VInTimeSeriesChunkMetaData createSimpleV1InTSF(TSDataType dataType, TSDigest digest)
|
||||
throws UnsupportedEncodingException {
|
||||
VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType);
|
||||
metaData.setMaxError(VInTimeSeriesChunkMetaDataTest.MAX_ERROR);
|
||||
metaData.setDigest(digest);
|
||||
return metaData;
|
||||
}
|
||||
|
||||
public static List<String> getJSONArray() {
|
||||
List<String> jsonMetaData = new ArrayList<String>();
|
||||
jsonMetaData.add("fsdfsfsd");
|
||||
jsonMetaData.add("424fd");
|
||||
return jsonMetaData;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,132 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import cn.edu.thu.tsfile.common.utils.RandomAccessOutputStream;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.VInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.CompressionTypeName;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSChunkType;
|
||||
import cn.edu.thu.tsfile.format.CompressionType;
|
||||
import cn.edu.thu.tsfile.format.TimeInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.format.TimeSeriesChunkType;
|
||||
import cn.edu.thu.tsfile.format.ValueInTimeSeriesChunkMetaData;
|
||||
|
||||
public class TimeSeriesChunkMetaDataTest {
|
||||
|
||||
public static final String MEASUREMENT_UID = "sensor231";
|
||||
public static final long FILE_OFFSET = 2313424242L;
|
||||
public static final long MAX_NUM_ROWS = 423432425L;
|
||||
public static final long TOTAL_BYTE_SIZE = 432453453L;
|
||||
public static final long DATA_PAGE_OFFSET = 42354334L;
|
||||
public static final long DICTIONARY_PAGE_OFFSET = 23434543L;
|
||||
public static final long INDEX_PAGE_OFFSET = 34243453L;
|
||||
final String PATH = "target/outputTimeSeriesChunk.ksn";
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteIntoFile() throws IOException {
|
||||
TimeSeriesChunkMetaData metaData = TestHelper.createSimpleTimeSeriesChunkMetaDataInTSF();
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
FileOutputStream fos = new FileOutputStream(file);
|
||||
RandomAccessOutputStream out = new RandomAccessOutputStream(file, "rw");
|
||||
Utils.write(metaData.convertToThrift(), out.getOutputStream());
|
||||
|
||||
out.close();
|
||||
fos.close();
|
||||
|
||||
FileInputStream fis = new FileInputStream(new File(PATH));
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift());
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData,
|
||||
Utils.read(fis, new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToThrift() throws UnsupportedEncodingException {
|
||||
for (CompressionTypeName compressionTypeName : CompressionTypeName.values()) {
|
||||
for (TSChunkType chunkType : TSChunkType.values()) {
|
||||
TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType,
|
||||
FILE_OFFSET, compressionTypeName);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
metaData.setNumRows(MAX_NUM_ROWS);
|
||||
metaData.setTotalByteSize(TOTAL_BYTE_SIZE);
|
||||
|
||||
metaData.setJsonMetaData(TestHelper.getJSONArray());
|
||||
|
||||
metaData.setDataPageOffset(DATA_PAGE_OFFSET);
|
||||
metaData.setDictionaryPageOffset(DICTIONARY_PAGE_OFFSET);
|
||||
metaData.setIndexPageOffset(INDEX_PAGE_OFFSET);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift());
|
||||
for (TInTimeSeriesChunkMetaData tSeriesMetaData : TestHelper
|
||||
.generateTSeriesChunkMetaDataListInTSF()) {
|
||||
metaData.setTInTimeSeriesChunkMetaData(tSeriesMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift());
|
||||
for (VInTimeSeriesChunkMetaData vSeriesMetaData : TestHelper
|
||||
.generateVSeriesChunkMetaDataListInTSF()) {
|
||||
metaData.setVInTimeSeriesChunkMetaData(vSeriesMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, metaData.convertToThrift());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToTSF() throws UnsupportedEncodingException {
|
||||
for (CompressionType compressionType : CompressionType.values()) {
|
||||
for (TimeSeriesChunkType chunkType : TimeSeriesChunkType.values()) {
|
||||
TimeSeriesChunkMetaData metaData = new TimeSeriesChunkMetaData();
|
||||
cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaData =
|
||||
new cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData(MEASUREMENT_UID, chunkType,
|
||||
FILE_OFFSET, compressionType);
|
||||
metaData.convertToTSF(timeSeriesChunkMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData);
|
||||
|
||||
timeSeriesChunkMetaData.setNum_rows(MAX_NUM_ROWS);
|
||||
timeSeriesChunkMetaData.setTotal_byte_size(TOTAL_BYTE_SIZE);
|
||||
|
||||
timeSeriesChunkMetaData.setJson_metadata(TestHelper.getJSONArray());
|
||||
timeSeriesChunkMetaData.setData_page_offset(DATA_PAGE_OFFSET);
|
||||
timeSeriesChunkMetaData.setDictionary_page_offset(DICTIONARY_PAGE_OFFSET);
|
||||
timeSeriesChunkMetaData.setIndex_page_offset(INDEX_PAGE_OFFSET);
|
||||
|
||||
metaData.convertToTSF(timeSeriesChunkMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData);
|
||||
|
||||
for (TimeInTimeSeriesChunkMetaData tSeriesChunkMetaData : TestHelper
|
||||
.generateTimeInTimeSeriesChunkMetaDataInThrift()) {
|
||||
timeSeriesChunkMetaData.setTime_tsc(tSeriesChunkMetaData);
|
||||
metaData.convertToTSF(timeSeriesChunkMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData);
|
||||
for (ValueInTimeSeriesChunkMetaData vSeriesChunkMetaData : TestHelper
|
||||
.generateValueInTimeSeriesChunkMetaDataInThrift()) {
|
||||
timeSeriesChunkMetaData.setValue_tsc(vSeriesChunkMetaData);
|
||||
metaData.convertToTSF(timeSeriesChunkMetaData);
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(metaData, timeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,327 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.thrift.TBase;
|
||||
import org.apache.thrift.TException;
|
||||
import org.apache.thrift.protocol.TCompactProtocol;
|
||||
import org.apache.thrift.protocol.TProtocol;
|
||||
import org.apache.thrift.transport.TIOStreamTransport;
|
||||
|
||||
import cn.edu.thu.tsfile.file.metadata.RowGroupMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TSFileMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.TimeSeriesMetadata;
|
||||
import cn.edu.thu.tsfile.file.metadata.VInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFFileMetadata;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFRowGroupListMetadata;
|
||||
import cn.edu.thu.tsfiledb.engine.overflow.metadata.OFSeriesListMetadata;
|
||||
import cn.edu.thu.tsfile.format.FileMetaData;
|
||||
import cn.edu.thu.tsfile.format.TimeInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.format.TimeSeries;
|
||||
import cn.edu.thu.tsfile.format.ValueInTimeSeriesChunkMetaData;
|
||||
|
||||
public class Utils {
|
||||
public static void isListEqual(List<?> listA, List<?> listB, String name) {
|
||||
if ((listA == null) ^ (listB == null)) {
|
||||
System.out.println("error");
|
||||
fail(String.format("one of %s is null", name));
|
||||
}
|
||||
if ((listA != null) && (listB != null)) {
|
||||
if (listA.size() != listB.size()) {
|
||||
fail(String.format("%s size is different", name));
|
||||
}
|
||||
for (int i = 0; i < listA.size(); i++) {
|
||||
assertTrue(listA.get(i).equals(listB.get(i)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* when one of A and B is Null, A != B, so test case fails.
|
||||
*
|
||||
* @param objectA
|
||||
* @param objectB
|
||||
* @param name
|
||||
* @return false - A and B both are NULL, so we do not need to check whether their members are
|
||||
* equal
|
||||
* @return true - A and B both are not NULL, so we need to check their members
|
||||
*/
|
||||
public static boolean isTwoObjectsNotNULL(Object objectA, Object objectB, String name) {
|
||||
if ((objectA == null) && (objectB == null))
|
||||
return false;
|
||||
if ((objectA == null) ^ (objectB == null))
|
||||
fail(String.format("one of %s is null", name));
|
||||
return true;
|
||||
}
|
||||
|
||||
public static void isStringSame(Object str1, Object str2, String name) {
|
||||
if ((str1 == null) && (str2 == null))
|
||||
return;
|
||||
if ((str1 == null) ^ (str2 == null))
|
||||
fail(String.format("one of %s string is null", name));
|
||||
assertTrue(str1.toString().equals(str2.toString()));
|
||||
}
|
||||
|
||||
public static void isTimeSeriesEqual(TimeSeriesMetadata timeSeriesInTSF,
|
||||
cn.edu.thu.tsfile.format.TimeSeries timeSeriesInThrift) {
|
||||
if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getMeasurementUID(),
|
||||
timeSeriesInThrift.getMeasurement_uid(), "sensorUID")) {
|
||||
assertTrue(
|
||||
timeSeriesInTSF.getMeasurementUID().equals(timeSeriesInThrift.getMeasurement_uid()));
|
||||
}
|
||||
if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getDeltaObjectType(),
|
||||
timeSeriesInThrift.getDelta_object_type(), "device type")) {
|
||||
assertTrue(
|
||||
timeSeriesInTSF.getDeltaObjectType().equals(timeSeriesInThrift.getDelta_object_type()));
|
||||
}
|
||||
assertTrue(timeSeriesInTSF.getTypeLength() == timeSeriesInThrift.getType_length());
|
||||
if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getType(), timeSeriesInThrift.getType(),
|
||||
"data type")) {
|
||||
assertTrue(timeSeriesInTSF.getType().toString() == timeSeriesInThrift.getType().toString());
|
||||
}
|
||||
if (Utils.isTwoObjectsNotNULL(timeSeriesInTSF.getFreqType(), timeSeriesInThrift.getFreq_type(),
|
||||
"freq type")) {
|
||||
assertTrue(
|
||||
timeSeriesInTSF.getFreqType().toString() == timeSeriesInThrift.getFreq_type().toString());
|
||||
}
|
||||
|
||||
Utils.isListEqual(timeSeriesInTSF.getFrequencies(), timeSeriesInThrift.getFrequencies(),
|
||||
"frequencies");
|
||||
Utils.isListEqual(timeSeriesInTSF.getEnumValues(), timeSeriesInThrift.getEnum_values(),
|
||||
"data values");
|
||||
}
|
||||
|
||||
public static void isTimeSeriesListEqual(List<TimeSeriesMetadata> timeSeriesInTSF,
|
||||
List<TimeSeries> timeSeriesInThrift) {
|
||||
if (timeSeriesInTSF == null && timeSeriesInThrift == null)
|
||||
return;
|
||||
|
||||
if (timeSeriesInTSF == null && timeSeriesInThrift == null)
|
||||
return;
|
||||
if ((timeSeriesInTSF == null) ^ (timeSeriesInThrift == null))
|
||||
fail("one list is null");
|
||||
if (timeSeriesInThrift.size() != timeSeriesInTSF.size())
|
||||
fail("list size is different");
|
||||
for (int i = 0; i < timeSeriesInThrift.size(); i++) {
|
||||
isTimeSeriesEqual(timeSeriesInTSF.get(i), timeSeriesInThrift.get(i));
|
||||
}
|
||||
}
|
||||
|
||||
public static void isTSeriesChunkMetadataEqual(TInTimeSeriesChunkMetaData tSeriesMetaData,
|
||||
TimeInTimeSeriesChunkMetaData timeInTimeSeriesChunkMetaData) {
|
||||
if (Utils.isTwoObjectsNotNULL(tSeriesMetaData, timeInTimeSeriesChunkMetaData,
|
||||
"TimeInTimeSeriesChunkMetaData")) {
|
||||
Utils.isStringSame(tSeriesMetaData.getDataType(),
|
||||
timeInTimeSeriesChunkMetaData.getData_type(), "data type");
|
||||
Utils.isStringSame(tSeriesMetaData.getFreqType(),
|
||||
timeInTimeSeriesChunkMetaData.getFreq_type(), "freq type");
|
||||
assertTrue(tSeriesMetaData.getStartTime() == timeInTimeSeriesChunkMetaData.getStartime());
|
||||
assertTrue(tSeriesMetaData.getEndTime() == timeInTimeSeriesChunkMetaData.getEndtime());
|
||||
Utils.isListEqual(tSeriesMetaData.getFrequencies(),
|
||||
timeInTimeSeriesChunkMetaData.getFrequencies(), "frequencies");
|
||||
Utils.isListEqual(tSeriesMetaData.getEnumValues(),
|
||||
timeInTimeSeriesChunkMetaData.getEnum_values(), "data values");
|
||||
}
|
||||
}
|
||||
|
||||
public static void isVSeriesChunkMetadataEqual(VInTimeSeriesChunkMetaData vSeriesMetaData,
|
||||
ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData) {
|
||||
if (Utils.isTwoObjectsNotNULL(vSeriesMetaData, valueInTimeSeriesChunkMetaData,
|
||||
"ValueInTimeSeriesChunkMetaData")) {
|
||||
assertTrue(vSeriesMetaData.getMaxError() == valueInTimeSeriesChunkMetaData.getMax_error());
|
||||
assertTrue(vSeriesMetaData.getDataType().toString()
|
||||
.equals(valueInTimeSeriesChunkMetaData.getData_type().toString()));
|
||||
if (Utils.isTwoObjectsNotNULL(vSeriesMetaData.getDigest(),
|
||||
valueInTimeSeriesChunkMetaData.getDigest(), "Digest")) {
|
||||
if (Utils.isTwoObjectsNotNULL(vSeriesMetaData.getDigest().max,
|
||||
valueInTimeSeriesChunkMetaData.getDigest().bufferForMax(), "Digest buffer max")) {
|
||||
vSeriesMetaData.getDigest().max
|
||||
.equals(valueInTimeSeriesChunkMetaData.getDigest().bufferForMax());
|
||||
}
|
||||
if (Utils.isTwoObjectsNotNULL(vSeriesMetaData.getDigest().min,
|
||||
valueInTimeSeriesChunkMetaData.getDigest().bufferForMin(), "Digest buffer min")) {
|
||||
vSeriesMetaData.getDigest().min
|
||||
.equals(valueInTimeSeriesChunkMetaData.getDigest().bufferForMin());
|
||||
}
|
||||
}
|
||||
Utils.isListEqual(vSeriesMetaData.getEnumValues(),
|
||||
valueInTimeSeriesChunkMetaData.getEnum_values(), "data values");
|
||||
}
|
||||
}
|
||||
|
||||
public static void isTimeSeriesChunkMetaDataEqual(
|
||||
TimeSeriesChunkMetaData timeSeriesChunkMetaDataInTSF,
|
||||
cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData timeSeriesChunkMetaDataInThrift) {
|
||||
if (Utils.isTwoObjectsNotNULL(timeSeriesChunkMetaDataInTSF, timeSeriesChunkMetaDataInThrift,
|
||||
"TimeSeriesChunkMetaData")) {
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getMeasurementUID()
|
||||
.equals(timeSeriesChunkMetaDataInThrift.getMeasurement_uid()));
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getTsChunkType().toString()
|
||||
.equals(timeSeriesChunkMetaDataInThrift.getTimeseries_chunk_type().toString()));
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getProperties()
|
||||
.getFileOffset() == timeSeriesChunkMetaDataInThrift.getFile_offset());
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getProperties().getCompression().toString()
|
||||
.equals(timeSeriesChunkMetaDataInThrift.getCompression_type().toString()));
|
||||
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getNumRows() == timeSeriesChunkMetaDataInThrift
|
||||
.getNum_rows());
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getTotalByteSize() == timeSeriesChunkMetaDataInThrift
|
||||
.getTotal_byte_size());
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF.getDataPageOffset() == timeSeriesChunkMetaDataInThrift
|
||||
.getData_page_offset());
|
||||
assertTrue(
|
||||
timeSeriesChunkMetaDataInTSF.getDictionaryPageOffset() == timeSeriesChunkMetaDataInThrift
|
||||
.getDictionary_page_offset());
|
||||
assertTrue(timeSeriesChunkMetaDataInTSF
|
||||
.getIndexPageOffset() == timeSeriesChunkMetaDataInThrift.getIndex_page_offset());
|
||||
Utils.isListEqual(timeSeriesChunkMetaDataInTSF.getJsonMetaData(),
|
||||
timeSeriesChunkMetaDataInThrift.getJson_metadata(), "json metadata");
|
||||
|
||||
Utils.isTSeriesChunkMetadataEqual(
|
||||
timeSeriesChunkMetaDataInTSF.getTInTimeSeriesChunkMetaData(),
|
||||
timeSeriesChunkMetaDataInThrift.getTime_tsc());
|
||||
Utils.isVSeriesChunkMetadataEqual(
|
||||
timeSeriesChunkMetaDataInTSF.getVInTimeSeriesChunkMetaData(),
|
||||
timeSeriesChunkMetaDataInThrift.getValue_tsc());
|
||||
}
|
||||
}
|
||||
|
||||
public static void isRowGroupMetaDataEqual(RowGroupMetaData rowGroupMetaDataInTSF,
|
||||
cn.edu.thu.tsfile.format.RowGroupMetaData rowGroupMetaDataInThrift) {
|
||||
if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF, rowGroupMetaDataInThrift,
|
||||
"RowGroupMetaData")) {
|
||||
assertTrue(rowGroupMetaDataInTSF.getDeltaObjectUID()
|
||||
.equals(rowGroupMetaDataInThrift.getDelta_object_uid()));
|
||||
assertTrue(rowGroupMetaDataInTSF.getDeltaObjectType()
|
||||
.equals(rowGroupMetaDataInThrift.getDelta_object_type()));
|
||||
assertTrue(rowGroupMetaDataInTSF.getTotalByteSize() == rowGroupMetaDataInThrift
|
||||
.getTotal_byte_size());
|
||||
assertTrue(
|
||||
rowGroupMetaDataInTSF.getNumOfRows() == rowGroupMetaDataInThrift.getMax_num_rows());
|
||||
|
||||
if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getPath(),
|
||||
rowGroupMetaDataInThrift.getFile_path(), "Row group metadata file path")) {
|
||||
assertTrue(rowGroupMetaDataInTSF.getPath().equals(rowGroupMetaDataInThrift.getFile_path()));
|
||||
}
|
||||
|
||||
if (Utils.isTwoObjectsNotNULL(rowGroupMetaDataInTSF.getMetaDatas(),
|
||||
rowGroupMetaDataInThrift.getTsc_metadata(), "TimeSeriesChunkMetaData List")) {
|
||||
List<TimeSeriesChunkMetaData> listTSF = rowGroupMetaDataInTSF.getMetaDatas();
|
||||
List<cn.edu.thu.tsfile.format.TimeSeriesChunkMetaData> listThrift =
|
||||
rowGroupMetaDataInThrift.getTsc_metadata();
|
||||
|
||||
if (listTSF.size() != listThrift.size()) {
|
||||
fail("TimeSeriesGroupMetaData List size is different");
|
||||
}
|
||||
|
||||
for (int i = 0; i < listTSF.size(); i++) {
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(listTSF.get(i), listThrift.get(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void isFileMetaDataEqual(TSFileMetaData fileMetaDataInTSF,
|
||||
FileMetaData fileMetaDataInThrift) {
|
||||
if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF, fileMetaDataInThrift, "File MetaData")) {
|
||||
|
||||
Utils.isTimeSeriesListEqual(fileMetaDataInTSF.getTimeSeriesList(),
|
||||
fileMetaDataInThrift.getTimeseries_list());
|
||||
Utils.isListEqual(fileMetaDataInTSF.getJsonMetaData(),
|
||||
fileMetaDataInThrift.getJson_metadata(), "json metadata");
|
||||
|
||||
|
||||
if (Utils.isTwoObjectsNotNULL(fileMetaDataInTSF.getRowGroups(), fileMetaDataInThrift.getRow_groups(),
|
||||
"Row Group List")) {
|
||||
List<RowGroupMetaData> listTSF = fileMetaDataInTSF.getRowGroups();
|
||||
List<cn.edu.thu.tsfile.format.RowGroupMetaData> listThrift =
|
||||
fileMetaDataInThrift.getRow_groups();
|
||||
|
||||
if (listTSF.size() != listThrift.size()) {
|
||||
fail("TimeSeriesGroupMetaData List size is different");
|
||||
}
|
||||
long maxNumRows = 0;
|
||||
for (int i = 0; i < listTSF.size(); i++) {
|
||||
Utils.isRowGroupMetaDataEqual(listTSF.get(i), listThrift.get(i));
|
||||
maxNumRows += listTSF.get(i).getNumOfRows();
|
||||
}
|
||||
assertTrue(maxNumRows == fileMetaDataInThrift.getMax_num_rows());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void isOFSeriesListMetaDataEqual(OFSeriesListMetadata ofSeriesListMetadata,
|
||||
cn.edu.thu.tsfiledb.engine.overflow.thrift.OFSeriesListMetadata ofSeriesListMetadata2) {
|
||||
if (Utils.isTwoObjectsNotNULL(ofSeriesListMetadata, ofSeriesListMetadata, "OFSeriesListMetaData")) {
|
||||
assertEquals(ofSeriesListMetadata.getMeasurementId(), ofSeriesListMetadata2.getMeasurement_id());
|
||||
assertEquals(ofSeriesListMetadata.getMetaDatas().size(), ofSeriesListMetadata2.getTsc_metadataSize());
|
||||
int size = ofSeriesListMetadata.getMetaDatas().size();
|
||||
while (size > 0) {
|
||||
size--;
|
||||
Utils.isTimeSeriesChunkMetaDataEqual(ofSeriesListMetadata.getMetaDatas().get(size),
|
||||
ofSeriesListMetadata2.getTsc_metadata().get(size));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void isOFRowGroupListMetaDataEqual(OFRowGroupListMetadata groupListMetadata,
|
||||
cn.edu.thu.tsfiledb.engine.overflow.thrift.OFRowGroupListMetadata groupListMetadata2) {
|
||||
if (Utils.isTwoObjectsNotNULL(groupListMetadata, groupListMetadata2, "OFRowGroupListMetaData")) {
|
||||
assertEquals(groupListMetadata.getDeltaObjectId(), groupListMetadata2.getDeltaObject_id());
|
||||
assertEquals(groupListMetadata.getMetaDatas().size(), groupListMetadata2.getMeasurement_metadataSize());
|
||||
int size = groupListMetadata.getMetaDatas().size();
|
||||
while (size > 0) {
|
||||
size--;
|
||||
Utils.isOFSeriesListMetaDataEqual(groupListMetadata.getMetaDatas().get(size),
|
||||
groupListMetadata2.getMeasurement_metadata().get(size));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void isOFFileMetaDataEqual(OFFileMetadata fileMetadata,
|
||||
cn.edu.thu.tsfiledb.engine.overflow.thrift.OFFileMetadata fileMetadata2) {
|
||||
if (Utils.isTwoObjectsNotNULL(fileMetadata, fileMetadata2, "OFFileMetaData")) {
|
||||
assertEquals(fileMetadata.getLastFooterOffset(), fileMetadata2.getLast_footer_offset());
|
||||
assertEquals(fileMetadata.getMetaDatas().size(), fileMetadata2.getDeltaObject_metadataSize());
|
||||
int size = fileMetadata.getMetaDatas().size();
|
||||
while (size > 0) {
|
||||
size--;
|
||||
Utils.isOFRowGroupListMetaDataEqual(fileMetadata.getMetaDatas().get(size),
|
||||
fileMetadata2.getDeltaObject_metadata().get(size));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static void write(TBase<?, ?> tbase, OutputStream to) throws IOException {
|
||||
try {
|
||||
tbase.write(protocol(to));
|
||||
} catch (TException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static <T extends TBase<?, ?>> T read(InputStream from, T tbase) throws IOException {
|
||||
try {
|
||||
tbase.read(protocol(from));
|
||||
return tbase;
|
||||
} catch (TException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
private static TProtocol protocol(OutputStream to) {
|
||||
return new TCompactProtocol((new TIOStreamTransport(to)));
|
||||
}
|
||||
|
||||
private static TProtocol protocol(InputStream from) {
|
||||
return new TCompactProtocol((new TIOStreamTransport(from)));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,114 @@
|
|||
package cn.edu.thu.tsfiledb.engine.overflow.metadata;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import cn.edu.thu.tsfile.common.utils.RandomAccessOutputStream;
|
||||
import cn.edu.thu.tsfile.file.metadata.TSDigest;
|
||||
import cn.edu.thu.tsfile.file.metadata.VInTimeSeriesChunkMetaData;
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.thu.tsfile.format.DataType;
|
||||
import cn.edu.thu.tsfile.format.Digest;
|
||||
import cn.edu.thu.tsfile.format.ValueInTimeSeriesChunkMetaData;
|
||||
|
||||
public class VInTimeSeriesChunkMetaDataTest {
|
||||
private VInTimeSeriesChunkMetaData metaData;
|
||||
public static final int MAX_ERROR = 1232;
|
||||
public static final String maxString = "3244324";
|
||||
public static final String minString = "fddsfsfgd";
|
||||
final String PATH = "target/outputV.ksn";
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
metaData = new VInTimeSeriesChunkMetaData();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteIntoFile() throws IOException {
|
||||
VInTimeSeriesChunkMetaData metaData =
|
||||
TestHelper.createSimpleV2InTSF(TSDataType.BYTE_ARRAY, new TSDigest(), maxString, minString);
|
||||
|
||||
File file = new File(PATH);
|
||||
if (file.exists())
|
||||
file.delete();
|
||||
FileOutputStream fos = new FileOutputStream(file);
|
||||
RandomAccessOutputStream out = new RandomAccessOutputStream(file, "rw");
|
||||
Utils.write(metaData.convertToThrift(), out.getOutputStream());
|
||||
|
||||
out.close();
|
||||
fos.close();
|
||||
|
||||
FileInputStream fis = new FileInputStream(new File(PATH));
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData,
|
||||
Utils.read(fis, new ValueInTimeSeriesChunkMetaData()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToThrift() throws UnsupportedEncodingException {
|
||||
for (TSDataType dataType : TSDataType.values()) {
|
||||
VInTimeSeriesChunkMetaData metaData = new VInTimeSeriesChunkMetaData(dataType);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
metaData.setMaxError(3123);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
metaData.setMaxError(-11);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
|
||||
ByteBuffer max = ByteBuffer.wrap(maxString.getBytes("UTF-8"));
|
||||
ByteBuffer min = ByteBuffer.wrap(minString.getBytes("UTF-8"));
|
||||
TSDigest digest = new TSDigest();
|
||||
metaData.setDigest(digest);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
digest.max = max;
|
||||
digest.min = min;
|
||||
metaData.setDigest(digest);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, metaData.convertToThrift());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertToTSF() throws UnsupportedEncodingException {
|
||||
for (DataType dataType : DataType.values()) {
|
||||
ValueInTimeSeriesChunkMetaData valueInTimeSeriesChunkMetaData =
|
||||
new ValueInTimeSeriesChunkMetaData(dataType);
|
||||
metaData.convertToTSF(valueInTimeSeriesChunkMetaData);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData);
|
||||
|
||||
valueInTimeSeriesChunkMetaData.setMax_error(3123);
|
||||
metaData.convertToTSF(valueInTimeSeriesChunkMetaData);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData);
|
||||
|
||||
valueInTimeSeriesChunkMetaData.setMax_error(-231);
|
||||
metaData.convertToTSF(valueInTimeSeriesChunkMetaData);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData);
|
||||
|
||||
ByteBuffer max = ByteBuffer.wrap(maxString.getBytes("UTF-8"));
|
||||
ByteBuffer min = ByteBuffer.wrap(minString.getBytes("UTF-8"));
|
||||
Digest digest = new Digest();
|
||||
valueInTimeSeriesChunkMetaData.setDigest(digest);
|
||||
metaData.convertToTSF(valueInTimeSeriesChunkMetaData);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData);
|
||||
|
||||
digest.max = max;
|
||||
digest.min = min;
|
||||
valueInTimeSeriesChunkMetaData.setDigest(digest);
|
||||
metaData.convertToTSF(valueInTimeSeriesChunkMetaData);
|
||||
Utils.isVSeriesChunkMetadataEqual(metaData, valueInTimeSeriesChunkMetaData);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,234 @@
|
|||
package cn.edu.thu.tsfiledb.metadata;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.awt.geom.Path2D;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import cn.edu.thu.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.thu.tsfile.timeseries.read.qp.Path;
|
||||
import cn.edu.thu.tsfiledb.exception.MetadataArgsErrorException;
|
||||
import cn.edu.thu.tsfiledb.exception.PathErrorException;
|
||||
|
||||
|
||||
|
||||
|
||||
public class MManagerTest {
|
||||
|
||||
private MManager mManager = MManager.getInstance();
|
||||
|
||||
@Before
|
||||
public void before(){
|
||||
mManager.clear();
|
||||
try {
|
||||
mManager.addAPathToMTree("root.vehicle.d1.s1", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d1.s2", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d1.s3", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d1.s4", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d2.s1", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d2.s2", "FLOAT", "TS_2DIFF", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d2.s3", "DOUBLE", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.vehicle.d2.s4", "INT64", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d1.s1", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d1.s2", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d1.s3", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d1.s4", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d2.s1", "INT32", "RLE", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d2.s2", "FLOAT", "TS_2DIFF", new String[0]);
|
||||
mManager.addAPathToMTree("root.laptop.d2.s3", "DOUBLE", "RLE", new String[0]);
|
||||
|
||||
mManager.setStorageLevelToMTree("root.vehicle.d1");
|
||||
mManager.setStorageLevelToMTree("root.laptop");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
fail("Exception when executing...");
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void after(){
|
||||
mManager.deleteLogAndDataFiles();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void deletePathFromMTree() {
|
||||
try {
|
||||
Assert.assertEquals(true, mManager.pathExist("root.vehicle.d2.s4"));
|
||||
mManager.deletePathFromMTree("root.vehicle.d2.s4");
|
||||
Assert.assertEquals(false, mManager.pathExist("root.vehicle.d2.s4"));
|
||||
|
||||
} catch (PathErrorException | IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryInMTree(){
|
||||
|
||||
String deltaObjectType;
|
||||
try {
|
||||
deltaObjectType = mManager.getDeltaObjectTypeByPath("root.vehicle.d2.s4");
|
||||
Assert.assertEquals("root.vehicle", deltaObjectType);
|
||||
|
||||
TSDataType type = mManager.getSeriesType("root.vehicle.d2.s4");
|
||||
Assert.assertEquals(TSDataType.INT64, type);
|
||||
|
||||
Map<String, List<ColumnSchema>> ret = mManager.getSchemaForAllType();
|
||||
Assert.assertEquals(4, ret.get("vehicle").size());
|
||||
mManager.deletePathFromMTree("root.vehicle.d2.s4");
|
||||
ret = mManager.getSchemaForAllType();
|
||||
Assert.assertEquals(4, ret.get("vehicle").size());
|
||||
mManager.deletePathFromMTree("root.vehicle.d1.s4");
|
||||
ret = mManager.getSchemaForAllType();
|
||||
Assert.assertEquals(3, ret.get("vehicle").size());
|
||||
mManager.deletePathFromMTree("root.vehicle.d1");
|
||||
|
||||
Metadata meta = mManager.getMetadata();
|
||||
Assert.assertEquals(1, meta.getDeltaObjectsForOneType("vehicle").size());
|
||||
|
||||
|
||||
} catch (PathErrorException | IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testQueryInMTree2(){
|
||||
|
||||
try {
|
||||
List<ColumnSchema> ret = mManager.getSchemaForOneType("root.vehicle");
|
||||
Assert.assertEquals(4, ret.size());
|
||||
|
||||
int cnt = mManager.getFileCountForOneType("root.laptop");
|
||||
Assert.assertEquals(1, cnt);
|
||||
|
||||
String fileName = mManager.getFileNameByPath("root.laptop.d1.s1");
|
||||
Assert.assertEquals("root.laptop", fileName);
|
||||
|
||||
HashMap<String, ArrayList<String>> ret2 = mManager.getAllPathGroupByFileName("root.vehicle");
|
||||
Assert.assertEquals(2, ret2.keySet().size());
|
||||
|
||||
List<String> paths = mManager.getPaths("root.vehicle.*.s1");
|
||||
Assert.assertEquals(2, paths.size());
|
||||
Assert.assertEquals("root.vehicle.d1.s1", paths.get(0));
|
||||
|
||||
boolean ret3 = mManager.pathExist("root.vehiccc.d1.s2");
|
||||
Assert.assertEquals(false, ret3);
|
||||
|
||||
ColumnSchema cs = mManager.getSchemaForOnePath("root.vehicle.d1.s1");
|
||||
Assert.assertEquals("s1", cs.name);
|
||||
Assert.assertEquals(TSDataType.INT32, cs.dataType);
|
||||
|
||||
List<Path> paths2 = new ArrayList<Path>();
|
||||
paths2.add(new Path("root.vehicle.d1.s1"));
|
||||
boolean ret4 = mManager.checkFileLevel(paths2);
|
||||
Assert.assertEquals(true, ret4);
|
||||
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPTree(){
|
||||
try {
|
||||
mManager.addAPTree("region");
|
||||
mManager.addAPathToPTree("region.beijing");
|
||||
mManager.addAPathToPTree("region.shanghai");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1.s1");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1.s2");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1.s3");
|
||||
Assert.assertEquals(3, mManager.getAllPathGroupByFileName("region.beijing").get("root.vehicle.d1").size());
|
||||
mManager.unlinkMNodeFromPTree("region.beijing", "root.vehicle.d1.s3");
|
||||
Assert.assertEquals(2, mManager.getAllPathGroupByFileName("region.beijing").get("root.vehicle.d1").size());
|
||||
mManager.unlinkMNodeFromPTree("region.beijing", "root.vehicle.d1.s2");
|
||||
mManager.unlinkMNodeFromPTree("region.beijing", "root.vehicle.d1.s1");
|
||||
Assert.assertEquals(false, mManager.getAllPathGroupByFileName("region.beijing").containsKey("root.vehicle.d1"));
|
||||
mManager.linkMNodeToPTree("region.shanghai", "root.vehicle.d1.s1");
|
||||
mManager.linkMNodeToPTree("region.shanghai", "root.vehicle.d1.s2");
|
||||
Assert.assertEquals(true, mManager.getAllPathGroupByFileName("region.shanghai").containsKey("root.vehicle.d1"));
|
||||
|
||||
} catch (PathErrorException | IOException | MetadataArgsErrorException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
try {
|
||||
mManager.deletePathFromPTree("region.shanghai");
|
||||
mManager.getAllPathGroupByFileName("region.shanghai").containsKey("root.vehicle.d1");
|
||||
fail("region.shanghai has been deleted");
|
||||
} catch (PathErrorException | IOException e) {
|
||||
Assert.assertEquals(true, true);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFlush(){
|
||||
try {
|
||||
mManager.flushObjectToFile();
|
||||
mManager.deleteLogAndDataFiles();
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLink(){
|
||||
try {
|
||||
mManager.addAPTree("region");
|
||||
mManager.addAPathToPTree("region.beijing");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1.s1");
|
||||
mManager.unlinkMNodeFromPTree("region.beijing", "root.vehicle.d1.s1");
|
||||
mManager.linkMNodeToPTree("region.beijing", "root.vehicle.d1.s2");
|
||||
mManager.unlinkMNodeFromPTree("region.beijing", "root.vehicle.d1");
|
||||
Assert.assertEquals(0, mManager.getAllPathGroupByFileName("region.beijing").size());
|
||||
|
||||
} catch (IOException | MetadataArgsErrorException | PathErrorException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testPTree2(){
|
||||
try {
|
||||
mManager.addAPTree("region");
|
||||
} catch (IOException | MetadataArgsErrorException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
try {
|
||||
mManager.addAPathToPTree("city.beijing");
|
||||
fail("city has not been added into PTree");
|
||||
} catch (PathErrorException | IOException | MetadataArgsErrorException e) {
|
||||
Assert.assertEquals(true, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue