mirror of https://github.com/apache/iotdb
parent
268a8cdab3
commit
54089f9064
27
pom.xml
27
pom.xml
|
@ -25,6 +25,33 @@
|
|||
<artifactId>iotdb-jdbc</artifactId>
|
||||
<version>0.3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>cn.edu.fudan.dsm</groupId>
|
||||
<artifactId>kvmatch-iotdb</artifactId>
|
||||
<version>1.0.3</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-log4j12</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>log4j</groupId>
|
||||
<artifactId>log4j</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>cn.edu.tsinghua</groupId>
|
||||
<artifactId>tsfile</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
|
|
|
@ -152,7 +152,7 @@ ArrayList<ParseError> errors = new ArrayList<ParseError>();
|
|||
xlateMap.put("COLON", ":");
|
||||
xlateMap.put("COMMA", ",");
|
||||
xlateMap.put("SEMICOLON", ");");
|
||||
|
||||
|
||||
xlateMap.put("LPAREN", "(");
|
||||
xlateMap.put("RPAREN", ")");
|
||||
xlateMap.put("LSQUARE", "[");
|
||||
|
@ -229,7 +229,7 @@ ArrayList<ParseError> errors = new ArrayList<ParseError>();
|
|||
+ input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : ""
|
||||
+ input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : ""
|
||||
+ input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : "";
|
||||
|
||||
|
||||
} else if (e instanceof MismatchedTokenException) {
|
||||
MismatchedTokenException mte = (MismatchedTokenException) e;
|
||||
msg = super.getErrorMessage(e, xlateNames) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'"
|
||||
|
@ -278,7 +278,7 @@ numberOrString // identifier is string or integer
|
|||
;
|
||||
|
||||
numberOrStringWidely
|
||||
: number
|
||||
: number
|
||||
| StringLiteral
|
||||
;
|
||||
|
||||
|
@ -606,8 +606,8 @@ indexWithEqualExpression
|
|||
|
||||
|
||||
dropIndexStatement
|
||||
: KW_DROP KW_INDEX KW_ON prefixPath
|
||||
-> ^(TOK_DROP ^(TOK_INDEX prefixPath))
|
||||
: KW_DROP KW_INDEX func=Identifier KW_ON p=timeseries
|
||||
-> ^(TOK_DROP ^(TOK_INDEX $p ^(TOK_FUNC $func)))
|
||||
;
|
||||
|
||||
/*
|
||||
|
@ -632,8 +632,8 @@ identifier
|
|||
// ;
|
||||
|
||||
selectClause
|
||||
: KW_SELECT KW_INDEX func=Identifier LPAREN p=prefixPath COMMA file=StringLiteral COMMA epsilon=Float (COMMA alpha=Float COMMA beta=Float)? RPAREN (fromClause)?
|
||||
-> ^(TOK_SELECT_INDEX $func $p $file $epsilon ($alpha $beta)?) fromClause?
|
||||
: KW_SELECT KW_INDEX func=Identifier LPAREN p1=timeseries COMMA p2=timeseries COMMA n1=dateFormatWithNumber COMMA n2=dateFormatWithNumber COMMA epsilon=Float (COMMA alpha=Float COMMA beta=Float)? RPAREN (fromClause)?
|
||||
-> ^(TOK_SELECT_INDEX $func $p1 $p2 $n1 $n2 $epsilon ($alpha $beta)?) fromClause?
|
||||
| KW_SELECT clusteredPath (COMMA clusteredPath)* fromClause
|
||||
-> ^(TOK_SELECT clusteredPath+) fromClause
|
||||
;
|
||||
|
|
|
@ -72,6 +72,11 @@ public class TsfileDBConfig {
|
|||
*/
|
||||
public String walFolder = "wals";
|
||||
|
||||
/**
|
||||
* Data directory for index files (KV-match indexes)
|
||||
*/
|
||||
public String indexFileDir = "index";
|
||||
|
||||
/**
|
||||
* The maximum concurrent thread number for merging overflow
|
||||
*/
|
||||
|
@ -129,5 +134,6 @@ public class TsfileDBConfig {
|
|||
metadataDir = dataDir + metadataDir;
|
||||
derbyHome = dataDir + derbyHome;
|
||||
walFolder = dataDir + walFolder;
|
||||
indexFileDir = dataDir + indexFileDir;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import java.util.Set;
|
|||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -28,6 +29,7 @@ import cn.edu.tsinghua.iotdb.exception.FileNodeProcessorException;
|
|||
import cn.edu.tsinghua.iotdb.exception.LRUManagerException;
|
||||
import cn.edu.tsinghua.iotdb.exception.OverflowProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.common.DataFileInfo;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.DeletePlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.UpdatePlan;
|
||||
|
@ -131,6 +133,8 @@ public class FileNodeManager extends LRUManager<FileNodeProcessor> {
|
|||
} else {
|
||||
fileNodeProcessor.writeUnlock();
|
||||
}
|
||||
//add index check sum
|
||||
fileNodeProcessor.rebuildIndex();
|
||||
}
|
||||
} catch (PathErrorException | LRUManagerException | FileNodeProcessorException e) {
|
||||
LOGGER.error("Restore all FileNode failed, the reason is {}", e.getMessage());
|
||||
|
@ -460,6 +464,35 @@ public class FileNodeManager extends LRUManager<FileNodeProcessor> {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
* @param path : the column path
|
||||
* @param startTime : the startTime of index
|
||||
* @param endTime : the endTime of index
|
||||
*
|
||||
* @throws FileNodeManagerException
|
||||
*/
|
||||
public List<DataFileInfo> indexBuildQuery(Path path, long startTime, long endTime) throws FileNodeManagerException {
|
||||
FileNodeProcessor fileNodeProcessor = null;
|
||||
String deltaObjectId = path.getDeltaObjectToString();
|
||||
try {
|
||||
do {
|
||||
fileNodeProcessor = getProcessorWithDeltaObjectIdByLRU(deltaObjectId, false);
|
||||
} while (fileNodeProcessor == null);
|
||||
LOGGER.debug("Get the FileNodeProcessor: {}, query.", fileNodeProcessor.getNameSpacePath());
|
||||
|
||||
return fileNodeProcessor.indexQuery(deltaObjectId, startTime, endTime);
|
||||
} catch (LRUManagerException e) {
|
||||
e.printStackTrace();
|
||||
throw new FileNodeManagerException(e);
|
||||
} finally {
|
||||
if (fileNodeProcessor != null) {
|
||||
fileNodeProcessor.readUnlock();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void endQuery(String deltaObjectId, int token) throws FileNodeManagerException {
|
||||
|
||||
FileNodeProcessor fileNodeProcessor = null;
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package cn.edu.tsinghua.iotdb.engine.filenode;
|
||||
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -30,6 +31,10 @@ import cn.edu.tsinghua.iotdb.exception.BufferWriteProcessorException;
|
|||
import cn.edu.tsinghua.iotdb.exception.FileNodeProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.exception.OverflowProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.iotdb.index.common.DataFileInfo;
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.iotdb.metadata.ColumnSchema;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.QueryForMerge;
|
||||
|
@ -584,6 +589,21 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
return queryStructure;
|
||||
}
|
||||
|
||||
public List<DataFileInfo> indexQuery(String deltaObjectId, long startTime, long endTime) {
|
||||
List<DataFileInfo> dataFileInfos = new ArrayList<>();
|
||||
for (IntervalFileNode intervalFileNode : newFileNodes) {
|
||||
if (intervalFileNode.isClosed()) {
|
||||
long s1 = intervalFileNode.getStartTime(deltaObjectId);
|
||||
long e1 = intervalFileNode.getEndTime(deltaObjectId);
|
||||
if (e1 >= startTime && (s1 <= endTime || endTime == -1)) {
|
||||
DataFileInfo dataFileInfo = new DataFileInfo(s1, e1, intervalFileNode.filePath);
|
||||
dataFileInfos.add(dataFileInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataFileInfos;
|
||||
}
|
||||
|
||||
public void merge() throws FileNodeProcessorException {
|
||||
|
||||
LOGGER.debug("begin to merge: the filenode is {}, the thread id is {}", nameSpacePath,
|
||||
|
@ -701,6 +721,15 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
// change status from merge to wait
|
||||
//
|
||||
switchMergeToWaitingv2(backupIntervalFiles, needEmtpy);
|
||||
|
||||
//
|
||||
// merge index begin
|
||||
//
|
||||
mergeIndex();
|
||||
//
|
||||
// merge index end
|
||||
//
|
||||
|
||||
//
|
||||
// change status from wait to work
|
||||
//
|
||||
|
@ -755,6 +784,76 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
return result;
|
||||
}
|
||||
|
||||
private List<DataFileInfo> getDataFileInfoForIndex(Path path, List<IntervalFileNode> sourceFileNodes) {
|
||||
String deltaObjectId = path.getDeltaObjectToString();
|
||||
List<DataFileInfo> dataFileInfos = new ArrayList<>();
|
||||
for (IntervalFileNode intervalFileNode : sourceFileNodes) {
|
||||
if (intervalFileNode.isClosed()) {
|
||||
if (intervalFileNode.getStartTime(deltaObjectId) != -1) {
|
||||
DataFileInfo dataFileInfo = new DataFileInfo(intervalFileNode.getStartTime
|
||||
(deltaObjectId),
|
||||
intervalFileNode.getEndTime(deltaObjectId), intervalFileNode.filePath);
|
||||
dataFileInfos.add(dataFileInfo);
|
||||
}
|
||||
}
|
||||
}
|
||||
return dataFileInfos;
|
||||
}
|
||||
|
||||
private void mergeIndex() throws FileNodeProcessorException {
|
||||
try {
|
||||
Map<String, Set<IndexType>> allIndexSeries = mManager.getAllIndexPaths(nameSpacePath);
|
||||
if (!allIndexSeries.isEmpty()) {
|
||||
LOGGER.info("merge all file and modify index file, the nameSpacePath is {}, the index path is {}",
|
||||
nameSpacePath, allIndexSeries);
|
||||
for (Entry<String, Set<IndexType>> entry : allIndexSeries.entrySet()) {
|
||||
String series = entry.getKey();
|
||||
Path path = new Path(series);
|
||||
List<DataFileInfo> dataFileInfos = getDataFileInfoForIndex(path, newFileNodes);
|
||||
if (!dataFileInfos.isEmpty()) {
|
||||
try {
|
||||
for (IndexType indexType : entry.getValue())
|
||||
IndexManager.getIndexInstance(indexType).build(path, dataFileInfos, null);
|
||||
} catch (IndexManagerException e) {
|
||||
e.printStackTrace();
|
||||
throw new FileNodeProcessorException(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (PathErrorException e) {
|
||||
LOGGER.error("failed to find all fileList to be merged." + e.getMessage());
|
||||
throw new FileNodeProcessorException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void switchMergeIndex() throws FileNodeProcessorException {
|
||||
try {
|
||||
Map<String, Set<IndexType>> allIndexSeries = mManager.getAllIndexPaths(nameSpacePath);
|
||||
if (!allIndexSeries.isEmpty()) {
|
||||
LOGGER.info("mergeswith all file and modify index file, the nameSpacePath is {}, the index path is {}",
|
||||
nameSpacePath, allIndexSeries);
|
||||
for (Entry<String, Set<IndexType>> entry : allIndexSeries.entrySet()) {
|
||||
String series = entry.getKey();
|
||||
Path path = new Path(series);
|
||||
List<DataFileInfo> dataFileInfos = getDataFileInfoForIndex(path, newFileNodes);
|
||||
if (!dataFileInfos.isEmpty()) {
|
||||
try {
|
||||
for (IndexType indexType : entry.getValue())
|
||||
IndexManager.getIndexInstance(indexType).mergeSwitch(path, dataFileInfos);
|
||||
} catch (IndexManagerException e) {
|
||||
e.printStackTrace();
|
||||
throw new FileNodeProcessorException(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (PathErrorException e) {
|
||||
LOGGER.error("failed to find all fileList to be mergeSwitch" + e.getMessage());
|
||||
throw new FileNodeProcessorException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void switchMergeToWaitingv2(List<IntervalFileNode> backupIntervalFiles, boolean needEmpty)
|
||||
throws FileNodeProcessorException {
|
||||
LOGGER.debug("Merge: the filenode is {}, switch merge to wait, the backupIntervalFiles is {}", nameSpacePath,
|
||||
|
@ -887,6 +986,10 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
file.delete();
|
||||
}
|
||||
}
|
||||
|
||||
// merge switch
|
||||
switchMergeIndex();
|
||||
|
||||
for (IntervalFileNode fileNode : newFileNodes) {
|
||||
if (fileNode.overflowChangeType != OverflowChangeType.NO_CHANGE) {
|
||||
fileNode.overflowChangeType = OverflowChangeType.CHANGED;
|
||||
|
@ -1104,7 +1207,28 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
}
|
||||
bufferWriteProcessor.close();
|
||||
bufferWriteProcessor = null;
|
||||
} catch (BufferWriteProcessorException e) {
|
||||
/*
|
||||
* add index for close
|
||||
*/
|
||||
Map<String, Set<IndexType>> allIndexSeries = mManager.getAllIndexPaths(nameSpacePath);
|
||||
|
||||
if (!allIndexSeries.isEmpty()) {
|
||||
LOGGER.info("Close buffer write file and append index file, the nameSpacePath is {}, the index " +
|
||||
"type is {}, the index path is {}",
|
||||
nameSpacePath, "kvindex", allIndexSeries);
|
||||
for (Entry<String, Set<IndexType>> entry : allIndexSeries.entrySet()) {
|
||||
Path path = new Path(entry.getKey());
|
||||
String deltaObjectId = path.getDeltaObjectToString();
|
||||
if (currentIntervalFileNode.getStartTime(deltaObjectId) != -1) {
|
||||
DataFileInfo dataFileInfo = new DataFileInfo(currentIntervalFileNode.getStartTime(deltaObjectId),
|
||||
currentIntervalFileNode.getEndTime(deltaObjectId), currentIntervalFileNode.filePath);
|
||||
for (IndexType indexType : entry.getValue())
|
||||
IndexManager.getIndexInstance(indexType).build(path, dataFileInfo, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (BufferWriteProcessorException | PathErrorException | IndexManagerException e) {
|
||||
e.printStackTrace();
|
||||
throw new FileNodeProcessorException(e);
|
||||
}
|
||||
|
@ -1158,4 +1282,9 @@ public class FileNodeProcessor extends LRUProcessor {
|
|||
return fileNodeProcessorStore;
|
||||
}
|
||||
}
|
||||
|
||||
public void rebuildIndex() throws FileNodeProcessorException {
|
||||
mergeIndex();
|
||||
switchMergeIndex();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
package cn.edu.tsinghua.iotdb.index;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.iotdb.index.kvmatch.KvMatchIndex;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static cn.edu.tsinghua.iotdb.index.IndexManager.IndexType.KvIndex;
|
||||
|
||||
public class IndexManager {
|
||||
private static Map<IndexType, IoTIndex> indexMap = new HashMap<>();
|
||||
|
||||
static{
|
||||
indexMap.put(KvIndex, KvMatchIndex.getInstance());
|
||||
}
|
||||
|
||||
public static IoTIndex getIndexInstance(IndexType indexType){
|
||||
return indexMap.get(indexType);
|
||||
}
|
||||
|
||||
|
||||
public enum IndexType {
|
||||
KvIndex;
|
||||
public static IndexType getIndexType(String indexNameString) throws IndexManagerException {
|
||||
String normalized = indexNameString.toLowerCase();
|
||||
switch (normalized){
|
||||
case "kvindex":
|
||||
case "kv-match":
|
||||
return KvIndex;
|
||||
default:
|
||||
throw new IndexManagerException("unsupport index type:" + indexNameString);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
package cn.edu.tsinghua.iotdb.index;
|
||||
|
||||
public class IndexMetadata {
|
||||
public String timeseries;
|
||||
public boolean isIndexExisted;
|
||||
|
||||
public IndexMetadata(String timeseries, boolean isIndexExisted){
|
||||
this.timeseries = timeseries;
|
||||
this.isIndexExisted = isIndexExisted;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
package cn.edu.tsinghua.iotdb.index;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.common.DataFileInfo;
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* IoTIndex is Sington.
|
||||
*/
|
||||
public interface IoTIndex {
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
void init();
|
||||
|
||||
/**
|
||||
*
|
||||
* Given the file list contain path, create index files.
|
||||
* Call this method when the index create operation happens or the merge file has created.
|
||||
*
|
||||
* @param path the time series to be indexed
|
||||
* @param fileList the file list contain path
|
||||
* @param parameters other parameters
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
boolean build(Path path, List<DataFileInfo> fileList, Map<String, Object> parameters)
|
||||
throws IndexManagerException;
|
||||
|
||||
/**
|
||||
*
|
||||
* Given one new file contain path, create the index file
|
||||
* Call this method when the close operation has completed.
|
||||
*
|
||||
* @param path the time series to be indexed
|
||||
* @param newFile the new file contain path
|
||||
* @param parameters other parameters
|
||||
* @return
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
boolean build(Path path, DataFileInfo newFile, Map<String, Object> parameters)
|
||||
throws IndexManagerException;
|
||||
|
||||
/**
|
||||
* Given the new file list after merge, delete all index files which are not in the list,
|
||||
* and switch to the new index files along with the new data files.
|
||||
* Call this method after the merge operation has completed. Block index read and write during this process.
|
||||
*
|
||||
* @param newFileList the data files leaves after the merge operation, the column paths in the file list need to
|
||||
* build index, some one may has no data in some data file
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException if the given column path is not correct or some base service occurred error
|
||||
*/
|
||||
boolean mergeSwitch(Path path, List<DataFileInfo> newFileList) throws IndexManagerException;
|
||||
|
||||
/**
|
||||
* todo
|
||||
*
|
||||
* @param path
|
||||
* @param timestamp
|
||||
* @param value
|
||||
*/
|
||||
void append(Path path, long timestamp, String value);
|
||||
|
||||
/**
|
||||
* todo
|
||||
*
|
||||
* @param path
|
||||
* @param timestamp
|
||||
* @param value
|
||||
*/
|
||||
void update(Path path, long timestamp, String value);
|
||||
|
||||
/**
|
||||
* todo
|
||||
*
|
||||
* @param path
|
||||
* @param starttime
|
||||
* @param endtime
|
||||
* @param value
|
||||
*/
|
||||
void update(Path path, long starttime, long endtime, String value);
|
||||
|
||||
/**
|
||||
* todo
|
||||
*
|
||||
* @param path
|
||||
* @param timestamp
|
||||
*/
|
||||
void delete(Path path, long timestamp);
|
||||
|
||||
/**
|
||||
* todo。
|
||||
*
|
||||
* @return todo
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
boolean close() throws IndexManagerException;
|
||||
|
||||
/**
|
||||
* drop the index created on path.
|
||||
*
|
||||
* @param path the column path
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
boolean drop(Path path) throws IndexManagerException;
|
||||
|
||||
/**
|
||||
* query on path with parameters, return result by limitSize
|
||||
*
|
||||
* @param path the path to be queried
|
||||
* @param parameters the query request with all parameters
|
||||
* @param nonUpdateIntervals the query request with all parameters
|
||||
* @param limitSize the limitation of number of answers
|
||||
* @return the query response
|
||||
*/
|
||||
Object query(Path path, List<Object> parameters, List<Pair<Long, Long>> nonUpdateIntervals, int limitSize)
|
||||
throws IndexManagerException;
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package cn.edu.tsinghua.iotdb.index;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
/**
|
||||
* The abstract class for a query request with specific parameters.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public abstract class QueryRequest {
|
||||
|
||||
protected Path columnPath;
|
||||
|
||||
protected long startTime;
|
||||
|
||||
protected long endTime;
|
||||
|
||||
protected QueryRequest(Path columnPath, long startTime, long endTime) {
|
||||
this.columnPath = columnPath;
|
||||
this.startTime = startTime;
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
protected QueryRequest(Path columnPath) {
|
||||
this.columnPath = columnPath;
|
||||
this.startTime = 0;
|
||||
this.endTime = Long.MAX_VALUE;
|
||||
}
|
||||
|
||||
public Path getColumnPath() {
|
||||
return columnPath;
|
||||
}
|
||||
|
||||
public void setColumnPath(Path columnPath) {
|
||||
this.columnPath = columnPath;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public long getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(long endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package cn.edu.tsinghua.iotdb.index.common;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
|
||||
/**
|
||||
* The class is used for storing information of a TsFile data file.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class DataFileInfo {
|
||||
|
||||
private long startTime;
|
||||
|
||||
private long endTime;
|
||||
|
||||
private String filePath;
|
||||
|
||||
public DataFileInfo(long startTime, long endTime, String filePath) {
|
||||
this.startTime = startTime;
|
||||
this.endTime = endTime;
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public long getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public String getFilePath() {
|
||||
return filePath;
|
||||
}
|
||||
|
||||
public Pair<Long, Long> getTimeInterval() {
|
||||
return new Pair<>(this.startTime, this.endTime);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package cn.edu.tsinghua.iotdb.index.common;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The class is used for storing information of a TsFile data file.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class DataFileMultiSeriesInfo {
|
||||
|
||||
private String filePath;
|
||||
|
||||
private List<Path> columnPaths;
|
||||
|
||||
private List<Pair<Long, Long>> timeRanges;
|
||||
|
||||
public DataFileMultiSeriesInfo(String filePath) {
|
||||
this.filePath = filePath;
|
||||
columnPaths = new ArrayList<>();
|
||||
timeRanges = new ArrayList<>();
|
||||
}
|
||||
|
||||
public DataFileMultiSeriesInfo(String filePath, List<Path> columnPaths, List<Pair<Long, Long>> timeRanges) {
|
||||
this.filePath = filePath;
|
||||
this.columnPaths = columnPaths;
|
||||
this.timeRanges = timeRanges;
|
||||
}
|
||||
|
||||
public void addColumnPath(Path path) {
|
||||
columnPaths.add(path);
|
||||
}
|
||||
|
||||
public void addTimeRanges(Pair<Long, Long> pair) {
|
||||
timeRanges.add(pair);
|
||||
}
|
||||
|
||||
public String getFilePath() {
|
||||
return filePath;
|
||||
}
|
||||
|
||||
public void setFilePath(String filePath) {
|
||||
this.filePath = filePath;
|
||||
}
|
||||
|
||||
public List<Path> getColumnPaths() {
|
||||
return columnPaths;
|
||||
}
|
||||
|
||||
public void setColumnPaths(List<Path> columnPaths) {
|
||||
this.columnPaths = columnPaths;
|
||||
}
|
||||
|
||||
public List<Pair<Long, Long>> getTimeRanges() {
|
||||
return timeRanges;
|
||||
}
|
||||
|
||||
public void setTimeRanges(List<Pair<Long, Long>> timeRanges) {
|
||||
this.timeRanges = timeRanges;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return columnPaths.isEmpty();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,18 @@
|
|||
package cn.edu.tsinghua.iotdb.index.common;
|
||||
|
||||
public class IndexManagerException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 6261687971768311032L;
|
||||
|
||||
public IndexManagerException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public IndexManagerException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public IndexManagerException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package cn.edu.tsinghua.iotdb.index.common;
|
||||
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.utils.IntervalUtils;
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* The class is used for index query, storing overflow data and buffer-write information separately.
|
||||
*
|
||||
* @author CGF, Jiaye Wu
|
||||
*/
|
||||
public class OverflowBufferWriteInfo {
|
||||
|
||||
private DynamicOneColumnData insert;
|
||||
|
||||
private DynamicOneColumnData update;
|
||||
|
||||
// deleteUntil means data deleted when timestamp <= deleteUntil
|
||||
private long deleteUntil;
|
||||
|
||||
private long bufferWriteBeginTime;
|
||||
|
||||
public OverflowBufferWriteInfo(DynamicOneColumnData insert, DynamicOneColumnData update, long deleteUntil, long bufferWriteBeginTime) {
|
||||
this.insert = insert;
|
||||
this.update = update;
|
||||
this.deleteUntil = deleteUntil;
|
||||
this.bufferWriteBeginTime = bufferWriteBeginTime;
|
||||
}
|
||||
|
||||
public List<Pair<Long, Long>> getInsertOrUpdateIntervals() {
|
||||
List<Pair<Long, Long>> insertIntervals = new ArrayList<>();
|
||||
if (insert != null) {
|
||||
for (int i = 0; i < insert.timeLength; i++) {
|
||||
insertIntervals.add(new Pair<>(insert.getTime(i), insert.getTime(i)));
|
||||
}
|
||||
}
|
||||
if (bufferWriteBeginTime < Long.MAX_VALUE) {
|
||||
insertIntervals.add(new Pair<>(bufferWriteBeginTime, Long.MAX_VALUE));
|
||||
insertIntervals = IntervalUtils.sortAndMergePair(insertIntervals);
|
||||
}
|
||||
List<Pair<Long, Long>> updateIntervals = new ArrayList<>();
|
||||
if (update != null) {
|
||||
for (int i = 0; i < update.timeLength; i += 2) {
|
||||
updateIntervals.add(new Pair<>(update.getTime(i), update.getTime(i + 1)));
|
||||
}
|
||||
}
|
||||
return IntervalUtils.union(insertIntervals, updateIntervals);
|
||||
}
|
||||
|
||||
public long getDeleteUntil() {
|
||||
return deleteUntil;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,79 @@
|
|||
package cn.edu.tsinghua.iotdb.index.common;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.common.exception.ProcessorException;
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterExpression;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.FilterFactory;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.filterseries.FilterSeries;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.And;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.GtEq;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.operators.LtEq;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.RowRecord;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.OverflowQueryEngine;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This is an iterator wrap class used for multi-batch fetching query data set in query process.
|
||||
*
|
||||
* @author CGF, Jiaye Wu
|
||||
*/
|
||||
public class QueryDataSetIterator {
|
||||
|
||||
private OverflowQueryEngine overflowQueryEngine;
|
||||
|
||||
private QueryDataSet queryDataSet;
|
||||
|
||||
private List<Path> pathList;
|
||||
|
||||
private FilterExpression filterExpression;
|
||||
|
||||
private int readToken;
|
||||
|
||||
public QueryDataSetIterator(OverflowQueryEngine overflowQueryEngine, Path path, List<Pair<Long, Long>> timeIntervals,
|
||||
int readToken) throws ProcessorException, PathErrorException, IOException {
|
||||
pathList = Collections.singletonList(path);
|
||||
|
||||
for (int i = 0; i < timeIntervals.size(); i++) {
|
||||
Pair<Long, Long> pair = timeIntervals.get(i);
|
||||
FilterSeries<Long> timeSeries = FilterFactory.timeFilterSeries();
|
||||
GtEq gtEq = FilterFactory.gtEq(timeSeries, pair.left, true);
|
||||
LtEq ltEq = FilterFactory.ltEq(timeSeries, pair.right, true);
|
||||
if (i == 0) {
|
||||
filterExpression = FilterFactory.and(gtEq, ltEq);
|
||||
} else {
|
||||
And tmpAnd = (And) FilterFactory.and(gtEq, ltEq);
|
||||
filterExpression = FilterFactory.or(filterExpression, tmpAnd);
|
||||
}
|
||||
}
|
||||
|
||||
this.overflowQueryEngine = overflowQueryEngine;
|
||||
this.readToken = readToken;
|
||||
this.queryDataSet = this.overflowQueryEngine.query(0, pathList, (SingleSeriesFilterExpression) filterExpression, null, null,
|
||||
null, TsfileDBDescriptor.getInstance().getConfig().fetchSize, readToken);
|
||||
// formNumber++;
|
||||
}
|
||||
|
||||
public boolean hasNext() throws IOException, PathErrorException, ProcessorException {
|
||||
if (queryDataSet.next()) {
|
||||
return true;
|
||||
} else {
|
||||
queryDataSet = overflowQueryEngine.query(0, pathList, (SingleSeriesFilterExpression) filterExpression, null, null,
|
||||
queryDataSet, TsfileDBDescriptor.getInstance().getConfig().fetchSize, readToken);
|
||||
// formNumber++;
|
||||
return queryDataSet.next();
|
||||
}
|
||||
}
|
||||
|
||||
public RowRecord getRowRecord() {
|
||||
return queryDataSet.getCurrentRecord();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,125 @@
|
|||
package cn.edu.tsinghua.iotdb.index.kvmatch;
|
||||
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.common.QueryConfig;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.utils.SeriesUtils;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.ReadCachePrefix;
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.RowRecord;
|
||||
import cn.edu.tsinghua.iotdb.index.common.QueryDataSetIterator;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.OverflowQueryEngine;
|
||||
import cn.edu.tsinghua.iotdb.query.management.RecordReaderFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
* The class validates the candidates to find out actual matching results satisfying the query request.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class KvMatchCandidateValidator implements Callable<List<Pair<Pair<Long, Long>, Double>>> {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(KvMatchCandidateValidator.class);
|
||||
|
||||
private Path columnPath;
|
||||
|
||||
private List<Pair<Long, Long>> scanIntervals;
|
||||
|
||||
private QueryConfig queryConfig;
|
||||
|
||||
private int token;
|
||||
|
||||
public KvMatchCandidateValidator(Path columnPath, List<Pair<Long, Long>> scanIntervals, QueryConfig queryConfig, int token) {
|
||||
this.columnPath = new Path(columnPath.getFullPath());
|
||||
this.scanIntervals = scanIntervals;
|
||||
this.queryConfig = queryConfig;
|
||||
this.token = token;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Pair<Pair<Long, Long>, Double>> call() throws Exception {
|
||||
logger.info("Validating candidate intervals: {}", scanIntervals);
|
||||
|
||||
QueryDataSetIterator queryDataSetIterator = new QueryDataSetIterator(new OverflowQueryEngine(), columnPath, scanIntervals, token);
|
||||
|
||||
List<Pair<Pair<Long, Long>, Double>> result = new ArrayList<>();
|
||||
Pair<Long, Double> lastKeyPoint = null;
|
||||
for (Pair<Long, Long> scanInterval : scanIntervals) {
|
||||
List<Pair<Long, Double>> keyPoints = new ArrayList<>();
|
||||
while (queryDataSetIterator.hasNext()) {
|
||||
RowRecord row = queryDataSetIterator.getRowRecord();
|
||||
double value = SeriesUtils.getValue(row.getFields().get(0)); // one column only
|
||||
if (keyPoints.isEmpty() && row.getTime() > scanInterval.left) {
|
||||
if (lastKeyPoint == null) {
|
||||
keyPoints.add(new Pair<>(scanInterval.left, value));
|
||||
} else {
|
||||
keyPoints.add(lastKeyPoint);
|
||||
}
|
||||
}
|
||||
keyPoints.add(new Pair<>(row.getTime(), value));
|
||||
if (row.getTime() >= scanInterval.right) break;
|
||||
}
|
||||
if (keyPoints.isEmpty()) break;
|
||||
lastKeyPoint = keyPoints.get(keyPoints.size() - 1);
|
||||
List<Double> series = SeriesUtils.amend(keyPoints, scanInterval);
|
||||
|
||||
double ex = 0, ex2 = 0;
|
||||
int lenQ = queryConfig.getQuerySeries().size(), idx = 0;
|
||||
double[] circularArray = new double[2 * lenQ];
|
||||
for (int i = 0; i < series.size(); i++) {
|
||||
double value = series.get(i);
|
||||
ex += value;
|
||||
ex2 += value * value;
|
||||
circularArray[i % lenQ] = value;
|
||||
circularArray[(i % lenQ) + lenQ] = value;
|
||||
|
||||
if (i >= lenQ - 1) {
|
||||
int j = (i + 1) % lenQ; // the current starting location of T
|
||||
|
||||
long left = scanInterval.left + i - lenQ + 1;
|
||||
if (left == keyPoints.get(idx).left) { // remove non-exist timestamp
|
||||
idx++;
|
||||
|
||||
if (queryConfig.isNormalization()) {
|
||||
double mean = ex / lenQ; // z
|
||||
double std = Math.sqrt(ex2 / lenQ - mean * mean);
|
||||
|
||||
if (Math.abs(mean - queryConfig.getMeanQ()) <= queryConfig.getBeta() && std / queryConfig.getStdQ() <= queryConfig.getBeta() && std / queryConfig.getStdQ() >= 1.0 / queryConfig.getAlpha()) {
|
||||
double dist = 0;
|
||||
for (int k = 0; k < lenQ && dist <= queryConfig.getEpsilon() * queryConfig.getEpsilon(); k++) {
|
||||
double x = (circularArray[(queryConfig.getOrder().get(k) + j)] - mean) / std;
|
||||
dist += (x - queryConfig.getNormalizedQuerySeries().get(k)) * (x - queryConfig.getNormalizedQuerySeries().get(k));
|
||||
}
|
||||
if (dist <= queryConfig.getEpsilon() * queryConfig.getEpsilon()) {
|
||||
result.add(new Pair<>(new Pair<>(left, scanInterval.left + i), Math.sqrt(dist)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
double dist = 0;
|
||||
for (int k = 0; k < lenQ && dist <= queryConfig.getEpsilon() * queryConfig.getEpsilon(); k++) {
|
||||
double x = circularArray[k + j];
|
||||
dist += (x - queryConfig.getQuerySeries().get(k)) * (x - queryConfig.getQuerySeries().get(k));
|
||||
}
|
||||
if (dist <= queryConfig.getEpsilon() * queryConfig.getEpsilon()) {
|
||||
result.add(new Pair<>(new Pair<>(left, scanInterval.left + i), Math.sqrt(dist)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ex -= circularArray[j];
|
||||
ex2 -= circularArray[j] * circularArray[j];
|
||||
}
|
||||
}
|
||||
}
|
||||
String prefix = ReadCachePrefix.addQueryPrefix(0);
|
||||
RecordReaderFactory.getInstance().removeRecordReader(prefix + columnPath.getDeltaObjectToString(), columnPath.getMeasurementToString());
|
||||
|
||||
logger.info("Finished validating candidate intervals: {}", scanIntervals);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,641 @@
|
|||
package cn.edu.tsinghua.iotdb.index.kvmatch;
|
||||
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.KvMatchIndexBuilder;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.KvMatchQueryExecutor;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.common.IndexConfig;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.common.QueryConfig;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.common.QueryResult;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.utils.IntervalUtils;
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.utils.SeriesUtils;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
import cn.edu.tsinghua.iotdb.engine.filenode.FileNodeManager;
|
||||
import cn.edu.tsinghua.iotdb.engine.filenode.SerializeUtil;
|
||||
import cn.edu.tsinghua.iotdb.exception.FileNodeManagerException;
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.IoTIndex;
|
||||
import cn.edu.tsinghua.iotdb.index.common.DataFileInfo;
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.iotdb.index.common.OverflowBufferWriteInfo;
|
||||
import cn.edu.tsinghua.iotdb.index.common.QueryDataSetIterator;
|
||||
import cn.edu.tsinghua.iotdb.index.utils.IndexFileUtils;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.OverflowQueryEngine;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.ReadCachePrefix;
|
||||
import cn.edu.tsinghua.iotdb.query.reader.RecordReader;
|
||||
import cn.edu.tsinghua.iotdb.query.management.RecordReaderFactory;
|
||||
import cn.edu.tsinghua.tsfile.common.exception.ProcessorException;
|
||||
import cn.edu.tsinghua.tsfile.common.utils.Pair;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.tsinghua.tsfile.format.PageHeader;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.basis.TsFile;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.definition.SingleSeriesFilterExpression;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.utils.LongInterval;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.filter.verifier.FilterVerifier;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.PageReader;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.DynamicOneColumnData;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.TsRandomAccessLocalFileReader;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.RowRecord;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.File;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
|
||||
/**
|
||||
* kv-match index
|
||||
*/
|
||||
public class KvMatchIndex implements IoTIndex {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(KvMatchIndex.class);
|
||||
private static final SerializeUtil<ConcurrentHashMap<String, IndexConfig>> serializeUtil = new SerializeUtil<>();
|
||||
private static final String CONFIG_FILE_PATH = TsfileDBDescriptor.getInstance().getConfig().indexFileDir + File.separator + ".metadata";
|
||||
private static final int PARALLELISM = Runtime.getRuntime().availableProcessors() - 1;
|
||||
private static final String buildingStatus = ".building";
|
||||
|
||||
private static OverflowQueryEngine overflowQueryEngine;
|
||||
private static ExecutorService executor;
|
||||
private static ConcurrentHashMap<String, IndexConfig> indexConfigStore;
|
||||
|
||||
private KvMatchIndex() {
|
||||
executor = Executors.newFixedThreadPool(PARALLELISM);
|
||||
overflowQueryEngine = new OverflowQueryEngine();
|
||||
try {
|
||||
File file = new File(CONFIG_FILE_PATH);
|
||||
FileUtils.forceMkdirParent(file);
|
||||
indexConfigStore = serializeUtil.deserialize(CONFIG_FILE_PATH).orElse(new ConcurrentHashMap<>());
|
||||
} catch (IOException e) {
|
||||
logger.error(e.getMessage(), e.getCause());
|
||||
}
|
||||
}
|
||||
|
||||
private static class KvMatchIndexHolder {
|
||||
static final KvMatchIndex INSTANCE = new KvMatchIndex();
|
||||
}
|
||||
|
||||
/**
|
||||
* only be used for {@linkplain cn.edu.tsinghua.iotdb.index.IndexManager IndexManager}
|
||||
* @return
|
||||
*/
|
||||
public static KvMatchIndex getInstance() { return KvMatchIndexHolder.INSTANCE; }
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public void init() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Given the file list contain path, create index files.
|
||||
* Call this method when the index create operation happens or the merge file has created.
|
||||
*
|
||||
* @param path the time series to be indexed
|
||||
* @param fileList the file list contain path
|
||||
* @param parameters other parameters
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
@Override
|
||||
public boolean build(Path path, List<DataFileInfo> fileList, Map<String, Object> parameters)
|
||||
throws IndexManagerException {
|
||||
int token = -1;
|
||||
List<String> indexFls = new ArrayList<>();
|
||||
Boolean overall = true;
|
||||
try {
|
||||
// 0. construct index configurations
|
||||
IndexConfig indexConfig = new IndexConfig();
|
||||
if (parameters == null) {
|
||||
indexConfig = indexConfigStore.getOrDefault(path.getFullPath(), new IndexConfig());
|
||||
}
|
||||
else {
|
||||
indexConfig.setWindowLength((int) parameters.getOrDefault(IndexConfig.PARAM_WINDOW_LENGTH, IndexConfig.DEFAULT_WINDOW_LENGTH));
|
||||
indexConfig.setSinceTime((long) parameters.getOrDefault(IndexConfig.PARAM_SINCE_TIME, IndexConfig.DEFAULT_SINCE_TIME));
|
||||
}
|
||||
|
||||
long startTime = indexConfig.getSinceTime();
|
||||
|
||||
// 1. build index for every data file
|
||||
if (fileList.isEmpty()) {
|
||||
// beginQuery fileList contains path, get FileNodeManager.MulPassLock.readLock
|
||||
token = FileNodeManager.getInstance().beginQuery(path.getDeltaObjectToString());
|
||||
fileList = FileNodeManager.getInstance().indexBuildQuery(path, indexConfig.getSinceTime(), -1);
|
||||
}
|
||||
|
||||
// no file to be builded.
|
||||
if (fileList.isEmpty()) {
|
||||
if (overall && parameters != null) {
|
||||
indexConfigStore.put(path.getFullPath(), indexConfig);
|
||||
serializeUtil.serialize(indexConfigStore, CONFIG_FILE_PATH);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
Set<String> existIndexFilePaths = new HashSet<>();
|
||||
File indexFileDir = new File(IndexFileUtils.getIndexFilePathPrefix(fileList.get(0).getFilePath())).getParentFile();
|
||||
File[] indexFiles = indexFileDir.listFiles();
|
||||
if (indexFiles != null) {
|
||||
for (File file : indexFiles) {
|
||||
existIndexFilePaths.add(file.getAbsolutePath());
|
||||
}
|
||||
}
|
||||
|
||||
for (DataFileInfo fileInfo : fileList) {
|
||||
String indexFile = IndexFileUtils.getIndexFilePath(path, fileInfo.getFilePath());
|
||||
|
||||
// 0. test whether the file is new, omit old files
|
||||
if (existIndexFilePaths.contains(indexFile)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (startTime > fileInfo.getEndTime()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
File buildFile = new File(indexFile + buildingStatus);
|
||||
if (buildFile.delete()) {
|
||||
logger.warn("{} delete failed", buildFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
Map<String,Object> map = getDataInTsFile(path, fileInfo.getFilePath());
|
||||
QueryDataSet dataSet = (QueryDataSet)(map.get("data"));
|
||||
Future<Boolean> result = executor.submit(new KvMatchIndexBuilder(indexConfig, path, dataSet, indexFile));
|
||||
|
||||
indexFls.add(indexFile);
|
||||
Boolean rs = result.get();
|
||||
if (!rs) {
|
||||
overall = false;
|
||||
break;
|
||||
}
|
||||
|
||||
TsFile tsfile = (TsFile)(map.get("tsfile"));
|
||||
tsfile.close();
|
||||
}
|
||||
|
||||
if (overall && parameters != null) {
|
||||
indexConfigStore.put(path.getFullPath(), indexConfig);
|
||||
serializeUtil.serialize(indexConfigStore, CONFIG_FILE_PATH);
|
||||
}
|
||||
|
||||
return overall;
|
||||
} catch (FileNodeManagerException | IOException e) {
|
||||
logger.error("failed to build index fileList" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("failed to build index fileList" +e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} finally {
|
||||
if (token != -1) {
|
||||
try {
|
||||
// endQuery. remove FileNodeManager.MultiPassLock.readLock
|
||||
FileNodeManager.getInstance().endQuery(path.getDeltaObjectToString(), token);
|
||||
} catch (FileNodeManagerException e) {
|
||||
logger.error("failed to unlock ReadLock while building index file" + e.getMessage(), e.getCause());
|
||||
}
|
||||
}
|
||||
if (!overall) {
|
||||
for (String fl : indexFls) {
|
||||
File indexFl = new File(fl);
|
||||
if (!indexFl.delete()) {
|
||||
logger.warn("Can not delete obsolete index file '{}' when build failed", indexFl);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Given one new file contain path, create the index file
|
||||
* Call this method when the close operation has completed.
|
||||
*
|
||||
* @param path the time series to be indexed
|
||||
* @param newFile the new file contain path
|
||||
* @param parameters other parameters
|
||||
* @return
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
@Override
|
||||
public boolean build(Path path, DataFileInfo newFile, Map<String, Object> parameters)
|
||||
throws IndexManagerException {
|
||||
try {
|
||||
// 0. construct index configurations
|
||||
IndexConfig indexConfig = new IndexConfig();
|
||||
if (parameters == null) {
|
||||
indexConfig = indexConfigStore.getOrDefault(path.getFullPath(), new IndexConfig());
|
||||
}
|
||||
else {
|
||||
indexConfig.setWindowLength((int) parameters.getOrDefault(IndexConfig.PARAM_WINDOW_LENGTH, IndexConfig.DEFAULT_WINDOW_LENGTH));
|
||||
indexConfig.setSinceTime((long) parameters.getOrDefault(IndexConfig.PARAM_SINCE_TIME, IndexConfig.DEFAULT_SINCE_TIME));
|
||||
}
|
||||
|
||||
long startTime = indexConfig.getSinceTime();
|
||||
|
||||
if (startTime > newFile.getEndTime()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
String indexFile = IndexFileUtils.getIndexFilePath(path, newFile.getFilePath());
|
||||
File indexFl = new File(indexFile);
|
||||
if (indexFl.exists()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
File buildFl = new File(indexFile + buildingStatus);
|
||||
if (buildFl.delete()) {
|
||||
logger.warn("{} delete failed".format(buildFl.getAbsolutePath()));
|
||||
}
|
||||
|
||||
// 1. build index asynchronously
|
||||
Map<String,Object> map = getDataInTsFile(path, newFile.getFilePath());
|
||||
QueryDataSet dataSet = (QueryDataSet)(map.get("data"));
|
||||
Future<Boolean> result = executor.submit(new KvMatchIndexBuilder(indexConfig, path, dataSet, indexFile));
|
||||
result.get();
|
||||
TsFile tsfile = (TsFile)(map.get("tsfile"));
|
||||
tsfile.close();
|
||||
// KvMatchIndexBuilder rs = new KvMatchIndexBuilder(indexConfig, path, dataSet, IndexFileUtils.getIndexFilePath(path, newFile.getFilePath()));
|
||||
// Boolean rr = rs.call();
|
||||
return true;
|
||||
} catch (IOException e) {
|
||||
logger.error("failed to build index file while closing" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("failed to build index file while closing" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given the new file list after merge, delete all index files which are not in the list,
|
||||
* and switch to the new index files along with the new data files.
|
||||
* Call this method after the merge operation has completed. Block index read and write during this process.
|
||||
*
|
||||
* @param newFileList the data files leaves after the merge operation, the column paths in the file list need to build index, some one may has no data in some data file
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException if the given column path is not correct or some base service occurred error
|
||||
*/
|
||||
@Override
|
||||
public boolean mergeSwitch(Path path, List<DataFileInfo> newFileList) throws IndexManagerException {
|
||||
Set<String> newIndexFilePathPrefixes = new HashSet<>();
|
||||
for (DataFileInfo fileInfo : newFileList) {
|
||||
newIndexFilePathPrefixes.add(IndexFileUtils.getIndexFilePathPrefix(fileInfo.getFilePath()));
|
||||
}
|
||||
File indexFileDir = new File(IndexFileUtils.getIndexFilePathPrefix(newFileList.get(0).getFilePath())).getParentFile();
|
||||
String suffix = IndexFileUtils.getIndexFilePathSuffix(IndexFileUtils.getIndexFilePath(path, newFileList.get(0).getFilePath()));
|
||||
File[] indexFiles = indexFileDir.listFiles();
|
||||
if (indexFiles != null) {
|
||||
for (File file : indexFiles) {
|
||||
if (suffix.equals(IndexFileUtils.getIndexFilePathSuffix(file)) && !newIndexFilePathPrefixes.contains(IndexFileUtils.getIndexFilePathPrefix(file))) {
|
||||
if (!file.delete()) {
|
||||
logger.warn("Can not delete obsolete index file '{}'", file);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* todo
|
||||
* @param path
|
||||
* @param timestamp
|
||||
* @param value
|
||||
*/
|
||||
@Override
|
||||
public void append(Path path, long timestamp, String value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* todo
|
||||
* @param path
|
||||
* @param timestamp
|
||||
* @param value
|
||||
*/
|
||||
@Override
|
||||
public void update(Path path, long timestamp, String value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* todo
|
||||
* @param path
|
||||
* @param starttime
|
||||
* @param endtime
|
||||
* @param value
|
||||
*/
|
||||
@Override
|
||||
public void update(Path path, long starttime, long endtime, String value) {
|
||||
}
|
||||
|
||||
/**
|
||||
* todo
|
||||
* @param path
|
||||
* @param timestamp
|
||||
*/
|
||||
@Override
|
||||
public void delete(Path path, long timestamp) {
|
||||
}
|
||||
|
||||
/**
|
||||
* todo
|
||||
*
|
||||
* @return todo
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
@Override
|
||||
public boolean close() throws IndexManagerException {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* drop index on path
|
||||
*
|
||||
* @param path the column path
|
||||
* @return whether the operation is successful
|
||||
* @throws IndexManagerException
|
||||
*/
|
||||
@Override
|
||||
public boolean drop(Path path) throws IndexManagerException {
|
||||
int token = -1;
|
||||
try {
|
||||
// beginQuery fileList contains path, get FileNodeManager.MulPassLock.readLock
|
||||
token = FileNodeManager.getInstance().beginQuery(path.getDeltaObjectToString());
|
||||
|
||||
// startTime=0, endTime=-1 means allTimeInterval
|
||||
List<DataFileInfo> fileInfoList = FileNodeManager.getInstance().indexBuildQuery(path, 0, -1);
|
||||
|
||||
for (DataFileInfo fileInfo : fileInfoList) {
|
||||
logger.info("Deleting index for '{}': [{}, {}] ({})", path, fileInfo.getStartTime(), fileInfo.getEndTime(), fileInfo.getFilePath());
|
||||
|
||||
File indexFile = new File(IndexFileUtils.getIndexFilePath(path, fileInfo.getFilePath()));
|
||||
if (!indexFile.delete()) {
|
||||
logger.warn("Can not delete obsolete index file '{}'", indexFile);
|
||||
}
|
||||
String[] subFilenames = indexFile.getParentFile().list();
|
||||
if (subFilenames == null || subFilenames.length == 0) {
|
||||
if (!indexFile.getParentFile().delete()) {
|
||||
logger.warn("Can not delete obsolete index directory '{}'", indexFile.getParent());
|
||||
}
|
||||
}
|
||||
|
||||
indexConfigStore.remove(path.getFullPath());
|
||||
serializeUtil.serialize(indexConfigStore, CONFIG_FILE_PATH);
|
||||
|
||||
}
|
||||
return true;
|
||||
} catch (FileNodeManagerException | IOException e) {
|
||||
logger.error("failed to drop index file" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("failed to drop index file" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} finally {
|
||||
if (token != -1) {
|
||||
try {
|
||||
// endQuery. remove FileNodeManager.MultiPassLock.readLock
|
||||
FileNodeManager.getInstance().endQuery(path.getDeltaObjectToString(), token);
|
||||
} catch (FileNodeManagerException e) {
|
||||
logger.error("failed to unlock ReadLock while droping index" + e.getMessage(), e.getCause());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* query on path with parameters, return result by limitSize
|
||||
*
|
||||
* @param path the path to be queried
|
||||
* @param parameters the query request with all parameters
|
||||
* @param nonUpdateIntervals the query request with all parameters
|
||||
* @param limitSize the limitation of number of answers
|
||||
* @return the query response
|
||||
*/
|
||||
@Override
|
||||
public Object query(Path path, List<Object> parameters, List<Pair<Long, Long>> nonUpdateIntervals, int limitSize)
|
||||
throws IndexManagerException {
|
||||
int token = -1;
|
||||
try {
|
||||
// beginQuery fileList contains path, get FileNodeManager.MulPassLock.readLock
|
||||
token = FileNodeManager.getInstance().beginQuery(path.getDeltaObjectToString());
|
||||
|
||||
// 0. get configuration from store
|
||||
IndexConfig indexConfig = indexConfigStore.getOrDefault(path.getFullPath(), new IndexConfig());
|
||||
|
||||
// 1. get all parameters
|
||||
long startTime = (long)(parameters.get(0));
|
||||
long endTime = (long)(parameters.get(1));
|
||||
if (endTime == -1) {
|
||||
endTime = Long.MAX_VALUE;
|
||||
}
|
||||
Path queryPath = (Path)(parameters.get(2));
|
||||
long queryStartTime = (long)(parameters.get(3));
|
||||
long queryEndTime = (long)(parameters.get(4));
|
||||
if (queryEndTime == -1) {
|
||||
queryEndTime = Long.MAX_VALUE;
|
||||
}
|
||||
double epsilon = (double)(parameters.get(5));
|
||||
double alpha = (double)(parameters.get(6));
|
||||
double beta = (double)(parameters.get(7));
|
||||
|
||||
// 1. get information of all files containing this column path.
|
||||
List<DataFileInfo> fileInfoList = FileNodeManager.getInstance().indexBuildQuery(path, startTime, endTime);
|
||||
|
||||
// 2. fetch non-indexed ranges from overflow manager
|
||||
OverflowBufferWriteInfo overflowBufferWriteInfo = getDataInBufferWriteSeparateWithOverflow(path, token);
|
||||
List<Pair<Long, Long>> insertOrUpdateIntervals = overflowBufferWriteInfo.getInsertOrUpdateIntervals();
|
||||
|
||||
// 3. propagate query series and configurations
|
||||
List<Double> querySeries = getQuerySeries(queryPath, queryStartTime, queryEndTime, token);
|
||||
if (querySeries.size() < 2 * indexConfig.getWindowLength() - 1) {
|
||||
throw new IllegalArgumentException(String.format("The length of query series should be greater than 2*<window_length>-1. (%s < 2*%s-1=%s)", querySeries.size(), indexConfig.getWindowLength(), (2 * indexConfig.getWindowLength() - 1)));
|
||||
}
|
||||
Pair<Long, Long> validTimeInterval = new Pair<>(Math.max(startTime, Math.max(overflowBufferWriteInfo.getDeleteUntil() + 1, indexConfig.getSinceTime())), endTime);
|
||||
QueryConfig queryConfig = new QueryConfig(indexConfig, querySeries, epsilon, alpha, beta, validTimeInterval);
|
||||
|
||||
// 4. search corresponding index files of data files in the query range
|
||||
List<Future<QueryResult>> futureResults = new ArrayList<>(fileInfoList.size());
|
||||
for (int i = 0; i < fileInfoList.size(); i++) {
|
||||
DataFileInfo fileInfo = fileInfoList.get(i);
|
||||
if (fileInfo.getStartTime() > validTimeInterval.right || fileInfo.getEndTime() < validTimeInterval.left)
|
||||
continue; // exclude deleted, not in query range, non-indexed time intervals
|
||||
File indexFile = new File(IndexFileUtils.getIndexFilePath(path, fileInfo.getFilePath()));
|
||||
if (indexFile.exists()) {
|
||||
KvMatchQueryExecutor queryExecutor = new KvMatchQueryExecutor(queryConfig, path, indexFile.getAbsolutePath());
|
||||
Future<QueryResult> result = executor.submit(queryExecutor);
|
||||
futureResults.add(result);
|
||||
} else { // the index of this file has not been built, this will not happen in normal circumstance (likely to happen between close operation and index building of new file finished)
|
||||
insertOrUpdateIntervals.add(fileInfo.getTimeInterval());
|
||||
}
|
||||
if (i > 0) { // add time intervals between file
|
||||
insertOrUpdateIntervals.add(new Pair<>(fileInfo.getStartTime(), fileInfo.getStartTime()));
|
||||
}
|
||||
}
|
||||
|
||||
// 5. collect query results
|
||||
QueryResult overallResult = new QueryResult();
|
||||
for (Future<QueryResult> result : futureResults) {
|
||||
if (result.get() != null) {
|
||||
overallResult.addCandidateRanges(result.get().getCandidateRanges());
|
||||
}
|
||||
}
|
||||
|
||||
// 6. merge the candidate ranges and non-indexed ranges to produce candidate ranges
|
||||
insertOrUpdateIntervals = IntervalUtils.extendBoth(insertOrUpdateIntervals, querySeries.size());
|
||||
insertOrUpdateIntervals = IntervalUtils.sortAndMergePair(insertOrUpdateIntervals);
|
||||
overallResult.setCandidateRanges(IntervalUtils.sortAndMergePair(overallResult.getCandidateRanges()));
|
||||
overallResult.setCandidateRanges(IntervalUtils.union(overallResult.getCandidateRanges(), insertOrUpdateIntervals));
|
||||
overallResult.setCandidateRanges(IntervalUtils.excludeNotIn(overallResult.getCandidateRanges(), validTimeInterval));
|
||||
logger.trace("Candidates: {}", overallResult.getCandidateRanges());
|
||||
|
||||
// 7. scan the data in candidate ranges to find out actual answers and sort them by distances
|
||||
List<Pair<Long, Long>> scanIntervals = IntervalUtils.extendAndMerge(overallResult.getCandidateRanges(), querySeries.size());
|
||||
List<Pair<Pair<Long, Long>, Double>> answers = validateCandidatesInParallel(scanIntervals, path, queryConfig, token);
|
||||
answers.sort(Comparator.comparingDouble(o -> o.right));
|
||||
logger.trace("Answers: {}", answers);
|
||||
|
||||
return constructQueryDataSet(answers, limitSize);
|
||||
} catch (FileNodeManagerException | InterruptedException | ExecutionException | ProcessorException | IOException | PathErrorException | IllegalArgumentException e) {
|
||||
logger.error("failed to query index" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
logger.error("failed to query index" + e.getMessage(), e.getCause());
|
||||
throw new IndexManagerException(e);
|
||||
} finally {
|
||||
if (token != -1) {
|
||||
try {
|
||||
// endQuery. remove FileNodeManager.MultiPassLock.readLock
|
||||
FileNodeManager.getInstance().endQuery(path.getDeltaObjectToString(), token);
|
||||
} catch (FileNodeManagerException e) {
|
||||
logger.error("failed to unlock ReadLock while querying index" + e.getMessage(), e.getCause());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private List<Double> getQuerySeries(Path path, long startTime, long endTime, int readToken) throws ProcessorException, PathErrorException, IOException {
|
||||
List<Pair<Long, Long>> timeInterval = new ArrayList<>(Collections.singleton(new Pair<>(startTime, endTime)));
|
||||
QueryDataSetIterator queryDataSetIterator = new QueryDataSetIterator(overflowQueryEngine, path, timeInterval, readToken);
|
||||
List<Pair<Long, Double>> keyPoints = new ArrayList<>();
|
||||
while (queryDataSetIterator.hasNext()) {
|
||||
RowRecord row = queryDataSetIterator.getRowRecord();
|
||||
keyPoints.add(new Pair<>(row.getTime(), SeriesUtils.getValue(row.getFields().get(0))));
|
||||
}
|
||||
String prefix = ReadCachePrefix.addQueryPrefix(0);
|
||||
RecordReaderFactory.getInstance().removeRecordReader(prefix + path.getDeltaObjectToString(), path.getMeasurementToString());
|
||||
if (keyPoints.isEmpty()) {
|
||||
throw new IllegalArgumentException(String.format("There is no value in the given time interval [%s, %s] for the query series %s.", startTime, endTime, path));
|
||||
}
|
||||
return SeriesUtils.amend(keyPoints);
|
||||
}
|
||||
|
||||
private List<Pair<Pair<Long, Long>, Double>> validateCandidatesInParallel(List<Pair<Long, Long>> scanIntervals, Path columnPath, QueryConfig queryConfig, int token) throws ExecutionException, InterruptedException, PathErrorException, ProcessorException, IOException {
|
||||
List<Future<List<Pair<Pair<Long, Long>, Double>>>> futureResults = new ArrayList<>(PARALLELISM);
|
||||
int intervalsPerTask = Math.min(Math.max(1, (int) Math.ceil(1.0 * scanIntervals.size() / PARALLELISM)), (new LongInterval()).v.length / 2 - 2), i = 0; // TODO: change LongInterval.arrayMaxn to public static field
|
||||
while (i < scanIntervals.size()) {
|
||||
List<Pair<Long, Long>> partialScanIntervals = scanIntervals.subList(i, Math.min(scanIntervals.size(), i + intervalsPerTask));
|
||||
i += intervalsPerTask;
|
||||
// schedule validating task
|
||||
KvMatchCandidateValidator validator = new KvMatchCandidateValidator(columnPath, partialScanIntervals, queryConfig, token);
|
||||
Future<List<Pair<Pair<Long, Long>, Double>>> result = executor.submit(validator);
|
||||
futureResults.add(result);
|
||||
}
|
||||
// collect results
|
||||
List<Pair<Pair<Long, Long>, Double>> overallResult = new ArrayList<>();
|
||||
for (Future<List<Pair<Pair<Long, Long>, Double>>> result : futureResults) {
|
||||
if (result.get() != null) {
|
||||
overallResult.addAll(result.get());
|
||||
}
|
||||
}
|
||||
return overallResult;
|
||||
}
|
||||
|
||||
private QueryDataSet constructQueryDataSet(List<Pair<Pair<Long, Long>, Double>> answers, int limitSize) throws IOException, ProcessorException {
|
||||
QueryDataSet dataSet = new QueryDataSet();
|
||||
DynamicOneColumnData startTime = new DynamicOneColumnData(TSDataType.INT64, true);
|
||||
DynamicOneColumnData endTime = new DynamicOneColumnData(TSDataType.INT64, true);
|
||||
DynamicOneColumnData distance = new DynamicOneColumnData(TSDataType.DOUBLE, true);
|
||||
for (int i = 0; i < Math.min(limitSize, answers.size()); i++) {
|
||||
Pair<Pair<Long, Long>, Double> answer = answers.get(i);
|
||||
startTime.putTime(i);
|
||||
startTime.putLong(answer.left.left);
|
||||
endTime.putTime(i);
|
||||
endTime.putLong(answer.left.right);
|
||||
distance.putTime(i);
|
||||
distance.putDouble(answer.right);
|
||||
}
|
||||
dataSet.mapRet.put("Start.Time", startTime); // useless names
|
||||
dataSet.mapRet.put("End.Time", endTime);
|
||||
dataSet.mapRet.put("Distance.", distance);
|
||||
return dataSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* kv-index, get the OverflowData and BufferWriteData separately only in memory.
|
||||
* No use to release read lock, because this method will not use alone.
|
||||
*
|
||||
* @param path kv-index path
|
||||
* @return
|
||||
* @throws PathErrorException
|
||||
* @throws IOException
|
||||
* @throws ProcessorException
|
||||
*/
|
||||
public OverflowBufferWriteInfo getDataInBufferWriteSeparateWithOverflow(Path path, int readToken) throws PathErrorException, IOException, ProcessorException {
|
||||
String deltaObjectUID = path.getDeltaObjectToString();
|
||||
String measurementUID = path.getMeasurementToString();
|
||||
String recordReaderPrefix = ReadCachePrefix.addQueryPrefix(0);
|
||||
|
||||
RecordReader recordReader = RecordReaderFactory.getInstance().
|
||||
getRecordReader(deltaObjectUID, measurementUID, null, null, null, readToken, recordReaderPrefix);
|
||||
|
||||
long bufferWriteBeginTime = Long.MAX_VALUE;
|
||||
if (recordReader.bufferWritePageList != null && recordReader.bufferWritePageList.size() > 0) {
|
||||
PageReader pageReader = new PageReader(recordReader.bufferWritePageList.get(0), recordReader.compressionTypeName);
|
||||
PageHeader pageHeader = pageReader.getNextPageHeader();
|
||||
bufferWriteBeginTime = pageHeader.data_page_header.min_timestamp;
|
||||
} else if (recordReader.insertPageInMemory != null && recordReader.insertPageInMemory.timeLength > 0) {
|
||||
bufferWriteBeginTime = recordReader.insertPageInMemory.getTime(0);
|
||||
}
|
||||
|
||||
DynamicOneColumnData insert = (DynamicOneColumnData) recordReader.overflowInfo.get(0);
|
||||
DynamicOneColumnData update = (DynamicOneColumnData) recordReader.overflowInfo.get(1);
|
||||
SingleSeriesFilterExpression deleteFilter = (SingleSeriesFilterExpression) recordReader.overflowInfo.get(3);
|
||||
long maxDeleteTime = 0;
|
||||
if (deleteFilter != null) {
|
||||
LongInterval interval = (LongInterval) FilterVerifier.create(TSDataType.INT64).getInterval(deleteFilter);
|
||||
if (interval.count > 0) {
|
||||
if (interval.flag[0] && interval.v[0] > 0) {
|
||||
maxDeleteTime = interval.v[0] - 1;
|
||||
} else {
|
||||
maxDeleteTime = interval.v[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RecordReaderFactory.getInstance().removeRecordReader(recordReaderPrefix + deltaObjectUID, measurementUID);
|
||||
return new OverflowBufferWriteInfo(insert, update, maxDeleteTime < 0 ? 0L : maxDeleteTime, bufferWriteBeginTime);
|
||||
}
|
||||
|
||||
/**
|
||||
* get the data only in file
|
||||
*/
|
||||
public Map<String,Object> getDataInTsFile(Path path, String filePath) throws IOException {
|
||||
TsRandomAccessLocalFileReader input = new TsRandomAccessLocalFileReader(filePath);
|
||||
TsFile readTsFile = new TsFile(input);
|
||||
ArrayList<Path> paths = new ArrayList<>();
|
||||
paths.add(path);
|
||||
Map<String,Object> map = new HashMap<String,Object>();
|
||||
map.put("tsfile", readTsFile);
|
||||
map.put("data", readTsFile.query(paths, null, null));
|
||||
return map;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,211 @@
|
|||
package cn.edu.tsinghua.iotdb.index.kvmatch;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.QueryRequest;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
/**
|
||||
* An instance of this class represents a query request with specific parameters.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class KvMatchQueryRequest extends QueryRequest {
|
||||
|
||||
private Path queryPath;
|
||||
|
||||
private long queryStartTime;
|
||||
|
||||
private long queryEndTime;
|
||||
|
||||
private double epsilon;
|
||||
|
||||
private double alpha;
|
||||
|
||||
private double beta;
|
||||
|
||||
/**
|
||||
* Private constructor used by the nested Builder class.
|
||||
*
|
||||
* @param builder builder used to create this query request
|
||||
*/
|
||||
private KvMatchQueryRequest(final Builder builder) {
|
||||
super(builder.columnPath, builder.startTime, builder.endTime);
|
||||
this.epsilon = builder.epsilon;
|
||||
this.alpha = builder.alpha;
|
||||
this.beta = builder.beta;
|
||||
this.queryPath = builder.queryPath;
|
||||
this.queryStartTime = builder.queryStartTime;
|
||||
this.queryEndTime = builder.queryEndTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link KvMatchQueryRequest.Builder} to create an {@link KvMatchQueryRequest} using descriptive methods.
|
||||
*
|
||||
* @return a new {@link KvMatchQueryRequest.Builder} instance
|
||||
*/
|
||||
public static KvMatchQueryRequest.Builder builder(Path columnPath, Path queryPath, long queryStartTime, long queryEndTime, double epsilon) {
|
||||
return new Builder(columnPath, queryPath, queryStartTime, queryEndTime, epsilon);
|
||||
}
|
||||
|
||||
public Path getQueryPath() {
|
||||
return queryPath;
|
||||
}
|
||||
|
||||
public void setQueryPath(Path queryPath) {
|
||||
this.queryPath = queryPath;
|
||||
}
|
||||
|
||||
public long getQueryStartTime() {
|
||||
return queryStartTime;
|
||||
}
|
||||
|
||||
public void setQueryStartTime(long queryStartTime) {
|
||||
this.queryStartTime = queryStartTime;
|
||||
}
|
||||
|
||||
public long getQueryEndTime() {
|
||||
return queryEndTime;
|
||||
}
|
||||
|
||||
public void setQueryEndTime(long queryEndTime) {
|
||||
this.queryEndTime = queryEndTime;
|
||||
}
|
||||
|
||||
public double getEpsilon() {
|
||||
return epsilon;
|
||||
}
|
||||
|
||||
public void setEpsilon(double epsilon) {
|
||||
this.epsilon = epsilon;
|
||||
}
|
||||
|
||||
public double getAlpha() {
|
||||
return alpha;
|
||||
}
|
||||
|
||||
public void setAlpha(double alpha) {
|
||||
this.alpha = alpha;
|
||||
}
|
||||
|
||||
public double getBeta() {
|
||||
return beta;
|
||||
}
|
||||
|
||||
public void setBeta(double beta) {
|
||||
this.beta = beta;
|
||||
}
|
||||
|
||||
/**
|
||||
* A nested builder class to create <code>KvMatchQueryRequest</code> instances using descriptive methods.
|
||||
* <p>
|
||||
* Example usage:
|
||||
* <pre>
|
||||
* KvMatchQueryRequest queryRequest = KvMatchQueryRequest.builder(columnPath, querySeries, epsilon)
|
||||
* .alpha(1.0)
|
||||
* .beta(0.0)
|
||||
* .startTime(1500350823)
|
||||
* .endTime(1500350823)
|
||||
* .build();
|
||||
* </pre>
|
||||
*/
|
||||
public static final class Builder {
|
||||
|
||||
private Path columnPath;
|
||||
|
||||
private long startTime;
|
||||
|
||||
private long endTime;
|
||||
|
||||
private Path queryPath;
|
||||
|
||||
private long queryStartTime;
|
||||
|
||||
private long queryEndTime;
|
||||
|
||||
private double epsilon;
|
||||
|
||||
private double alpha;
|
||||
|
||||
private double beta;
|
||||
|
||||
/**
|
||||
* Constructs a new <code>Builder</code> with the minimum
|
||||
* required parameters for an <code>KvMatchQueryRequest</code> instance.
|
||||
*
|
||||
* @param columnPath the column path request to query
|
||||
* @param queryPath the column path used to extract pattern series
|
||||
* @param queryStartTime the start time of pattern series in query path
|
||||
* @param queryEndTime the end time of pattern series in query path
|
||||
* @param epsilon the distance threshold
|
||||
*/
|
||||
private Builder(Path columnPath, Path queryPath, long queryStartTime, long queryEndTime, double epsilon) throws IllegalArgumentException {
|
||||
this.columnPath = columnPath;
|
||||
this.queryPath = queryPath;
|
||||
this.queryStartTime = queryStartTime;
|
||||
this.queryEndTime = queryEndTime;
|
||||
this.epsilon = epsilon;
|
||||
this.alpha = 1.0;
|
||||
this.beta = 0.0;
|
||||
this.startTime = 0;
|
||||
this.endTime = Long.MAX_VALUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parameter alpha for the query request
|
||||
*
|
||||
* @param alpha the parameter alpha for the query request
|
||||
* @return this builder, to allow method chaining
|
||||
*/
|
||||
public Builder alpha(final double alpha) {
|
||||
this.alpha = alpha;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the parameter beta for the query request
|
||||
*
|
||||
* @param beta the parameter alpha for the query request
|
||||
* @return this builder, to allow method chaining
|
||||
*/
|
||||
public Builder beta(final double beta) {
|
||||
this.beta = beta;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the start time for the query request
|
||||
*
|
||||
* @param startTime the start time for the query request
|
||||
* @return this builder, to allow method chaining
|
||||
*/
|
||||
public Builder startTime(final long startTime) {
|
||||
this.startTime = startTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the end time for the query request
|
||||
*
|
||||
* @param endTime the end time for the query request
|
||||
* @return this builder, to allow method chaining
|
||||
*/
|
||||
public Builder endTime(final long endTime) {
|
||||
this.endTime = endTime;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an {@link KvMatchQueryRequest} with the values declared by this {@link KvMatchQueryRequest.Builder}.
|
||||
*
|
||||
* @return the new {@link KvMatchQueryRequest}
|
||||
* @throws IllegalArgumentException if either required arguments is illegal or has been set
|
||||
*/
|
||||
public KvMatchQueryRequest build() {
|
||||
if (columnPath == null || queryPath == null || epsilon < 0 ||
|
||||
alpha < 1.0 || beta < 0 || startTime > endTime || queryStartTime > queryEndTime) {
|
||||
throw new IllegalArgumentException("The given query request is not valid!");
|
||||
}
|
||||
return new KvMatchQueryRequest(this);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
package cn.edu.tsinghua.iotdb.index.utils;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBConfig;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
public class IndexFileUtils {
|
||||
|
||||
private static final String DATA_FILE_PATH, INDEX_FILE_PATH;
|
||||
|
||||
static {
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
DATA_FILE_PATH = File.separator + config.bufferWriteDir + File.separator;
|
||||
INDEX_FILE_PATH = File.separator + config.indexFileDir + File.separator;
|
||||
}
|
||||
|
||||
public static String getIndexFilePath(Path path, String dataFilePath) {
|
||||
String nameSpacePath = new File(dataFilePath).getParentFile().getName();
|
||||
return dataFilePath.replace(DATA_FILE_PATH, INDEX_FILE_PATH) + "-" + path.getFullPath().replace(nameSpacePath + ".","");
|
||||
}
|
||||
|
||||
public static String getIndexFilePathPrefix(String dataFilePath) {
|
||||
return dataFilePath.replace(DATA_FILE_PATH, INDEX_FILE_PATH);
|
||||
}
|
||||
|
||||
public static String getIndexFilePathPrefix(File indexFile) {
|
||||
String str = indexFile.getAbsolutePath();
|
||||
int idx = str.lastIndexOf("-");
|
||||
return idx != -1 ? str.substring(0, idx) : str;
|
||||
}
|
||||
|
||||
public static String getIndexFilePathSuffix(String str) {
|
||||
int idx = str.lastIndexOf("-");
|
||||
return idx != -1 ? str.substring(idx+1) : "";
|
||||
}
|
||||
|
||||
public static String getIndexFilePathSuffix(File indexFile) {
|
||||
String str = indexFile.getAbsolutePath();
|
||||
int idx = str.lastIndexOf("-");
|
||||
return idx != -1 ? str.substring(idx+1) : "";
|
||||
}
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
package cn.edu.tsinghua.iotdb.index.utils;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
|
||||
/**
|
||||
* The class generates synthetic data series to TsFileDB for index building.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class SyntheticDataGenerator {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SyntheticDataGenerator.class);
|
||||
|
||||
private static final String CREATE_TIME_SERIES_TEMPLATE = "create timeseries root.turbine.Beijing.%s.%s with datatype=%s,encoding=%s";
|
||||
private static final String INSERT_DATA_TEMPLATE = "insert into root.turbine.Beijing.%s(timestamp,%s) values (%s,%s)";
|
||||
private static final String INSERT_2DATA_TEMPLATE = "insert into root.turbine.Beijing.%s(timestamp,%s,%s) values (%s,%s,%s)";
|
||||
private static final String SET_STORAGE_GROUP_TEMPLATE = "set storage group to root.turbine.Beijing.%s";
|
||||
private static final String CREATE_INDEX_TEMPLATE = "create index on root.turbine.Beijing.%s.%s using kv-match";
|
||||
private static final String CLOSE_TEMPLATE = "close";
|
||||
|
||||
private static final String JDBC_SERVER_URL = "jdbc:tsfile://127.0.0.1:6667/";
|
||||
// private static final String JDBC_SERVER_URL = "jdbc:tsfile://192.168.130.19:6667/";
|
||||
|
||||
private Connection connection = null;
|
||||
|
||||
private String deviceName;
|
||||
private int length;
|
||||
private long timeInterval;
|
||||
|
||||
public SyntheticDataGenerator(String deviceName, int length, long timeInterval) {
|
||||
this.deviceName = deviceName;
|
||||
this.length = length;
|
||||
this.timeInterval = timeInterval;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws ClassNotFoundException, SQLException, InterruptedException {
|
||||
long time = System.currentTimeMillis();
|
||||
SyntheticDataGenerator generator1 = new SyntheticDataGenerator("d3", 2000000, 10);
|
||||
generator1.start(time);
|
||||
}
|
||||
|
||||
public void start(long t) throws ClassNotFoundException, SQLException {
|
||||
Class.forName("cn.edu.thu.tsfiledb.jdbc.TsfileDriver");
|
||||
connectServer();
|
||||
|
||||
createTimeSeriesMetadata();
|
||||
|
||||
Statement statement = connection.createStatement();
|
||||
double x1 = ThreadLocalRandom.current().nextDouble(-5, 5);
|
||||
double x2 = ThreadLocalRandom.current().nextDouble(-5, 5);
|
||||
for (int i = 1; i <= length; i++) {
|
||||
statement.execute(String.format(INSERT_2DATA_TEMPLATE, deviceName, "Speed", "Energy", t, (int) x1, x2));
|
||||
|
||||
x2 += ThreadLocalRandom.current().nextDouble(-1, 1);
|
||||
x1 += ThreadLocalRandom.current().nextDouble(-1, 1);
|
||||
t += timeInterval;
|
||||
|
||||
if (i % 10000 == 0) {
|
||||
logger.info("{}", i);
|
||||
}
|
||||
if (i % 1000000 == 0) {
|
||||
statement.execute(CLOSE_TEMPLATE);
|
||||
}
|
||||
}
|
||||
|
||||
disconnectServer();
|
||||
}
|
||||
|
||||
private void createTimeSeriesMetadata() throws SQLException {
|
||||
List<String> sqls = new ArrayList<>();
|
||||
sqls.add(String.format(CREATE_TIME_SERIES_TEMPLATE, deviceName, "Speed", TSDataType.INT32, TSEncoding.RLE));
|
||||
sqls.add(String.format(CREATE_TIME_SERIES_TEMPLATE, deviceName, "Energy", TSDataType.FLOAT, TSEncoding.RLE));
|
||||
sqls.add(String.format(SET_STORAGE_GROUP_TEMPLATE, deviceName));
|
||||
sqls.add(String.format(CREATE_INDEX_TEMPLATE, deviceName, "Speed"));
|
||||
sqls.add(String.format(CREATE_INDEX_TEMPLATE, deviceName, "Energy"));
|
||||
executeSQL(sqls);
|
||||
}
|
||||
|
||||
private void connectServer() {
|
||||
try {
|
||||
connection = DriverManager.getConnection(JDBC_SERVER_URL, "root", "root");
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to connect the server {} because ", JDBC_SERVER_URL, e);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
private void disconnectServer() {
|
||||
if (connection != null) {
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to disconnect the server {} because ", JDBC_SERVER_URL, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void executeSQL(List<String> sqls) throws SQLException {
|
||||
if (connection == null) {
|
||||
connectServer();
|
||||
}
|
||||
try {
|
||||
Statement statement = connection.createStatement();
|
||||
for (String sql : sqls) {
|
||||
try {
|
||||
statement.execute(sql);
|
||||
} catch (Exception e) {
|
||||
logger.error("Execute {} failed!", sql, e);
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to execute {} because ", sqls, e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -2,11 +2,13 @@ package cn.edu.tsinghua.iotdb.metadata;
|
|||
|
||||
import java.io.Serializable;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding;
|
||||
import cn.edu.tsinghua.tsfile.format.Encoding;
|
||||
|
||||
public class ColumnSchema implements Serializable {
|
||||
private static final long serialVersionUID = -8257474930341487207L;
|
||||
|
@ -15,14 +17,35 @@ public class ColumnSchema implements Serializable {
|
|||
public TSDataType dataType;
|
||||
public TSEncoding encoding;
|
||||
private Map<String, String> args;
|
||||
private Set<IndexType> indexNameSet;
|
||||
|
||||
public ColumnSchema(String name, TSDataType dataType, TSEncoding encoding) {
|
||||
this.name = name;
|
||||
this.dataType = dataType;
|
||||
this.encoding = encoding;
|
||||
this.args = new HashMap<>();
|
||||
this.indexNameSet = new HashSet<>();
|
||||
}
|
||||
|
||||
public boolean isHasIndex() {
|
||||
return !indexNameSet.isEmpty();
|
||||
}
|
||||
|
||||
public boolean isHasIndex(IndexType indexType) {
|
||||
return indexNameSet.contains(indexType);
|
||||
}
|
||||
|
||||
public Set<IndexType> getIndexSet() {
|
||||
return indexNameSet;
|
||||
}
|
||||
|
||||
|
||||
public void setHasIndex(IndexType indexType) {
|
||||
this.indexNameSet.add(indexType);
|
||||
}
|
||||
|
||||
public void removeIndex(IndexType indexType) { this.indexNameSet.remove(indexType); }
|
||||
|
||||
public void putKeyValueToArgs(String key, String value) {
|
||||
this.args.put(key, value);
|
||||
}
|
||||
|
@ -50,4 +73,5 @@ public class ColumnSchema implements Serializable {
|
|||
public void setArgsMap(Map<String, String> argsMap) {
|
||||
this.args = argsMap;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -9,4 +9,6 @@ public class MetadataOperationType {
|
|||
public final static String DELETE_PATH_FROM_PTREE = "5";
|
||||
public final static String LINK_MNODE_TO_PTREE = "6";
|
||||
public final static String UNLINK_MNODE_FROM_PTREE = "7";
|
||||
public final static String ADD_INDEX_TO_PATH = "8";
|
||||
public final static String DELETE_INDEX_FROM_PATH = "9";
|
||||
}
|
||||
|
|
|
@ -106,6 +106,7 @@ public class QueryProcessor {
|
|||
case LOADDATA:
|
||||
case INSERT:
|
||||
case INDEX:
|
||||
case INDEXQUERY:
|
||||
return operator;
|
||||
case QUERY:
|
||||
case UPDATE:
|
||||
|
@ -113,7 +114,7 @@ public class QueryProcessor {
|
|||
SFWOperator root = (SFWOperator) operator;
|
||||
return optimizeSFWOperator(root, executor);
|
||||
default:
|
||||
throw new LogicalOperatorException("unknown operator type:{}" + operator.getType());
|
||||
throw new LogicalOperatorException("unknown operator type:" + operator.getType());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -94,9 +94,10 @@ public class SQLConstant {
|
|||
public static final int TOK_DELETE = 25;
|
||||
public static final int TOK_UPDATE = 26;
|
||||
public static final int TOK_QUERY = 27;
|
||||
|
||||
|
||||
public static final int TOK_CREATE_INDEX = 31;
|
||||
public static final int TOK_SELECT_INDEX = 32;
|
||||
public static final int TOK_DROP_INDEX = 32;
|
||||
public static final int TOK_QUERY_INDEX = 33;
|
||||
|
||||
public static final int TOK_AUTHOR_CREATE = 41;
|
||||
public static final int TOK_AUTHOR_DROP = 42;
|
||||
|
|
|
@ -16,6 +16,9 @@ import cn.edu.tsinghua.iotdb.engine.filenode.FileNodeManager;
|
|||
import cn.edu.tsinghua.iotdb.exception.ArgsErrorException;
|
||||
import cn.edu.tsinghua.iotdb.exception.FileNodeManagerException;
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.iotdb.index.IoTIndex;
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.iotdb.metadata.ColumnSchema;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.qp.constant.SQLConstant;
|
||||
|
@ -24,6 +27,7 @@ import cn.edu.tsinghua.iotdb.qp.logical.sys.MetadataOperator;
|
|||
import cn.edu.tsinghua.iotdb.qp.logical.sys.PropertyOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.DeletePlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.InsertPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.UpdatePlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.sys.AuthorPlan;
|
||||
|
@ -49,6 +53,7 @@ public class OverflowQPExecutor extends QueryProcessExecutor {
|
|||
private OverflowQueryEngine queryEngine;
|
||||
private FileNodeManager fileNodeManager;
|
||||
private MManager mManager = MManager.getInstance();
|
||||
// private KvMatchIndex kvMatchIndex = KvMatchIndex.getInstance();
|
||||
|
||||
public OverflowQPExecutor() {
|
||||
queryEngine = new OverflowQueryEngine();
|
||||
|
@ -87,12 +92,72 @@ public class OverflowQPExecutor extends QueryProcessExecutor {
|
|||
case PROPERTY:
|
||||
PropertyPlan property = (PropertyPlan) plan;
|
||||
return operateProperty(property);
|
||||
case INDEX:
|
||||
IndexPlan indexPlan = (IndexPlan) plan;
|
||||
return operateIndex(indexPlan);
|
||||
default:
|
||||
throw new UnsupportedOperationException(
|
||||
String.format("operation %s does not support", plan.getOperatorType()));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean operateIndex(IndexPlan indexPlan) throws ProcessorException {
|
||||
switch (indexPlan.getIndexOperatorType()) {
|
||||
case CREATE_INDEX:
|
||||
try {
|
||||
String path = indexPlan.getPaths().get(0).getFullPath();
|
||||
// check path
|
||||
if(!mManager.pathExist(path)){
|
||||
throw new ProcessorException(String.format("The timeseries %s does not exist.", path));
|
||||
}
|
||||
// check storage group
|
||||
mManager.getFileNameByPath(path);
|
||||
// check index
|
||||
if (mManager.checkPathIndex(path, indexPlan.getIndexType())) {
|
||||
throw new ProcessorException(String.format("The timeseries %s has already been indexed.", path));
|
||||
}
|
||||
// create index
|
||||
IoTIndex index = IndexManager.getIndexInstance(indexPlan.getIndexType());
|
||||
if(index == null)
|
||||
throw new IndexManagerException(indexPlan.getIndexType()+" doesn't support");
|
||||
Path indexPath = indexPlan.getPaths().get(0);
|
||||
if (index.build(indexPath, new ArrayList<>(), indexPlan.getParameters())) {
|
||||
mManager.addIndexForOneTimeseries(path,indexPlan.getIndexType());
|
||||
}
|
||||
} catch (IndexManagerException | PathErrorException | IOException e) {
|
||||
e.printStackTrace();
|
||||
throw new ProcessorException(e.getMessage());
|
||||
}
|
||||
break;
|
||||
case DROP_INDEX:
|
||||
try {
|
||||
String path = indexPlan.getPaths().get(0).getFullPath();
|
||||
// check path
|
||||
if(!mManager.pathExist(path)){
|
||||
throw new ProcessorException(String.format("The timeseries %s does not exist.", path));
|
||||
}
|
||||
// check index
|
||||
if (!mManager.checkPathIndex(path, indexPlan.getIndexType())) {
|
||||
throw new ProcessorException(String.format("The timeseries %s hasn't been indexed.", path));
|
||||
}
|
||||
IoTIndex index = IndexManager.getIndexInstance(indexPlan.getIndexType());
|
||||
if(index == null)
|
||||
throw new IndexManagerException(indexPlan.getIndexType()+" doesn't support");
|
||||
Path indexPath = indexPlan.getPaths().get(0);
|
||||
if (index.drop(indexPath)) {
|
||||
mManager.deleteIndexForOneTimeseries(path, indexPlan.getIndexType());
|
||||
}
|
||||
} catch (IndexManagerException | PathErrorException | IOException e) {
|
||||
e.printStackTrace();
|
||||
throw new ProcessorException(e.getMessage());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
throw new ProcessorException(String.format("Not support the index operation %s", indexPlan.getIndexType()));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TSDataType getSeriesType(Path path) throws PathErrorException {
|
||||
if (path.equals(SQLConstant.RESERVED_TIME))
|
||||
|
@ -129,7 +194,7 @@ public class OverflowQPExecutor extends QueryProcessExecutor {
|
|||
throws ProcessorException {
|
||||
|
||||
try {
|
||||
return queryEngine.query(formNumber, paths, timeFilter, freqFilter, valueFilter, lastData, fetchSize);
|
||||
return queryEngine.query(formNumber, paths, timeFilter, freqFilter, valueFilter, lastData, fetchSize, null);
|
||||
} catch (Exception e) {
|
||||
throw new ProcessorException(e.getMessage());
|
||||
}
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.executor;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.kvmatch.KvMatchQueryRequest;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.qp.exception.QueryProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.qp.executor.iterator.MergeQuerySetIterator;
|
||||
import cn.edu.tsinghua.iotdb.qp.executor.iterator.PatternQueryDataSetIterator;
|
||||
import cn.edu.tsinghua.iotdb.qp.executor.iterator.QueryDataSetIterator;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.MultiQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.SingleQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.query.engine.FilterStructure;
|
||||
|
@ -36,6 +39,9 @@ public abstract class QueryProcessExecutor {
|
|||
|
||||
//process MultiQueryPlan
|
||||
public Iterator<QueryDataSet> processQuery(PhysicalPlan plan) throws QueryProcessorException {
|
||||
if(plan instanceof IndexQueryPlan){
|
||||
return ((IndexQueryPlan) plan).fetchQueryDateSet(getFetchSize());
|
||||
}
|
||||
MultiQueryPlan mergeQuery = (MultiQueryPlan) plan;
|
||||
List<SingleQueryPlan> selectPlans = mergeQuery.getSingleQueryPlans();
|
||||
switch (mergeQuery.getType()) {
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.executor.iterator;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.iotdb.index.IoTIndex;
|
||||
import cn.edu.tsinghua.iotdb.index.common.IndexManagerException;
|
||||
import cn.edu.tsinghua.iotdb.index.kvmatch.KvMatchQueryRequest;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import static cn.edu.tsinghua.iotdb.index.IndexManager.IndexType.KvIndex;
|
||||
|
||||
/**
|
||||
* Result wrap for KV-match index query, only return fetchSize number of results in one batch.
|
||||
*
|
||||
* @author Jiaye Wu
|
||||
*/
|
||||
public class PatternQueryDataSetIterator implements Iterator<QueryDataSet> {
|
||||
|
||||
private static IoTIndex kvMatchIndexManager = IndexManager.getIndexInstance(KvIndex);
|
||||
|
||||
private boolean noNext = false;
|
||||
private KvMatchQueryRequest queryRequest;
|
||||
private final int fetchSize;
|
||||
private QueryDataSet data = null;
|
||||
private QueryDataSet useddata = null;
|
||||
|
||||
public PatternQueryDataSetIterator(KvMatchQueryRequest queryRequest, int fetchSize) {
|
||||
this.queryRequest = queryRequest;
|
||||
this.fetchSize = fetchSize;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
if (useddata != null) {
|
||||
useddata.clear();
|
||||
}
|
||||
if (noNext) {
|
||||
return false;
|
||||
}
|
||||
if (data == null || !data.hasNextRecord()) {
|
||||
try {
|
||||
List<Object> parameters = new ArrayList<>();
|
||||
parameters.add(queryRequest.getStartTime());
|
||||
parameters.add(queryRequest.getEndTime());
|
||||
parameters.add(queryRequest.getQueryPath());
|
||||
parameters.add(queryRequest.getQueryStartTime());
|
||||
parameters.add(queryRequest.getQueryEndTime());
|
||||
parameters.add(queryRequest.getEpsilon());
|
||||
parameters.add(queryRequest.getAlpha());
|
||||
parameters.add(queryRequest.getBeta());
|
||||
data = (QueryDataSet) kvMatchIndexManager.query(queryRequest.getColumnPath(), parameters, null, fetchSize);
|
||||
} catch (IndexManagerException e) {
|
||||
throw new RuntimeException(e.getMessage());
|
||||
}
|
||||
}
|
||||
if (data == null) {
|
||||
throw new RuntimeException(String.format("data is null when query index {}!", queryRequest.getColumnPath()));
|
||||
}
|
||||
// no support batch results
|
||||
noNext = true;
|
||||
if (data.hasNextRecord()) {
|
||||
return true;
|
||||
} else {
|
||||
noNext = true;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public QueryDataSet next() {
|
||||
useddata = data;
|
||||
data = null;
|
||||
return useddata;
|
||||
}
|
||||
}
|
|
@ -58,6 +58,6 @@ public abstract class Operator {
|
|||
OVERFLOWFLUSHEND,
|
||||
BUFFERFLUSHSTART,
|
||||
BUFFERFLUSHEND,
|
||||
INDEX;
|
||||
INDEX, INDEXQUERY;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,19 +1,26 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.logical.crud;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
public final class IndexOperator extends SFWOperator {
|
||||
|
||||
|
||||
private Path path;
|
||||
private Map<String, Integer> parameters;
|
||||
private long startTime;
|
||||
private final IndexOperatorType indexOperatorType;
|
||||
|
||||
public IndexOperator(int tokenIntType) {
|
||||
private final IndexType indexType;
|
||||
|
||||
public IndexOperator(int tokenIntType,IndexOperatorType indexOperatorType, IndexType indexType) {
|
||||
super(tokenIntType);
|
||||
this.indexOperatorType = indexOperatorType;
|
||||
this.indexType = indexType;
|
||||
operatorType = Operator.OperatorType.INDEX;
|
||||
this.parameters = new HashMap<>();
|
||||
}
|
||||
|
@ -21,6 +28,15 @@ public final class IndexOperator extends SFWOperator {
|
|||
public Path getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public IndexOperatorType getIndexOperatorType(){
|
||||
return indexOperatorType;
|
||||
}
|
||||
|
||||
|
||||
public IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
|
||||
public void setPath(Path path) {
|
||||
this.path = path;
|
||||
|
@ -41,4 +57,8 @@ public final class IndexOperator extends SFWOperator {
|
|||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public enum IndexOperatorType {
|
||||
CREATE_INDEX,DROP_INDEX
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.logical.crud;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
public class IndexQueryOperator extends SFWOperator {
|
||||
|
||||
private final IndexManager.IndexType indexType;
|
||||
protected Path path;
|
||||
|
||||
public IndexQueryOperator(int tokenIntType, IndexManager.IndexType indexType) {
|
||||
super(tokenIntType);
|
||||
this.operatorType = OperatorType.INDEXQUERY;
|
||||
this.indexType = indexType;
|
||||
}
|
||||
|
||||
public Path getPath() {
|
||||
return path;
|
||||
}
|
||||
|
||||
public void setPath(Path path) {
|
||||
this.path = path;
|
||||
}
|
||||
|
||||
public IndexManager.IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,78 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.logical.index;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.IndexQueryOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.SFWOperator;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
public class KvMatchIndexQueryOperator extends IndexQueryOperator {
|
||||
private Path patternPath;
|
||||
private long startTime;
|
||||
private long endTime;
|
||||
private double epsilon;
|
||||
private double alpha = 1.0;
|
||||
private double beta = 0.0;
|
||||
private boolean hasParameter;
|
||||
|
||||
public KvMatchIndexQueryOperator(int tokenIntType) {
|
||||
super(tokenIntType, IndexManager.IndexType.KvIndex);
|
||||
}
|
||||
|
||||
|
||||
public Path getPatternPath() {
|
||||
return patternPath;
|
||||
}
|
||||
|
||||
public void setPatternPath(Path patternPath) {
|
||||
this.patternPath = patternPath;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public long getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(long endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
public double getEpsilon() {
|
||||
return epsilon;
|
||||
}
|
||||
|
||||
public void setEpsilon(double epsilon) {
|
||||
this.epsilon = epsilon;
|
||||
}
|
||||
|
||||
public double getAlpha() {
|
||||
return alpha;
|
||||
}
|
||||
|
||||
public void setAlpha(double alpha) {
|
||||
this.alpha = alpha;
|
||||
}
|
||||
|
||||
public double getBeta() {
|
||||
return beta;
|
||||
}
|
||||
|
||||
public void setBeta(double beta) {
|
||||
this.beta = beta;
|
||||
}
|
||||
|
||||
// public boolean isHasParameter() {
|
||||
// return hasParameter;
|
||||
// }
|
||||
|
||||
// public void setHasParameter(boolean hasParameter) {
|
||||
// this.hasParameter = hasParameter;
|
||||
// }
|
||||
}
|
|
@ -1,24 +1,45 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.physical.crud;
|
||||
|
||||
import cn.edu.fudan.dsm.kvmatch.iotdb.common.IndexConfig;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.IndexOperator.IndexOperatorType;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import static cn.edu.tsinghua.iotdb.qp.logical.Operator.OperatorType.INDEX;
|
||||
|
||||
public class IndexPlan extends PhysicalPlan {
|
||||
public class IndexPlan extends
|
||||
PhysicalPlan {
|
||||
|
||||
private Path path;
|
||||
private Map<String, Integer> parameters;
|
||||
private long startTime;
|
||||
private Map<String, Object> parameters;
|
||||
private final IndexOperatorType indexOperatorType;
|
||||
|
||||
public IndexPlan(Path path, Map<String, Integer> parameters,long startTime) {
|
||||
super(false, Operator.OperatorType.INDEX);
|
||||
|
||||
private final IndexType indexType;
|
||||
|
||||
public IndexPlan(Path path, Map<String, Integer> parameters,long startTime,IndexOperatorType indexOperatorType, IndexType indexType) {
|
||||
super(false, INDEX);
|
||||
this.path = path;
|
||||
this.parameters = parameters;
|
||||
this.startTime = startTime;
|
||||
this.indexType = indexType;
|
||||
this.indexOperatorType = indexOperatorType;
|
||||
this.parameters = new HashMap<>();
|
||||
this.parameters.putAll(parameters);
|
||||
this.parameters.put(IndexConfig.PARAM_SINCE_TIME, startTime);
|
||||
}
|
||||
|
||||
public IndexOperatorType getIndexOperatorType(){
|
||||
return indexOperatorType;
|
||||
}
|
||||
|
||||
|
||||
public IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,11 +51,8 @@ public class IndexPlan extends PhysicalPlan {
|
|||
return list;
|
||||
}
|
||||
|
||||
public Map<String, Integer> getParameters() {
|
||||
public Map<String, Object> getParameters() {
|
||||
return parameters;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.physical.crud;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.iotdb.qp.exception.QueryProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class IndexQueryPlan extends PhysicalPlan {
|
||||
|
||||
public IndexType getIndexType() {
|
||||
return indexType;
|
||||
}
|
||||
|
||||
protected final IndexType indexType;
|
||||
protected List<Path> paths;
|
||||
protected long startTime;
|
||||
protected long endTime;
|
||||
protected Map<String, Object> parameters;
|
||||
|
||||
public IndexQueryPlan(Path path, IndexType indexType) {
|
||||
super(true, Operator.OperatorType.INDEXQUERY);
|
||||
this.indexType = indexType;
|
||||
this.paths = new ArrayList<>();
|
||||
paths.add(path);
|
||||
// this.startTime = startTime;
|
||||
// this.endTime = endTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Path> getPaths() {
|
||||
return paths;
|
||||
}
|
||||
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
public long getEndTime() {
|
||||
return endTime;
|
||||
}
|
||||
|
||||
public void setEndTime(long endTime) {
|
||||
this.endTime = endTime;
|
||||
}
|
||||
|
||||
public abstract Iterator<QueryDataSet> fetchQueryDateSet(int fetchSize) throws QueryProcessorException;
|
||||
|
||||
public abstract List<String> getColumnHeader();
|
||||
}
|
|
@ -106,6 +106,6 @@ public class MultiQueryPlan extends PhysicalPlan {
|
|||
}
|
||||
|
||||
public enum QueryType {
|
||||
QUERY, AGGREGATION, GROUPBY
|
||||
QUERY, AGGREGATION, GROUPBY, INDEXQUERY
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,125 @@
|
|||
package cn.edu.tsinghua.iotdb.qp.physical.index;
|
||||
|
||||
|
||||
import cn.edu.tsinghua.iotdb.exception.PathErrorException;
|
||||
import cn.edu.tsinghua.iotdb.index.IndexManager.IndexType;
|
||||
import cn.edu.tsinghua.iotdb.index.kvmatch.KvMatchQueryRequest;
|
||||
import cn.edu.tsinghua.iotdb.metadata.MManager;
|
||||
import cn.edu.tsinghua.iotdb.qp.exception.QueryProcessorException;
|
||||
import cn.edu.tsinghua.iotdb.qp.executor.iterator.PatternQueryDataSetIterator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexQueryPlan;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
public class KvMatchIndexQueryPlan extends IndexQueryPlan {
|
||||
private Path patterPath;
|
||||
private double epsilon;
|
||||
private double alpha = 1.0d;
|
||||
private double beta = 0.0d;
|
||||
private boolean hasParameter;
|
||||
private long patterStartTime;
|
||||
private long patterEndTime;
|
||||
|
||||
public KvMatchIndexQueryPlan(Path path, Path patterPath, double epsilon,
|
||||
long patternStartTime, long patternEndTime) {
|
||||
super(path,IndexType.KvIndex);
|
||||
this.patterPath = patterPath;
|
||||
this.epsilon = epsilon;
|
||||
this.patterStartTime = patternStartTime;
|
||||
this.patterEndTime = patternEndTime;
|
||||
}
|
||||
|
||||
public double getAlpha() {
|
||||
return alpha;
|
||||
}
|
||||
|
||||
public void setAlpha(double alpha) {
|
||||
this.alpha = alpha;
|
||||
}
|
||||
|
||||
public double getBeta() {
|
||||
return beta;
|
||||
}
|
||||
|
||||
public void setBeta(double beta) {
|
||||
this.beta = beta;
|
||||
}
|
||||
|
||||
public boolean isHasParameter() {
|
||||
return hasParameter;
|
||||
}
|
||||
|
||||
public void setHasParameter(boolean hasParameter) {
|
||||
this.hasParameter = hasParameter;
|
||||
}
|
||||
|
||||
|
||||
public double getEpsilon() {
|
||||
return epsilon;
|
||||
}
|
||||
|
||||
public Path getPatterPath() {
|
||||
return patterPath;
|
||||
}
|
||||
|
||||
public long getPatternStartTime() {
|
||||
return patterStartTime;
|
||||
}
|
||||
|
||||
public long getPatternEndTime() {
|
||||
return patterEndTime;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<QueryDataSet> fetchQueryDateSet(int fetchSize) throws QueryProcessorException {
|
||||
MManager mManager = MManager.getInstance();
|
||||
// check path and storage group
|
||||
Path path = paths.get(0);
|
||||
if (!mManager.pathExist(path.getFullPath())) {
|
||||
throw new QueryProcessorException(String.format("The timeseries %s does not exist.", path));
|
||||
}
|
||||
try {
|
||||
mManager.getFileNameByPath(path.getFullPath());
|
||||
} catch (PathErrorException e) {
|
||||
e.printStackTrace();
|
||||
throw new QueryProcessorException(e.getMessage());
|
||||
}
|
||||
if (!mManager.pathExist(patterPath.getFullPath())) {
|
||||
throw new QueryProcessorException(String.format("The timeseries %s does not exist.", patterPath));
|
||||
}
|
||||
try {
|
||||
mManager.getFileNameByPath(patterPath.getFullPath());
|
||||
} catch (PathErrorException e) {
|
||||
e.printStackTrace();
|
||||
throw new QueryProcessorException(e.getMessage());
|
||||
}
|
||||
// check index for metadata
|
||||
if (!mManager.checkPathIndex(path.getFullPath(), IndexType.KvIndex)) {
|
||||
throw new QueryProcessorException(String.format("The timeseries %s hasn't been indexed.", path));
|
||||
}
|
||||
KvMatchQueryRequest queryRequest = KvMatchQueryRequest
|
||||
.builder(path, patterPath,
|
||||
patterStartTime, patterEndTime,
|
||||
epsilon)
|
||||
.startTime(startTime).endTime(endTime).build();
|
||||
queryRequest.setAlpha(alpha);
|
||||
queryRequest.setBeta(beta);
|
||||
return new PatternQueryDataSetIterator(queryRequest, fetchSize);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getColumnHeader() {
|
||||
List<String> columns = new ArrayList<>();
|
||||
columns.add(" Start Time");
|
||||
columns.add(" End Time");
|
||||
columns.add(" Distance");
|
||||
return columns;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -9,10 +9,12 @@ import cn.edu.tsinghua.iotdb.qp.logical.Operator;
|
|||
import cn.edu.tsinghua.iotdb.qp.logical.crud.DeleteOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.FilterOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.IndexOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.IndexQueryOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.InsertOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.QueryOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.SelectOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.crud.UpdateOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.index.KvMatchIndexQueryOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.sys.AuthorOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.sys.LoadDataOperator;
|
||||
import cn.edu.tsinghua.iotdb.qp.logical.sys.MetadataOperator;
|
||||
|
@ -20,10 +22,12 @@ import cn.edu.tsinghua.iotdb.qp.logical.sys.PropertyOperator;
|
|||
import cn.edu.tsinghua.iotdb.qp.physical.PhysicalPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.DeletePlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.InsertPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.MultiQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.SingleQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.UpdatePlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.index.KvMatchIndexQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.sys.AuthorPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.sys.LoadDataPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.sys.MetadataPlan;
|
||||
|
@ -107,14 +111,76 @@ public class PhysicalGenerator {
|
|||
return transformQuery(query);
|
||||
case INDEX:
|
||||
IndexOperator indexOperator = (IndexOperator) operator;
|
||||
IndexPlan indexPlan = new IndexPlan(indexOperator.getPath(), indexOperator.getParameters(),
|
||||
indexOperator.getStartTime());
|
||||
return indexPlan;
|
||||
return new IndexPlan(indexOperator.getPath(), indexOperator.getParameters(),
|
||||
indexOperator.getStartTime(), indexOperator.getIndexOperatorType(), indexOperator.getIndexType());
|
||||
case INDEXQUERY:
|
||||
switch (((IndexQueryOperator) operator).getIndexType()){
|
||||
case KvIndex:
|
||||
KvMatchIndexQueryOperator indexQueryOperator = (KvMatchIndexQueryOperator) operator;
|
||||
KvMatchIndexQueryPlan indexQueryPlan = new KvMatchIndexQueryPlan(indexQueryOperator.getPath(),
|
||||
indexQueryOperator.getPatternPath(), indexQueryOperator.getEpsilon(),
|
||||
indexQueryOperator.getStartTime(), indexQueryOperator.getEndTime());
|
||||
indexQueryPlan.setAlpha(indexQueryOperator.getAlpha());
|
||||
indexQueryPlan.setBeta(indexQueryOperator.getBeta());
|
||||
parseIndexTimeFilter(indexQueryOperator, indexQueryPlan);
|
||||
return indexQueryPlan;
|
||||
default:
|
||||
throw new LogicalOperatorException("not support index type:" + ((IndexQueryOperator) operator).getIndexType());
|
||||
}
|
||||
default:
|
||||
throw new LogicalOperatorException("not supported operator type: " + operator.getType());
|
||||
}
|
||||
}
|
||||
|
||||
private void parseIndexTimeFilter(IndexQueryOperator indexQueryOperator, IndexQueryPlan indexQueryPlan)
|
||||
throws LogicalOperatorException {
|
||||
FilterOperator filterOperator = indexQueryOperator.getFilterOperator();
|
||||
if (filterOperator == null) {
|
||||
indexQueryPlan.setStartTime(0);
|
||||
indexQueryPlan.setEndTime(Long.MAX_VALUE);
|
||||
return;
|
||||
}
|
||||
if (!filterOperator.isSingle() || !filterOperator.getSinglePath().equals(RESERVED_TIME)) {
|
||||
throw new LogicalOperatorException("For index query statement, non-time condition is not allowed in the where clause.");
|
||||
}
|
||||
FilterExpression timeFilter;
|
||||
try {
|
||||
timeFilter = filterOperator.transformToFilterExpression(executor, FilterSeriesType.TIME_FILTER);
|
||||
} catch (QueryProcessorException e) {
|
||||
e.printStackTrace();
|
||||
throw new LogicalOperatorException(e.getMessage());
|
||||
}
|
||||
LongFilterVerifier filterVerifier = (LongFilterVerifier) FilterVerifier.create(TSDataType.INT64);
|
||||
LongInterval longInterval = filterVerifier.getInterval((SingleSeriesFilterExpression) timeFilter);
|
||||
long startTime;
|
||||
long endTime;
|
||||
if (longInterval.count != 2) {
|
||||
throw new LogicalOperatorException("For index query statement, the time filter must be an interval.");
|
||||
}
|
||||
if (longInterval.flag[0]) {
|
||||
startTime = longInterval.v[0];
|
||||
} else {
|
||||
startTime = longInterval.v[0] + 1;
|
||||
}
|
||||
if (longInterval.flag[1]) {
|
||||
endTime = longInterval.v[1];
|
||||
} else {
|
||||
endTime = longInterval.v[1] - 1;
|
||||
}
|
||||
if ((startTime <= 0 && startTime != Long.MIN_VALUE) || endTime <= 0) {
|
||||
throw new LogicalOperatorException("The time of index query must be greater than 0.");
|
||||
}
|
||||
if (startTime == Long.MIN_VALUE) {
|
||||
startTime = 0;
|
||||
}
|
||||
if (endTime >= startTime) {
|
||||
indexQueryPlan.setStartTime(startTime);
|
||||
indexQueryPlan.setEndTime(endTime);
|
||||
} else {
|
||||
throw new LogicalOperatorException("For index query statement, the start time should be greater than end time.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* for update command, time should have start and end time range.
|
||||
*
|
||||
|
@ -179,8 +245,7 @@ public class PhysicalGenerator {
|
|||
List<FilterOperator> parts = splitFilter(filterOperator);
|
||||
for (FilterOperator filter : parts) {
|
||||
SingleQueryPlan plan = constructSelectPlan(filter, paths, executor);
|
||||
if (plan != null)
|
||||
subPlans.add(plan);
|
||||
subPlans.add(plan);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -57,20 +57,20 @@ public class OverflowQueryEngine {
|
|||
* @throws IOException TsFile read error
|
||||
*/
|
||||
public QueryDataSet query(int formNumber, List<Path> paths, FilterExpression timeFilter, FilterExpression freqFilter,
|
||||
FilterExpression valueFilter, QueryDataSet queryDataSet, int fetchSize)
|
||||
FilterExpression valueFilter, QueryDataSet queryDataSet, int fetchSize, Integer readLock)
|
||||
throws ProcessorException, IOException, PathErrorException {
|
||||
this.formNumber = formNumber;
|
||||
if (queryDataSet != null) {
|
||||
queryDataSet.clear();
|
||||
}
|
||||
if (timeFilter == null && freqFilter == null && valueFilter == null) {
|
||||
return querySeriesWithoutFilter(paths, queryDataSet, fetchSize, null);
|
||||
return querySeriesWithoutFilter(paths, queryDataSet, fetchSize, readLock);
|
||||
} else if (valueFilter != null && valueFilter instanceof CrossSeriesFilterExpression) {
|
||||
return crossSeriesQuery(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter,
|
||||
(CrossSeriesFilterExpression) valueFilter, queryDataSet, fetchSize);
|
||||
} else {
|
||||
return querySeriesUsingFilter(paths, (SingleSeriesFilterExpression) timeFilter, (SingleSeriesFilterExpression) freqFilter,
|
||||
(SingleSeriesFilterExpression) valueFilter, queryDataSet, fetchSize, null);
|
||||
(SingleSeriesFilterExpression) valueFilter, queryDataSet, fetchSize, readLock);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ public class QueryForMerge {
|
|||
if (queryDataSet == null || !queryDataSet.hasNextRecord()) {
|
||||
try {
|
||||
queryDataSet = queryEngine.query(0, pathList, timeFilter, null, null, queryDataSet,
|
||||
TsFileDBConf.fetchSize);
|
||||
TsFileDBConf.fetchSize, null);
|
||||
} catch (ProcessorException | IOException | PathErrorException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.IndexQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.crud.MultiQueryPlan;
|
||||
import org.apache.thrift.TException;
|
||||
import org.apache.thrift.server.ServerContext;
|
||||
|
@ -66,6 +67,8 @@ import cn.edu.tsinghua.tsfile.common.exception.ProcessorException;
|
|||
import cn.edu.tsinghua.tsfile.timeseries.read.support.Path;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.read.query.QueryDataSet;
|
||||
|
||||
import static cn.edu.tsinghua.iotdb.qp.logical.Operator.OperatorType.INDEXQUERY;
|
||||
|
||||
/**
|
||||
* Thrift RPC implementation at server side
|
||||
*/
|
||||
|
@ -367,8 +370,10 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
|
|||
List<String> columns = new ArrayList<>();
|
||||
// Restore column header of aggregate to func(column_name), only
|
||||
// support single aggregate function for now
|
||||
|
||||
if (((MultiQueryPlan)plan).getType() == MultiQueryPlan.QueryType.QUERY) {
|
||||
if (plan.getOperatorType() == INDEXQUERY) {
|
||||
columns = ((IndexQueryPlan)plan).getColumnHeader();
|
||||
}
|
||||
else if (((MultiQueryPlan)plan).getType() == MultiQueryPlan.QueryType.QUERY) {
|
||||
for (Path p : paths) {
|
||||
columns.add(p.getFullPath());
|
||||
}
|
||||
|
@ -384,7 +389,11 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
|
|||
columns.add(aggregations.get(i) + "(" + paths.get(i).getFullPath() + ")");
|
||||
}
|
||||
}
|
||||
resp.setOperationType(((MultiQueryPlan)plan).getType().toString());
|
||||
if (plan.getOperatorType() == INDEXQUERY) {
|
||||
resp.setOperationType(INDEXQUERY.toString());
|
||||
} else {
|
||||
resp.setOperationType(((MultiQueryPlan) plan).getType().toString());
|
||||
}
|
||||
TSHandleIdentifier operationId = new TSHandleIdentifier(ByteBuffer.wrap(username.get().getBytes()),
|
||||
ByteBuffer.wrap(("PASS".getBytes())));
|
||||
TSOperationHandle operationHandle;
|
||||
|
|
|
@ -0,0 +1,219 @@
|
|||
package cn.edu.tsinghua.iotdb.performance.index;
|
||||
|
||||
import cn.edu.tsinghua.tsfile.common.conf.TSFileConfig;
|
||||
import cn.edu.tsinghua.tsfile.common.conf.TSFileDescriptor;
|
||||
import cn.edu.tsinghua.tsfile.common.constant.JsonFormatConstant;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSDataType;
|
||||
import cn.edu.tsinghua.tsfile.file.metadata.enums.TSEncoding;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.utils.FileUtils;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.utils.RecordUtils;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.utils.StringContainer;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.write.TsFileWriter;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.write.record.TSRecord;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.write.schema.FileSchema;
|
||||
import org.json.JSONArray;
|
||||
import org.json.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
public class GenBigTsFile {
|
||||
private static final Logger LOG = LoggerFactory.getLogger(GenBigTsFile.class);
|
||||
private static TsFileWriter writer;
|
||||
private static String outputDataFile;
|
||||
private static TSFileConfig conf = TSFileDescriptor.getInstance().getConfig();
|
||||
|
||||
private static int setRowGroupSize = conf.groupSizeInByte;
|
||||
// To be configure
|
||||
private static int deviceCount = 1;
|
||||
private static Random random = new Random();
|
||||
// s0:broken line
|
||||
// s1:line
|
||||
// s2:sin
|
||||
// s3:square wave
|
||||
// s4:log
|
||||
private static int[][] brokenLineConfigs = {{100, 1, 100}, {0, -1, 200000}, {10000, 2, 50000}};
|
||||
private static long[][] lineConfigs = {{1L << 32, 1}, {0, -1}, {10000, 2}};
|
||||
private static float[] squareAmplitude = {12.5f, 1273.143f, 1823767.4f};
|
||||
private static float[] squareBaseLine = {25f, 1273.143f, 1823767.4f};
|
||||
private static int[] squareLength = {150, 5000, 20000};
|
||||
// y = A*sin(wt), sinConfigs:w,A
|
||||
private static double[][] sinConfigs = {{0.05, 10}, {0.3, 100}, {2, 50}};
|
||||
private static double[][] sinAbnormalConfigs = {{0.8, 20}, {0.3, 100}, {2, 50}};
|
||||
private static String deltaObjectType = "root.laptop";
|
||||
//s5: random walk
|
||||
private static int[] startNumbers = {100, 1000, 2000};
|
||||
// private static int[] randomBounds = {2, 10, 100};
|
||||
private static int[] randomRadius = {1, 5, 50};
|
||||
private static long defaultStartTimestamp;
|
||||
|
||||
|
||||
private static void getNextRecord(long timestamp, long index) throws IOException {
|
||||
for (int i = 0; i < 1; i++) {
|
||||
StringContainer sc = new StringContainer(",");
|
||||
sc.addTail("root.vehicle.d" + deviceCount, timestamp);
|
||||
if (sensorSet.contains("s0")) {
|
||||
// s0:broken line, int
|
||||
if ((timestamp % brokenLineConfigs[i][2]) == 0)
|
||||
brokenLineConfigs[i][1] = -brokenLineConfigs[i][1];
|
||||
brokenLineConfigs[i][0] += brokenLineConfigs[i][1];
|
||||
sc.addTail("s0", brokenLineConfigs[i][0]);
|
||||
}
|
||||
if (sensorSet.contains("s1")) {
|
||||
// s1:line, long
|
||||
lineConfigs[i][0] += lineConfigs[i][1];
|
||||
if (lineConfigs[i][0] < 0)
|
||||
lineConfigs[i][0] = 0;
|
||||
sc.addTail("s1", lineConfigs[i][0]);
|
||||
}
|
||||
if (sensorSet.contains("s2")) {
|
||||
// s2:square wave, float
|
||||
if ((timestamp % squareLength[i]) == 0)
|
||||
squareAmplitude[i] = -squareAmplitude[i];
|
||||
sc.addTail("s2", squareBaseLine[i] + squareAmplitude[i]);
|
||||
}
|
||||
if (sensorSet.contains("s3")) {
|
||||
// s3:sin, double
|
||||
if (index > 5000 && index < 8000)
|
||||
sc.addTail(
|
||||
"s3",
|
||||
sinAbnormalConfigs[i][1] + sinAbnormalConfigs[i][1]
|
||||
* Math.sin(sinAbnormalConfigs[i][0] * timestamp));
|
||||
else
|
||||
sc.addTail(
|
||||
"s3",
|
||||
sinConfigs[i][1] + sinConfigs[i][1]
|
||||
* Math.sin(sinConfigs[i][0] * timestamp));
|
||||
}
|
||||
if (sensorSet.contains("s5")) {
|
||||
// s3:sin, double
|
||||
startNumbers[i] += random.nextBoolean() ? randomRadius[i] : -randomRadius[i];
|
||||
sc.addTail("s5", startNumbers[i]);
|
||||
}
|
||||
strLines[i] = sc.toString();
|
||||
}
|
||||
}
|
||||
|
||||
private static String[] strLines;
|
||||
|
||||
private static Set<String> sensorSet = new HashSet<>();
|
||||
|
||||
private static void writeToFileByLine(long lineLimit) throws InterruptedException, IOException {
|
||||
writeToFile(Long.MAX_VALUE, lineLimit);
|
||||
}
|
||||
|
||||
private static void writeToFile(long spaceLimit, long lineLimit) throws InterruptedException, IOException {
|
||||
long lineCount = 0;
|
||||
long startTimestamp = defaultStartTimestamp;
|
||||
long endTime;
|
||||
long currentSpace = 0;
|
||||
long startTime = System.currentTimeMillis();
|
||||
while (currentSpace < spaceLimit && lineCount < lineLimit) {
|
||||
if (lineCount % 1000000 == 0) {
|
||||
endTime = System.currentTimeMillis();
|
||||
currentSpace =
|
||||
(long) FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.B)
|
||||
+ writer.calculateMemSizeForAllGroup();
|
||||
LOG.info("write line:{},use time:{}s, space:{}", lineCount,
|
||||
(endTime - startTime) / 1000,
|
||||
FileUtils.transformUnit(currentSpace, FileUtils.Unit.MB));
|
||||
|
||||
}
|
||||
getNextRecord(startTimestamp + lineCount, lineCount);
|
||||
try {
|
||||
for (String str : strLines) {
|
||||
// System.out.println(str);
|
||||
TSRecord ts = RecordUtils.parseSimpleTupleRecord(str, fileSchema);
|
||||
writer.write(ts);
|
||||
}
|
||||
} catch (WriteProcessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
lineCount++;
|
||||
}
|
||||
writer.close();
|
||||
endTime = System.currentTimeMillis();
|
||||
LOG.info("write total:{},use time:{}s", lineCount, (endTime - startTime) / 1000);
|
||||
LOG.info("src file size:{}MB", FileUtils.getLocalFileByte(outputDataFile, FileUtils.Unit.MB));
|
||||
}
|
||||
|
||||
private static JSONObject generateTestSchema() {
|
||||
conf = TSFileDescriptor.getInstance().getConfig();
|
||||
JSONObject s0 = new JSONObject();
|
||||
s0.put(JsonFormatConstant.MEASUREMENT_UID, "s0");
|
||||
s0.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString());
|
||||
s0.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
conf.valueEncoder);
|
||||
JSONObject s1 = new JSONObject();
|
||||
s1.put(JsonFormatConstant.MEASUREMENT_UID, "s1");
|
||||
s1.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT64.toString());
|
||||
s1.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
conf.valueEncoder);
|
||||
JSONObject s2 = new JSONObject();
|
||||
s2.put(JsonFormatConstant.MEASUREMENT_UID, "s2");
|
||||
s2.put(JsonFormatConstant.DATA_TYPE, TSDataType.FLOAT.toString());
|
||||
s2.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
conf.valueEncoder);
|
||||
JSONObject s3 = new JSONObject();
|
||||
s3.put(JsonFormatConstant.MEASUREMENT_UID, "s3");
|
||||
s3.put(JsonFormatConstant.DATA_TYPE, TSDataType.DOUBLE.toString());
|
||||
s3.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
conf.valueEncoder);
|
||||
JSONObject s4 = new JSONObject();
|
||||
s4.put(JsonFormatConstant.MEASUREMENT_UID, "s4");
|
||||
s4.put(JsonFormatConstant.DATA_TYPE, TSDataType.TEXT.toString());
|
||||
s4.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
TSEncoding.PLAIN.toString());
|
||||
JSONObject s5 = new JSONObject();
|
||||
s5.put(JsonFormatConstant.MEASUREMENT_UID, "s5");
|
||||
s5.put(JsonFormatConstant.DATA_TYPE, TSDataType.INT32.toString());
|
||||
s5.put(JsonFormatConstant.MEASUREMENT_ENCODING,
|
||||
TSEncoding.RLE.toString());
|
||||
JSONArray measureGroup1 = new JSONArray();
|
||||
if (sensorSet.contains("s0"))
|
||||
measureGroup1.put(s0);
|
||||
if (sensorSet.contains("s1"))
|
||||
measureGroup1.put(s1);
|
||||
if (sensorSet.contains("s2"))
|
||||
measureGroup1.put(s2);
|
||||
if (sensorSet.contains("s3"))
|
||||
measureGroup1.put(s3);
|
||||
if (sensorSet.contains("s4"))
|
||||
measureGroup1.put(s4);
|
||||
if (sensorSet.contains("s5"))
|
||||
measureGroup1.put(s5);
|
||||
|
||||
JSONObject jsonSchema = new JSONObject();
|
||||
jsonSchema.put(JsonFormatConstant.DELTA_TYPE, "test_type");
|
||||
jsonSchema.put(JsonFormatConstant.JSON_SCHEMA, measureGroup1);
|
||||
return jsonSchema;
|
||||
}
|
||||
|
||||
private static FileSchema fileSchema;
|
||||
|
||||
public static void generate(long count, String filename, String[] sensors, int dCount, long startTime) throws IOException, InterruptedException, WriteProcessException {
|
||||
long lineLimit = count;
|
||||
outputDataFile = filename;
|
||||
if (new File(outputDataFile).exists())
|
||||
new File(outputDataFile).delete();
|
||||
sensorSet.clear();
|
||||
for (String sr : sensors) {
|
||||
sensorSet.add(sr);
|
||||
}
|
||||
fileSchema = new FileSchema(generateTestSchema());
|
||||
conf.groupSizeInByte = setRowGroupSize;
|
||||
deviceCount = dCount;
|
||||
strLines = new String[1];
|
||||
// write file
|
||||
defaultStartTimestamp = startTime;
|
||||
writer = new TsFileWriter(new File(outputDataFile), fileSchema, conf);
|
||||
System.out.println("setRowGroupSize: " + setRowGroupSize + ",total target line:" + lineLimit);
|
||||
writeToFileByLine(lineLimit);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,420 @@
|
|||
package cn.edu.tsinghua.iotdb.performance.index;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBConfig;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
import cn.edu.tsinghua.iotdb.jdbc.TsfileJDBCConfig;
|
||||
import cn.edu.tsinghua.iotdb.jdbc.TsfileSQLException;
|
||||
import cn.edu.tsinghua.iotdb.service.IoTDB;
|
||||
import cn.edu.tsinghua.tsfile.timeseries.write.exception.WriteProcessException;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.Random;
|
||||
|
||||
//unused
|
||||
public class KvIndexPerfTestInstance {
|
||||
private static IoTDB deamon;
|
||||
//We insert all data into one path and create index.
|
||||
private static String path;
|
||||
//Another path to cut off testing path
|
||||
private static int defaultWindowLength;
|
||||
private static long defaultPatternStartPos;
|
||||
private static long defaultPatternLength;
|
||||
private static float defaultThreshold = 0.2f;
|
||||
|
||||
private static final String FOLDER_HEADER = "src/test/tmp";
|
||||
private static int maxOpenFolderPre;
|
||||
private static String overflowDataDirPre;
|
||||
private static String fileNodeDirPre;
|
||||
private static String bufferWriteDirPre;
|
||||
private static String metadataDirPre;
|
||||
private static String derbyHomePre;
|
||||
private static String walFolderPre;
|
||||
private static String indexFileDirPre;
|
||||
|
||||
//TODO to be specified, the last timestamp of this series
|
||||
private static long[] lastTimestamp;
|
||||
private static int timeLen;
|
||||
|
||||
private static long defaultLimitTime = 3000L;
|
||||
|
||||
private static String resultFile;
|
||||
|
||||
private static FileWriter resultWriter;
|
||||
|
||||
public static void setUp() throws Exception {
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
// clearDir(config);
|
||||
overflowDataDirPre = config.overflowDataDir;
|
||||
fileNodeDirPre = config.fileNodeDir;
|
||||
bufferWriteDirPre = config.bufferWriteDir;
|
||||
metadataDirPre = config.metadataDir;
|
||||
derbyHomePre = config.derbyHome;
|
||||
maxOpenFolderPre = config.maxOpenFolder;
|
||||
walFolderPre = config.walFolder;
|
||||
indexFileDirPre = config.indexFileDir;
|
||||
|
||||
config.overflowDataDir = FOLDER_HEADER + "/data/overflow";
|
||||
config.fileNodeDir = FOLDER_HEADER + "/data/digest";
|
||||
config.bufferWriteDir = FOLDER_HEADER + "/data/delta";
|
||||
config.metadataDir = FOLDER_HEADER + "/data/metadata";
|
||||
config.derbyHome = FOLDER_HEADER + "/data/derby";
|
||||
config.walFolder = FOLDER_HEADER + "/data/wals";
|
||||
config.indexFileDir = FOLDER_HEADER + "/data/index";
|
||||
config.maxOpenFolder = 1;
|
||||
|
||||
resultFile = "result.out";
|
||||
|
||||
deamon = IoTDB.getInstance();
|
||||
deamon.active();
|
||||
|
||||
File ff = new File(config.bufferWriteDir);
|
||||
prepareIoTData(ff.exists());
|
||||
}
|
||||
|
||||
private static void clearDir(TsfileDBConfig config) throws IOException {
|
||||
FileUtils.deleteDirectory(new File(config.overflowDataDir));
|
||||
FileUtils.deleteDirectory(new File(config.fileNodeDir));
|
||||
FileUtils.deleteDirectory(new File(config.bufferWriteDir));
|
||||
FileUtils.deleteDirectory(new File(config.metadataDir));
|
||||
FileUtils.deleteDirectory(new File(config.derbyHome));
|
||||
FileUtils.deleteDirectory(new File(config.walFolder));
|
||||
FileUtils.deleteDirectory(new File(config.indexFileDir));
|
||||
FileUtils.deleteDirectory(new File(FOLDER_HEADER + "/data"));
|
||||
}
|
||||
|
||||
private static void prepareIoTData(Boolean exists) throws IOException, InterruptedException, WriteProcessException, ClassNotFoundException, SQLException{
|
||||
String[] sqls = new String[]{
|
||||
"SET STORAGE GROUP TO root.vehicle.d40",
|
||||
"CREATE TIMESERIES root.vehicle.d40.s5 WITH DATATYPE=INT32, ENCODING=RLE",
|
||||
};
|
||||
defaultWindowLength = 500;
|
||||
defaultPatternStartPos = 1;
|
||||
lastTimestamp = new long[]{100000, 200000, 300000, 400000, 500000, 600000, 700000, 800000, 900000, 1000000, 10000000, 100000000, 1000000000, 2000000000, 3000000000l, 4000000000l};
|
||||
defaultPatternLength = 1000;
|
||||
|
||||
timeLen = lastTimestamp.length;
|
||||
int pos = 1;
|
||||
double[][] mul = new double[][]{
|
||||
{0, 0.33, 1.0}, {0, 0.167, 0.5, 1.0}, {0, 0.1, 0.3, 0.6, 1.0}
|
||||
};
|
||||
|
||||
if (exists) return;
|
||||
Class.forName(TsfileJDBCConfig.JDBC_DRIVER_NAME);
|
||||
Connection connection = null;
|
||||
try {
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
Statement statement = connection.createStatement();
|
||||
for (String sql : sqls) {
|
||||
statement.execute(sql);
|
||||
}
|
||||
for (int i = 0;i < 2;i++) {
|
||||
String sql;
|
||||
sql = String.format("SET STORAGE GROUP TO root.vehicle.d%d", i);
|
||||
statement.execute(sql);
|
||||
sql = String.format("CREATE TIMESERIES root.vehicle.d%d.s1 WITH DATATYPE=INT64, ENCODING=RLE", i);
|
||||
statement.execute(sql);
|
||||
sql = String.format("insert into root.vehicle.d%d(timestamp,s1) values(1,1)", i);
|
||||
statement.execute(sql);
|
||||
sql = String.format("insert into root.vehicle.d%d(timestamp,s1) values(%d, 2)", i, lastTimestamp[i]);
|
||||
statement.execute(sql);
|
||||
sql = String.format("insert into root.vehicle.d40(timestamp,s5) values(%d,%d)", pos, pos);
|
||||
statement.execute(sql);
|
||||
pos++;
|
||||
// } else if (j < 4) {
|
||||
// for (int n = 0;n < (j+1);n++) {
|
||||
// sql = String.format("insert into root.vehicle.d%d(timestamp,s5) values(%d,%d)", i, n * lastTimestamp[k] / (j+1) + 1, n * lastTimestamp[k] / (j+1) + 1);
|
||||
// statement.execute(sql);
|
||||
// sql = String.format("insert into root.vehicle.d%d(timestamp,s5) values(%d,%d)", i, (n + 1) * lastTimestamp[k] / (j+1), (n + 1) * lastTimestamp[k] / (j+1));
|
||||
// statement.execute(sql);
|
||||
// sql = String.format("insert into root.vehicle.d40(timestamp,s5) values(%d,%d)", pos, pos);
|
||||
// statement.execute(sql);
|
||||
// pos++;
|
||||
// }
|
||||
// } else {
|
||||
// for (int n = 0;n < (j-2);n++) {
|
||||
// sql = String.format("insert into root.vehicle.d%d(timestamp,s5) values(%d,%d)", i, (long)(lastTimestamp[k] * mul[j-4][n]) + 1, (long)(lastTimestamp[k] * mul[j-4][n]) + 1);
|
||||
// statement.execute(sql);
|
||||
// sql = String.format("insert into root.vehicle.d%d(timestamp,s5) values(%d,%d)", i, (long)(lastTimestamp[k] * mul[j-4][n+1]), (long)(lastTimestamp[k] * mul[j-4][n+1]));
|
||||
// statement.execute(sql);
|
||||
// sql = String.format("insert into root.vehicle.d40(timestamp,s5) values(%d,%d)", pos, pos);
|
||||
// statement.execute(sql);
|
||||
// pos++;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
statement.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
String[][] sensors = new String[][]{
|
||||
{"s5"}, {"s1"}, {"s5","s0"},{"s5","s0","s1"}, {"s5","s0","s1","s2"}
|
||||
};
|
||||
for (int i = 0;i < 2;i++) {
|
||||
File dir = new File(config.bufferWriteDir + "/root.vehicle.d" + i);
|
||||
File[] files = dir.listFiles();
|
||||
if (files.length == 0)
|
||||
continue;
|
||||
String filename = files[0].getAbsolutePath();
|
||||
files[0].delete();
|
||||
long startTime = System.currentTimeMillis(), endTime;
|
||||
GenBigTsFile.generate(lastTimestamp[i], filename, sensors[1], i, 1);
|
||||
endTime = System.currentTimeMillis();
|
||||
double averageTime = (endTime - startTime) / 1000.0;
|
||||
resultWriter = new FileWriter(resultFile, true);
|
||||
resultWriter.write("create_file:\ttype: line\tlength: " + lastTimestamp[i] + "\ttime: " + averageTime + "s\n");
|
||||
resultWriter.close();
|
||||
}
|
||||
}
|
||||
|
||||
public static void tearDown() throws Exception {
|
||||
deamon.stop();
|
||||
Thread.sleep(5000);
|
||||
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
// clearDir(config);
|
||||
config.overflowDataDir = overflowDataDirPre;
|
||||
config.fileNodeDir = fileNodeDirPre;
|
||||
config.bufferWriteDir = bufferWriteDirPre;
|
||||
config.metadataDir = metadataDirPre;
|
||||
config.derbyHome = derbyHomePre;
|
||||
config.maxOpenFolder = maxOpenFolderPre;
|
||||
config.walFolder = walFolderPre;
|
||||
config.indexFileDir = indexFileDirPre;
|
||||
}
|
||||
|
||||
public static void Test() throws IOException, SQLException, ClassNotFoundException {
|
||||
createPerfTest();
|
||||
// queryPerfByVaryTimeRangeTest();
|
||||
// queryPerfByVaryThresholdTest();
|
||||
// queryPerfByVaryPatternLengthTest();
|
||||
// queryPerfByVaryWindowSizeTest();
|
||||
// executeSQL("drop index kvindex on " + path, 0);
|
||||
}
|
||||
|
||||
// @Test
|
||||
public static void createPerfTest() throws IOException, SQLException, ClassNotFoundException {
|
||||
System.out.println("create time cost");
|
||||
//suppose the time range of the path is 0~x, we test the time costs of creating index over 10%, 20%, ...
|
||||
//90% of the whole time range.
|
||||
for (int i = 0; i < 2;i++) {
|
||||
|
||||
// if (i >= timeLen-6 && i < timeLen-4) continue;
|
||||
|
||||
path = "root.vehicle.d" + i + " .s1";
|
||||
|
||||
try {
|
||||
executeSQL("drop index kvindex on " + path, 0);
|
||||
} catch (Exception e) {
|
||||
}
|
||||
|
||||
double averageTime = executeSQL("create index on " + path + " using kvindex with window_length=" +
|
||||
defaultWindowLength + ", " + "since_time=" + 0, 0);
|
||||
|
||||
System.out.println("percent: " + lastTimestamp[i] + "\ttime:" + averageTime);
|
||||
|
||||
double aT = query(String.format("select index kvindex(%s, %s, %s, %s, %s, 1.0, 0.0) from %s",
|
||||
path, path, defaultPatternStartPos, (defaultPatternStartPos + defaultPatternLength - 1), defaultThreshold,
|
||||
path), defaultLimitTime);
|
||||
|
||||
resultWriter = new FileWriter(resultFile, true);
|
||||
resultWriter.write("length_test:\tsingle file:\tlength: " + lastTimestamp[i] + "\ttime: " + averageTime + "s\tquery time: " + aT + "s\n");
|
||||
resultWriter.close();
|
||||
}
|
||||
//finally, create index over the whole time series for the following query test.
|
||||
// executeSQL("create index on " + path + " using kvindex with window_length=" +
|
||||
// defaultWindowLength + ", " + "since_time=0", 0);
|
||||
}
|
||||
|
||||
// @Test
|
||||
public static void queryPerfByVaryPatternLengthTest() throws IOException, SQLException, ClassNotFoundException {
|
||||
System.out.println("query by varying pattern length");
|
||||
//suppose the time range of the path is 0~x, we test the time costs of creating index over 10%, 20%, ...
|
||||
//90% of the whole time range.
|
||||
// for (float i = 0.2f; i < 2f; i += 0.2) {
|
||||
path = "root.vehicle.d10.s1";
|
||||
for (int i = 0; i < 11;i++){
|
||||
int patternLength;
|
||||
|
||||
if (i < 5) {
|
||||
patternLength = (int) (defaultPatternLength * Math.pow(10, (i+1) / 2));
|
||||
if (i % 2 == 1)
|
||||
patternLength /= 2;
|
||||
} else {
|
||||
patternLength = (int) (defaultPatternLength * Math.pow(10, 3));
|
||||
if (i > 5)
|
||||
patternLength *= 2 * (i-5);
|
||||
}
|
||||
|
||||
double averageTime = query(String.format("select index kvindex(%s, %s, %s, %s, %s, 1.0, 0.0) from %s",
|
||||
path, path, defaultPatternStartPos, (defaultPatternStartPos + patternLength - 1), defaultThreshold,
|
||||
path), defaultLimitTime);
|
||||
|
||||
System.out.println("the ratio of pattern length: " + patternLength + "\ttime:" + averageTime);
|
||||
|
||||
resultWriter = new FileWriter(resultFile, true);
|
||||
resultWriter.write("pattern_test:\tsingle file\tlength: " + lastTimestamp[10] + "\tpattern length: " + patternLength + "\ttime: " + averageTime + "s\n");
|
||||
resultWriter.close();
|
||||
}
|
||||
}
|
||||
|
||||
// @Test
|
||||
/* public void queryPerfByVaryThresholdTest() throws SQLException, ClassNotFoundException {
|
||||
System.out.println("query by varying threshold, baseline: " + defaultThreshold);
|
||||
//suppose the time range of the path is 0~x, we test the time costs of creating index over 10%, 20%, ...
|
||||
//90% of the whole time range.
|
||||
for (float i = 0.f; i < 2f; i += 0.2) {
|
||||
float threshold = defaultThreshold * i;
|
||||
|
||||
double averageTime = query(String.format("select index kvindex(%s, %s, %s, %s, %s, 1.0, 0.0) from %s",
|
||||
path, path, defaultPatternStartPos, (defaultPatternStartPos + defaultPatternLength), threshold,
|
||||
path), defaultLimitTime);
|
||||
|
||||
System.out.println("the ratio of pattern threshold: " + i + "\ttime:" + averageTime);
|
||||
}
|
||||
}*/
|
||||
|
||||
// @Test
|
||||
/* public void queryPerfByVaryTimeRangeTest() throws SQLException, ClassNotFoundException {
|
||||
System.out.println("query by varying time range");
|
||||
//suppose the time range of the path is 0~x, we test the time costs of creating index over 10%, 20%, ...
|
||||
//90% of the whole time range.
|
||||
for (float i = 0.2f; i <= 1; i += 0.2f) {
|
||||
double averageTime = query(String.format("select index kvindex(%s, %s, %s, %s, %s, 1.0, 0.0) from %s where time < %s",
|
||||
path, path, defaultPatternStartPos, (defaultPatternStartPos + defaultPatternLength),
|
||||
defaultThreshold,
|
||||
path, (long) (lastTimestamp[0] * i)), defaultLimitTime);
|
||||
|
||||
System.out.println("the ratio of time range: " + i + "\ttime:" + averageTime);
|
||||
}
|
||||
}*/
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
// @Test
|
||||
public static void queryPerfByVaryWindowSizeTest() throws IOException, SQLException, ClassNotFoundException {
|
||||
System.out.println("query by varying window length, baseline: " + defaultWindowLength);
|
||||
path = "root.vehicle.d10.s1";
|
||||
for (int i = 0;i < 19;i++) {
|
||||
try {
|
||||
executeSQL("drop index kvindex on " + path, 0);
|
||||
} catch (Exception e) {
|
||||
}
|
||||
|
||||
int ld;
|
||||
|
||||
if (i < 10) ld = (int) (defaultWindowLength / 100 * (i+1));
|
||||
else ld = (int) (defaultWindowLength / 10 * (i-8));
|
||||
|
||||
double aT = executeSQL("create index on " + path + " using kvindex with window_length=" +
|
||||
ld + ", " + "since_time=0", 0);
|
||||
|
||||
System.out.println("the ratio of window length: " + i + "\tindex time:" + aT);
|
||||
|
||||
double averageTime = query(String.format("select index kvindex(%s, %s, %s, %s, %s, 1.0, 0.0) from %s where time < %s",
|
||||
path, path, defaultPatternStartPos, (defaultPatternStartPos + defaultPatternLength - 1),
|
||||
defaultThreshold, path, lastTimestamp[2]),
|
||||
defaultLimitTime);
|
||||
System.out.println("the ratio of window length: " + i + "\ttime:" + averageTime);
|
||||
|
||||
resultWriter = new FileWriter(resultFile, true);
|
||||
resultWriter.write("window_length_test:\tsingle file\tlength: " + lastTimestamp[10] + "\twindow_length: " + ld + "\tcreate time: " + aT + "s\tquery time:" + averageTime + "s\n");
|
||||
resultWriter.close();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param sql
|
||||
* @param limitTime
|
||||
* @return total cycle when this function runs over limitTime
|
||||
* @throws ClassNotFoundException
|
||||
* @throws SQLException
|
||||
*/
|
||||
private static double executeSQL(String sql, long limitTime) throws ClassNotFoundException, SQLException {
|
||||
long startTime = System.currentTimeMillis();
|
||||
long endTime = startTime;
|
||||
int cycle = 0;
|
||||
while (endTime - startTime <= limitTime) {
|
||||
Class.forName(TsfileJDBCConfig.JDBC_DRIVER_NAME);
|
||||
Connection connection = null;
|
||||
try {
|
||||
System.out.println("testtest-sql\t" + sql);
|
||||
//长度1,non-query语句
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.execute(sql);
|
||||
statement.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
// System.exit(0);
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
endTime = System.currentTimeMillis();
|
||||
cycle++;
|
||||
}
|
||||
return ((double) (endTime - startTime)) / 1000.0 / cycle;
|
||||
}
|
||||
|
||||
private static double query(String querySQL, long limitTime) throws ClassNotFoundException,
|
||||
SQLException {
|
||||
long startTime = System.currentTimeMillis();
|
||||
long endTime = startTime;
|
||||
int cycle = 0;
|
||||
while (endTime - startTime <= limitTime) {
|
||||
Class.forName(TsfileJDBCConfig.JDBC_DRIVER_NAME);
|
||||
Connection connection = null;
|
||||
try {
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
Statement statement = connection.createStatement();
|
||||
try {
|
||||
boolean hasResultSet = statement.execute(querySQL);
|
||||
// System.out.println(hasResultSet + "...");
|
||||
// KvMatchIndexQueryPlan planForHeader = new KvMatchIndexQueryPlan(null, null, 0,0,0);
|
||||
if (hasResultSet) {
|
||||
ResultSet resultSet = statement.getResultSet();
|
||||
while (resultSet.next()) {
|
||||
//don't check anything, just for performance evaluation
|
||||
}
|
||||
}
|
||||
} catch (TsfileSQLException e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
System.exit(0);
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
endTime = System.currentTimeMillis();
|
||||
cycle++;
|
||||
}
|
||||
return ((double) (endTime - startTime)) / 1000 / cycle;
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
setUp();
|
||||
Test();
|
||||
tearDown();
|
||||
return;
|
||||
}
|
||||
}
|
|
@ -28,29 +28,29 @@ public class IndexTest {
|
|||
@Test
|
||||
public void testCreateIndex() throws QueryProcessorException, ArgsErrorException {
|
||||
|
||||
String createIndex = "create index on root.laptop.d1.s1 using kv-match";
|
||||
String createIndex = "create index on root.laptop.d1.s1 using kvindex";
|
||||
QueryProcessor processor = new QueryProcessor(new MemIntQpExecutor());
|
||||
IndexPlan indexPlan = (IndexPlan) processor.parseSQLToPhysicalPlan(createIndex);
|
||||
assertEquals("root.laptop.d1.s1", indexPlan.getPaths().get(0).getFullPath());
|
||||
assertEquals(0, indexPlan.getParameters().keySet().size());
|
||||
assertEquals(0, indexPlan.getStartTime());
|
||||
assertEquals(1, indexPlan.getParameters().keySet().size());
|
||||
// assertEquals(0, indexPlan.getStartTime());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateIndex2() throws QueryProcessorException, ArgsErrorException{
|
||||
String createIndex = "create index on root.laptop.d1.s1 using kv-match with b=20,a=50 where time>=100";
|
||||
String createIndex = "create index on root.laptop.d1.s1 using kvindex with b=20,a=50 where time>=100";
|
||||
QueryProcessor processor = new QueryProcessor(new MemIntQpExecutor());
|
||||
IndexPlan indexPlan = (IndexPlan) processor.parseSQLToPhysicalPlan(createIndex);
|
||||
assertEquals("root.laptop.d1.s1", indexPlan.getPaths().get(0).getFullPath());
|
||||
assertEquals(2, indexPlan.getParameters().keySet().size());
|
||||
Map<String, Integer> map = indexPlan.getParameters();
|
||||
assertEquals((long)20, (long)map.get("b"));
|
||||
assertEquals((long)50, (long)map.get("a"));
|
||||
assertEquals(100, indexPlan.getStartTime());
|
||||
createIndex = "create index on root.laptop.d1.s1 using kv-match with b=20,a=50 where time>100";
|
||||
assertEquals(3, indexPlan.getParameters().keySet().size());
|
||||
Map<String, Object> map = indexPlan.getParameters();
|
||||
assertEquals(20, map.get("b"));
|
||||
assertEquals(50, map.get("a"));
|
||||
// assertEquals(100, indexPlan.getStartTime());
|
||||
createIndex = "create index on root.laptop.d1.s1 using kvindex with b=20,a=50 where time>100";
|
||||
processor = new QueryProcessor(new MemIntQpExecutor());
|
||||
indexPlan = (IndexPlan) processor.parseSQLToPhysicalPlan(createIndex);
|
||||
assertEquals(101, indexPlan.getStartTime());
|
||||
// assertEquals(101, indexPlan.getStartTime());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,450 @@
|
|||
package cn.edu.tsinghua.iotdb.service;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBConfig;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
import cn.edu.tsinghua.iotdb.jdbc.TsfileJDBCConfig;
|
||||
import cn.edu.tsinghua.iotdb.utils.EnvironmentUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.*;
|
||||
|
||||
public class CompleteTest {
|
||||
|
||||
private final String FOLDER_HEADER = "src/test/resources";
|
||||
private static final String TIMESTAMP_STR = "Time";
|
||||
|
||||
private IoTDB deamon;
|
||||
|
||||
private boolean testFlag = TestUtils.testFlag;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (testFlag) {
|
||||
deamon = IoTDB.getInstance();
|
||||
deamon.active();
|
||||
EnvironmentUtils.envSetUp();
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (testFlag) {
|
||||
deamon.stop();
|
||||
Thread.sleep(5000);
|
||||
EnvironmentUtils.cleanEnv();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void Test() throws ClassNotFoundException, SQLException {
|
||||
String[] sqls = {"SET STORAGE GROUP TO root.vehicle"};
|
||||
executeSQL(sqls);
|
||||
SimpleTest();
|
||||
InsertTest();
|
||||
UpdateTest();
|
||||
DeleteTest();
|
||||
SelectTest();
|
||||
FuncTest();
|
||||
GroupByTest();
|
||||
}
|
||||
|
||||
public void SimpleTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"SHOW TIMESERIES",
|
||||
"=== Timeseries Tree ===\n" +
|
||||
"\n" +
|
||||
"root:{\n" +
|
||||
" vehicle:{\n" +
|
||||
" d0:{\n" +
|
||||
" s0:{\n" +
|
||||
" DataType: INT32,\n" +
|
||||
" Encoding: RLE,\n" +
|
||||
" args: {},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}",
|
||||
"DELETE TIMESERIES root.vehicle.d0.s0",
|
||||
"SHOW TIMESERIES",
|
||||
"=== Timeseries Tree ===\n" +
|
||||
"\n" +
|
||||
"root:{\n" +
|
||||
" vehicle\n" +
|
||||
"}",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=BOOLEAN,ENCODING=PLAIN",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s1 WITH DATATYPE=INT64,ENCODING=TS_2DIFF",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s2 WITH DATATYPE=FLOAT,ENCODING=GORILLA",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s4 WITH DATATYPE=DOUBLE,ENCODING=RLE",
|
||||
"CREATE TIMESERIES root.vehicle.d1.s5 WITH DATATYPE=TEXT,ENCODING=PLAIN",
|
||||
"CREATE TIMESERIES root.vehicle.d2.s6 WITH DATATYPE=INT32,ENCODING=TS_2DIFF,COMPRESSOR=UNCOMPRESSOR",
|
||||
"CREATE TIMESERIES root.vehicle.d3.s7 WITH DATATYPE=INT32,ENCODING=RLE,COMPRESSOR=SNAPPY",
|
||||
"CREATE TIMESERIES root.vehicle.d4.s8 WITH DATATYPE=INT32,ENCODING=RLE,MAX_POINT_NUMBER=100",
|
||||
"CREATE TIMESERIES root.vehicle.d5.s9 WITH DATATYPE=FLOAT,ENCODING=PLAIN,COMPRESSOR=SNAPPY,MAX_POINT_NUMBER=10",
|
||||
"CREATE TIMESERIES root.vehicle.d6.s10 WITH DATATYPE=DOUBLE,ENCODING=RLE,COMPRESSOR=UNCOMPRESSOR,MAX_POINT_NUMBER=10",
|
||||
"DELETE TIMESERIES root.vehicle.d0.*",
|
||||
"SHOW TIMESERIES",
|
||||
"=== Timeseries Tree ===\n" +
|
||||
"\n" +
|
||||
"root:{\n" +
|
||||
" vehicle:{\n" +
|
||||
" d1:{\n" +
|
||||
" s5:{\n" +
|
||||
" DataType: TEXT,\n" +
|
||||
" Encoding: PLAIN,\n" +
|
||||
" args: {},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" d2:{\n" +
|
||||
" s6:{\n" +
|
||||
" DataType: INT32,\n" +
|
||||
" Encoding: TS_2DIFF,\n" +
|
||||
" args: {COMPRESSOR=UNCOMPRESSOR},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" d3:{\n" +
|
||||
" s7:{\n" +
|
||||
" DataType: INT32,\n" +
|
||||
" Encoding: RLE,\n" +
|
||||
" args: {COMPRESSOR=SNAPPY},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" d4:{\n" +
|
||||
" s8:{\n" +
|
||||
" DataType: INT32,\n" +
|
||||
" Encoding: RLE,\n" +
|
||||
" args: {MAX_POINT_NUMBER=100},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" d5:{\n" +
|
||||
" s9:{\n" +
|
||||
" DataType: FLOAT,\n" +
|
||||
" Encoding: PLAIN,\n" +
|
||||
" args: {COMPRESSOR=SNAPPY, MAX_POINT_NUMBER=10},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" },\n" +
|
||||
" d6:{\n" +
|
||||
" s10:{\n" +
|
||||
" DataType: DOUBLE,\n" +
|
||||
" Encoding: RLE,\n" +
|
||||
" args: {COMPRESSOR=UNCOMPRESSOR, MAX_POINT_NUMBER=10},\n" +
|
||||
" StorageGroup: root.vehicle \n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}",
|
||||
"DELETE TIMESERIES root.vehicle.*",
|
||||
"SHOW TIMESERIES",
|
||||
"=== Timeseries Tree ===\n" +
|
||||
"\n" +
|
||||
"root:{\n" +
|
||||
" vehicle\n" +
|
||||
"}"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void InsertTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,101)",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s1 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0,s1) values(2,102,202)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(NOW(),104)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2000-01-01T08:00:00+08:00,105)",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"1,101,null,\n" +
|
||||
"2,102,202,\n" +
|
||||
"946684800000,105,null,\n" +
|
||||
"NOW(),104,null,\n",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void UpdateTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(3,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(4,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(6,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(7,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(8,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(9,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(10,1)",
|
||||
"UPDATE root.vehicle.d0 SET s0 = 2 WHERE time <= 2",
|
||||
"UPDATE root.vehicle SET d0.s0 = 3 WHERE time >= 9",
|
||||
"UPDATE root.vehicle.d0 SET s0 = 4 WHERE time <= 7 and time >= 5",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"1,2,\n" +
|
||||
"2,2,\n" +
|
||||
"3,1,\n" +
|
||||
"4,1,\n" +
|
||||
"5,4,\n" +
|
||||
"6,4,\n" +
|
||||
"7,4,\n" +
|
||||
"8,1,\n" +
|
||||
"9,3,\n" +
|
||||
"10,3,\n",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void DeleteTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(3,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(4,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(6,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(7,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(8,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(9,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(10,1)",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"1,1,\n" +
|
||||
"2,1,\n" +
|
||||
"3,1,\n" +
|
||||
"4,1,\n" +
|
||||
"5,1,\n" +
|
||||
"6,1,\n" +
|
||||
"7,1,\n" +
|
||||
"8,1,\n" +
|
||||
"9,1,\n" +
|
||||
"10,1,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0 WHERE time < 8",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"8,1,\n" +
|
||||
"9,1,\n" +
|
||||
"10,1,\n",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2000-01-01T08:00:00+08:00,1)",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"8,1,\n" +
|
||||
"9,1,\n" +
|
||||
"10,1,\n" +
|
||||
"946684800000,1,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0 WHERE time < 2000-01-02T08:00:00+08:00",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(NOW(),1)",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"NOW(),1,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0 WHERE time <= NOW()",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"",
|
||||
"CREATE TIMESERIES root.vehicle.d1.s1 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,1)",
|
||||
"INSERT INTO root.vehicle.d1(timestamp,s1) values(1,1)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,5)",
|
||||
"INSERT INTO root.vehicle.d1(timestamp,s1) values(5,5)",
|
||||
"SELECT * FROM root.vehicle",
|
||||
"1,1,1,\n" +
|
||||
"5,5,5,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0,root.vehicle.d1.s1 WHERE time < 3",
|
||||
"SELECT * FROM root.vehicle",
|
||||
"5,5,5,\n",
|
||||
"DELETE FROM root.vehicle.* WHERE time < 7",
|
||||
"SELECT * FROM root.vehicle",
|
||||
"",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void SelectTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,101)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2,102)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(3,103)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(4,104)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,105)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(6,106)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(7,107)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(8,108)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(9,109)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(10,110)",
|
||||
"SELECT * FROM root.vehicle.d0 WHERE s0 < 104",
|
||||
"1,101,\n" +
|
||||
"2,102,\n" +
|
||||
"3,103,\n",
|
||||
"SELECT * FROM root.vehicle.d0 WHERE s0 > 105 and time < 8",
|
||||
"6,106,\n" +
|
||||
"7,107,\n",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"1,101,\n" +
|
||||
"2,102,\n" +
|
||||
"3,103,\n" +
|
||||
"4,104,\n" +
|
||||
"5,105,\n" +
|
||||
"6,106,\n" +
|
||||
"7,107,\n" +
|
||||
"8,108,\n" +
|
||||
"9,109,\n" +
|
||||
"10,110,\n",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void FuncTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,110)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2,109)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(3,108)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(4,107)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,106)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(6,105)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(7,104)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(8,103)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(9,102)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(10,101)",
|
||||
"SELECT COUNT(s0) FROM root.vehicle.d0",
|
||||
"0,10,\n",
|
||||
"SELECT COUNT(s0) FROM root.vehicle.d0 WHERE root.vehicle.d0.s0 < 105",
|
||||
"0,4,\n",
|
||||
"SELECT MAX_TIME(s0) FROM root.vehicle.d0",
|
||||
"0,10,\n",
|
||||
"SELECT MAX_TIME(s0) FROM root.vehicle.d0 WHERE root.vehicle.d0.s0 > 105",
|
||||
"0,5,\n",
|
||||
"SELECT MIN_TIME(s0) FROM root.vehicle.d0",
|
||||
"0,1,\n",
|
||||
"SELECT MIN_TIME(s0) FROM root.vehicle.d0 WHERE root.vehicle.d0.s0 < 106",
|
||||
"0,6,\n",
|
||||
"SELECT MAX_VALUE(s0) FROM root.vehicle.d0",
|
||||
"0,110,\n",
|
||||
"SELECT MAX_VALUE(s0) FROM root.vehicle.d0 WHERE time > 4",
|
||||
"0,106,\n",
|
||||
"SELECT MIN_VALUE(s0) FROM root.vehicle.d0",
|
||||
"0,101,\n",
|
||||
"SELECT MIN_VALUE(s0) FROM root.vehicle.d0 WHERE time < 5",
|
||||
"0,107,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0 WHERE time <= 10",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(NOW(),5)",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"NOW(),5,\n",
|
||||
"UPDATE root.vehicle.d0 SET s0 = 10 WHERE time < NOW()",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"NOW(),10,\n",
|
||||
"DELETE FROM root.vehicle.d0.s0 WHERE time <= NOW()",
|
||||
"SELECT * FROM root.vehicle.d0",
|
||||
"",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
public void GroupByTest() throws ClassNotFoundException, SQLException {
|
||||
String[] sqlS = {
|
||||
"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(1,110)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(2,109)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(3,108)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(4,107)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(5,106)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(6,105)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(7,104)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(8,103)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(9,102)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s0) values(10,101)",
|
||||
"CREATE TIMESERIES root.vehicle.d0.s1 WITH DATATYPE=INT32,ENCODING=RLE",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(1,101)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(2,102)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(3,103)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(4,104)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(5,105)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(6,106)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(7,107)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(8,108)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(9,109)",
|
||||
"INSERT INTO root.vehicle.d0(timestamp,s1) values(10,110)",
|
||||
"SELECT COUNT(s0), COUNT(s1) FROM root.vehicle.d0 WHERE s1 < 109 GROUP BY(4ms,[1,10])",
|
||||
"1,3,3,\n" +
|
||||
"4,4,4,\n" +
|
||||
"8,1,1,\n",
|
||||
"SELECT COUNT(s0), MAX_VALUE(s1) FROM root.vehicle.d0 WHERE time < 7 GROUP BY(3ms,2,[1,5])",
|
||||
"2,3,104,\n" +
|
||||
"5,1,105,\n",
|
||||
"SELECT MIN_VALUE(s0), MAX_TIME(s1) FROM root.vehicle.d0 WHERE s1 > 102 and time < 9 GROUP BY(3ms,1,[1,4],[6,9])",
|
||||
"1,108,3,\n" +
|
||||
"4,105,6,\n" +
|
||||
"7,103,8,\n",
|
||||
"DELETE TIMESERIES root.vehicle.*"};
|
||||
executeSQL(sqlS);
|
||||
}
|
||||
|
||||
private void executeSQL(String[] sqls) throws ClassNotFoundException, SQLException {
|
||||
Class.forName(TsfileJDBCConfig.JDBC_DRIVER_NAME);
|
||||
Connection connection = null;
|
||||
try {
|
||||
String result = "";
|
||||
Long now_start = 0L;
|
||||
boolean cmp = false;
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
for (String sql : sqls) {
|
||||
if (cmp) {
|
||||
Assert.assertEquals(result, sql);
|
||||
cmp = false;
|
||||
} else if (sql.equals("SHOW TIMESERIES")) {
|
||||
DatabaseMetaData data = connection.getMetaData();
|
||||
result = data.toString();
|
||||
cmp = true;
|
||||
} else {
|
||||
if (sql.contains("NOW()") && now_start == 0L) {
|
||||
now_start = System.currentTimeMillis();
|
||||
}
|
||||
Statement statement = connection.createStatement();
|
||||
statement.execute(sql);
|
||||
if (sql.split(" ")[0].equals("SELECT")) {
|
||||
ResultSet resultSet = statement.getResultSet();
|
||||
ResultSetMetaData metaData = resultSet.getMetaData();
|
||||
int count = metaData.getColumnCount();
|
||||
String[] column = new String[count];
|
||||
for (int i = 0;i < count;i++) {
|
||||
column[i] = metaData.getColumnName(i+1);
|
||||
}
|
||||
result = "";
|
||||
while (resultSet.next()) {
|
||||
for (int i = 1;i <= count;i++) {
|
||||
if (now_start > 0L && column[i-1] == TIMESTAMP_STR) {
|
||||
String timestr = resultSet.getString(i);
|
||||
Long tn = Long.valueOf(timestr);
|
||||
Long now = System.currentTimeMillis();
|
||||
if (tn >= now_start && tn <= now) {
|
||||
timestr = "NOW()";
|
||||
}
|
||||
result += timestr + ',';
|
||||
} else {
|
||||
result += resultSet.getString(i) + ',';
|
||||
}
|
||||
}
|
||||
result += '\n';
|
||||
}
|
||||
cmp = true;
|
||||
// Assert.assertEquals();
|
||||
}
|
||||
statement.close();
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,224 @@
|
|||
package cn.edu.tsinghua.iotdb.service;
|
||||
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBConfig;
|
||||
import cn.edu.tsinghua.iotdb.conf.TsfileDBDescriptor;
|
||||
import cn.edu.tsinghua.iotdb.jdbc.TsfileJDBCConfig;
|
||||
import cn.edu.tsinghua.iotdb.jdbc.TsfileSQLException;
|
||||
import cn.edu.tsinghua.iotdb.qp.physical.index.KvMatchIndexQueryPlan;
|
||||
import cn.edu.tsinghua.iotdb.utils.EnvironmentUtils;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
|
||||
/**
|
||||
* Just used for integration test.
|
||||
*/
|
||||
public class KVIndexTest {
|
||||
private final String FOLDER_HEADER = "src/test/resources";
|
||||
private static final String TIMESTAMP_STR = "Time";
|
||||
private int maxOpenFolderPre;
|
||||
|
||||
private String count(String path) {
|
||||
return String.format("count(%s)", path);
|
||||
}
|
||||
|
||||
private String[][] sqls = new String[][]{
|
||||
{"SET STORAGE GROUP TO root.vehicle.d0"},
|
||||
{"SET STORAGE GROUP TO root.vehicle.d1"},
|
||||
{"CREATE TIMESERIES root.vehicle.d0.s0 WITH DATATYPE=INT32, ENCODING=RLE"},
|
||||
{"CREATE TIMESERIES root.vehicle.d1.s0 WITH DATATYPE=INT32, ENCODING=RLE"},
|
||||
// s0第一个文件
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(1,101)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(2,102)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(3,103)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(4,104)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(5,105)"},
|
||||
// 创建索引
|
||||
{"create index on root.vehicle.d0.s0 using kvindex with window_length=2, since_time=0"},
|
||||
// 强行切断d0.s0,生成d1.s0文件
|
||||
{"insert into root.vehicle.d1(timestamp,s0) values(5,102)"},
|
||||
// s0第二个文件
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(6,106)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(7,107)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(8,108)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(9,109)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(10,110)"},
|
||||
|
||||
// 强行切断d0.s0,生成第二个d1.s0文件
|
||||
{"insert into root.vehicle.d1(timestamp,s0) values(6,102)"},
|
||||
// s0第三个文件,处于未关闭状态
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(11,111)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(12,112)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(13,113)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(14,114)"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(15,115)"},
|
||||
// 修改d2.s0,强行切断d0.s0,生成第三个d0.s0文件
|
||||
{"update root.vehicle SET d0.s0 = 33333 WHERE time >= 6 and time <= 7"},
|
||||
{"insert into root.vehicle.d0(timestamp,s0) values(7,102)"},
|
||||
// 单文件索引查询
|
||||
// {
|
||||
// "select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 4, 7, 0.0, 1.0, 0.0) from root" +
|
||||
// ".vehicle.d0.s0",
|
||||
// "0,4,7,0.0",
|
||||
// },
|
||||
// {
|
||||
// "select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 2, 5, 0.0, 1.0, 0.0) from root.vehicle.d0.s0",
|
||||
// "0,2,5,0.0",
|
||||
// },
|
||||
{
|
||||
"select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 1, 4, 0.0, 1.0, 0.0) from root" +
|
||||
".indextest.d0.s0",
|
||||
"0,1,4,0.0",
|
||||
},
|
||||
// 跨文件索引,涉及到Overflow的查询
|
||||
|
||||
// merge操作
|
||||
{"merge"},
|
||||
// 单文件索引查询
|
||||
{
|
||||
"select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 2, 5, 0.0, 1.0, 0.0) from root.vehicle.d0.s0",
|
||||
"0,2,5,0.0",
|
||||
},
|
||||
{
|
||||
"select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 3, 5, 0.0, 1.0, 0.0) from root.vehicle.d0.s0",
|
||||
"0,3,5,0.0",
|
||||
},
|
||||
|
||||
// 跨文件索引,涉及到Overflow的查询
|
||||
{
|
||||
"select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 5, 8, 0.0, 1.0, 0.0) from root.vehicle.d0.s0",
|
||||
"0,5,8,0.0",
|
||||
},
|
||||
// 删除索引
|
||||
{"drop index kvindex on root.vehicle.d0.s0"},
|
||||
//// 再次查询
|
||||
{
|
||||
"select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 6, 9, 0.0, 1.0, 0.0) from root.vehicle.d0.s0",
|
||||
"0,1,4,0.0",
|
||||
},
|
||||
|
||||
};
|
||||
|
||||
private IoTDB deamon;
|
||||
|
||||
private boolean testFlag = TestUtils.testFlag;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (testFlag) {
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
maxOpenFolderPre = config.maxOpenFolder;
|
||||
config.maxOpenFolder = 1;
|
||||
deamon = IoTDB.getInstance();
|
||||
deamon.active();
|
||||
EnvironmentUtils.envSetUp();
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (testFlag) {
|
||||
deamon.stop();
|
||||
Thread.sleep(5000);
|
||||
TsfileDBConfig config = TsfileDBDescriptor.getInstance().getConfig();
|
||||
config.maxOpenFolder = maxOpenFolderPre;
|
||||
EnvironmentUtils.cleanEnv();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test() throws ClassNotFoundException, SQLException, InterruptedException {
|
||||
if (testFlag) {
|
||||
Thread.sleep(5000);
|
||||
executeSQL();
|
||||
}
|
||||
}
|
||||
|
||||
private void executeSQL() throws ClassNotFoundException, SQLException {
|
||||
Class.forName(TsfileJDBCConfig.JDBC_DRIVER_NAME);
|
||||
Connection connection = null;
|
||||
try {
|
||||
for (String[] sqlRet : sqls) {
|
||||
String sql = sqlRet[0];
|
||||
System.out.println("testtest-sql\t" + sql);
|
||||
if ("".equals(sql))
|
||||
return;
|
||||
// if("select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 1, 3, 0)".equals(sql))
|
||||
// System.out.println();
|
||||
if (sqlRet.length == 1) {
|
||||
//长度1,non-query语句
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
Statement statement = connection.createStatement();
|
||||
statement.execute(sql);
|
||||
if ("merge".equals(sql)) {
|
||||
// Thread.sleep(3000);
|
||||
System.out.println("process merge operation");
|
||||
}
|
||||
statement.close();
|
||||
} else {
|
||||
//长度2,query语句,第二项是结果
|
||||
// String[] retArray = (String[]) sqlRet[1];
|
||||
query(sql, sqlRet);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void query(String querySQL, String[] retArray) throws ClassNotFoundException,
|
||||
SQLException {
|
||||
Connection connection = null;
|
||||
try {
|
||||
connection = DriverManager.getConnection("jdbc:tsfile://127.0.0.1:6667/", "root", "root");
|
||||
Statement statement = connection.createStatement();
|
||||
try {
|
||||
boolean hasResultSet = statement.execute(querySQL);
|
||||
// System.out.println(hasResultSet + "...");
|
||||
// KvMatchIndexQueryPlan planForHeader = new KvMatchIndexQueryPlan(null, null, 0,0,0);
|
||||
Assert.assertTrue(hasResultSet);
|
||||
if (hasResultSet) {
|
||||
ResultSet resultSet = statement.getResultSet();
|
||||
int cnt = 1;
|
||||
while (resultSet.next()) {
|
||||
String ans = resultSet.getString(TIMESTAMP_STR) + "," + resultSet.getString(2)
|
||||
+ "," + resultSet.getString(3)
|
||||
+ "," + resultSet.getString(4);
|
||||
System.out.println("testtest-actual\t" + ans);
|
||||
if (!retArray[cnt].equals(ans))
|
||||
Assert.assertEquals(retArray[cnt], ans);
|
||||
cnt++;
|
||||
if (cnt > retArray.length)
|
||||
Assert.fail();
|
||||
}
|
||||
if (retArray.length != cnt)
|
||||
Assert.assertEquals(retArray.length, cnt);
|
||||
}
|
||||
} catch (TsfileSQLException e) {
|
||||
Assert.assertEquals(e.getMessage(),"The timeseries root.vehicle.d0.s0 hasn't been indexed.");
|
||||
Assert.assertEquals(querySQL, "select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 6, 9, 0.0, 1.0, 0.0) from root.vehicle.d0.s0");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
if (connection != null) {
|
||||
connection.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -915,16 +915,16 @@ public class SQLParserTest {
|
|||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void createIndex1() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_CREATE", "TOK_INDEX",
|
||||
"TOK_PATH" ,"TOK_ROOT", "a", "b", "c",
|
||||
"TOK_FUNC", "kv-match",
|
||||
"TOK_WITH", "TOK_INDEX_KV", "window_length", "50",
|
||||
"TOK_WHERE", ">", "TOK_PATH", "time", "123"));
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_CREATE", "TOK_INDEX",
|
||||
"TOK_PATH" ,"TOK_ROOT", "a", "b", "c",
|
||||
"TOK_FUNC", "kvindex",
|
||||
"TOK_WITH", "TOK_INDEX_KV", "window_length", "50",
|
||||
"TOK_WHERE", ">", "TOK_PATH", "time", "123"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("create index on root.a.b.c using kv-match with window_length=50 where time > 123");
|
||||
ASTNode astTree = ParseGenerator.generateAST("create index on root.a.b.c using kvindex with window_length=50 where time > 123");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
recursivePrintSon(astTree, rec);
|
||||
|
||||
|
@ -937,11 +937,11 @@ public class SQLParserTest {
|
|||
|
||||
@Test
|
||||
public void createIndex2() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_CREATE", "TOK_INDEX",
|
||||
"TOK_PATH" ,"TOK_ROOT", "a", "b", "c",
|
||||
"TOK_FUNC", "kv-match2",
|
||||
"TOK_WITH", "TOK_INDEX_KV", "xxx", "50", "TOK_INDEX_KV", "xxx", "123",
|
||||
"TOK_WHERE", ">", "TOK_PATH", "time" ,"TOK_DATETIME", "now"));
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_CREATE", "TOK_INDEX",
|
||||
"TOK_PATH" ,"TOK_ROOT", "a", "b", "c",
|
||||
"TOK_FUNC", "kv-match2",
|
||||
"TOK_WITH", "TOK_INDEX_KV", "xxx", "50", "TOK_INDEX_KV", "xxx", "123",
|
||||
"TOK_WHERE", ">", "TOK_PATH", "time" ,"TOK_DATETIME", "now"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("create index on root.a.b.c using kv-match2 with xxx=50,xxx=123 where time > now()");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
|
@ -953,15 +953,17 @@ public class SQLParserTest {
|
|||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void selectIndex1() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_QUERY",
|
||||
"TOK_SELECT_INDEX", "subsequence_matching",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c", "'query.csv'", "123.1",
|
||||
"TOK_FROM", "TOK_PATH", "TOK_ROOT", "a", "b", "c"));
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_QUERY",
|
||||
"TOK_SELECT_INDEX", "subsequence_matching",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"123", "132", "123.1",
|
||||
"TOK_WHERE", "<", "TOK_PATH", "time", "10"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("select index subsequence_matching(root.a.b.c, 'query.csv' , 123.1) from root.a.b.c");
|
||||
ASTNode astTree = ParseGenerator.generateAST("select index subsequence_matching(root.a.b.c, root.a.b.c, 123, 132 , 123.1) where time < 10");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
recursivePrintSon(astTree, rec);
|
||||
|
||||
|
@ -970,15 +972,19 @@ public class SQLParserTest {
|
|||
assertEquals(rec.get(i), ans.get(i));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectIndex2() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_QUERY",
|
||||
"TOK_SELECT_INDEX", "subsequence_matching",
|
||||
"TOK_PATH","TOK_ROOT", "a", "b", "c", "'query.csv'", "123.1", "0.123", "0.5"));
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_QUERY",
|
||||
"TOK_SELECT_INDEX", "subsequence_matching",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"123", "132", "123.1", "0.123", "0.5",
|
||||
"TOK_FROM", "TOK_PATH", "TOK_ROOT", "a", "b"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("select index subsequence_matching(root.a.b.c, 'query.csv' , 123.1, 0.123, 0.5)");
|
||||
// ASTNode astTree = ParseGenerator.generateAST("select index kvindex(root.vehicle.d0.s0, root.vehicle.d0.s0, 1, 3, 0.0, 1.0, 0.0) from root.vehicle.d0.s0");
|
||||
ASTNode astTree = ParseGenerator.generateAST("select index subsequence_matching(root.a.b.c, root.a.b.c, 123, 132 , 123.1, 0.123, 0.5) from root.a.b");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
recursivePrintSon(astTree, rec);
|
||||
|
||||
|
@ -987,14 +993,36 @@ public class SQLParserTest {
|
|||
assertEquals(rec.get(i), ans.get(i));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectIndex3() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_QUERY",
|
||||
"TOK_SELECT_INDEX", "kvindex",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"TOK_DATETIME", "2016-11-16T16:22:33+08:00",
|
||||
"TOK_DATETIME", "now",
|
||||
"123.1", "0.123", "0.5"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("select index kvindex(root.a.b.c, root.a.b.c, 2016-11-16T16:22:33+08:00, now() , 123.1, 0.123, 0.5)");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
recursivePrintSon(astTree, rec);
|
||||
|
||||
int i = 0;
|
||||
while (i <= rec.size() - 1) {
|
||||
assertEquals(rec.get(i), ans.get(i));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void dropIndex() throws ParseException, RecognitionException {
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_DROP", "TOK_INDEX",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c"));
|
||||
ArrayList<String> ans = new ArrayList<>(Arrays.asList("TOK_DROP", "TOK_INDEX",
|
||||
"TOK_PATH", "TOK_ROOT", "a", "b", "c",
|
||||
"TOK_FUNC", "kvindex"));
|
||||
ArrayList<String> rec = new ArrayList<>();
|
||||
ASTNode astTree = ParseGenerator.generateAST("drop index on root.a.b.c");
|
||||
ASTNode astTree = ParseGenerator.generateAST("drop index kvindex on root.a.b.c");
|
||||
astTree = ParseUtils.findRootNonNullToken(astTree);
|
||||
recursivePrintSon(astTree, rec);
|
||||
|
||||
|
@ -1003,8 +1031,8 @@ public class SQLParserTest {
|
|||
assertEquals(rec.get(i), ans.get(i));
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void recursivePrintSon(Node ns, ArrayList<String> rec) {
|
||||
rec.add(ns.toString());
|
||||
if (ns.getChildren() != null) {
|
||||
|
|
|
@ -72,6 +72,8 @@ public class EnvironmentUtils {
|
|||
cleanDir(config.walFolder);
|
||||
// delete derby
|
||||
cleanDir(config.derbyHome);
|
||||
// delete index
|
||||
cleanDir(config.indexFileDir);
|
||||
// delte data
|
||||
cleanDir("data");
|
||||
// delte derby log
|
||||
|
|
Loading…
Reference in New Issue