[IOTDB-190] upgrade from antlr3 to antlr4 (#564)

* upgrade from antlr3 to antlr4
This commit is contained in:
Boris 2019-12-03 16:04:42 +08:00 committed by Jialin Qiao
parent 8c7b51a800
commit 927239b885
29 changed files with 1864 additions and 5709 deletions

View File

@ -47,6 +47,12 @@
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<exclusions>
<exclusion>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<dependencyManagement>

View File

@ -77,7 +77,7 @@
<thrift.version>0.12.0</thrift.version>
<airline.version>0.8</airline.version>
<jackson.version>2.10.0</jackson.version>
<antlr3.version>3.5.2</antlr3.version>
<antlr4.version>4.7.1</antlr4.version>
<common.cli.version>1.3.1</common.cli.version>
<common.codec.version>1.13</common.codec.version>
<common.collections.version>3.2.2</common.collections.version>
@ -262,8 +262,8 @@
</dependency>
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
<version>${antlr3.version}</version>
<artifactId>antlr4-runtime</artifactId>
<version>${antlr4.version}</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>

View File

@ -61,10 +61,10 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.antlr/antlr-runtime -->
<!-- https://mvnrepository.com/artifact/org.antlr/antlr-runtime4 -->
<dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr-runtime</artifactId>
<artifactId>antlr4-runtime</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.commons/commons-lang3 -->
<dependency>
@ -126,12 +126,12 @@
<plugins>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr3-maven-plugin</artifactId>
<version>${antlr3.version}</version>
<artifactId>antlr4-maven-plugin</artifactId>
<version>${antlr4.version}</version>
<executions>
<execution>
<goals>
<goal>antlr</goal>
<goal>antlr4</goal>
</goals>
</execution>
</executions>

View File

@ -1,577 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
lexer grammar TqlLexer;
@header {
package org.apache.iotdb.db.sql.parse;
}
//*************** key words *************
K_SELECT
: S E L E C T
;
K_INTO
: I N T O
;
K_ROOT
: R O O T
;
K_FROM
: F R O M
;
K_WHERE
: W H E R E
;
K_LIMIT
: L I M I T
;
K_OFFSET
: O F F S E T
;
K_SLIMIT
: S L I M I T
;
K_SOFFSET
: S O F F S E T
;
K_NOW
: N O W
;
K_GROUP
: G R O U P
;
K_BY
: B Y
;
K_FILL
: F I L L
;
K_PREVIOUS
: P R E V I O U S
;
K_LINEAR
: L I N E A R
;
K_INT32
: I N T '3' '2'
;
K_INT64
: I N T '6' '4'
;
K_FLOAT
: F L O A T
;
K_DOUBLE
: D O U B L E
;
K_BOOLEAN
: B O O L E A N
;
K_TEXT
: T E X T
;
K_INSERT
: I N S E R T
;
K_VALUES
: V A L U E S
;
K_TIMESTAMP
: T I M E S T A M P
;
K_UPDATE
: U P D A T E
;
K_SET
: S E T
;
K_DELETE
: D E L E T E
;
K_CREATE
: C R E A T E
;
K_TIMESERIES
: T I M E S E R I E S
;
K_WITH
: W I T H
;
K_DATATYPE
: D A T A T Y P E
;
K_ENCODING
: E N C O D I N G
;
K_COMPRESSOR
: C O M P R E S S O R
;
K_STORAGE
: S T O R A G E
;
K_TO
: T O
;
K_PROPERTY
: P R O P E R T Y
;
K_LABEL
: L A B E L
;
K_LINK
: L I N K
;
K_UNLINK
: U N L I N K
;
K_SHOW
: S H O W
;
K_METADATA
: M E T A D A T A
;
K_DESCRIBE
: D E S C R I B E
;
K_INDEX
: I N D E X
;
K_ON
: O N
;
K_USING
: U S I N G
;
K_DROP
: D R O P
;
K_MERGE
: M E R G E
;
K_LIST
: L I S T
;
K_USER
: U S E R
;
K_PRIVILEGES
: P R I V I L E G E S
;
K_ROLE
: R O L E
;
K_ALL
: A L L
;
K_OF
: O F
;
K_ALTER
: A L T E R
;
K_PASSWORD
: P A S S W O R D
;
K_GRANT
: G R A N T
;
K_REVOKE
: R E V O K E
;
K_PATH
: P A T H
;
K_LOAD
: L O A D
;
K_WATERMARK_EMBEDDING
: W A T E R M A R K '_' E M B E D D I N G
;
K_TTL
: T T L
;
K_UNSET
: U N S E T
;
K_CONFIGURATION
: C O N F I G U R A T I O N
;
K_FLUSH
: F L U S H
;
K_TASK
: T A S K
;
K_DYNAMIC
: D Y N A M I C
;
K_PARAMETER
: P A R A M E T E R
;
K_INFO
: I N F O
;
//************** logical operator***********
OPERATOR_AND
: A N D
| '&'
| '&&'
;
OPERATOR_OR
: O R
| '|'
| '||'
;
OPERATOR_NOT
: N O T | '!'
;
//**************** data type ***************
K_PLAIN
: P L A I N
;
K_PLAIN_DICTIONARY
: P L A I N '_' D I C T I O N A R Y
;
K_RLE
: R L E
;
K_DIFF
: D I F F
;
K_TS_2DIFF
: T S '_' '2' D I F F
;
K_BITMAP
: B I T M A P
;
K_GORILLA
: G O R I L L A
;
K_REGULAR
: R E G U L A R
;
K_ADD
: A D D
;
K_DEVICE
: D E V I C E
;
// *************** comparison *******
OPERATOR_GT
: '>'
;
OPERATOR_GTE
: '>='
;
OPERATOR_LT
: '<'
;
OPERATOR_LTE
: '<='
;
OPERATOR_EQ
: '=' | '=='
;
OPERATOR_NEQ
: '!=' | '<>'
;
//************ operator *******
STAR
: '*'
;
MINUS
: '-'
;
PLUS
: '+'
;
DIVIDE
: '/'
;
//**************** symbol ***************
SEMI
: ';'
;
DOT
: '.'
;
COMMA
: ','
;
LR_BRACKET
: '('
;
RR_BRACKET
: ')'
;
LS_BRACKET
: '['
;
RS_BRACKET
: ']'
;
STRING_LITERAL
: DQUOTA_STRING
| SQUOTA_STRING
;
fragment DQUOTA_STRING
: '"' ( '\\'. | '""' | ~('"'| '\\') )* '"'
;
fragment SQUOTA_STRING
: '\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\''
;
DATETIME
: INT ('-'|'/') INT ('-'|'/') INT
(T | WS)
INT ':' INT ':' INT (DOT INT)?
(('+' | '-') INT ':' INT)?
;
EXPONENT : INT ('e'|'E') ('+'|'-')? INT ;
ID
: ('a'..'z'|'A'..'Z'|'_') ('a'..'z'|'A'..'Z'|'0'..'9'|'_'|'-')*
;
INT
: NUM+
;
fragment
NUM
: '0'..'9'
;
// ***************************
fragment A
: 'a' | 'A'
;
fragment B
: 'b' | 'B'
;
fragment C
: 'c' | 'C'
;
fragment D
: 'd' | 'D'
;
fragment E
: 'e' | 'E'
;
fragment F
: 'f' | 'F'
;
fragment G
: 'g' | 'G'
;
fragment H
: 'h' | 'H'
;
fragment I
: 'i' | 'I'
;
fragment J
: 'j' | 'J'
;
fragment K
: 'k' | 'K'
;
fragment L
: 'l' | 'L'
;
fragment M
: 'm' | 'M'
;
fragment N
: 'n' | 'N'
;
fragment O
: 'o' | 'O'
;
fragment P
: 'p' | 'P'
;
fragment Q
: 'q' | 'Q'
;
fragment R
: 'r' | 'R'
;
fragment S
: 's' | 'S'
;
fragment T
: 't' | 'T'
;
fragment U
: 'u' | 'U'
;
fragment V
: 'v' | 'V'
;
fragment W
: 'w' | 'W'
;
fragment X
: 'x' | 'X'
;
fragment Y
: 'y' | 'Y'
;
fragment Z
: 'z' | 'Z'
;
WS : ( ' '
| '\t'
| '\r'
| '\n'
) {$channel=HIDDEN;}
;
DURATION
:
(NUM+ (Y|M O|W|D|H|M|S|M S|U S|N S))+
;

View File

@ -1,824 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
parser grammar TqlParser;
options {
tokenVocab=TqlLexer;
output=AST;
ASTLabelType=CommonTree;
backtrack=false;
k=3;
}
tokens{
TOK_CREATE;
TOK_TIMESERIES;
TOK_PATH;
TOK_WITH;
TOK_DATATYPE;
TOK_ENCODING;
TOK_COMPRESSOR;
TOK_PROPERTY;
TOK_QUERY;
TOK_AGGREGATE;
TOK_SELECT;
TOK_FROM;
TOK_ROOT;
TOK_WHERE;
TOK_AND;
TOK_OR;
TOK_NOT;
TOK_GT;
TOK_GTE;
TOK_LT;
TOK_LTE;
TOK_EQ;
TOK_NEQ;
TOK_DATETIME;
TOK_INSERT;
TOK_INSERT_COLUMNS;
TOK_TIME;
TOK_INSERT_VALUES;
TOK_UPDATE;
TOK_SET;
TOK_DELETE;
TOK_LABEL;
TOK_ADD;
TOK_LINK;
TOK_UNLINK;
TOK_SHOW_METADATA;
TOK_INDEX;
TOK_FUNCTION;
TOK_INDEX_KV;
TOK_DESCRIBE;
TOK_DROP;
TOK_MERGE;
TOK_LIST;
TOK_PRIVILEGES;
TOK_ALL;
TOK_USER;
TOK_ROLE;
TOK_PASSWORD;
TOK_ALTER;
TOK_ALTER_PSWD;
TOK_GRANT;
TOK_REVOKE;
TOK_SLIMIT;
TOK_LIMIT;
TOK_SOFFSET;
TOK_OFFSET;
TOK_GROUPBY;
TOK_TIMEUNIT;
TOK_TIMEORIGIN;
TOK_TIMEINTERVAL;
TOK_FILL;
TOK_TYPE;
TOK_PREVIOUS;
TOK_LINEAR;
TOK_LOAD;
TOK_GRANT_WATERMARK_EMBEDDING;
TOK_REVOKE_WATERMARK_EMBEDDING;
TOK_STORAGEGROUP;
TOK_VALUE;
TOK_CONSTANT;
TOK_TIMEINTERVALPAIR;
TOK_PROPERTY_VALUE;
TOK_GROUPBY_DEVICE;
TOK_SELECT_INDEX;
TOK_TTL;
TOK_UNSET;
TOK_SHOW;
TOK_DATE_EXPR;
TOK_DURATION;
TOK_LOAD_CONFIGURATION;
TOK_DYNAMIC_PARAMETER;
TOK_FLUSH_TASK_INFO;
}
@header{
package org.apache.iotdb.db.sql.parse;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
}
@members {
ArrayList<ParseError> errors = new ArrayList<ParseError>();
Stack messages = new Stack<String>();
private static HashMap<String, String> tokenNameMap;
static {
tokenNameMap = new HashMap<String, String>();
tokenNameMap.put("K_AND", "AND");
tokenNameMap.put("K_OR", "OR");
tokenNameMap.put("K_NOT", "NOT");
tokenNameMap.put("K_LIKE", "LIKE");
tokenNameMap.put("K_BY", "BY");
tokenNameMap.put("K_GROUP", "GROUP");
tokenNameMap.put("K_FILL", "FILL");
tokenNameMap.put("K_LINEAR", "LINEAR");
tokenNameMap.put("K_PREVIOUS", "PREVIOUS");
tokenNameMap.put("K_WHERE", "WHERE");
tokenNameMap.put("K_FROM", "FROM");
tokenNameMap.put("K_SELECT", "SELECT");
tokenNameMap.put("K_INSERT", "INSERT");
tokenNameMap.put("K_LIMIT","LIMIT");
tokenNameMap.put("K_OFFSET","OFFSET");
tokenNameMap.put("K_SLIMIT","SLIMIT");
tokenNameMap.put("K_SOFFSET","SOFFSET");
tokenNameMap.put("K_ON", "ON");
tokenNameMap.put("K_ROOT", "ROOT");
tokenNameMap.put("K_SHOW", "SHOW");
tokenNameMap.put("K_CLUSTER", "CLUSTER");
tokenNameMap.put("K_LOAD", "LOAD");
tokenNameMap.put("K_NULL", "NULL");
tokenNameMap.put("K_CREATE", "CREATE");
tokenNameMap.put("K_DESCRIBE", "DESCRIBE");
tokenNameMap.put("K_TO", "TO");
tokenNameMap.put("K_ON", "ON");
tokenNameMap.put("K_USING", "USING");
tokenNameMap.put("K_DATETIME", "DATETIME");
tokenNameMap.put("K_TIMESTAMP", "TIMESTAMP");
tokenNameMap.put("K_TIME", "TIME");
tokenNameMap.put("K_AGGREGATION", "CLUSTERED");
tokenNameMap.put("K_INTO", "INTO");
tokenNameMap.put("K_ROW", "ROW");
tokenNameMap.put("K_STORED", "STORED");
tokenNameMap.put("K_OF", "OF");
tokenNameMap.put("K_ADD", "ADD");
tokenNameMap.put("K_FUNCTION", "FUNCTION");
tokenNameMap.put("K_WITH", "WITH");
tokenNameMap.put("K_SET", "SET");
tokenNameMap.put("K_UPDATE", "UPDATE");
tokenNameMap.put("K_VALUES", "VALUES");
tokenNameMap.put("K_KEY", "KEY");
tokenNameMap.put("K_ENABLE", "ENABLE");
tokenNameMap.put("K_DISABLE", "DISABLE");
tokenNameMap.put("K_ALL", "ALL");
tokenNameMap.put("K_LIST", "LIST");
tokenNameMap.put("K_TTL", "TTL");
tokenNameMap.put("K_UNSET", "UNSET");
tokenNameMap.put("K_CONFIGURATION", "CONFIGURATION");
tokenNameMap.put("K_FLUSH", "FLUSH");
tokenNameMap.put("K_TASK", "TASK");
tokenNameMap.put("K_DYNAMIC", "DYNAMIC");
tokenNameMap.put("K_PARAMETER", "PARAMETER");
tokenNameMap.put("K_INFO", "INFO");
// Operators
tokenNameMap.put("DOT", ".");
tokenNameMap.put("COLON", ":");
tokenNameMap.put("COMMA", ",");
tokenNameMap.put("SEMI", ");");
tokenNameMap.put("LR_BRACKET", "(");
tokenNameMap.put("RR_BRACKET", ")");
tokenNameMap.put("LS_BRACKET", "[");
tokenNameMap.put("RS_BRACKET", "]");
tokenNameMap.put("OPERATOR_EQ", "=");
tokenNameMap.put("OPERATOR_NEQ", "<>");
// tokenNameMap.put("EQUAL_NS", "<=>");
tokenNameMap.put("OPERATOR_LTE", "<=");
tokenNameMap.put("OPERATOR_LT", "<");
tokenNameMap.put("OPERATOR_GTE", ">=");
tokenNameMap.put("OPERATOR_HT", ">");
tokenNameMap.put("STRING_LITERAL", "\\'");
}
public static Collection<String> getKeywords() {
return tokenNameMap.values();
}
private static String getTokenName(String token) {
String name = tokenNameMap.get(token);
return name == null ? token : name;
}
@Override
public Object recoverFromMismatchedSet(IntStream input, RecognitionException re, BitSet follow)
throws RecognitionException {
throw re;
}
@Override
public void displayRecognitionError(String[] tokenNames, RecognitionException e) {
errors.add(new ParseError(this, e, tokenNames));
}
@Override
public String getErrorHeader(RecognitionException e) {
String header = null;
if (e.charPositionInLine < 0 && input.LT(-1) != null) {
Token t = input.LT(-1);
header = "line " + t.getLine() + ":" + t.getCharPositionInLine();
} else {
header = super.getErrorHeader(e);
}
return header;
}
@Override
public String getErrorMessage(RecognitionException e, String[] tokenNames) {
String msg = null;
// Translate the token names to something that the user can understand
String[] tokens = new String[tokenNames.length];
for (int i = 0; i < tokenNames.length; ++i) {
tokens[i] = TqlParser.getTokenName(tokenNames[i]);
}
if (e instanceof NoViableAltException) {
@SuppressWarnings("unused")
NoViableAltException nvae = (NoViableAltException) e;
// for development, can add
// "decision=<<"+nvae.grammarDecisionDescription+">>"
// and "(decision="+nvae.decisionNumber+") and
// "state "+nvae.stateNumber
msg = "cannot recognize input near "
+ input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : ""
+ input.LT(1) != null ? " " + getTokenErrorDisplay(input.LT(1)) : ""
+ input.LT(3) != null ? " " + getTokenErrorDisplay(input.LT(3)) : "";
} else if (e instanceof MismatchedTokenException) {
MismatchedTokenException mte = (MismatchedTokenException) e;
msg = super.getErrorMessage(e, tokens) + (input.LT(-1) == null ? "":" near '" + input.LT(-1).getText()) + "'"
+ ". Please refer to SQL document and check if there is any keyword conflict.";
} else if (e instanceof FailedPredicateException) {
FailedPredicateException fpe = (FailedPredicateException) e;
msg = "Failed to recognize predicate '" + fpe.token.getText() + "'. Failed rule: '" + fpe.ruleName + "'";
} else {
if(tokenNameMap.containsKey("K_"+e.token.getText().toUpperCase())){
msg = e.token.getText() + " is a key word. Please refer to SQL document and check whether it can be used here or not.";
} else {
msg = super.getErrorMessage(e, tokens);
}
}
return messages.size() > 0 ? msg + " in " + messages.peek() : msg;
}
}
@rulecatch {
catch (RecognitionException e) {
reportError(e);
throw e;
}
}
statement
: sqlStatement (SEMI)? EOF
;
sqlStatement
: ddlStatement
| dmlStatement
| administrationStatement
| configurationStatement
| showStatement
;
dmlStatement
: selectStatement
| insertStatement
| updateStatement
| deleteStatement
| loadStatement
;
ddlStatement
: createTimeseries
| deleteTimeseries
| setStorageGroup
| deleteStorageGroup
| createProperty
| addLabel
| deleteLabel
| linkPath
| unlinkPath
| showMetadata
| describePath
| createIndex
| dropIndex
| mergeStatement
| listStatement
| ttlStatement
;
administrationStatement
: createUser
| alterUser
| dropUser
| createRole
| dropRole
| grantUser
| grantRole
| revokeUser
| revokeRole
| grantRoleToUser
| revokeRoleFromUser
| grantWatermarkEmbedding
| revokeWatermarkEmbedding
;
createTimeseries
: K_CREATE K_TIMESERIES timeseriesPath K_WITH attributeClauses
-> ^(TOK_CREATE timeseriesPath ^(TOK_WITH attributeClauses))
;
timeseriesPath
: K_ROOT (DOT nodeNameWithoutStar)+
-> ^(TOK_PATH ^(TOK_ROOT nodeNameWithoutStar+))
;
nodeNameWithoutStar
: INT
| ID
| STRING_LITERAL
;
attributeClauses
: K_DATATYPE OPERATOR_EQ dataType COMMA K_ENCODING OPERATOR_EQ encoding (COMMA K_COMPRESSOR OPERATOR_EQ compressor=propertyValue)? (COMMA property)*
-> ^(TOK_DATATYPE dataType) ^(TOK_ENCODING encoding) ^(TOK_COMPRESSOR $compressor)? property*
;
encoding
: K_PLAIN | K_PLAIN_DICTIONARY | K_RLE | K_DIFF | K_TS_2DIFF | K_BITMAP | K_GORILLA | K_REGULAR
;
propertyValue
: ID ->^(TOK_PROPERTY_VALUE ID)
| MINUS? INT ->^(TOK_PROPERTY_VALUE MINUS? INT)
| MINUS? realLiteral -> ^(TOK_PROPERTY_VALUE MINUS? realLiteral)
;
property
: name=ID OPERATOR_EQ value=propertyValue
-> ^(TOK_PROPERTY $name $value)
;
selectStatement
: K_SELECT K_INDEX func=ID
LR_BRACKET
p1=timeseriesPath COMMA p2=timeseriesPath COMMA n1=timeValue COMMA n2=timeValue COMMA epsilon=constant (COMMA alpha=constant COMMA beta=constant)?
RR_BRACKET
fromClause?
whereClause?
specialClause?
-> ^(TOK_QUERY ^(TOK_SELECT_INDEX $func $p1 $p2 $n1 $n2 $epsilon ($alpha $beta)?) fromClause? whereClause? specialClause?)
| K_SELECT selectElements
fromClause
whereClause?
specialClause?
-> ^(TOK_QUERY selectElements fromClause whereClause? specialClause?)
;
insertStatement
: K_INSERT K_INTO timeseriesPath insertColumnSpec K_VALUES insertValuesSpec
-> ^(TOK_INSERT timeseriesPath insertColumnSpec insertValuesSpec)
;
updateStatement
: K_UPDATE prefixPath setClause whereClause?
-> ^(TOK_UPDATE prefixPath setClause whereClause?)
;
deleteStatement
: K_DELETE K_FROM prefixPath (COMMA prefixPath)* (whereClause)?
-> ^(TOK_DELETE prefixPath+ whereClause?)
;
insertColumnSpec
: LR_BRACKET K_TIMESTAMP (COMMA nodeNameWithoutStar)* RR_BRACKET
-> ^(TOK_INSERT_COLUMNS TOK_TIME nodeNameWithoutStar*)
;
insertValuesSpec
: LR_BRACKET dateFormat (COMMA constant)* RR_BRACKET -> ^(TOK_INSERT_VALUES dateFormat constant*)
| LR_BRACKET INT (COMMA constant)* RR_BRACKET -> ^(TOK_INSERT_VALUES INT constant*)
;
selectElements
: functionCall (COMMA functionCall)* -> ^(TOK_SELECT functionCall+)
| suffixPath (COMMA suffixPath)* -> ^(TOK_SELECT suffixPath+)
;
functionCall
: ID LR_BRACKET suffixPath RR_BRACKET
-> ^(TOK_PATH ^(TOK_AGGREGATE suffixPath ID))
;
suffixPath
: nodeName (DOT nodeName)*
-> ^(TOK_PATH nodeName+)
;
nodeName
: ID
| INT
| STAR
| STRING_LITERAL
;
fromClause
: K_FROM prefixPath (COMMA prefixPath)*
-> ^(TOK_FROM prefixPath+)
;
prefixPath
: K_ROOT (DOT nodeName)*
-> ^(TOK_PATH ^(TOK_ROOT nodeName*))
;
whereClause
: K_WHERE expression
-> ^(TOK_WHERE expression)
;
expression
: orExpression
;
orExpression
: andExpression (options{greedy=true;}:(OPERATOR_OR^ andExpression))*
;
andExpression
: predicate (options{greedy=true;}:(OPERATOR_AND^ predicate))*
;
//predicate
// : (suffixPath | prefixPath) comparisonOperator^ constant
// | OPERATOR_NOT^ expression
// | LR_BRACKET expression RR_BRACKET -> expression
// ;
predicate
: (suffixPath | prefixPath) comparisonOperator^ constant
| OPERATOR_NOT^? LR_BRACKET! expression RR_BRACKET!
;
comparisonOperator
: OPERATOR_GT
| OPERATOR_GTE
| OPERATOR_LT
| OPERATOR_LTE
| OPERATOR_EQ
| OPERATOR_NEQ
;
constant
: dateExpr=dateExpression -> ^(TOK_DATE_EXPR $dateExpr)
| ID -> ^(TOK_CONSTANT ID)
| MINUS? realLiteral -> ^(TOK_CONSTANT MINUS? realLiteral)
| MINUS? INT -> ^(TOK_CONSTANT MINUS? INT)
| STRING_LITERAL -> ^(TOK_CONSTANT STRING_LITERAL)
;
realLiteral
: INT DOT (INT | EXPONENT)?
| DOT (INT|EXPONENT)
| EXPONENT
;
specialClause
: specialLimit
| groupByClause specialLimit?
| fillClause slimitClause? groupByDeviceClause?
;
specialLimit
: limitClause slimitClause? groupByDeviceClause?
| slimitClause limitClause? groupByDeviceClause?
| groupByDeviceClause
;
limitClause
: K_LIMIT INT offsetClause?
-> ^(TOK_LIMIT INT)
;
offsetClause
: K_OFFSET INT
;
slimitClause
: K_SLIMIT INT soffsetClause?
-> ^(TOK_SLIMIT INT) soffsetClause?
;
soffsetClause
: K_SOFFSET INT
-> ^(TOK_SOFFSET INT)
;
groupByDeviceClause
:
K_GROUP K_BY K_DEVICE
-> ^(TOK_GROUPBY_DEVICE)
;
dateFormat
: datetime=DATETIME -> ^(TOK_DATETIME $datetime)
| K_NOW LR_BRACKET RR_BRACKET -> ^(TOK_DATETIME K_NOW)
;
durationExpr
: duration=DURATION -> ^(TOK_DURATION $duration)
;
dateExpression
: dateFormat ((PLUS^ | MINUS^) durationExpr)*
;
groupByClause
: K_GROUP K_BY LR_BRACKET
durationExpr (COMMA timeValue)?
COMMA timeInterval (COMMA timeInterval)* RR_BRACKET
-> ^(TOK_GROUPBY durationExpr ^(TOK_TIMEORIGIN timeValue)? ^(TOK_TIMEINTERVAL timeInterval+))
;
timeValue
: dateFormat
| INT
;
timeInterval
: LS_BRACKET startTime=timeValue COMMA endTime=timeValue RS_BRACKET
-> ^(TOK_TIMEINTERVALPAIR $startTime $endTime)
;
fillClause
: K_FILL LR_BRACKET typeClause (COMMA typeClause)* RR_BRACKET
-> ^(TOK_FILL typeClause+)
;
typeClause
: dataType LS_BRACKET linearClause RS_BRACKET
-> ^(TOK_TYPE dataType linearClause)
| dataType LS_BRACKET previousClause RS_BRACKET
-> ^(TOK_TYPE dataType previousClause)
;
previousClause
: K_PREVIOUS (COMMA durationExpr)?
-> ^(TOK_PREVIOUS durationExpr?)
;
linearClause
: K_LINEAR (COMMA aheadDuration=durationExpr COMMA behindDuration=durationExpr)?
-> ^(TOK_LINEAR ($aheadDuration $behindDuration)?)
;
dataType
: K_INT32 | K_INT64 | K_FLOAT | K_DOUBLE | K_BOOLEAN | K_TEXT
;
setClause
: K_SET setCol (COMMA setCol)*
-> setCol+
;
setCol
: suffixPath OPERATOR_EQ constant
-> ^(TOK_VALUE suffixPath constant)
;
deleteTimeseries
: K_DELETE K_TIMESERIES prefixPath (COMMA prefixPath)*
-> ^(TOK_DELETE ^(TOK_TIMESERIES prefixPath+))
;
setStorageGroup
: K_SET K_STORAGE K_GROUP K_TO prefixPath
-> ^(TOK_SET ^(TOK_STORAGEGROUP prefixPath))
;
deleteStorageGroup
: K_DELETE K_STORAGE K_GROUP prefixPath (COMMA prefixPath)*
-> ^(TOK_DELETE ^(TOK_STORAGEGROUP prefixPath+))
;
createProperty
: K_CREATE K_PROPERTY ID
-> ^(TOK_CREATE ^(TOK_PROPERTY ID))
;
addLabel
: K_ADD K_LABEL label=ID K_TO K_PROPERTY propertyName=ID
-> ^(TOK_ADD ^(TOK_LABEL $label) ^(TOK_PROPERTY $propertyName))
;
deleteLabel
: K_DELETE K_LABEL label=ID K_FROM K_PROPERTY propertyName=ID
-> ^(TOK_DELETE ^(TOK_LABEL $label) ^(TOK_PROPERTY $propertyName))
;
linkPath
: K_LINK prefixPath K_TO propertyLabelPair
-> ^(TOK_LINK prefixPath propertyLabelPair)
;
propertyLabelPair
: propertyName=ID DOT labelName=ID
-> ^(TOK_LABEL $labelName) ^(TOK_PROPERTY $propertyName)
;
unlinkPath
:K_UNLINK prefixPath K_FROM propertyLabelPair
-> ^(TOK_UNLINK prefixPath propertyLabelPair)
;
showMetadata
: K_SHOW K_METADATA
-> ^(TOK_SHOW_METADATA)
;
describePath
: K_DESCRIBE prefixPath
-> ^(TOK_DESCRIBE prefixPath)
;
createIndex
: K_CREATE K_INDEX K_ON timeseriesPath K_USING function=ID indexWithClause? whereClause?
-> ^(TOK_CREATE ^(TOK_INDEX timeseriesPath ^(TOK_FUNCTION $function indexWithClause? whereClause?)))
;
indexWithClause
: K_WITH indexValue (COMMA indexValue)?
-> ^(TOK_WITH indexValue+)
;
indexValue
: ID OPERATOR_EQ INT
-> ^(TOK_INDEX_KV ID INT)
;
dropIndex
: K_DROP K_INDEX function=ID K_ON timeseriesPath
-> ^(TOK_DROP ^(TOK_INDEX timeseriesPath ^(TOK_FUNCTION $function)))
;
mergeStatement
: K_MERGE
-> ^(TOK_MERGE)
;
listStatement
: K_LIST K_USER -> ^(TOK_LIST TOK_USER)
| K_LIST K_ROLE -> ^(TOK_LIST TOK_ROLE)
| K_LIST K_PRIVILEGES K_USER username = ID K_ON prefixPath -> ^(TOK_LIST TOK_PRIVILEGES ^(TOK_USER $username) prefixPath)
| K_LIST K_PRIVILEGES K_ROLE roleName = ID K_ON prefixPath -> ^(TOK_LIST TOK_PRIVILEGES ^(TOK_ROLE $roleName) prefixPath)
| K_LIST K_USER K_PRIVILEGES username = ID -> ^(TOK_LIST TOK_PRIVILEGES TOK_ALL ^(TOK_USER $username))
| K_LIST K_ROLE K_PRIVILEGES roleName = ID -> ^(TOK_LIST TOK_PRIVILEGES TOK_ALL ^(TOK_ROLE $roleName))
| K_LIST K_ALL K_ROLE K_OF K_USER username = ID -> ^(TOK_LIST TOK_ROLE TOK_ALL ^(TOK_USER $username))
| K_LIST K_ALL K_USER K_OF K_ROLE roleName = ID -> ^(TOK_LIST TOK_USER TOK_ALL ^(TOK_ROLE $roleName))
;
createUser
: K_CREATE K_USER userName=ID password=STRING_LITERAL
-> ^(TOK_CREATE ^(TOK_USER $userName) ^(TOK_PASSWORD $password))
;
alterUser
: K_ALTER K_USER userName=ID K_SET K_PASSWORD password=STRING_LITERAL
-> ^(TOK_ALTER ^(TOK_ALTER_PSWD $userName $password))
;
dropUser
: K_DROP K_USER userName=ID
-> ^(TOK_DROP ^(TOK_USER $userName))
;
createRole
: K_CREATE K_ROLE roleName=ID
-> ^(TOK_CREATE ^(TOK_ROLE $roleName))
;
dropRole
: K_DROP K_ROLE roleName=ID
-> ^(TOK_DROP ^(TOK_ROLE $roleName))
;
grantUser
: K_GRANT K_USER userName = ID K_PRIVILEGES privileges K_ON prefixPath
-> ^(TOK_GRANT ^(TOK_USER $userName) privileges prefixPath)
;
privileges
: STRING_LITERAL (COMMA STRING_LITERAL)*
-> ^(TOK_PRIVILEGES STRING_LITERAL+)
;
grantRole
: K_GRANT K_ROLE roleName=ID K_PRIVILEGES privileges K_ON prefixPath
-> ^(TOK_GRANT ^(TOK_ROLE $roleName) privileges prefixPath)
;
revokeUser
: K_REVOKE K_USER userName = ID K_PRIVILEGES privileges K_ON prefixPath
-> ^(TOK_REVOKE ^(TOK_USER $userName) privileges prefixPath)
;
revokeRole
: K_REVOKE K_ROLE roleName = ID K_PRIVILEGES privileges K_ON prefixPath
-> ^(TOK_REVOKE ^(TOK_ROLE $roleName) privileges prefixPath)
;
grantRoleToUser
: K_GRANT roleName = ID K_TO userName = ID
-> ^(TOK_GRANT ^(TOK_ROLE $roleName) ^(TOK_USER $userName))
;
revokeRoleFromUser
: K_REVOKE roleName = ID K_FROM userName = ID
-> ^(TOK_REVOKE ^(TOK_ROLE $roleName) ^(TOK_USER $userName))
;
loadStatement
: K_LOAD K_TIMESERIES (fileName=STRING_LITERAL) ID (DOT ID)*
-> ^(TOK_LOAD $fileName ID+)
;
grantWatermarkEmbedding
: K_GRANT K_WATERMARK_EMBEDDING K_TO rootOrId (COMMA rootOrId)*
-> ^(TOK_GRANT_WATERMARK_EMBEDDING rootOrId+)
;
revokeWatermarkEmbedding
: K_REVOKE K_WATERMARK_EMBEDDING K_FROM rootOrId (COMMA rootOrId)*
-> ^(TOK_REVOKE_WATERMARK_EMBEDDING rootOrId+)
;
rootOrId
: K_ROOT
| ID
;
configurationStatement
: loadConfigurationStatement
;
loadConfigurationStatement
: K_LOAD K_CONFIGURATION
-> ^(TOK_LOAD_CONFIGURATION)
;
showStatement
: showFlushTaskInfo
| showDynamicParameter
;
showFlushTaskInfo
: K_SHOW K_FLUSH K_TASK K_INFO
-> ^(TOK_SHOW TOK_FLUSH_TASK_INFO)
;
showDynamicParameter
: K_SHOW K_DYNAMIC K_PARAMETER
-> ^(TOK_SHOW TOK_DYNAMIC_PARAMETER)
;
/*
****
*************
TTL
*************
****
*/
ttlStatement
: setTTLStatement
| unsetTTLStatement
| showTTLStatement
;
setTTLStatement
: K_SET K_TTL K_TO path=prefixPath time=INT
-> ^(TOK_TTL TOK_SET $path $time)
;
unsetTTLStatement
: K_UNSET K_TTL K_TO path=prefixPath
-> ^(TOK_TTL TOK_UNSET $path)
;
showTTLStatement
:
K_SHOW K_TTL K_ON prefixPath (COMMA prefixPath)*
-> ^(TOK_TTL TOK_SHOW prefixPath+)
|
K_SHOW K_ALL K_TTL
-> ^(TOK_TTL TOK_SHOW)
;

View File

@ -0,0 +1,830 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
grammar SqlBase;
singleStatement
: statement EOF
;
statement
: CREATE TIMESERIES timeseriesPath WITH attributeClauses #createTimeseries
| DELETE TIMESERIES prefixPath (COMMA prefixPath)* #deleteTimeseries
| INSERT INTO timeseriesPath insertColumnSpec VALUES insertValuesSpec #insertStatement
| UPDATE prefixPath setClause whereClause? #updateStatement
| DELETE FROM prefixPath (COMMA prefixPath)* (whereClause)? #deleteStatement
| SET STORAGE GROUP TO prefixPath #setStorageGroup
| DELETE STORAGE GROUP prefixPath (COMMA prefixPath)* #deleteStorageGroup
| CREATE PROPERTY ID #createProperty
| ADD LABEL label=ID TO PROPERTY propertyName=ID #addLabel
| DELETE LABEL label=ID FROM PROPERTY propertyName=ID #deleteLabel
| LINK prefixPath TO propertyLabelPair #linkPath
| UNLINK prefixPath FROM propertyLabelPair #unlinkPath
| SHOW METADATA #showMetadata // not support yet
| DESCRIBE prefixPath #describePath // not support yet
| CREATE INDEX ON timeseriesPath USING function=ID indexWithClause? whereClause? #createIndex //not support yet
| DROP INDEX function=ID ON timeseriesPath #dropIndex //not support yet
| MERGE #merge //not support yet
| CREATE USER userName=ID password=STRING_LITERAL #createUser
| ALTER USER userName=ID SET PASSWORD password=STRING_LITERAL #alterUser
| DROP USER userName=ID #dropUser
| CREATE ROLE roleName=ID #createRole
| DROP ROLE roleName=ID #dropRole
| GRANT USER userName=ID PRIVILEGES privileges ON prefixPath #grantUser
| GRANT ROLE roleName=ID PRIVILEGES privileges ON prefixPath #grantRole
| REVOKE USER userName=ID PRIVILEGES privileges ON prefixPath #revokeUser
| REVOKE ROLE roleName=ID PRIVILEGES privileges ON prefixPath #revokeRole
| GRANT roleName=ID TO userName=ID #grantRoleToUser
| REVOKE roleName = ID FROM userName = ID #revokeRoleFromUser
| LOAD TIMESERIES (fileName=STRING_LITERAL) prefixPath #loadStatement
| GRANT WATERMARK_EMBEDDING TO rootOrId (COMMA rootOrId)* #grantWatermarkEmbedding
| REVOKE WATERMARK_EMBEDDING FROM rootOrId (COMMA rootOrId)* #revokeWatermarkEmbedding
| LIST USER #listUser
| LIST ROLE #listRole
| LIST PRIVILEGES USER username=ID ON prefixPath #listPrivilegesUser
| LIST PRIVILEGES ROLE roleName=ID ON prefixPath #listPrivilegesRole
| LIST USER PRIVILEGES username = ID #listUserPrivileges
| LIST ROLE PRIVILEGES roleName = ID #listRolePrivileges
| LIST ALL ROLE OF USER username = ID #listAllRoleOfUser
| LIST ALL USER OF ROLE roleName = ID #listAllUserOfRole
| SET TTL TO path=prefixPath time=INT #setTTLStatement
| UNSET TTL TO path=prefixPath #unsetTTLStatement
| SHOW TTL ON prefixPath (COMMA prefixPath)* #showTTLStatement
| SHOW ALL TTL #showAllTTLStatement
| SHOW FLUSH TASK INFO #showFlushTaskInfo
| SHOW DYNAMIC PARAMETER #showDynamicParameter
| LOAD CONFIGURATION #loadConfigurationStatement
| SELECT INDEX func=ID //not support yet
LR_BRACKET
p1=timeseriesPath COMMA p2=timeseriesPath COMMA n1=timeValue COMMA n2=timeValue COMMA
epsilon=constant (COMMA alpha=constant COMMA beta=constant)?
RR_BRACKET
fromClause
whereClause?
specialClause? #selectIndexStatement
| SELECT selectElements
fromClause
whereClause?
specialClause? #selectStatement
;
selectElements
: functionCall (COMMA functionCall)* #functionElement
| suffixPath (COMMA suffixPath)* #selectElement
;
functionCall
: ID LR_BRACKET suffixPath RR_BRACKET
;
attributeClauses
: DATATYPE OPERATOR_EQ dataType COMMA ENCODING OPERATOR_EQ encoding (COMMA COMPRESSOR OPERATOR_EQ compressor=propertyValue)? (COMMA property)*
;
setClause
: SET setCol (COMMA setCol)*
;
whereClause
: WHERE orExpression
;
orExpression
: andExpression (OPERATOR_OR andExpression)*
;
andExpression
: predicate (OPERATOR_AND predicate)*
;
predicate
: (suffixPath | prefixPath) comparisonOperator constant
| OPERATOR_NOT? LR_BRACKET orExpression RR_BRACKET
;
fromClause
: FROM prefixPath (COMMA prefixPath)*
;
specialClause
: specialLimit
| groupByClause specialLimit?
| fillClause slimitClause? groupByDeviceClause?
;
specialLimit
: limitClause slimitClause? groupByDeviceClause?
| slimitClause limitClause? groupByDeviceClause?
| groupByDeviceClause
;
limitClause
: LIMIT INT offsetClause?
;
offsetClause
: OFFSET INT
;
slimitClause
: SLIMIT INT soffsetClause?
;
soffsetClause
: SOFFSET INT
;
groupByDeviceClause
:
GROUP BY DEVICE
;
fillClause
: FILL LR_BRACKET typeClause (COMMA typeClause)* RR_BRACKET
;
groupByClause
: GROUP BY LR_BRACKET
DURATION (COMMA timeValue)?
COMMA timeInterval (COMMA timeInterval)* RR_BRACKET
;
typeClause
: dataType LS_BRACKET linearClause RS_BRACKET
| dataType LS_BRACKET previousClause RS_BRACKET
;
linearClause
: LINEAR (COMMA aheadDuration=DURATION COMMA behindDuration=DURATION)?
;
previousClause
: PREVIOUS (COMMA DURATION)?
;
indexWithClause
: WITH indexValue (COMMA indexValue)?
;
indexValue
: ID OPERATOR_EQ INT
;
comparisonOperator
: type = OPERATOR_GT
| type = OPERATOR_GTE
| type = OPERATOR_LT
| type = OPERATOR_LTE
| type = OPERATOR_EQ
| type = OPERATOR_NEQ
;
insertColumnSpec
: LR_BRACKET TIMESTAMP (COMMA nodeNameWithoutStar)* RR_BRACKET
;
insertValuesSpec
: LR_BRACKET dateFormat (COMMA constant)* RR_BRACKET
| LR_BRACKET INT (COMMA constant)* RR_BRACKET
;
setCol
: suffixPath OPERATOR_EQ constant
;
privileges
: STRING_LITERAL (COMMA STRING_LITERAL)*
;
rootOrId
: ROOT
| ID
;
timeInterval
: LS_BRACKET startTime=timeValue COMMA endTime=timeValue RS_BRACKET
;
timeValue
: dateFormat
| INT
;
propertyValue
: ID
| MINUS? INT
| MINUS? realLiteral
;
propertyLabelPair
: propertyName=ID DOT labelName=ID
;
timeseriesPath
: ROOT (DOT nodeNameWithoutStar)*
;
prefixPath
: ROOT (DOT nodeName)*
;
suffixPath
: nodeName (DOT nodeName)*
;
nodeName
: ID
| INT
| STAR
| STRING_LITERAL
;
nodeNameWithoutStar
: INT
| ID
| STRING_LITERAL
;
dataType
: INT32 | INT64 | FLOAT | DOUBLE | BOOLEAN | TEXT
;
dateFormat
: DATETIME
| NOW LR_BRACKET RR_BRACKET
;
constant
: dateExpression
| ID
| MINUS? realLiteral
| MINUS? INT
| STRING_LITERAL
;
dateExpression
: dateFormat ((PLUS | MINUS) DURATION)*
;
encoding
: PLAIN | PLAIN_DICTIONARY | RLE | DIFF | TS_2DIFF | GORILLA | REGULAR
;
realLiteral
: INT DOT (INT | EXPONENT)?
| DOT (INT|EXPONENT)
| EXPONENT
;
property
: name=ID OPERATOR_EQ value=propertyValue
;
//============================
// Start of the keywords list
//============================
CREATE
: C R E A T E
;
INSERT
: I N S E R T
;
UPDATE
: U P D A T E
;
DELETE
: D E L E T E
;
SELECT
: S E L E C T
;
SHOW
: S H O W
;
GRANT
: G R A N T
;
INTO
: I N T O
;
SET
: S E T
;
WHERE
: W H E R E
;
FROM
: F R O M
;
TO
: T O
;
BY
: B Y
;
DEVICE
: D E V I C E
;
CONFIGURATION
: C O N F I G U R A T I O N
;
DESCRIBE
: D E S C R I B E
;
SLIMIT
: S L I M I T
;
LIMIT
: L I M I T
;
UNLINK
: U N L I N K
;
OFFSET
: O F F S E T
;
SOFFSET
: S O F F S E T
;
FILL
: F I L L
;
LINEAR
: L I N E A R
;
PREVIOUS
: P R E V I O U S
;
METADATA
: M E T A D A T A
;
TIMESERIES
: T I M E S E R I E S
;
TIMESTAMP
: T I M E S T A M P
;
PROPERTY
: P R O P E R T Y
;
WITH
: W I T H
;
ROOT
: R O O T
;
DATATYPE
: D A T A T Y P E
;
COMPRESSOR
: C O M P R E S S O R
;
STORAGE
: S T O R A G E
;
GROUP
: G R O U P
;
LABEL
: L A B E L
;
INT32
: I N T '3' '2'
;
INT64
: I N T '6' '4'
;
FLOAT
: F L O A T
;
DOUBLE
: D O U B L E
;
BOOLEAN
: B O O L E A N
;
TEXT
: T E X T
;
ENCODING
: E N C O D I N G
;
PLAIN
: P L A I N
;
PLAIN_DICTIONARY
: P L A I N '_' D I C T I O N A R Y
;
RLE
: R L E
;
DIFF
: D I F F
;
TS_2DIFF
: T S '_' '2' D I F F
;
GORILLA
: G O R I L L A
;
REGULAR
: R E G U L A R
;
BITMAP
: B I T M A P
;
ADD
: A D D
;
VALUES
: V A L U E S
;
NOW
: N O W
;
LINK
: L I N K
;
INDEX
: I N D E X
;
USING
: U S I N G
;
ON
: O N
;
DROP
: D R O P
;
MERGE
: M E R G E
;
LIST
: L I S T
;
USER
: U S E R
;
PRIVILEGES
: P R I V I L E G E S
;
ROLE
: R O L E
;
ALL
: A L L
;
OF
: O F
;
ALTER
: A L T E R
;
PASSWORD
: P A S S W O R D
;
REVOKE
: R E V O K E
;
LOAD
: L O A D
;
WATERMARK_EMBEDDING
: W A T E R M A R K '_' E M B E D D I N G
;
UNSET
: U N S E T
;
TTL
: T T L
;
FLUSH
: F L U S H
;
TASK
: T A S K
;
INFO
: I N F O
;
DYNAMIC
: D Y N A M I C
;
PARAMETER
: P A R A M E T E R
;
//============================
// End of the keywords list
//============================
COMMA : ',';
STAR : '*';
OPERATOR_EQ : '=' | '==';
OPERATOR_GT : '>';
OPERATOR_GTE : '>=';
OPERATOR_LT : '<';
OPERATOR_LTE : '<=';
OPERATOR_NEQ : '!=' | '<>';
OPERATOR_AND
: A N D
| '&'
| '&&'
;
OPERATOR_OR
: O R
| '|'
| '||'
;
OPERATOR_NOT
: N O T | '!'
;
MINUS : '-';
PLUS : '+';
DOT : '.';
LR_BRACKET : '(';
RR_BRACKET : ')';
LS_BRACKET : '[';
RS_BRACKET : ']';
L_BRACKET : '{';
R_BRACKET : '}';
STRING_LITERAL
: DOUBLE_QUOTE_STRING_LITERAL
| SINGLE_QUOTE_STRING_LITERAL
;
INT : [0-9]+;
EXPONENT : INT ('e'|'E') ('+'|'-')? INT ;
DURATION
:
(INT+ (Y|M O|W|D|H|M|S|M S|U S|N S))+
;
DATETIME
: INT ('-'|'/') INT ('-'|'/') INT
(T | WS)
INT ':' INT ':' INT (DOT INT)?
(('+' | '-') INT ':' INT)?
;
/** Allow unicode rule/token names */
ID : NameStartChar NameChar*;
fragment
NameChar
: NameStartChar
| '0'..'9'
| '_'
| '\u00B7'
| '\u0300'..'\u036F'
| '\u203F'..'\u2040'
;
fragment
NameStartChar
: 'A'..'Z'
| 'a'..'z'
| '\u00C0'..'\u00D6'
| '\u00D8'..'\u00F6'
| '\u00F8'..'\u02FF'
| '\u0370'..'\u037D'
| '\u037F'..'\u1FFF'
| '\u200C'..'\u200D'
| '\u2070'..'\u218F'
| '\u2C00'..'\u2FEF'
| '\u3001'..'\uD7FF'
| '\uF900'..'\uFDCF'
| '\uFDF0'..'\uFFFD'
; // ignores | ['\u10000-'\uEFFFF] ;
fragment DOUBLE_QUOTE_STRING_LITERAL
: '"' ('\\' . | ~'"' )*? '"'
;
fragment SINGLE_QUOTE_STRING_LITERAL
: '\'' ('\\' . | ~'\'' )*? '\''
;
//Characters and write it this way for case sensitivity
fragment A
: 'a' | 'A'
;
fragment B
: 'b' | 'B'
;
fragment C
: 'c' | 'C'
;
fragment D
: 'd' | 'D'
;
fragment E
: 'e' | 'E'
;
fragment F
: 'f' | 'F'
;
fragment G
: 'g' | 'G'
;
fragment H
: 'h' | 'H'
;
fragment I
: 'i' | 'I'
;
fragment J
: 'j' | 'J'
;
fragment K
: 'k' | 'K'
;
fragment L
: 'l' | 'L'
;
fragment M
: 'm' | 'M'
;
fragment N
: 'n' | 'N'
;
fragment O
: 'o' | 'O'
;
fragment P
: 'p' | 'P'
;
fragment Q
: 'q' | 'Q'
;
fragment R
: 'r' | 'R'
;
fragment S
: 's' | 'S'
;
fragment T
: 't' | 'T'
;
fragment U
: 'u' | 'U'
;
fragment V
: 'v' | 'V'
;
fragment W
: 'w' | 'W'
;
fragment X
: 'x' | 'X'
;
fragment Y
: 'y' | 'Y'
;
fragment Z
: 'z' | 'Z'
;
WS
: [ \r\n\t]+ -> channel(HIDDEN)
;

View File

@ -1,68 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.exception.query;
import org.apache.iotdb.rpc.TSStatusCode;
/**
* This exception is thrown while meeting error in parsing ast tree to generate logical operator.
*/
public class IllegalASTFormatException extends QueryProcessException {
private static final long serialVersionUID = -8987915911329315588L;
/**
* Thrown for detailed SQL statement
*
* @param sqlStatement SQL statement
* @param message detailed error message
*/
public IllegalASTFormatException(String sqlStatement, String message) {
super(String.format(
"Parsing error, statement [%s] failed when parsing AST tree to generate logical operator. Detailed information: [%s]",
sqlStatement, message));
errorCode = TSStatusCode.AST_FORMAT_ERROR.getStatusCode();
}
/**
* Thrown for auth command, for example "grant author" or "update password"
*
* @param authCommand auth command
*/
public IllegalASTFormatException(String authCommand) {
super(String.format(
"Parsing error, [%s] command failed when parsing AST tree to generate logical operator. Please check you SQL statement",
authCommand));
errorCode = TSStatusCode.AST_FORMAT_ERROR.getStatusCode();
}
/**
* Thrown for other commands, for example "data load"
*
* @param command command type
* @param message exception message
* @param detailedMessage exception detailed message
*/
public IllegalASTFormatException(String command, String message, String detailedMessage) {
super(String.format(
"Parsing error, [%s] command failed when parsing AST tree to generate logical operator. Detailed information: [%s]",
command, message + detailedMessage));
errorCode = TSStatusCode.AST_FORMAT_ERROR.getStatusCode();
}
}

View File

@ -16,27 +16,18 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
package org.apache.iotdb.db.exception.runtime;
import java.util.List;
public class SQLParserException extends RuntimeException{
private static final long serialVersionUID = 3249707655860110299L;
public SQLParserException() {
super("Error format in SQL statement, please check whether SQL statement is correct.");
}
public SQLParserException(String message) {
super(message);
}
/**
* This interface defines the functions needed by the walkers and dispatchers. These are implemented by the node of the
* graph that needs to be walked.
*/
public interface Node {
/**
* Gets the vector of children nodes. This is used in the graph walker algorithms.
*
* @return List<? extends Node>
*/
List<Node> getChildren();
/**
* Gets the name of the node. This is used in the rule dispatchers.
*
* @return String
*/
String getName();
public SQLParserException(String type, String message) {
super(String.format("Unsupported type: [%s]. " + message, type));
}
}

View File

@ -22,36 +22,31 @@ import java.time.ZoneId;
import org.apache.iotdb.db.conf.IoTDBConfig;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.exception.metadata.MetadataException;
import org.apache.iotdb.db.exception.query.IllegalASTFormatException;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.query.LogicalOptimizeException;
import org.apache.iotdb.db.exception.query.QueryProcessException;
import org.apache.iotdb.db.qp.executor.IQueryProcessExecutor;
import org.apache.iotdb.db.qp.logical.Operator;
import org.apache.iotdb.db.qp.logical.RootOperator;
import org.apache.iotdb.db.qp.logical.crud.FilterOperator;
import org.apache.iotdb.db.qp.logical.crud.SFWOperator;
import org.apache.iotdb.db.qp.physical.PhysicalPlan;
import org.apache.iotdb.db.qp.strategy.LogicalGenerator;
import org.apache.iotdb.db.qp.strategy.ParseDriver;
import org.apache.iotdb.db.qp.strategy.PhysicalGenerator;
import org.apache.iotdb.db.qp.strategy.optimizer.ConcatPathOptimizer;
import org.apache.iotdb.db.qp.strategy.optimizer.DnfFilterOptimizer;
import org.apache.iotdb.db.qp.strategy.optimizer.MergeSingleFilterOptimizer;
import org.apache.iotdb.db.qp.strategy.optimizer.RemoveNotOptimizer;
import org.apache.iotdb.db.sql.ParseGenerator;
import org.apache.iotdb.db.sql.parse.AstNode;
import org.apache.iotdb.db.sql.parse.ParseException;
import org.apache.iotdb.db.sql.parse.ParseUtils;
/**
* provide a integration method for other user.
*/
public class QueryProcessor {
private IQueryProcessExecutor executor;
private ParseDriver parseDriver;
public QueryProcessor(IQueryProcessExecutor executor) {
this.executor = executor;
this.parseDriver = new ParseDriver();
}
public IQueryProcessExecutor getExecutor() {
@ -65,44 +60,13 @@ public class QueryProcessor {
}
public PhysicalPlan parseSQLToPhysicalPlan(String sqlStr, ZoneId zoneId)
throws MetadataException, QueryProcessException {
AstNode astNode = parseSQLToAST(sqlStr);
Operator operator = parseASTToOperator(astNode, zoneId);
throws QueryProcessException {
Operator operator = parseDriver.parse(sqlStr, zoneId);
operator = logicalOptimize(operator, executor);
PhysicalGenerator physicalGenerator = new PhysicalGenerator(executor);
return physicalGenerator.transformToPhysicalPlan(operator);
}
/**
* Convert ast tree to Operator which type maybe {@code SFWOperator} or {@code AuthorOperator}
*
* @param astNode - input ast tree
* @return - RootOperator has four subclass:Query/Insert/Delete/Update/Author
* @throws IllegalASTFormatException exception in converting sql to operator
*/
private RootOperator parseASTToOperator(AstNode astNode, ZoneId zoneId)
throws QueryProcessException, MetadataException {
LogicalGenerator generator = new LogicalGenerator(zoneId);
return generator.getLogicalPlan(astNode);
}
/**
* Given a SQL statement and generate an ast tree
*
* @param sqlStr input sql command
* @return ast tree
* @throws IllegalASTFormatException exception in sql parsing
*/
private AstNode parseSQLToAST(String sqlStr) throws IllegalASTFormatException {
AstNode astTree;
// parse string to ASTTree
try {
astTree = ParseGenerator.generateAST(sqlStr);
} catch (ParseException e) {
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
return ParseUtils.findRootNonNullToken(astTree);
}
/**
* given an unoptimized logical operator tree and return a optimized result.

View File

@ -20,6 +20,7 @@ package org.apache.iotdb.db.qp.constant;
import java.util.HashMap;
import java.util.Map;
import org.apache.iotdb.db.qp.strategy.SqlBaseLexer;
import org.apache.iotdb.tsfile.read.common.Path;
/**
@ -36,7 +37,7 @@ public class SQLConstant {
public static final String RESERVED_TIME = "time";
public static final String RESERVED_FREQ = "freq";
public static final String IS_AGGREGATION = "IS_AGGREGATION";
public static final String NOW_FUNC = "now";
public static final String NOW_FUNC = "now()";
public static final String START_TIME_STR = "1970-1-01T00:00:00";
public static final String LINE_FEED_SIGNAL = "\n";
@ -67,13 +68,13 @@ public class SQLConstant {
public static final int KW_OR = 2;
public static final int KW_NOT = 3;
public static final int EQUAL = 11;
public static final int NOTEQUAL = 12;
public static final int LESSTHANOREQUALTO = 13;
public static final int LESSTHAN = 14;
public static final int GREATERTHANOREQUALTO = 15;
public static final int GREATERTHAN = 16;
public static final int EQUAL_NS = 17;
public static final int EQUAL = SqlBaseLexer.OPERATOR_EQ;
public static final int NOTEQUAL = SqlBaseLexer.OPERATOR_NEQ;
public static final int LESSTHANOREQUALTO = SqlBaseLexer.OPERATOR_LTE;
public static final int LESSTHAN = SqlBaseLexer.OPERATOR_LT;
public static final int GREATERTHANOREQUALTO = SqlBaseLexer.OPERATOR_GTE;
public static final int GREATERTHAN = SqlBaseLexer.OPERATOR_GT;
public static final int EQUAL_NS = SqlBaseLexer.OPERATOR_NEQ;
public static final int TOK_SELECT = 21;
public static final int TOK_FROM = 22;

View File

@ -1,71 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.qp.constant;
import java.util.HashMap;
import java.util.Map;
import org.apache.iotdb.db.sql.parse.TqlParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TqlParserConstant {
private static final Logger logger = LoggerFactory.getLogger(TqlParserConstant.class);
private TqlParserConstant() {
// forbidding instantiation
}
private static Map<Integer, Integer> antlrQpMap = new HashMap<>();
// used to get operator type when construct operator from AST Tree
static {
antlrQpMap.put(TqlParser.OPERATOR_AND, SQLConstant.KW_AND);
antlrQpMap.put(TqlParser.OPERATOR_OR, SQLConstant.KW_OR);
antlrQpMap.put(TqlParser.OPERATOR_NOT, SQLConstant.KW_NOT);
antlrQpMap.put(TqlParser.OPERATOR_EQ, SQLConstant.EQUAL);
antlrQpMap.put(TqlParser.OPERATOR_NEQ, SQLConstant.NOTEQUAL);
antlrQpMap.put(TqlParser.OPERATOR_LTE, SQLConstant.LESSTHANOREQUALTO);
antlrQpMap.put(TqlParser.OPERATOR_LT, SQLConstant.LESSTHAN);
antlrQpMap.put(TqlParser.OPERATOR_GTE, SQLConstant.GREATERTHANOREQUALTO);
antlrQpMap.put(TqlParser.OPERATOR_GT, SQLConstant.GREATERTHAN);
// antlrQpMap.put(TqlParser.EQUAL_NS, SQLConstant.EQUAL_NS);
antlrQpMap.put(TqlParser.TOK_SELECT, SQLConstant.TOK_SELECT);
antlrQpMap.put(TqlParser.TOK_FROM, SQLConstant.TOK_FROM);
antlrQpMap.put(TqlParser.TOK_WHERE, SQLConstant.TOK_WHERE);
antlrQpMap.put(TqlParser.TOK_QUERY, SQLConstant.TOK_QUERY);
}
/**
* return map value corresponding to key,when not contain the param,print it.
*
* @param antlrIntType -param to judge whether antlrQpMap has key
* @return -map value corresponding to the param
*/
public static int getTSTokenIntType(int antlrIntType) {
if (!antlrQpMap.containsKey(antlrIntType)) {
logger.error("No such TSToken: {}", antlrIntType);
}
return antlrQpMap.get(antlrIntType);
}
}

View File

@ -21,6 +21,7 @@ package org.apache.iotdb.db.qp.logical.crud;
import java.util.Objects;
import org.apache.iotdb.db.exception.path.PathException;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.qp.constant.SQLConstant;
import org.apache.iotdb.db.qp.executor.IQueryProcessExecutor;
import org.apache.iotdb.db.qp.logical.Operator;
@ -52,7 +53,7 @@ public class BasicFunctionOperator extends FunctionOperator {
* @throws LogicalOperatorException Logical Operator Exception
*/
public BasicFunctionOperator(int tokenIntType, Path path, String value)
throws LogicalOperatorException {
throws SQLParserException {
super(tokenIntType);
operatorType = Operator.OperatorType.BASIC_FUNC;
funcToken = BasicOperatorType.getBasicOpBySymbol(tokenIntType);
@ -150,7 +151,7 @@ public class BasicFunctionOperator extends FunctionOperator {
BasicFunctionOperator ret;
try {
ret = new BasicFunctionOperator(this.tokenIntType, path.clone(), value);
} catch (LogicalOperatorException e) {
} catch (SQLParserException e) {
logger.error("error clone:", e);
return null;
}

View File

@ -19,6 +19,7 @@
package org.apache.iotdb.db.qp.logical.crud;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.qp.constant.SQLConstant;
import org.apache.iotdb.tsfile.read.common.Path;
import org.apache.iotdb.tsfile.read.expression.IUnaryExpression;
@ -161,7 +162,7 @@ public enum BasicOperatorType {
* @throws LogicalOperatorException Logical Operator Exception
*/
public static BasicOperatorType getBasicOpBySymbol(int tokenIntType)
throws LogicalOperatorException {
throws SQLParserException {
switch (tokenIntType) {
case SQLConstant.EQUAL:
return EQ;
@ -176,7 +177,7 @@ public enum BasicOperatorType {
case SQLConstant.NOTEQUAL:
return NOTEQUAL;
default:
throw new LogicalOperatorException(
throw new SQLParserException(
"unsupported type:{}" + SQLConstant.tokenNames.get(tokenIntType));
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.qp.strategy;
import java.time.ZoneId;
import org.antlr.v4.runtime.BailErrorStrategy;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.atn.PredictionMode;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.qp.logical.Operator;
/**
* ParseDriver.
*
*/
public class ParseDriver {
private LogicalGenerator logicalGenerator;
private ParseTreeWalker walker;
public ParseDriver() {
this(IoTDBDescriptor.getInstance().getConfig().getZoneID());
}
private ParseDriver(ZoneId zoneId) {
walker = new ParseTreeWalker();
logicalGenerator = new LogicalGenerator(zoneId);
}
public Operator parse(String sql, ZoneId zoneId) {
logicalGenerator.setZoneId(zoneId);
CharStream charStream = CharStreams.fromString(sql);
SqlBaseLexer lexer1 = new SqlBaseLexer(charStream);
CommonTokenStream tokens1 = new CommonTokenStream(lexer1);
SqlBaseParser parser = new SqlBaseParser(tokens1);
parser.getInterpreter().setPredictionMode(PredictionMode.SLL);
parser.setErrorHandler(new BailErrorStrategy());
ParseTree tree;
try {
tree = parser.singleStatement(); // STAGE 1
}
catch (Exception ex) {
SqlBaseLexer lexer2 = new SqlBaseLexer(charStream);
CommonTokenStream tokens2 = new CommonTokenStream(lexer2);
SqlBaseParser parserLL = new SqlBaseParser(tokens2);
parserLL.getInterpreter().setPredictionMode(PredictionMode.LL);
tree = parser.singleStatement(); // STAGE 2
// if we parse ok, it's LL not SLL
}
walker.walk(logicalGenerator, tree);
return logicalGenerator.getLogicalPlan();
}
}

View File

@ -24,6 +24,7 @@ import java.util.List;
import org.apache.iotdb.db.exception.metadata.MetadataException;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.query.LogicalOptimizeException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.qp.constant.SQLConstant;
import org.apache.iotdb.db.qp.executor.IQueryProcessExecutor;
import org.apache.iotdb.db.qp.logical.Operator;
@ -256,7 +257,7 @@ public class ConcatPathOptimizer implements ILogicalOptimizer {
currentNode.addChildOperator(
new BasicFunctionOperator(operator.getTokenIntType(), noStarPaths.get(i),
((BasicFunctionOperator) operator).getValue()));
} catch (LogicalOperatorException e) {
} catch (SQLParserException e) {
throw new LogicalOptimizeException(e.getMessage());
}
}

View File

@ -55,6 +55,7 @@ import org.apache.iotdb.db.exception.StorageEngineException;
import org.apache.iotdb.db.exception.metadata.MetadataException;
import org.apache.iotdb.db.exception.path.PathException;
import org.apache.iotdb.db.exception.query.QueryProcessException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.exception.storageGroup.StorageGroupException;
import org.apache.iotdb.db.metadata.MManager;
import org.apache.iotdb.db.metrics.server.SqlArgument;
@ -567,7 +568,7 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
batchErrorMessage.append(resp.getStatus().getStatusType().getCode()).append("\n");
return false;
}
} catch (MetadataException e) {
} catch (SQLParserException e) {
logger.error("Error occurred when executing {}, check metadata error: ", statement, e);
result.add(Statement.EXECUTE_FAILED);
batchErrorMessage.append(TSStatusCode.METADATA_ERROR.getStatusCode()).append("\n");
@ -622,7 +623,7 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
} else {
return executeUpdateStatement(physicalPlan);
}
} catch (MetadataException e) {
} catch (SQLParserException e) {
logger.error("check metadata error: ", e);
return getTSExecuteStatementResp(getStatus(TSStatusCode.METADATA_ERROR,
"Check metadata error: " + e.getMessage()));
@ -687,7 +688,7 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
PhysicalPlan physicalPlan;
try {
physicalPlan = processor.parseSQLToPhysicalPlan(statement, zoneIds.get());
} catch (QueryProcessException | MetadataException e) {
} catch (QueryProcessException | SQLParserException e) {
logger.info("meet error while parsing SQL to physical plan: {}", e.getMessage());
return getTSExecuteStatementResp(getStatus(TSStatusCode.SQL_PARSE_ERROR, e.getMessage()));
}
@ -1031,7 +1032,7 @@ public class TSServiceImpl implements TSIService.Iface, ServerContext {
PhysicalPlan physicalPlan;
try {
physicalPlan = processor.parseSQLToPhysicalPlan(statement, zoneIds.get());
} catch (QueryProcessException | MetadataException e) {
} catch (QueryProcessException | SQLParserException e) {
logger.info("meet error while parsing SQL to physical plan: {}", e.getMessage());
return getTSExecuteStatementResp(getStatus(TSStatusCode.SQL_PARSE_ERROR, e.getMessage()));
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql;
import org.apache.iotdb.db.sql.parse.AstNode;
import org.apache.iotdb.db.sql.parse.ParseDriver;
import org.apache.iotdb.db.sql.parse.ParseException;
/**
* ParseContextGenerator is a class that offers methods to generate AstNode Tree
*
*/
public final class ParseGenerator {
private ParseGenerator() {
}
/**
* Parse the input {@link String} command and generate an AstNode Tree.
*/
public static AstNode generateAST(String command) throws ParseException {
ParseDriver pd = new ParseDriver();
return pd.parse(command);
}
}

View File

@ -1,58 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenStream;
import org.antlr.runtime.tree.CommonErrorNode;
public class AstErrorNode extends AstNode {
private static final long serialVersionUID = 1L;
/**
* delegate needn't be serialized.
*/
private transient CommonErrorNode delegate;
public AstErrorNode(TokenStream input, Token start, Token stop, RecognitionException e) {
delegate = new CommonErrorNode(input, start, stop, e);
}
@Override
public boolean isNil() {
return delegate.isNil();
}
@Override
public int getType() {
return delegate.getType();
}
@Override
public String getText() {
return delegate.getText();
}
@Override
public String toString() {
return delegate.toString();
}
}

View File

@ -1,362 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
import org.antlr.runtime.Token;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.lang3.StringUtils;
public class AstNode extends CommonTree implements Node, Serializable {
private static final long serialVersionUID = 1L;
private transient StringBuilder astStr;
private transient AstNodeOrigin origin;
private transient int startIndx = -1;
private transient int endIndx = -1;
private transient AstNode rootNode;
private transient boolean isValidAstStr;
private transient boolean visited = false;
public AstNode() {
// there is nothing need to do
}
/**
* Constructor.
*
* @param t Token for the CommonTree Node
*/
public AstNode(Token t) {
super(t);
}
public AstNode(AstNode node) {
super(node);
this.origin = node.origin;
}
@Override
public Tree dupNode() {
return new AstNode(this);
}
/*
* (non-Javadoc)
*
* @see org.apache.hadoop.hive.ql.lib.Node#getChildren()
*/
@Override
public List<Node> getChildren() {
if (super.getChildCount() == 0) {
return new ArrayList<>();
}
ArrayList<Node> retVec = new ArrayList<>();
for (int i = 0; i < super.getChildCount(); ++i) {
retVec.add((Node) super.getChild(i));
}
return retVec;
}
/*
* (non-Javadoc)
*
* @see org.apache.hadoop.hive.ql.lib.Node#getName()
*/
@Override
public String getName() {
return String.valueOf(super.getToken().getType());
}
/**
* For every node in this subtree, make sure it's start/stop token's are set. Walk depth first,
* visit bottom up. Only updates nodes with at least one token index < 0.
* <p> </p>
* In contrast to the method in the parent class, this method is iterative.
*/
@Override
public void setUnknownTokenBoundaries() {
Deque<AstNode> stack1 = new ArrayDeque<>();
Deque<AstNode> stack2 = new ArrayDeque<>();
stack1.push(this);
while (!stack1.isEmpty()) {
AstNode next = stack1.pop();
stack2.push(next);
if (next.children != null) {
for (int i = next.children.size() - 1; i >= 0; i--) {
stack1.push((AstNode) next.children.get(i));
}
}
}
while (!stack2.isEmpty()) {
AstNode next = stack2.pop();
if (next.children == null && (next.startIndex < 0 || next.stopIndex < 0)) {
next.startIndex = next.stopIndex = next.token.getTokenIndex();
} else if (!(next.startIndex >= 0 && next.stopIndex >= 0) && !next.children.isEmpty()) {
AstNode firstChild = (AstNode) next.children.get(0);
AstNode lastChild = (AstNode) next.children.get(next.children.size() - 1);
next.startIndex = firstChild.getTokenStartIndex();
next.stopIndex = lastChild.getTokenStopIndex();
}
}
}
/**
* Get origin AstNodeOrigin.
*
* @return information about the object from which this AstNode originated, or null if this
* AstNode was not expanded from an object reference
*/
public AstNodeOrigin getOrigin() {
return origin;
}
/**
* Tag this AstNode with information about the object from which this node originated.
*/
public void setOrigin(AstNodeOrigin origin) {
this.origin = origin;
}
/**
* dump string.
*
* @return dump string
*/
public String dump() {
StringBuilder sb = new StringBuilder("\n");
dump(sb);
return sb.toString();
}
private StringBuilder dump(StringBuilder sb) {
Deque<AstNode> stack = new ArrayDeque<>();
stack.push(this);
int tabLength = 0;
while (!stack.isEmpty()) {
AstNode next = stack.peek();
if (!next.visited) {
sb.append(StringUtils.repeat(" ", tabLength * 3));
sb.append(next.toString());
sb.append("\n");
if (next.children != null) {
for (int i = next.children.size() - 1; i >= 0; i--) {
stack.push((AstNode) next.children.get(i));
}
}
tabLength++;
next.visited = true;
} else {
tabLength--;
next.visited = false;
stack.pop();
}
}
return sb;
}
private void getRootNodeWithValidAstStr() {
if (rootNode != null && rootNode.parent == null && rootNode.hasValidMemoizedString()) {
return;
}
AstNode retNode = this;
while (retNode.parent != null) {
retNode = (AstNode) retNode.parent;
}
rootNode = retNode;
if (!rootNode.isValidAstStr) {
rootNode.astStr = new StringBuilder();
rootNode.toStringTree(rootNode);
rootNode.isValidAstStr = true;
}
}
private boolean hasValidMemoizedString() {
return isValidAstStr && astStr != null;
}
private void resetRootInformation() {
// Reset the previously stored rootNode string
if (rootNode != null) {
rootNode.astStr = null;
rootNode.isValidAstStr = false;
}
}
private int getMemoizedStringLen() {
return astStr == null ? 0 : astStr.length();
}
private boolean checkStringBuilder(StringBuilder builder, int start, int end){
return builder == null || start < 0 || end > builder.length() || start >= end;
}
private String getMemoizedSubString(int start, int end) {
return checkStringBuilder(astStr, start, end) ? null : astStr.subSequence(start, end).toString();
}
private void addtoMemoizedString(String string) {
if (astStr == null) {
astStr = new StringBuilder();
}
astStr.append(string);
}
@Override
public void setParent(Tree t) {
super.setParent(t);
resetRootInformation();
}
@Override
public void addChild(Tree t) {
super.addChild(t);
resetRootInformation();
}
@Override
public void addChildren(List kids) {
super.addChildren(kids);
resetRootInformation();
}
@Override
public void setChild(int i, Tree t) {
super.setChild(i, t);
resetRootInformation();
}
@Override
public void insertChild(int i, Object t) {
super.insertChild(i, t);
resetRootInformation();
}
@Override
public Object deleteChild(int i) {
Object ret = super.deleteChild(i);
resetRootInformation();
return ret;
}
@Override
public void replaceChildren(int startChildIndex, int stopChildIndex, Object t) {
super.replaceChildren(startChildIndex, stopChildIndex, t);
resetRootInformation();
}
@Override
public String toStringTree() {
// The root might have changed because of tree modifications.
// Compute the new root for this tree and set the astStr.
getRootNodeWithValidAstStr();
// If rootNotModified is false, then startIndx and endIndx will be
// stale.
if (startIndx >= 0 && endIndx <= rootNode.getMemoizedStringLen()) {
return rootNode.getMemoizedSubString(startIndx, endIndx);
}
return toStringTree(rootNode);
}
private String toStringTree(AstNode rootNode) {
Deque<AstNode> stack = new ArrayDeque<>();
stack.push(this);
while (!stack.isEmpty()) {
AstNode next = stack.peek();
if (!next.visited) {
toStringVisit(next, stack);
} else {
if (!next.isNil()) {
rootNode.addtoMemoizedString(")");
}
next.endIndx = rootNode.getMemoizedStringLen();
next.visited = false;
stack.pop();
}
}
return rootNode.getMemoizedSubString(startIndx, endIndx);
}
private void toStringVisit(AstNode next, Deque<AstNode> stack) {
if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent
.getChild(0)) {
rootNode.addtoMemoizedString(" ");
}
next.rootNode = rootNode;
next.startIndx = rootNode.getMemoizedStringLen();
// Leaf
if (next.children == null || next.children.isEmpty()) {
String str = next.toString();
rootNode.addtoMemoizedString(
next.getType() != TqlParser.STRING_LITERAL ? str.toLowerCase() : str);
next.endIndx = rootNode.getMemoizedStringLen();
stack.pop();
return;
}
if (!next.isNil()) {
rootNode.addtoMemoizedString("(");
String str = next.toString();
rootNode.addtoMemoizedString(
(next.getType() == TqlParser.STRING_LITERAL || null == str) ? str : str.toLowerCase());
rootNode.addtoMemoizedString(" ");
}
if (next.children != null) {
for (int i = next.children.size() - 1; i >= 0; i--) {
stack.push((AstNode) next.children.get(i));
}
}
next.visited = true;
}
@Override
public AstNode getChild(int i) {
if (children == null || i >= children.size()) {
return null;
}
return (AstNode) children.get(i);
}
}

View File

@ -1,110 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
/**
* AstNodeOrigin contains contextual information about the object from whose definition a particular
* AstNode originated. For example, suppose a view v is defined as <code>select x+1 as y from
* t</code>, and we're processing a query
* <code>select v1.y from v as v1</code>, and there's a type-checking problem with the expression
* <code>x+1</code> due
* to an ALTER TABLE on t subsequent to the creation of v. Then, when reporting the error, we want
* to provide the parser location with respect to the definition of v (rather than with respect to
* the top-level query, since that represents a completely different "parser coordinate system").
* <p> </p>
* So, when expanding the definition of v while analyzing the top-level query, we tag each AstNode
* with a reference to an AstNodeOrign describing v and its usage within the query.
*/
public class AstNodeOrigin {
private final String objectType;
private final String objectName;
private final String objectDefinition;
private final String usageAlias;
private final AstNode usageNode;
/**
* Constructor of AstNodeOrigin.
*
* @param objectType object type
* @param objectName object name
* @param objectDefinition object definition
* @param usageAlias usage alias
* @param usageNode usage node
*/
public AstNodeOrigin(String objectType, String objectName, String objectDefinition,
String usageAlias,
AstNode usageNode) {
this.objectType = objectType;
this.objectName = objectName;
this.objectDefinition = objectDefinition;
this.usageAlias = usageAlias;
this.usageNode = usageNode;
}
/**
* get object type.
*
* @return the type of the object from which an AstNode originated, e.g. "view".
*/
public String getObjectType() {
return objectType;
}
/**
* get object name.
*
* @return the name of the object from which an AstNode originated, e.g. "v".
*/
public String getObjectName() {
return objectName;
}
/**
* get object definition.
*
* @return the definition of the object from which an AstNode originated, e.g. <code>select x+1 as
* y from t</code>.
*/
public String getObjectDefinition() {
return objectDefinition;
}
/**
* get usage alias.
*
* @return the alias of the object from which an AstNode originated, e.g. "v1" (this can help with
* debugging context-dependent expansions)
*/
public String getUsageAlias() {
return usageAlias;
}
/**
* get usage node.
*
* @return the expression node triggering usage of an object from which an AstNode originated,
* e.g. <code>v as v1</code> (this can help with debugging context-dependent expansions)
*/
public AstNode getUsageNode() {
return usageNode;
}
}
// End AstNodeOrigin.java

View File

@ -1,192 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
import java.util.ArrayList;
import java.util.List;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.NoViableAltException;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenRewriteStream;
import org.antlr.runtime.TokenStream;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.CommonTreeAdaptor;
import org.antlr.runtime.tree.TreeAdaptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ParseDriver.
*
*/
public class ParseDriver {
/**
* Tree adaptor for making antlr return AstNodes instead of CommonTree nodes so that the graph walking algorithms
* and the rules framework defined in ql.lib can be used with the AST Nodes.
*/
public static final TreeAdaptor adaptor = new CommonTreeAdaptor() {
/**
* Creates an AstNode for the given token. The AstNode is a wrapper around antlr's CommonTree class that
* implements the Node interface.
*
* @param payload
* The token.
* @return Object (which is actually an AstNode) for the token.
*/
@Override
public Object create(Token payload) {
return new AstNode(payload);
}
@Override
public Object dupNode(Object t) {
return create(((CommonTree) t).token);
}
@Override
public Object errorNode(TokenStream input, Token start, Token stop, RecognitionException e) {
return new AstErrorNode(input, start, stop, e);
}
};
private static final Logger LOG = LoggerFactory.getLogger("ql.parse.ParseDriver");
/**
* Parses a command, optionally assigning the parser's token stream to the given context.
*
* @param command
* command to parse
*
* @return parsed AST
*/
public AstNode parse(String command) throws ParseException {
TqlLexerX lexer = new TqlLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
TqlParser parser = new TqlParser(tokens);
parser.setTreeAdaptor(adaptor);
TqlParser.statement_return r = null;
try {
r = parser.statement();
} catch (RecognitionException e) {
LOG.trace("meet error while parsing statement: {}", command, e);
}
if (!lexer.getErrors().isEmpty()) {
throw new ParseException(lexer.getErrors());
}
if (!parser.errors.isEmpty()) {
throw new ParseException(parser.errors);
}
if (r != null) {
AstNode tree = (AstNode) r.getTree();
tree.setUnknownTokenBoundaries();
return tree;
} else {
return null;
}
}
/**
* ANTLRNoCaseStringStream.
* This class provides and implementation for a case insensitive token checker
* for the lexical analysis part of antlr. By converting the token stream into
* upper case at the time when lexical rules are checked, this class ensures that the
* lexical rules need to just match the token with upper case letters as opposed to
* combination of upper case and lower case characteres. This is purely used for matching lexical
* rules. The actual token text is stored in the same way as the user input without
* actually converting it into an upper case. The token values are generated by the consume()
* function of the super class ANTLRStringStream. The LA() function is the lookahead funtion
* and is purely used for matching lexical rules. This also means that the grammar will only
* accept capitalized tokens in case it is run from other tools like antlrworks which
* do not have the ANTLRNoCaseStringStream implementation.
*/
public class ANTLRNoCaseStringStream extends ANTLRStringStream {
public ANTLRNoCaseStringStream(String input) {
super(input);
}
@Override
public int LA(int i) {
int returnChar = super.LA(i);
if (returnChar == CharStream.EOF || returnChar == 0) {
return returnChar;
}
return Character.toUpperCase((char) returnChar);
}
}
/**
* TqlLexerX.
*
*/
public class TqlLexerX extends TqlLexer {
private final ArrayList<ParseError> errors;
public TqlLexerX(CharStream input) {
super(input);
errors = new ArrayList<>();
}
@Override
public void displayRecognitionError(String[] tokenNames, RecognitionException e) {
errors.add(new ParseError(this, e, tokenNames));
}
@Override
public String getErrorMessage(RecognitionException e, String[] tokenNames) {
String msg;
if (e instanceof NoViableAltException) {
// for development, can add
// "decision=<<"+nvae.grammarDecisionDescription+">>"
// and "(decision="+nvae.decisionNumber+") and
// "state "+nvae.stateNumber
msg = "character " + getCharErrorDisplay(e.c) + " not supported here";
} else {
msg = super.getErrorMessage(e, tokenNames);
}
String input = e.input.toString();
if (input.contains("timestamp") || input.contains("time")) {
msg += ". (Note that time format should be something like 1) number: eg.123456 2) function: eg.now() 3) datatime: eg.yyyy-MM-dd HH:mm:ss, please check whether inputting correct time format or referring to sql document)";
}
return msg;
}
public List<ParseError> getErrors() {
return errors;
}
}
}

View File

@ -1,52 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
import org.antlr.runtime.BaseRecognizer;
import org.antlr.runtime.RecognitionException;
public class ParseError {
private final BaseRecognizer br;
private final RecognitionException re;
private final String[] tokenNames;
ParseError(BaseRecognizer br, RecognitionException re, String[] tokenNames) {
this.br = br;
this.re = re;
this.tokenNames = tokenNames;
}
BaseRecognizer getBaseRecognizer() {
return br;
}
RecognitionException getRecognitionException() {
return re;
}
String[] getTokenNames() {
return tokenNames;
}
String getMessage() {
return br.getErrorHeader(re) + " " + br.getErrorMessage(re, tokenNames);
}
}

View File

@ -1,48 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
import java.util.List;
public class ParseException extends Exception {
private static final long serialVersionUID = 1L;
private final transient List<ParseError> errors;
public ParseException(List<ParseError> errors) {
super();
this.errors = errors;
}
@Override
public String getMessage() {
StringBuilder sb = new StringBuilder();
for (ParseError err : errors) {
if (sb.length() > 0) {
sb.append('\n');
}
sb.append(err.getMessage());
}
return sb.toString();
}
}

View File

@ -1,43 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.sql.parse;
/**
* Library of utility functions used in the parse code.
*/
public final class ParseUtils {
private ParseUtils() {
// prevent instantiation
}
/**
* Performs a descent of the leftmost branch of a tree, stopping when either a node with a
* non-null token is found or the leaf level is encountered.
*
* @param tree candidate node from which to start searching
* @return node at which descent stopped
*/
public static AstNode findRootNonNullToken(AstNode tree) {
while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
tree = tree.getChild(0);
}
return tree;
}
}

View File

@ -18,25 +18,18 @@
*/
package org.apache.iotdb.db.qp.plan;
import org.apache.iotdb.db.conf.IoTDBConfig;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.exception.metadata.MetadataException;
import org.apache.iotdb.db.exception.query.IllegalASTFormatException;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.query.LogicalOptimizeException;
import org.apache.iotdb.db.exception.query.QueryProcessException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.qp.logical.RootOperator;
import org.apache.iotdb.db.qp.logical.crud.QueryOperator;
import org.apache.iotdb.db.qp.logical.crud.SFWOperator;
import org.apache.iotdb.db.qp.logical.sys.DeleteStorageGroupOperator;
import org.apache.iotdb.db.qp.physical.crud.InsertPlan;
import org.apache.iotdb.db.qp.strategy.LogicalGenerator;
import org.apache.iotdb.db.qp.strategy.ParseDriver;
import org.apache.iotdb.db.qp.strategy.optimizer.ConcatPathOptimizer;
import org.apache.iotdb.db.qp.utils.MemIntQpExecutor;
import org.apache.iotdb.db.sql.ParseGenerator;
import org.apache.iotdb.db.sql.parse.AstNode;
import org.apache.iotdb.db.sql.parse.ParseException;
import org.apache.iotdb.db.sql.parse.ParseUtils;
import org.apache.iotdb.tsfile.common.constant.TsFileConstant;
import org.apache.iotdb.tsfile.read.common.Path;
import org.apache.iotdb.tsfile.utils.StringContainer;
@ -46,93 +39,49 @@ import org.junit.Test;
public class LogicalPlanSmallTest {
private LogicalGenerator generator;
private ParseDriver parseDriver;
@Before
public void before() {
IoTDBConfig config = IoTDBDescriptor.getInstance().getConfig();
generator = new LogicalGenerator(config.getZoneID());
parseDriver = new ParseDriver();
}
@Test
public void testSlimit1()
throws QueryProcessException, MetadataException {
public void testSlimit1() {
String sqlStr = "select * from root.vehicle.d1 where s1 < 20 and time <= now() slimit 10";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
Assert.assertEquals(operator.getClass(), QueryOperator.class);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
Assert.assertEquals(QueryOperator.class, operator.getClass());
Assert.assertEquals(10, ((QueryOperator) operator).getSeriesLimit());
}
@Test(expected = LogicalOperatorException.class)
public void testSlimit2()
throws QueryProcessException, MetadataException {
@Test(expected = NumberFormatException.class)
public void testSlimit2() {
String sqlStr = "select * from root.vehicle.d1 where s1 < 20 and time <= now() slimit 1111111111111111111111";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
// expected to throw LogicalOperatorException: SLIMIT <SN>: SN should be Int32.
}
@Test(expected = LogicalOperatorException.class)
public void testSlimit3()
throws QueryProcessException, MetadataException {
@Test(expected = SQLParserException.class)
public void testSlimit3() {
String sqlStr = "select * from root.vehicle.d1 where s1 < 20 and time <= now() slimit 0";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
// expected to throw LogicalOperatorException: SLIMIT <SN>: SN must be a positive integer and can not be zero.
}
@Test
public void testSoffset()
throws QueryProcessException, MetadataException {
public void testSoffset() {
String sqlStr = "select * from root.vehicle.d1 where s1 < 20 and time <= now() slimit 10 soffset 1";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
Assert.assertEquals(operator.getClass(), QueryOperator.class);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
Assert.assertEquals(QueryOperator.class, operator.getClass());
Assert.assertEquals(10, ((QueryOperator) operator).getSeriesLimit());
Assert.assertEquals(1, ((QueryOperator) operator).getSeriesOffset());
}
@Test(expected = LogicalOptimizeException.class)
public void testSlimitLogicalOptimize()
throws QueryProcessException, MetadataException {
throws QueryProcessException {
String sqlStr = "select s1 from root.vehicle.d1 where s1 < 20 and time <= now() slimit 10 soffset 1";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
MemIntQpExecutor executor = new MemIntQpExecutor();
Path path1 = new Path(
@ -157,51 +106,24 @@ public class LogicalPlanSmallTest {
// complete paths.
}
@Test(expected = LogicalOperatorException.class)
public void testLimit1()
throws QueryProcessException, MetadataException {
@Test(expected = NumberFormatException.class)
public void testLimit1() {
String sqlStr = "select s1 from root.vehicle.d1 where s1 < 20 and time <= now() limit 111111111111111111111111";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
// expected to throw LogicalOperatorException: LIMIT <N>: N should be Int32.
}
@Test(expected = LogicalOperatorException.class)
public void testLimit2()
throws QueryProcessException, MetadataException {
@Test(expected = SQLParserException.class)
public void testLimit2() {
String sqlStr = "select s1 from root.vehicle.d1 where s1 < 20 and time <= now() limit 0";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr); // parse string to ASTTree
} catch (ParseException e) {
// e.printStackTrace();
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
// expected to throw LogicalOperatorException: LIMIT <N>: N must be a positive integer and can not be zero.
}
@Test
public void testDeleteStorageGroup()
throws QueryProcessException, MetadataException {
public void testDeleteStorageGroup() {
String sqlStr = "delete storage group root.vehicle.d1";
AstNode astTree;
try {
astTree = ParseGenerator.generateAST(sqlStr);
} catch (ParseException e) {
throw new IllegalASTFormatException(sqlStr, e.getMessage());
}
AstNode astNode = ParseUtils.findRootNonNullToken(astTree);
RootOperator operator = generator.getLogicalPlan(astNode);
RootOperator operator = (RootOperator) parseDriver.parse(sqlStr, IoTDBDescriptor.getInstance().getConfig().getZoneID());
Assert.assertEquals(DeleteStorageGroupOperator.class, operator.getClass());
Path path = new Path("root.vehicle.d1");
Assert.assertEquals(path, ((DeleteStorageGroupOperator) operator).getDeletePathList().get(0));

View File

@ -24,7 +24,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.antlr.runtime.RecognitionException;
import org.antlr.v4.runtime.RecognitionException;
import org.apache.iotdb.db.exception.metadata.MetadataException;
import org.apache.iotdb.db.exception.query.QueryProcessException;
import org.apache.iotdb.db.qp.QueryProcessor;
@ -37,15 +37,12 @@ import org.apache.iotdb.tsfile.read.expression.impl.SingleSeriesExpression;
import org.apache.iotdb.tsfile.read.filter.ValueFilter;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* test the correctness of {@linkplain ConcatPathOptimizer ConcatPathOptimizer}
*/
public class TestConcatOptimizer {
private static final Logger LOG = LoggerFactory.getLogger(TestConcatOptimizer.class);
private QueryProcessor processor;
@Before

View File

@ -25,6 +25,7 @@ import java.time.ZoneId;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import org.apache.iotdb.db.exception.query.LogicalOperatorException;
import org.apache.iotdb.db.exception.runtime.SQLParserException;
import org.apache.iotdb.db.qp.constant.SQLConstant;
import org.junit.After;
import org.junit.Before;
@ -65,12 +66,12 @@ public class LogicalGeneratorTest {
}
@Test(expected = LogicalOperatorException.class)
@Test(expected = SQLParserException.class)
public void testParseTimeFormatFail1() throws LogicalOperatorException {
generator.parseTimeFormat(null);
}
@Test(expected = LogicalOperatorException.class)
@Test(expected = SQLParserException.class)
public void testParseTimeFormatFail2() throws LogicalOperatorException {
generator.parseTimeFormat("");
}

File diff suppressed because it is too large Load Diff