Updating docker and docker-compose versions.

Updating and enabling datetime64 extended range tests.
This commit is contained in:
Vitaliy Zakaznikov 2021-05-10 16:59:47 -04:00
parent 872ae4821e
commit b194965a56
25 changed files with 207 additions and 228 deletions

View File

@ -35,10 +35,10 @@ RUN apt-get update \
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN pip3 install urllib3 testflows==1.6.90 docker-compose docker dicttoxml kazoo tzlocal python-dateutil numpy
RUN pip3 install urllib3 testflows==1.6.90 docker-compose==1.29.1 docker=5.0.0 dicttoxml kazoo tzlocal python-dateutil numpy
ENV DOCKER_CHANNEL stable
ENV DOCKER_VERSION 17.09.1-ce
ENV DOCKER_VERSION 20.10.6
RUN set -eux; \
\

View File

@ -69,6 +69,11 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "aes_encryption_env")) as cluster:
self.context.cluster = cluster

View File

@ -1,46 +1,2 @@
from multiprocessing.dummy import Pool
from testflows.core import *
def join(tasks, test=None):
"""Join all parallel tests.
"""
exc = None
if test is None:
test = current()
for task in tasks:
try:
task.get()
except Exception as e:
exc = e
if exc:
raise exc
def start(pool, tasks, scenario, kwargs=None, test=None):
"""Start parallel test.
"""
if test is None:
test = current()
if kwargs is None:
kwargs = {}
task = pool.apply_async(scenario, [], kwargs)
tasks.append(task)
return task
def run_scenario(pool, tasks, scenario, kwargs=None):
"""Run scenario in parallel if parallel flag is set
in the context.
"""
if kwargs is None:
kwargs = {}
if current().context.parallel:
start(pool, tasks, scenario, kwargs)
else:
scenario(**kwargs)
from helpers.common import *

View File

@ -0,0 +1,28 @@
version: '2.3'
services:
clickhouse:
image: yandex/clickhouse-integration-test
expose:
- "9000"
- "9009"
- "8123"
volumes:
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml"
- "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse"
- "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge"
entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log"
healthcheck:
test: clickhouse client --query='select 1'
interval: 10s
timeout: 10s
retries: 10
start_period: 300s
cap_add:
- SYS_PTRACE
security_opt:
- label:disable

View File

@ -0,0 +1,18 @@
version: '2.3'
services:
zookeeper:
image: zookeeper:3.6.2
expose:
- "2181"
environment:
ZOO_TICK_TIME: 500
ZOO_MY_ID: 1
healthcheck:
test: echo stat | nc localhost 2181
interval: 3s
timeout: 2s
retries: 5
start_period: 2s
security_opt:
- label:disable

View File

@ -1,28 +0,0 @@
version: '2.3'
services:
clickhouse:
image: yandex/clickhouse-integration-test
expose:
- "9000"
- "9009"
- "8123"
volumes:
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.d:/etc/clickhouse-server/config.d"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.d/:/etc/clickhouse-server/users.d"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/ssl:/etc/clickhouse-server/ssl"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/config.xml:/etc/clickhouse-server/config.xml"
- "${CLICKHOUSE_TESTS_DIR}/configs/clickhouse/users.xml:/etc/clickhouse-server/users.xml"
- "${CLICKHOUSE_TESTS_SERVER_BIN_PATH:-/usr/bin/clickhouse}:/usr/bin/clickhouse"
- "${CLICKHOUSE_TESTS_ODBC_BRIDGE_BIN_PATH:-/usr/bin/clickhouse-odbc-bridge}:/usr/bin/clickhouse-odbc-bridge"
entrypoint: bash -c "clickhouse server --config-file=/etc/clickhouse-server/config.xml --log-file=/var/log/clickhouse-server/clickhouse-server.log --errorlog-file=/var/log/clickhouse-server/clickhouse-server.err.log"
healthcheck:
test: clickhouse client --query='select 1'
interval: 3s
timeout: 2s
retries: 40
start_period: 2s
cap_add:
- SYS_PTRACE
security_opt:
- label:disable

View File

@ -1,18 +0,0 @@
version: '2.3'
services:
zookeeper:
image: zookeeper:3.4.12
expose:
- "2181"
environment:
ZOO_TICK_TIME: 500
ZOO_MY_ID: 1
healthcheck:
test: echo stat | nc localhost 2181
interval: 3s
timeout: 2s
retries: 5
start_period: 2s
security_opt:
- label:disable

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3
import os
import sys
from testflows.core import *
@ -44,6 +45,8 @@ xfails = {
"type conversion/from unix timestamp64 */:": [(Fail, "https://github.com/ClickHouse/ClickHouse/issues/22959")],
"type conversion/to int 8 16 32 64 128 256/:": [(Fail, "https://github.com/ClickHouse/ClickHouse/issues/16581#issuecomment-804360350")],
"reference times/:": [(Fail, "check procedure unclear")],
# need to investigate
"type conversion/to datetime/cast=True": [(Fail, "need to investigate")],
}
@ -51,7 +54,7 @@ xfails = {
@Name("datetime64 extended range")
@ArgumentParser(argparser)
@Specifications(
QA_SRS010_ClickHouse_DateTime64_Extended_Range
SRS_010_ClickHouse_DateTime64_Extended_Range
)
@Requirements(
RQ_SRS_010_DateTime64_ExtendedRange("1.0"),
@ -60,20 +63,30 @@ xfails = {
def regression(self, local, clickhouse_binary_path, parallel=False, stress=False):
"""ClickHouse DateTime64 Extended Range regression module.
"""
top().terminating = False
nodes = {
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}
with Cluster(local, clickhouse_binary_path, nodes=nodes) as cluster:
self.context.cluster = cluster
self.context.parallel = parallel
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
Scenario(run=load("datetime64_extended_range.tests.generic", "generic"), flags=TE)
Scenario(run=load("datetime64_extended_range.tests.non_existent_time", "feature"), flags=TE)
Scenario(run=load("datetime64_extended_range.tests.reference_times", "reference_times"), flags=TE)
Scenario(run=load("datetime64_extended_range.tests.date_time_functions", "date_time_funcs"), flags=TE)
Scenario(run=load("datetime64_extended_range.tests.type_conversion", "type_conversion"), flags=TE)
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "datetime64_extended_range_env")) as cluster:
self.context.cluster = cluster
tasks = []
with Pool(2) as pool:
try:
run_scenario(pool, tasks, Scenario(test=load("datetime64_extended_range.tests.generic", "generic")))
run_scenario(pool, tasks, Scenario(test=load("datetime64_extended_range.tests.non_existent_time", "feature")))
run_scenario(pool, tasks, Scenario(test=load("datetime64_extended_range.tests.reference_times", "reference_times")))
run_scenario(pool, tasks, Scenario(test=load("datetime64_extended_range.tests.date_time_functions", "date_time_funcs")))
run_scenario(pool, tasks, Scenario(test=load("datetime64_extended_range.tests.type_conversion", "type_conversion")))
finally:
join(tasks)
if main():
regression()

View File

@ -1,4 +1,4 @@
# QA-SRS010 ClickHouse DateTime64 Extended Range
# SRS-010 ClickHouse DateTime64 Extended Range
# Software Requirements Specification
## Table of Contents

View File

@ -1,6 +1,6 @@
# These requirements were auto generated
# from software requirements specification (SRS)
# document by TestFlows v1.6.210312.1172513.
# document by TestFlows v1.6.210505.1133630.
# Do not edit by hand but re-generate instead
# using 'tfs requirements generate' command.
from testflows.core import Specification
@ -1628,8 +1628,8 @@ RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64N
level=5,
num='4.2.1.4.16')
QA_SRS010_ClickHouse_DateTime64_Extended_Range = Specification(
name='QA-SRS010 ClickHouse DateTime64 Extended Range',
SRS_010_ClickHouse_DateTime64_Extended_Range = Specification(
name='SRS-010 ClickHouse DateTime64 Extended Range',
description=None,
author=None,
date=None,
@ -1860,7 +1860,7 @@ QA_SRS010_ClickHouse_DateTime64_Extended_Range = Specification(
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions_fromUnixTimestamp64Nano,
),
content='''
# QA-SRS010 ClickHouse DateTime64 Extended Range
# SRS-010 ClickHouse DateTime64 Extended Range
# Software Requirements Specification
## Table of Contents

View File

@ -126,24 +126,23 @@ def walk_datetime_in_incrementing_steps(self, date, hrs_range=(0, 24), step=1, t
"""
stress = self.context.stress
tasks = []
pool = Pool(4)
secs = f"00{'.' * (precision > 0)}{'0' * precision}"
try:
with When(f"I loop through datetime range {hrs_range} starting from {date} in {step}min increments"):
for hrs in range(*hrs_range) if stress else (hrs_range[0], hrs_range[1]-1):
for mins in range(0, 60, step) if stress else (0, 59):
datetime = f"{date} {str(hrs).zfill(2)}:{str(mins).zfill(2)}:{secs}"
expected = datetime
tasks = []
with Pool(2) as pool:
try:
with When(f"I loop through datetime range {hrs_range} starting from {date} in {step}min increments"):
for hrs in range(*hrs_range) if stress else (hrs_range[0], hrs_range[1]-1):
for mins in range(0, 60, step) if stress else (0, 59):
datetime = f"{date} {str(hrs).zfill(2)}:{str(mins).zfill(2)}:{secs}"
expected = datetime
with When(f"time is {datetime}"):
run_scenario(pool, tasks, Test(name=f"{hrs}:{mins}:{secs}", test=select_check_datetime),
kwargs=dict(datetime=datetime, precision=precision, timezone=timezone,
expected=expected))
finally:
join(tasks)
with When(f"time is {datetime}"):
run_scenario(pool, tasks, Test(name=f"{hrs}:{mins}:{secs}", test=select_check_datetime),
kwargs=dict(datetime=datetime, precision=precision, timezone=timezone,
expected=expected))
finally:
join(tasks)
@TestStep
@ -157,23 +156,21 @@ def walk_datetime_in_decrementing_steps(self, date, hrs_range=(23, 0), step=1, t
:param step: step in minutes
:param timezone: String
"""
stress = self.context.stress
tasks = []
pool = Pool(4)
secs = f"00{'.' * (precision > 0)}{'0' * precision}"
try:
with When(f"I loop through datetime range {hrs_range} starting from {date} in {step}min decrements"):
for hrs in range(*hrs_range, -1) if stress else (hrs_range[1], hrs_range[0]):
for mins in range(59, 0, -step) if stress else (59, 0):
datetime = f"{date} {str(hrs).zfill(2)}:{str(mins).zfill(2)}:{secs}"
expected = datetime
tasks = []
with Pool(2) as pool:
try:
with When(f"I loop through datetime range {hrs_range} starting from {date} in {step}min decrements"):
for hrs in range(*hrs_range, -1) if stress else (hrs_range[1], hrs_range[0]):
for mins in range(59, 0, -step) if stress else (59, 0):
datetime = f"{date} {str(hrs).zfill(2)}:{str(mins).zfill(2)}:{secs}"
expected = datetime
with When(f"time is {datetime}"):
run_scenario(pool, tasks, Test(name=f"{hrs}:{mins}:{secs}", test=select_check_datetime),
kwargs=dict(datetime=datetime, precision=precision, timezone=timezone,
expected=expected))
finally:
join(tasks)
with When(f"time is {datetime}"):
run_scenario(pool, tasks, Test(name=f"{hrs}:{mins}:{secs}", test=select_check_datetime),
kwargs=dict(datetime=datetime, precision=precision, timezone=timezone,
expected=expected))
finally:
join(tasks)

View File

@ -21,7 +21,7 @@ def to_time_zone(self):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -49,7 +49,7 @@ def to_date_part(self, py_func, ch_func):
datetimes = select_dates_in_year(year=year, stress=stress)
timezones = timezones_range(stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1}, {tz2}"):
@ -115,7 +115,7 @@ def to_day_of(self, py_func, ch_func):
datetimes = select_dates_in_year(year=year, stress=stress)
timezones = timezones_range(stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with When(f"{dt} {tz1} -> {tz2}"):
@ -174,7 +174,7 @@ def to_time_part(self, py_func, ch_func):
datetimes = select_dates_in_year(year=year, stress=stress)
timezones = timezones_range(stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1} -> {tz2}"):
@ -234,7 +234,7 @@ def to_unix_timestamp(self):
datetimes = select_dates_in_year(year=year, stress=stress)
timezones = timezones_range(stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -260,7 +260,7 @@ def to_start_of_year(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1} -> {tz2}"):
@ -296,7 +296,7 @@ def to_start_of_iso_year(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -323,7 +323,8 @@ def to_start_of_quarter(self):
for year in years_range(stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1} -> {tz2}"):
@ -352,7 +353,8 @@ def to_start_of_month(self):
for year in years_range(stress=stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1} -> {tz2}"):
@ -381,7 +383,8 @@ def to_monday(self):
for year in years_range(stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
with Step(f"{dt} {tz1} -> {tz2}"):
@ -409,7 +412,8 @@ def to_start_of_week(self):
for year in years_range(stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz1, tz2 in itertools.product(timezones, timezones):
for mode in (0, 1): # mode - week beginning, either 0 (Sunday) or 1 (Monday)
@ -439,7 +443,8 @@ def to_start_of_day(self):
for year in years_range(stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -465,7 +470,7 @@ def to_start_of_hour(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -491,7 +496,7 @@ def to_start_of_minute(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -517,7 +522,7 @@ def to_start_of_second(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -540,7 +545,7 @@ def to_start_of_minutes_interval(self, interval, func):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -640,7 +645,7 @@ def to_start_of_interval(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
for interval in intervals_testing_ranges.keys():
@ -665,7 +670,7 @@ def to_iso(self, func, isocalendar_pos):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -709,7 +714,7 @@ def to_time(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -735,7 +740,7 @@ def to_relative_quarter_num(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -760,7 +765,8 @@ def to_relative_week_num(self):
for year in years_range(stress=stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -787,7 +793,7 @@ def to_relative_month_num(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -814,7 +820,7 @@ def to_relative_day_num(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -840,7 +846,7 @@ def to_relative_time(self, divisor, func):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with When(f"{dt} {tz}"):
@ -984,7 +990,7 @@ def to_week_year_week(self, clh_func, ret_year):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
for mode in range(0, 10):
@ -1031,7 +1037,7 @@ def to_yyyymm(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -1059,7 +1065,7 @@ def to_yyyymmdd(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -1087,7 +1093,7 @@ def to_yyyymmddhhmmss(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
with Step(f"{dt} {tz}"):
@ -1113,11 +1119,11 @@ def now(self):
timezones = timezones_range(stress)
for tz in timezones:
with Given("I record current time and localize it"):
with Given(f"I record current time and localize it for {tz}"):
dt = datetime.datetime.now()
dt = dt.astimezone(pytz.timezone(tz))
with Step(f"{dt} {tz}"):
with Step(f"{dt} in {tz}"):
with When("I execute query and format its result to string"):
r = self.context.node.query(f"SELECT toDateTime64(now(), 0, '{tz}')")
query_result = r.output
@ -1138,6 +1144,7 @@ def now(self):
@Requirements(
RQ_SRS_010_DateTime64_ExtendedRange_DatesAndTimesFunctions_today("1.0")
)
@Flags(SKIP)
def today(self):
"""Check the today() conversion to DateTime64 extended range.
"""
@ -1145,7 +1152,7 @@ def today(self):
timezones = timezones_range(stress)
for tz in timezones:
with Given("I record current time and localize it"):
with Given(f"I record current time and localize it for {tz}"):
dt = datetime.datetime.now()
dt = dt.astimezone(pytz.timezone(tz))
@ -1178,7 +1185,7 @@ def yesterday(self):
timezones = timezones_range(stress)
for tz in timezones:
with Given("I record current time and localize it"):
with Given(f"I record current time and localize it for {tz}"):
dt = datetime.datetime.now()
dt = dt.astimezone(pytz.timezone(tz))
@ -1219,7 +1226,7 @@ def add_subtract_functions(self, clh_func, py_key, test_range, years_padding=(1,
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for tz in timezones:
for incr in test_range:
@ -1454,7 +1461,7 @@ def format_date_time(self):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with And("I format the datetimes in every possible way"):
with When(f"I format the datetimes in {year} in every possible way"):
for dt in datetimes:
for tz in timezones:
for mode in modes:
@ -1503,7 +1510,8 @@ def time_slots(self):
for year in years_range(stress=stress):
with Given(f"I choose datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
with When("I check each of the datetimes"):
with When(f"I check each of the datetimes in {year}"):
for dt in datetimes:
for duration in range(1, 100, 9):
for size in range(1, 50, 3):
@ -1529,5 +1537,10 @@ def date_time_funcs(self, node="clickhouse1"):
"""
self.context.node = self.context.cluster.node(node)
for scenario in loads(current_module(), Scenario):
Scenario(run=scenario, flags=TE)
tasks = []
with Pool(4) as pool:
try:
for scenario in loads(current_module(), Scenario):
run_scenario(pool, tasks, Scenario(test=scenario))
finally:
join(tasks)

View File

@ -137,10 +137,6 @@ def timezones_support(self):
exec_query(request=query, expected=f"{dt_str}")
@TestFeature
def generic(self, node="clickhouse1"):
"""Check the basic operations with DateTime64
@ -148,4 +144,4 @@ def generic(self, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
for scenario in loads(current_module(), Scenario, Suite):
Scenario(run=scenario, flags=TE)
Scenario(run=scenario)

View File

@ -148,8 +148,6 @@ def dst_time_zone_switch(self):
exec_query(request=query, expected=f"{dt1_str}")
@TestFeature
@Name("non existent time")
@Requirements(
@ -161,4 +159,4 @@ def feature(self, node="clickhouse1"):
self.context.node = self.context.cluster.node(node)
for scenario in loads(current_module(), Scenario, Suite):
Scenario(run=scenario, flags=TE)
Scenario(run=scenario)

View File

@ -25,7 +25,7 @@ def to_int_8_16_32_64_128_256(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for d in datetimes:
@ -66,7 +66,7 @@ def to_uint_8_16_32_64_256(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for d in datetimes:
@ -107,7 +107,7 @@ def to_float_32_64(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for d in datetimes:
@ -142,7 +142,7 @@ def to_datetime64_from_string_missing_time(self):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -169,7 +169,7 @@ def to_datetime64(self):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -196,7 +196,7 @@ def to_date(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -232,7 +232,7 @@ def to_datetime(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -271,7 +271,7 @@ def to_string(self, cast):
timezones = timezones_range(stress)
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for dt in datetimes:
@ -311,7 +311,7 @@ def to_decimal_32_64_128_256(self, cast):
scales = {32: 9, 64: 18, 128: 38, 256: 76}
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress)
for d in datetimes:
@ -345,7 +345,7 @@ def to_unix_timestamp64_milli_micro_nano(self, scale):
func = {3: 'Milli', 6: 'Micro', 9: 'Nano'}
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
for d in datetimes:
@ -408,7 +408,7 @@ def from_unix_timestamp64_milli_micro_nano(self, scale):
func = {3: 'Milli', 6: 'Micro', 9: 'Nano'}
for year in years_range(stress):
with Given("I select datetimes in a year"):
with Given(f"I select datetimes in {year}"):
datetimes = select_dates_in_year(year=year, stress=stress, microseconds=True)
for d in datetimes:
@ -462,18 +462,16 @@ def from_unix_timestamp64_nano(self):
from_unix_timestamp64_milli_micro_nano(scale=9)
@TestFeature
@Requirements(
RQ_SRS_010_DateTime64_ExtendedRange_TypeConversionFunctions("1.0")
)
def type_conversion(self, node="clickhouse1"):
"""Check the type conversion operations with DateTime64. Cast can be set as Requirement thereby as the module
"""Check the type conversion operations with DateTime64.
Cast can be set as Requirement thereby as the module
tests exactly what CAST does.
"""
self.context.node = self.context.cluster.node(node)
for scenario in loads(current_module(), Scenario):
Scenario(run=scenario, flags=TE)
Scenario(run=scenario)

View File

@ -19,6 +19,11 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"clickhouse": ("clickhouse1",),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "example_env")) as cluster:
self.context.cluster = cluster

View File

@ -29,6 +29,11 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"kerberos": ("kerberos", ),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "kerberos_env")) as cluster:
self.context.cluster = cluster

View File

@ -45,15 +45,15 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "ldap_authentication_env")) as cluster:
self.context.cluster = cluster
if stress is not None or not hasattr(self.context, "stress"):
self.context.stress = stress
if parallel is not None or not hasattr(self.context, "parallel"):
self.context.parallel = parallel
Scenario(run=load("ldap.authentication.tests.sanity", "scenario"))
Scenario(run=load("ldap.authentication.tests.multiple_servers", "scenario"))
Feature(run=load("ldap.authentication.tests.connections", "feature"))

View File

@ -45,14 +45,14 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "ldap_external_user_directory_env")) as cluster:
self.context.cluster = cluster
if stress is not None or not hasattr(self.context, "stress"):
self.context.stress = stress
if parallel is not None or not hasattr(self.context, "parallel"):
self.context.parallel = parallel
Scenario(run=load("ldap.authentication.tests.sanity", "scenario"))
Scenario(run=load("ldap.external_user_directory.tests.simple", "scenario"))

View File

@ -35,14 +35,14 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"clickhouse": ("clickhouse1", "clickhouse2", "clickhouse3"),
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "ldap_role_mapping_env")) as cluster:
self.context.cluster = cluster
if stress is not None or not hasattr(self.context, "stress"):
self.context.stress = stress
if parallel is not None or not hasattr(self.context, "parallel"):
self.context.parallel = parallel
Scenario(run=load("ldap.authentication.tests.sanity", "scenario"), name="ldap sanity")
Feature(run=load("ldap.role_mapping.tests.server_config", "feature"))

View File

@ -119,10 +119,6 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "map_type_env")) as cluster:
self.context.cluster = cluster
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
Feature(run=load("map_type.tests.feature", "feature"))

View File

@ -163,18 +163,19 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
"""RBAC regression.
"""
top().terminating = False
nodes = {
"clickhouse":
("clickhouse1", "clickhouse2", "clickhouse3")
}
if stress is not None:
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "rbac_env")) as cluster:
self.context.cluster = cluster
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
Feature(run=load("rbac.tests.syntax.feature", "feature"))
Feature(run=load("rbac.tests.privileges.feature", "feature"))

View File

@ -23,7 +23,7 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
with Pool(7) as pool:
try:
run_scenario(pool, tasks, Feature(test=load("example.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
#run_scenario(pool, tasks, Feature(test=load("ldap.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("rbac.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("aes_encryption.regression", "regression")), args)
run_scenario(pool, tasks, Feature(test=load("map_type.regression", "regression")), args)

View File

@ -94,10 +94,6 @@ def regression(self, local, clickhouse_binary_path, stress=None, parallel=None):
with Cluster(local, clickhouse_binary_path, nodes=nodes,
docker_compose_project_dir=os.path.join(current_dir(), "window_functions_env")) as cluster:
self.context.cluster = cluster
self.context.stress = stress
if parallel is not None:
self.context.parallel = parallel
Feature(run=load("window_functions.tests.feature", "feature"), flags=TE)