command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_UPGRADE_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_UPGRADE_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_UPGRADE_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_without_vnodes_raw /tmp/all_dtest_tests_j17_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_without_vnodes_raw > /tmp/all_dtest_tests_j17_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_without_vnodes > /tmp/split_dtest_tests_j17_without_vnodes.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j17_without_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j17_without_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j17_without_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j17_without_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_with_vnodes_raw /tmp/all_dtest_tests_j17_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_with_vnodes_raw > /tmp/all_dtest_tests_j17_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_with_vnodes > /tmp/split_dtest_tests_j17_with_vnodes.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes_final.txt\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_dtests_latest_raw /tmp/all_dtest_tests_j17_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j17_dtests_latest_raw > /tmp/all_dtest_tests_j17_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_dtests_latest > /tmp/split_dtest_tests_j17_dtests_latest.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
name: Configure virtualenv and python Dependencies
command: |
# note, this should be super quick as all dependencies should be pre-installed in the docker image
# if additional dependencies were added to requirmeents.txt and the docker image hasn't been updated
# we'd have to install it here at runtime -- which will make things slow, so do yourself a favor and
# rebuild the docker image! (it automatically pulls the latest requirements.txt on build)
source ~/env3.8/bin/activate
export PATH=$JAVA_HOME/bin:$PATH
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j11_dtests_latest)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_dtests_latest_raw /tmp/all_dtest_tests_j11_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j11_dtests_latest_raw > /tmp/all_dtest_tests_j11_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_dtests_latest > /tmp/split_dtest_tests_j11_dtests_latest.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_without_vnodes_raw /tmp/all_dtest_tests_j11_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_without_vnodes_raw > /tmp/all_dtest_tests_j11_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_without_vnodes > /tmp/split_dtest_tests_j11_without_vnodes.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-latest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-latest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_without_vnodes_raw /tmp/all_dtest_tests_j17_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_without_vnodes_raw > /tmp/all_dtest_tests_j17_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_without_vnodes > /tmp/split_dtest_tests_j17_without_vnodes.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes_final.txt\n"
name: Configure virtualenv and python Dependencies
command: |
# note, this should be super quick as all dependencies should be pre-installed in the docker image
# if additional dependencies were added to requirmeents.txt and the docker image hasn't been updated
# we'd have to install it here at runtime -- which will make things slow, so do yourself a favor and
# rebuild the docker image! (it automatically pulls the latest requirements.txt on build)
source ~/env3.8/bin/activate
export PATH=$JAVA_HOME/bin:$PATH
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j11_without_vnodes)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_without_vnodes_raw /tmp/all_dtest_tests_j11_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_without_vnodes_raw > /tmp/all_dtest_tests_j11_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_without_vnodes > /tmp/split_dtest_tests_j11_without_vnodes.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_STRESS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_STRESS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_STRESS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=stress-test-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant stress-test-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_large_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --only-resource-intensive-tests --force-resource-intensive-tests --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_large_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_large_with_vnodes_raw /tmp/all_dtest_tests_j11_large_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_large_with_vnodes_raw > /tmp/all_dtest_tests_j11_large_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_large_with_vnodes > /tmp/split_dtest_tests_j11_large_with_vnodes.txt\ncat /tmp/split_dtest_tests_j11_large_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_large_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_large_with_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_large_with_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j11_large_with_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_large_with_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --only-resource-intensive-tests --force-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_large_with_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_large_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --only-resource-intensive-tests --force-resource-intensive-tests --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_large_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_large_without_vnodes_raw /tmp/all_dtest_tests_j11_large_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_large_without_vnodes_raw > /tmp/all_dtest_tests_j11_large_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_large_without_vnodes > /tmp/split_dtest_tests_j11_large_without_vnodes.txt\ncat /tmp/split_dtest_tests_j11_large_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_large_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_large_without_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_large_without_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j11_large_without_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_large_without_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --only-resource-intensive-tests --force-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_large_without_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-system-keyspace-directory\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-system-keyspace-directory $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=true\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-cdc\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-cdc $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_with_vnodes_raw /tmp/all_dtest_tests_j17_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_with_vnodes_raw > /tmp/all_dtest_tests_j17_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_with_vnodes > /tmp/split_dtest_tests_j17_with_vnodes.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j17_with_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j17_with_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j17_with_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j17_with_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-latest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-latest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_upgradetests_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --execute-upgrade-tests-only --upgrade-target-version-only --upgrade-version-selection all --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_upgradetests_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_upgradetests_without_vnodes_raw /tmp/all_dtest_tests_j11_upgradetests_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_upgradetests_without_vnodes_raw > /tmp/all_dtest_tests_j11_upgradetests_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_upgradetests_without_vnodes > /tmp/split_dtest_tests_j11_upgradetests_without_vnodes.txt\ncat /tmp/split_dtest_tests_j11_upgradetests_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_upgradetests_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_upgradetests_without_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_upgradetests_without_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j11_upgradetests_without_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_upgradetests_without_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --execute-upgrade-tests-only --upgrade-target-version-only --upgrade-version-selection all --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_upgradetests_without_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-compression\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-compression $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_large_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --only-resource-intensive-tests --force-resource-intensive-tests --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_large_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_large_with_vnodes_raw /tmp/all_dtest_tests_j17_large_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_large_with_vnodes_raw > /tmp/all_dtest_tests_j17_large_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_large_with_vnodes > /tmp/split_dtest_tests_j17_large_with_vnodes.txt\ncat /tmp/split_dtest_tests_j17_large_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_large_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_large_with_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j17_large_with_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j17_large_with_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j17_large_with_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --only-resource-intensive-tests --force-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j17_large_with_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_with_vnodes_raw /tmp/all_dtest_tests_j11_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_with_vnodes_raw > /tmp/all_dtest_tests_j11_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_with_vnodes > /tmp/split_dtest_tests_j11_with_vnodes.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_SIMULATOR_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_SIMULATOR_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_SIMULATOR_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-simulator-dtest\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-simulator-dtest $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_dtests_latest_raw /tmp/all_dtest_tests_j11_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j11_dtests_latest_raw > /tmp/all_dtest_tests_j11_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_dtests_latest > /tmp/split_dtest_tests_j11_dtests_latest.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest_final.txt\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_with_vnodes_raw /tmp/all_dtest_tests_j11_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_with_vnodes_raw > /tmp/all_dtest_tests_j11_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_with_vnodes > /tmp/split_dtest_tests_j11_with_vnodes.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes_final.txt\n"
name: Configure virtualenv and python Dependencies
command: |
# note, this should be super quick as all dependencies should be pre-installed in the docker image
# if additional dependencies were added to requirmeents.txt and the docker image hasn't been updated
# we'd have to install it here at runtime -- which will make things slow, so do yourself a favor and
# rebuild the docker image! (it automatically pulls the latest requirements.txt on build)
source ~/env3.8/bin/activate
export PATH=$JAVA_HOME/bin:$PATH
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j17_dtests_latest)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_dtests_latest_raw /tmp/all_dtest_tests_j17_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j17_dtests_latest_raw > /tmp/all_dtest_tests_j17_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_dtests_latest > /tmp/split_dtest_tests_j17_dtests_latest.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest_final.txt\n"
name: Configure virtualenv and python Dependencies
command: |
# note, this should be super quick as all dependencies should be pre-installed in the docker image
# if additional dependencies were added to requirmeents.txt and the docker image hasn't been updated
# we'd have to install it here at runtime -- which will make things slow, so do yourself a favor and
# rebuild the docker image! (it automatically pulls the latest requirements.txt on build)
source ~/env3.11/bin/activate
export PATH=$JAVA_HOME/bin:$PATH
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j17_with_vnodes)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.11/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_with_vnodes_raw /tmp/all_dtest_tests_j17_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_with_vnodes_raw > /tmp/all_dtest_tests_j17_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_with_vnodes > /tmp/split_dtest_tests_j17_with_vnodes.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_with_vnodes_final.txt\n"
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j17_dtests_latest)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_dtests_latest_raw /tmp/all_dtest_tests_j17_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j17_dtests_latest_raw > /tmp/all_dtest_tests_j17_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_dtests_latest > /tmp/split_dtest_tests_j17_dtests_latest.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j17_dtests_latest_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j17_dtests_latest_final.txt\"\ncat /tmp/split_dtest_tests_j17_dtests_latest_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j17_dtests_latest_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j17_dtests_latest.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-oa\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-oa $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j11_dtests_latest)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_dtests_latest)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_dtests_latest_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_dtests_latest_raw /tmp/all_dtest_tests_j11_dtests_latest\nelse\n grep -e '' /tmp/all_dtest_tests_j11_dtests_latest_raw > /tmp/all_dtest_tests_j11_dtests_latest || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_dtests_latest > /tmp/split_dtest_tests_j11_dtests_latest.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_dtests_latest_final.txt\ncat /tmp/split_dtest_tests_j11_dtests_latest_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_dtests_latest_final.txt\"\ncat /tmp/split_dtest_tests_j11_dtests_latest_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_dtests_latest_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --configuration-yaml=cassandra_latest.yaml --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_dtests_latest.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
pip3 install --exists-action w --upgrade -r ~/cassandra-dtest/requirements.txt
pip3 uninstall -y cqlsh
pip3 freeze
- run:
name: Determine Tests to Run (j11_without_vnodes)
no_output_timeout: 5m
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_without_vnodes_raw /tmp/all_dtest_tests_j11_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_without_vnodes_raw > /tmp/all_dtest_tests_j11_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_without_vnodes > /tmp/split_dtest_tests_j11_without_vnodes.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_without_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_without_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j11_without_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_without_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_without_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --skip-resource-intensive-tests --pytest-options '-k cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_without_vnodes_raw /tmp/all_dtest_tests_j17_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_without_vnodes_raw > /tmp/all_dtest_tests_j17_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_without_vnodes > /tmp/split_dtest_tests_j17_without_vnodes.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_without_vnodes_final.txt\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_FQLTOOL_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_FQLTOOL_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_FQLTOOL} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=fqltool-test\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant fqltool-test $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j17_large_without_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --only-resource-intensive-tests --force-resource-intensive-tests --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j17_large_without_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j17_large_without_vnodes_raw /tmp/all_dtest_tests_j17_large_without_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j17_large_without_vnodes_raw > /tmp/all_dtest_tests_j17_large_without_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j17_large_without_vnodes > /tmp/split_dtest_tests_j17_large_without_vnodes.txt\ncat /tmp/split_dtest_tests_j17_large_without_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j17_large_without_vnodes_final.txt\ncat /tmp/split_dtest_tests_j17_large_without_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j17_large_without_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j17_large_without_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j17_large_without_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --only-resource-intensive-tests --force-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j17_large_without_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-oa\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-oa $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "# reminder: this code (along with all the steps) is independently executed on every circle container\n# so the goal here is to get the circleci script to return the tests *this* container will run\n# which we do via the `circleci` cli tool.\n\ncd cassandra-dtest\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\n\nif [ -n '' ]; then\n export \nfi\n\necho \"***Collected DTests (j11_with_vnodes)***\"\nset -eo pipefail && ./run_dtests.py --use-vnodes --skip-resource-intensive-tests --pytest-options '-k not cql' --dtest-print-tests-only --dtest-print-tests-output=/tmp/all_dtest_tests_j11_with_vnodes_raw --cassandra-dir=../cassandra\nif [ -z '' ]; then\n mv /tmp/all_dtest_tests_j11_with_vnodes_raw /tmp/all_dtest_tests_j11_with_vnodes\nelse\n grep -e '' /tmp/all_dtest_tests_j11_with_vnodes_raw > /tmp/all_dtest_tests_j11_with_vnodes || { echo \"Filter did not match any tests! Exiting build.\"; exit 0; }\nfi\nset -eo pipefail && circleci tests split --split-by=timings --timings-type=classname /tmp/all_dtest_tests_j11_with_vnodes > /tmp/split_dtest_tests_j11_with_vnodes.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes.txt | tr '\\n' ' ' > /tmp/split_dtest_tests_j11_with_vnodes_final.txt\ncat /tmp/split_dtest_tests_j11_with_vnodes_final.txt\n"
command: "echo \"cat /tmp/split_dtest_tests_j11_with_vnodes_final.txt\"\ncat /tmp/split_dtest_tests_j11_with_vnodes_final.txt\n\nsource ~/env3.8/bin/activate\nexport PATH=$JAVA_HOME/bin:$PATH\nif [ -n '' ]; then\n export \nfi\n\njava -version\ncd ~/cassandra-dtest\nmkdir -p /tmp/dtest\n\necho \"env: $(env)\"\necho \"** done env\"\nmkdir -p /tmp/results/dtests\n# we need the \"set -o pipefail\" here so that the exit code that circleci will actually use is from pytest and not the exit code from tee\nexport SPLIT_TESTS=`cat /tmp/split_dtest_tests_j11_with_vnodes_final.txt`\nif [ ! -z \"$SPLIT_TESTS\" ]; then\n set -o pipefail && cd ~/cassandra-dtest && pytest --use-vnodes --num-tokens=16 --skip-resource-intensive-tests --log-cli-level=\"DEBUG\" --junit-xml=/tmp/results/dtests/pytest_result_j11_with_vnodes.xml -s --cassandra-dir=/home/cassandra/cassandra --keep-test-dir $SPLIT_TESTS 2>&1 | tee /tmp/dtest/stdout.txt\nelse\n echo \"Tune your parallelism, there are more containers than test classes. Nothing to do in this container\"\n (exit 1)\nfi\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_JVM_DTESTS_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_JVM_DTESTS_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_JVM_DTESTS} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=test-jvm-dtest-some\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant test-jvm-dtest-some $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"
command: "set -x\nexport PATH=$JAVA_HOME/bin:$PATH\ntime mv ~/cassandra /tmp\ncd /tmp/cassandra\nif [ -d ~/dtest_jars ]; then\n cp ~/dtest_jars/dtest* /tmp/cassandra/build/\nfi\n\n# Calculate the number of test iterations to be run by the current parallel runner.\ncount=$((${REPEATED_UTESTS_LONG_COUNT} / CIRCLE_NODE_TOTAL))\nif (($CIRCLE_NODE_INDEX < (${REPEATED_UTESTS_LONG_COUNT} % CIRCLE_NODE_TOTAL))); then\n count=$((count+1))\nfi\n\n# Put manually specified tests and automatically detected tests together, removing duplicates\ntests=$(echo ${REPEATED_UTESTS_LONG} | sed -e \"s/<nil>//\" | sed -e \"s/ //\" | tr \",\" \"\\n\" | tr \" \" \"\\n\" | sort -n | uniq -u)\necho \"Tests to be repeated: ${tests}\"\n\n# Prepare the JVM dtests vnodes argument, which is optional.\nvnodes=false\nvnodes_args=\"\"\nif [ \"$vnodes\" = true ] ; then\n vnodes_args=\"-Dtest.jvm.args='-Dcassandra.dtest.num_tokens=16'\"\nfi\n\n# Prepare the testtag for the target, used by the test macro in build.xml to group the output files\ntarget=long-testsome\ntesttag=\"\"\nif [[ $target == \"test-cdc\" ]]; then\n testtag=\"cdc\"\nelif [[ $target == \"test-compression\" ]]; then\n testtag=\"compression\"\nelif [[ $target == \"test-system-keyspace-directory\" ]]; then\n testtag=\"system_keyspace_directory\"\nelif [[ $target == \"test-latest\" ]]; then\n testtag=\"latest\"\nelif [[ $target == \"test-oa\" ]]; then\n testtag=\"oa\"\nfi\n\n# Run each test class as many times as requested.\nexit_code=\"$?\"\nfor test in $tests; do\n\n # Split class and method names from the test name\n if [[ $test =~ \"#\" ]]; then\n class=${test%\"#\"*}\n method=${test#*\"#\"}\n else\n class=$test\n method=\"\"\n fi\n\n # Prepare the -Dtest.name argument.\n # It can be the fully qualified class name or the short class name, depending on the target.\n if [[ $target == \"test\" || \\\n $target == \"test-cdc\" || \\\n $target == \"test-compression\" || \\\n $target == \"test-latest\" || \\\n $target == \"test-oa\" || \\\n $target == \"test-system-keyspace-directory\" || \\\n $target == \"fqltool-test\" || \\\n $target == \"long-test\" || \\\n $target == \"stress-test\" || \\\n $target == \"test-simulator-dtest\" ]]; then\n name_arg=\"-Dtest.name=${class##*.}\"\n else\n name_arg=\"-Dtest.name=$class\"\n fi\n\n # Prepare the -Dtest.methods argument, which is optional\n if [[ $method == \"\" ]]; then\n methods_arg=\"\"\n else\n methods_arg=\"-Dtest.methods=$method\"\n fi\n\n for i in $(seq -w 1 $count); do\n echo \"Running test $test, iteration $i of $count\"\n\n # run the test\n status=\"passes\"\n if !( set -o pipefail && \\\n ant long-testsome $name_arg $methods_arg $vnodes_args -Dno-build-test=true | \\\n tee stdout.txt \\\n ); then\n status=\"fails\"\n exit_code=1\n fi\n\n # move the stdout output file\n dest=/tmp/results/repeated_utests/stdout/${status}/${i}\n mkdir -p $dest\n mv stdout.txt $dest/${test}.txt\n\n # move the XML output files\n source=build/test/output/${testtag}\n dest=/tmp/results/repeated_utests/output/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" ]]; then\n mv $source/* $dest/\n fi\n\n # move the log files\n source=\"build/test/logs\"\n dest=/tmp/results/repeated_utests/logs/${status}/${i}\n mkdir -p $dest\n if [[ -d $source && -n \"$(ls $source)\" && -n \"$(ls ${source}/${testtag}*)\" ]]; then\n mv $source/${testtag}*/* $dest/\n fi\n \n # maybe stop iterations on test failure\n if [[ ${REPEATED_TESTS_STOP_ON_FAILURE} = true ]] && (( $exit_code > 0 )); then\n break\n fi\n done\ndone\n(exit ${exit_code})\n"