From 2d70e54e5472ee2768658312db465d1d20d30ad5 Mon Sep 17 00:00:00 2001 From: HarshLunagariya <40052763+HarshLunagariya@users.noreply.github.com> Date: Wed, 16 Mar 2022 19:59:48 +0530 Subject: [PATCH] Run Isolation tests as part of GitHub actions MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This framework will be able to: 1. Parse and understand PG isolation level test(.spec) file. 2. Maintain multiple connections and get their status: active, blocked(waiting for lock) or error. 3. Execute SQL commands on any connection without blocking the main thread and other connections. 4. Cancel or Terminate the test/SQL command in case some long running step or deadlock condition is encountered. 5. Generate an output file with SQL results and compare it with expected file provided. Above functionality will ensure that it’ll be able to test isolation behaviour. Added tests for foreign key related isolation tests : fk-contention and fk-deadlock Task:BABEL-2836,2021 Author: Harsh Lunagariya lunharsh@amazon.com Signed-off-by: Harsh Lunagariya lunharsh@amazon.com --- .github/workflows/isolation-tests.yml | 104 +++++ test/python/.gitignore | 5 + test/python/batch_run.py | 6 + test/python/compare_results.py | 2 + test/python/config.txt | 12 +- test/python/expected/pyodbc/fk-contention.out | 27 ++ test/python/expected/pyodbc/fk-deadlock.out | 230 +++++++++++ .../python/input/isolation/fk-contention.spec | 19 + test/python/input/isolation/fk-deadlock.spec | 46 +++ test/python/isolationtest/README.md | 23 ++ test/python/isolationtest/__init__.py | 0 .../isolationtest/isolationTestHandler.py | 46 +++ test/python/isolationtest/isolationTester.py | 385 ++++++++++++++++++ test/python/isolationtest/parser/__init__.py | 0 test/python/isolationtest/parser/specLexer.g4 | 17 + .../python/isolationtest/parser/specParser.g4 | 20 + .../isolationtest/specParserVisitorImpl.py | 122 ++++++ test/python/logs/.gitignore | 4 - test/python/logs/.gitkeep | 0 test/python/test_main.py | 10 +- test/python/utils/base.py | 15 +- 21 files changed, 1086 insertions(+), 7 deletions(-) create mode 100644 .github/workflows/isolation-tests.yml create mode 100644 test/python/.gitignore create mode 100644 test/python/expected/pyodbc/fk-contention.out create mode 100644 test/python/expected/pyodbc/fk-deadlock.out create mode 100644 test/python/input/isolation/fk-contention.spec create mode 100644 test/python/input/isolation/fk-deadlock.spec create mode 100644 test/python/isolationtest/README.md create mode 100644 test/python/isolationtest/__init__.py create mode 100644 test/python/isolationtest/isolationTestHandler.py create mode 100644 test/python/isolationtest/isolationTester.py create mode 100644 test/python/isolationtest/parser/__init__.py create mode 100644 test/python/isolationtest/parser/specLexer.g4 create mode 100644 test/python/isolationtest/parser/specParser.g4 create mode 100644 test/python/isolationtest/specParserVisitorImpl.py delete mode 100644 test/python/logs/.gitignore create mode 100644 test/python/logs/.gitkeep diff --git a/.github/workflows/isolation-tests.yml b/.github/workflows/isolation-tests.yml new file mode 100644 index 0000000000..210d98ba2b --- /dev/null +++ b/.github/workflows/isolation-tests.yml @@ -0,0 +1,104 @@ +name: Babelfish Smoke Tests +on: + push: + branches: + pull_request: + branches: + +jobs: + isolation-tests: + name: Isolation-Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Clone, build, and run tests for Postgres engine + run: | + cd .. + git clone https://github.com/babelfish-for-postgresql/postgresql_modified_for_babelfish.git + sudo apt-get update + sudo apt-get install uuid-dev openjdk-8-jre libicu-dev libxml2-dev openssl libssl-dev python-dev libossp-uuid-dev libpq-dev cmake pkg-config g++ build-essential bison + cd postgresql_modified_for_babelfish + ./configure --prefix=$HOME/postgres/ --with-python PYTHON=/usr/bin/python2.7 --enable-debug CFLAGS="-ggdb" --with-libxml --with-uuid=ossp --with-icu + make -j 4 2>error.txt + make install + cd contrib && make && sudo make install + - name: Copy ANTLR jar file + run: | + cd contrib/babelfishpg_tsql/antlr/thirdparty/antlr/ + sudo cp antlr-4.9.3-complete.jar /usr/local/lib + - name: Compile ANTLR + run: | + cd .. + wget http://www.antlr.org/download/antlr4-cpp-runtime-4.9.3-source.zip + unzip -d antlr4 antlr4-cpp-runtime-4.9.3-source.zip + cd antlr4 + mkdir build && cd build + cmake .. -D ANTLR_JAR_LOCATION=/usr/local/lib/antlr-4.9.3-complete.jar -DCMAKE_INSTALL_PREFIX=/usr/local -DWITH_DEMO=True + make + sudo make install + cp /usr/local/lib/libantlr4-runtime.so.4.9.3 ~/postgres/lib/ + - name: Set env variables and build extensions + run: | + export PG_CONFIG=~/postgres/bin/pg_config + export PG_SRC=~/work/babelfish_extensions/postgresql_modified_for_babelfish + export cmake=$(which cmake) + cd contrib/babelfishpg_money + make && make install + cd ../babelfishpg_common + make && make install + cd ../babelfishpg_tds + make && make install + cd ../babelfishpg_tsql + make && make install + - name: Install extensions + run: | + cd ~ + curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - + curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/msprod.list + sudo apt-get update + sudo apt-get install -y mssql-tools unixodbc-dev + export PATH=/opt/mssql-tools/bin:$PATH + ~/postgres/bin/initdb -D ~/postgres/data/ + ~/postgres/bin/pg_ctl -D ~/postgres/data/ -l logfile start + cd postgres/data + sudo sed -i "s/#listen_addresses = 'localhost'/listen_addresses = '*'/g" postgresql.conf + sudo sed -i "s/#shared_preload_libraries = ''/shared_preload_libraries = 'babelfishpg_tds'/g" postgresql.conf + ipaddress=$(ifconfig eth0 | grep 'inet ' | cut -d: -f2 | awk '{ print $2}') + sudo echo "host all all $ipaddress/32 trust" >> pg_hba.conf + ~/postgres/bin/pg_ctl -D ~/postgres/data/ -l logfile restart + cd ~/work/babelfish_extensions/babelfish_extensions/ + sudo ~/postgres/bin/psql -d postgres -U runner -v user="python_user" -v db="python_testdb" -f .github/scripts/create_extension.sql + sqlcmd -S localhost -U python_user -P 12345678 -Q "SELECT @@version GO" + - name: Install Python + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Configure Python environment + run: | + cd ~ + curl https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list + cd ~/work/babelfish_extensions/babelfish_extensions/test/python + sudo ACCEPT_EULA=Y apt-get install -y msodbcsql17 python3-dev + pip3 install pyodbc pymssql pytest pytest-xdist antlr4-python3-runtime + - name: Generate .spec file parser + run: | + cd ~/work/babelfish_extensions/babelfish_extensions/test/python/isolationtest/ + java -Xmx500M -cp /usr/local/lib/antlr-4.9.3-complete.jar org.antlr.v4.Tool -Dlanguage=Python3 ./parser/*.g4 -visitor -no-listener + + - name: Run Isolation tests + run: | + cd test/python + compareWithFile=true \ + driver=pyodbc \ + runInParallel=false \ + testName=all \ + provider="ODBC Driver 17 for SQL Server" \ + fileGenerator_URL=localhost \ + fileGenerator_port=1433 \ + fileGenerator_databaseName=master \ + fileGenerator_user=python_user \ + fileGenerator_password=12345678 \ + inputFilesPath=./input/isolation \ + runIsolationTests=true \ + stepTimeLimit=30 \ + pytest -s --tb=long -q . diff --git a/test/python/.gitignore b/test/python/.gitignore new file mode 100644 index 0000000000..07a6f42f93 --- /dev/null +++ b/test/python/.gitignore @@ -0,0 +1,5 @@ +# Ignore everything in the following directories except .gitkeep +/output/*/* +/logs/* +!/**/.gitkeep + diff --git a/test/python/batch_run.py b/test/python/batch_run.py index fe3b47a36c..98f36173db 100644 --- a/test/python/batch_run.py +++ b/test/python/batch_run.py @@ -1,5 +1,8 @@ +from utils.config import config_dict as cfg from execute_query import parse_prepared_statement, parse_stored_procedures, process_transaction_statement,process_statement_in_file_mode from python_authentication import py_authentication +if cfg['runIsolationTests'] == 'true': + from isolationtest.isolationTestHandler import isolationTestHandler import os @@ -86,6 +89,9 @@ def batch_run(bbl_cnxn, file_handler, file, logger): else: failed += 1 + elif f_type == "spec": + flag = isolationTestHandler(file ,file_handler, logger) + return (passed, failed) diff --git a/test/python/compare_results.py b/test/python/compare_results.py index 687b67db2d..01800e2f8c 100644 --- a/test/python/compare_results.py +++ b/test/python/compare_results.py @@ -93,6 +93,8 @@ def handle_exception_in_file(e, file_writer): def process_multiple_resultsets(bbl_rs, file_writer, result_processed, result_set_exist): #not possible initial value count = -10 + if(result_set_exist is None): + result_set_exist = (bbl_rs.description is not None) while True: diff --git a/test/python/config.txt b/test/python/config.txt index d511f1de59..de10d552c4 100644 --- a/test/python/config.txt +++ b/test/python/config.txt @@ -8,7 +8,7 @@ fileGenerator_password = # PATH TO INPUT TEST FILES FOLDER inputFilesPath = ./input -# SPECIFY IF YOU WISH TO EXECUTE TEST FILES SEQUENTIALLY OR IN PARALLEL +# SPECIFY IF YOU WISH TO EXECUTE TEST FILES SEQUENTIALLY OR IN PARALLEL(FALSE FOR ISOLATION TESTS) runInParallel = false # SPECIFY IF YOU WISH TO PRINT ALL THE LOGS/DIFF TO CONSOLE @@ -35,6 +35,12 @@ outputColumnName = false # SPECIFY IF ERROR CODE SHOULD BE DISPLAYED IN OUTPUT FILE outputErrorCode = true +############################ ALLOW TO RUN ISOLATION TESTS (IF TRUE THEN RUNS ALL THE .SPEC FILES FROM INPUT DIRECTORY OTHERWISE SKIP THEM) ################################################## +runIsolationTests = false + +############################ MAX TIME LIMIT FOR SINGLE STEP QUERY EXECUTION IN SECONDS(FOR ISOLATION TESTS ONLY) ############################ +stepTimeLimit = 30 + ############################################ WHICH TEST TO RUN ############################################ # SET AS "all" TO RUN ALL THE TESTS FOR THE INPUT FILES INSIDE DIRECTORY WHOSE PATH IS SPECIFIED BY @@ -42,3 +48,7 @@ outputErrorCode = true # MULTIPLE INPUT TETS FILE NAMES CAN BE GIVEN SEPARATED WITH A SEMICOLON AND NO SPACES testName = all +########################################### WHICH TEST TO IGNORE ########################################## + +# MULTIPLE INPUT TETS FILE NAMES CAN BE GIVEN SEPARATED WITH A SEMICOLON AND NO SPACES +ignoredTestName = diff --git a/test/python/expected/pyodbc/fk-contention.out b/test/python/expected/pyodbc/fk-contention.out new file mode 100644 index 0000000000..fab2419dc3 --- /dev/null +++ b/test/python/expected/pyodbc/fk-contention.out @@ -0,0 +1,27 @@ + +starting permutation : { ins com upd } +step ins: INSERT INTO bar VALUES (42); +~~ROW COUNT: 1~~ + +step com: COMMIT; +step upd: UPDATE foo SET b = 'Hello World'; +~~ROW COUNT: 1~~ + + +starting permutation : { ins upd com } +step ins: INSERT INTO bar VALUES (42); +~~ROW COUNT: 1~~ + +step upd: UPDATE foo SET b = 'Hello World'; +~~ROW COUNT: 1~~ + +step com: COMMIT; + +starting permutation : { upd ins com } +step upd: UPDATE foo SET b = 'Hello World'; +~~ROW COUNT: 1~~ + +step ins: INSERT INTO bar VALUES (42); +~~ROW COUNT: 1~~ + +step com: COMMIT; diff --git a/test/python/expected/pyodbc/fk-deadlock.out b/test/python/expected/pyodbc/fk-deadlock.out new file mode 100644 index 0000000000..5151d39720 --- /dev/null +++ b/test/python/expected/pyodbc/fk-deadlock.out @@ -0,0 +1,230 @@ + +starting permutation : { s1i s1u s1c s2i s2u s2c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s1i s1u s2i s1c s2u s2c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1c: COMMIT; +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s1i s1u s2i s2u s1c s2c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +step s1c: COMMIT; +step s2u: <... completed> +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s1i s2i s1u s1c s2u s2c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s1i s2i s1u s2u s1c s2c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +step s1c: COMMIT; +step s2u: <... completed> +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s1i s2i s2u s1u s2c s1c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +step s2c: COMMIT; +step s1u: <... completed> +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s1i s2i s2u s2c s1u s1c } +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s2i s1i s1u s1c s2u s2c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s2i s1i s1u s2u s1c s2c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +step s1c: COMMIT; +step s2u: <... completed> +~~ROW COUNT: 1~~ + +step s2c: COMMIT; + +starting permutation : { s2i s1i s2u s1u s2c s1c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +step s2c: COMMIT; +step s1u: <... completed> +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s2i s1i s2u s2c s1u s1c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s2i s2u s1i s1u s2c s1c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +step s2c: COMMIT; +step s1u: <... completed> +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s2i s2u s1i s2c s1u s1c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s2c: COMMIT; +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; + +starting permutation : { s2i s2u s2c s1i s1u s1c } +step s2i: INSERT INTO child VALUES (2, 1); +~~ROW COUNT: 1~~ + +step s2u: UPDATE parent SET aux = 'baz'; +~~ROW COUNT: 1~~ + +step s2c: COMMIT; +step s1i: INSERT INTO child VALUES (1, 1); +~~ROW COUNT: 1~~ + +step s1u: UPDATE parent SET aux = 'bar'; +~~ROW COUNT: 1~~ + +step s1c: COMMIT; diff --git a/test/python/input/isolation/fk-contention.spec b/test/python/input/isolation/fk-contention.spec new file mode 100644 index 0000000000..c2b8512125 --- /dev/null +++ b/test/python/input/isolation/fk-contention.spec @@ -0,0 +1,19 @@ +setup +{ + CREATE TABLE foo (a int PRIMARY KEY, b text); + CREATE TABLE bar (a int NOT NULL REFERENCES foo); + INSERT INTO foo(a) VALUES (42); +} + +teardown +{ + DROP TABLE foo, bar; +} + +session s1 +setup { BEGIN TRAN; } +step ins { INSERT INTO bar VALUES (42); } +step com { COMMIT; } + +session s2 +step upd { UPDATE foo SET b = 'Hello World'; } diff --git a/test/python/input/isolation/fk-deadlock.spec b/test/python/input/isolation/fk-deadlock.spec new file mode 100644 index 0000000000..a40c1c2c0e --- /dev/null +++ b/test/python/input/isolation/fk-deadlock.spec @@ -0,0 +1,46 @@ +setup +{ + CREATE TABLE parent ( + parent_key int PRIMARY KEY, + aux text NOT NULL + ); + + CREATE TABLE child ( + child_key int PRIMARY KEY, + parent_key int NOT NULL REFERENCES parent + ); + + INSERT INTO parent VALUES (1, 'foo'); +} + +teardown +{ + DROP TABLE parent, child; +} + +session s1 +setup { BEGIN TRAN; SET lock_timeout '100'; } +step s1i { INSERT INTO child VALUES (1, 1); } +step s1u { UPDATE parent SET aux = 'bar'; } +step s1c { COMMIT; } + +session s2 +setup { BEGIN TRAN; SET lock_timeout '10000'; } +step s2i { INSERT INTO child VALUES (2, 1); } +step s2u { UPDATE parent SET aux = 'baz'; } +step s2c { COMMIT; } + +permutation s1i s1u s1c s2i s2u s2c +permutation s1i s1u s2i s1c s2u s2c +permutation s1i s1u s2i s2u s1c s2c +permutation s1i s2i s1u s1c s2u s2c +permutation s1i s2i s1u s2u s1c s2c +permutation s1i s2i s2u s1u s2c s1c +permutation s1i s2i s2u s2c s1u s1c +permutation s2i s1i s1u s1c s2u s2c +permutation s2i s1i s1u s2u s1c s2c +permutation s2i s1i s2u s1u s2c s1c +permutation s2i s1i s2u s2c s1u s1c +permutation s2i s2u s1i s1u s2c s1c +permutation s2i s2u s1i s2c s1u s1c +permutation s2i s2u s2c s1i s1u s1c diff --git a/test/python/isolationtest/README.md b/test/python/isolationtest/README.md new file mode 100644 index 0000000000..20f5f6fef8 --- /dev/null +++ b/test/python/isolationtest/README.md @@ -0,0 +1,23 @@ +# Isolation Tests + +### Steps to run isolation tests locally: +1. Download antlr jar and generate parser files from grammer files. + ``` + java -Xmx500M -cp path/to/antlr/jar org.antlr.v4.Tool -Dlanguage=Python3 ./parser/*.g4 -visitor -no-listener + ``` +2. Install antlr4-python3-runtime module. + ``` + pip install antlr4-python3-runtime + ``` +3. In `config.txt` file, set + ``` + runIsolationTests = true + compareWithFile = true + Fill in the fileGenerator details + inputFilesPath = ./input/isolation (if we want to run only isolation tests) + ``` + +4. Trigger the test run + ``` + python3 start.py + ``` diff --git a/test/python/isolationtest/__init__.py b/test/python/isolationtest/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/python/isolationtest/isolationTestHandler.py b/test/python/isolationtest/isolationTestHandler.py new file mode 100644 index 0000000000..aed5dcd738 --- /dev/null +++ b/test/python/isolationtest/isolationTestHandler.py @@ -0,0 +1,46 @@ +import traceback + +from antlr4 import * +from .parser.specLexer import specLexer +from .parser.specParser import specParser +from .specParserVisitorImpl import * + + +def isolationTestHandler(testFile, fileWriter, logger): + testName = testFile.name.split('.')[0] + + try: + logger.info("Starting : {}".format(testName)) + try: + testSpec = parseSpecInput(str(testFile)) + if(testSpec is None): + raise Exception("TestSpec object is not generated") + else: + print(testSpec) + logger.info("Successfully parsed") + except Exception as e: + logger.error("Error while parsing : {}".format(str(e))) + return False + + testSpec.logger = logger + testSpec.fileWriter = fileWriter + + testSpec.initTestRun() + + logger.info("Completed : {}".format(testName)) + return True + except Exception as e: + logger.error(str(e)) + traceback.print_exc() + return False + + +def parseSpecInput(filename): + input_stream = FileStream(filename) + lexer = specLexer(input_stream) + token_stream = CommonTokenStream(lexer) + parser = specParser(token_stream) + tree = parser.parse() + visitor = specParserVisitorImpl() + visitor.visit(tree) + return visitor.testSpec diff --git a/test/python/isolationtest/isolationTester.py b/test/python/isolationtest/isolationTester.py new file mode 100644 index 0000000000..57ac4d10b1 --- /dev/null +++ b/test/python/isolationtest/isolationTester.py @@ -0,0 +1,385 @@ +import pyodbc +import threading +import time +from enum import Flag, auto + +from utils.config import config_dict as cfg +from compare_results import process_multiple_resultsets, handle_exception_in_file + +self = None + + +class STEP_FLAG(Flag): + RETRY = auto() + NONBLOCK = auto() + + +class Session(): + def __init__(self, name, teardownsql=None, autocommit=True): + self.name = name + self.setupsqls = [] + self.teardownsql = teardownsql + self.steps = [] + self.conn = None + self.autocommit = autocommit + self.activeStep = None + self.parentTestSpec = None + + def getConnAndSetup(self): + self.conn = Conn(self, autocommit=self.autocommit) + if not self.setupsqls: + return + for setup in self.setupsqls: + self.sessionBatchExecute(setup) + + def teardownAndCloseConn(self): + if self.teardownsql is not None: + self.sessionBatchExecute(self.teardownsql) + if self.conn is not None: + self.conn.closeCnxnAndCur() + + def sessionBatchExecute(self, batch): + if self.conn is None: + self.parentTestSpec.logger("Error : Connection doesn't exist") + self.conn.executeBatch(batch) + + def __repr__(self) -> str: + return '{' + str(self.name) + ':' + str(','.join(self.setupsqls)) + ':' + str(self.teardownsql) + ':' + str(self.steps) + '}' + + +class MasterSession(Session): + def __init__(self, name='master', teardownsql=None, autocommit=True): + super().__init__(name, teardownsql, autocommit) + self.allPidList = [] + + def checkForLock(self, blocked_pid): + self.conn.cur.execute( + "SELECT pg_catalog.pg_isolation_test_session_is_blocked(?,'{" + ",".join(self.allPidList) + "}');", str(blocked_pid)) + (res,) = self.conn.cur.fetchone() + if res == 1: + return True + return False + + def cancelQuery(self, pid): + self.conn.cur.execute("SELECT pg_cancel_backend(?);", pid) + + def terminateSession(self, pid): + self.conn.cur.execute("SELECT pg_terminate_backend(?);", pid) + + +class Step(): + def __init__(self, name, sql, session): + self.name = name + self.sql = sql + self.session = session + + def __repr__(self) -> str: + return '{' + str(self.name) + ':' + str(self.sql) + ':' + str(self.session.name) + '}' + + +class Pstep(): + def __init__(self, step=None, blocker=None, parentPermutation=None): + self.step = step + self.blocker = blocker + self.parentPermutation = parentPermutation + + def __repr__(self) -> str: + return self.step.name + str(self.blocker) + + def hasBlocker(self): + for otherStep in self.blocker.otherStepBlocker: + if (otherStep.session.activeStep is not None) and (otherStep.session.activeStep.step is otherStep): + return True + return False + + def tryCompleteStep(self, flag): + step = self.step + conn = step.session.conn + testSpec = step.session.parentTestSpec + permutation = self.parentPermutation + logger = testSpec.logger + fileWriter = testSpec.fileWriter + masterSession = permutation.masterSession + waiting = permutation.waiting + + canceled = False + + if not(flag & STEP_FLAG.RETRY): + if self.blocker.isFirstTryBlocker is True: + fileWriter.write("step {} \n".format(step.name)) + return True + + start_time = time.time() + while(conn.isBusy()): + # Two options : it will be executing or blocked + if(flag & STEP_FLAG.NONBLOCK): + # Perform lock query + waiting_flag = masterSession.checkForLock(conn.backendPid) + if(waiting_flag): + if(conn.isBusy() is False): + break + if not(flag & STEP_FLAG.RETRY): + fileWriter.write("step {}: {} \n".format(step.name, step.sql)) + return True + # else not waiting + # if not waiting for lock then it must be some weird query + # so just cancel that query if it times out + taken_time = time.time() - start_time + if(taken_time > int(cfg['stepTimeLimit']) and canceled is False): + masterSession.cancelQuery(step.session.conn.backendPid) + logger.info("canceling step {} after {} seconds\n".format( + step.name, taken_time)) + canceled = True + if(taken_time > 2 * int(cfg['stepTimeLimit'])): + # Raise Exp(if needed) for denoting test failure + # for now exit() works + logger.error("step exceeded stepTimeLimit") + exit(1) + + # step is done but if there is some blocker we shouldn't show it as completed + # we've to wait for blockers + if (self.hasBlocker()): + if not(flag & STEP_FLAG.RETRY): + fileWriter.write("step {}: {} \n".format(step.name, step.sql)) + return True + + # otherwise go ahead and complete it + if(flag & STEP_FLAG.RETRY): + fileWriter.write("step {}: <... completed>\n".format(step.name)) + else: + fileWriter.write("step {}: {}\n".format(step.name, step.sql)) + + # print result ,err messages + conn.printResult() + step.session.activeStep = None + for x in waiting: + if x is self: + waiting.remove(x) + break + # waiting.remove(waiting.index(oldstep)) + return False + + +class Blocker(): + def __init__(self): + self.isFirstTryBlocker = False + self.otherStepBlocker = [] + + def __repr__(self) -> str: + res = '' + if self.isFirstTryBlocker is True: + res += '*' + for otherStep in self.otherStepBlocker: + if res is not '': + res += ',' + res += otherStep.name + if res is not '': + res = '(' + res + ')' + return res + + +class Permutation(): + def __init__(self): + self.psteps = [] + self.parentTestSpec = None + self.waiting = [] + self.masterSession = None + + def __repr__(self) -> str: + res = " ".join([str(x.step.name) for x in self.psteps]) + return '{ ' + res + ' }' + + def runAllPermutation(self): + totalSteps = 0 + for sess in self.parentTestSpec.sessions: + totalSteps += len(sess.steps) + usedSteps = [0 for i in range(totalSteps)] + self.psteps = [Pstep(blocker=Blocker(), parentPermutation=self) for i in range(totalSteps)] + self.generatePermutation(usedSteps, 0) + + def generatePermutation(self, usedSteps, currentIndex): + anyStepAdded = False + sessions = self.parentTestSpec.sessions + for i in range(len(sessions)): + if usedSteps[i] < len(sessions[i].steps): + self.psteps[currentIndex].step = sessions[i].steps[usedSteps[i]] + usedSteps[i] += 1 + self.generatePermutation(usedSteps, currentIndex + 1) + usedSteps[i] -= 1 + anyStepAdded = True + if not(anyStepAdded): + self.runPermutation() + + ''' + Run this permutaion + - Create connections + - Main setup + - Per session setup + - Execute steps + - Per session teardown + - Main teardown + - Close connections + ''' + + def runPermutation(self): + try: + self.waiting = [] + testSpec = self.parentTestSpec + fileWriter = testSpec.fileWriter + logger = testSpec.logger + + logmsg = "\nstarting permutation : {}\n".format(str(self)) + fileWriter.write(logmsg) + logger.info(logmsg) + + # Create and Setup Sessions + masterSession = MasterSession(autocommit=True) + masterSession.setupsqls = testSpec.setupsqls + masterSession.teardownsql = testSpec.teardownsql + masterSession.parentTestSpec = testSpec + masterSession.getConnAndSetup() + self.masterSession = masterSession + for sess in testSpec.sessions: + sess.parentTestSpec = testSpec + sess.getConnAndSetup() + sess.conn.start() + masterSession.allPidList.append(str(sess.conn.backendPid)) + + logger.info("Session setup completed for {}".format(str(self))) + + for pstep in self.psteps: + ''' + Check whether the session that needs to perform the next step is + still blocked on an earlier step. If so, wait for it to finish. + ''' + step = pstep.step + sess = step.session + if sess.activeStep is not None: + # note start time + start_time = time.time() + while(sess.activeStep is not None): + oldstep = sess.activeStep + ''' + Wait for oldstep. But even though we don't use + STEP_NONBLOCK, it might not complete because of blocker + conditions. + ''' + oldstep.tryCompleteStep(STEP_FLAG.RETRY) + self.tryCompleteSteps(STEP_FLAG.NONBLOCK | STEP_FLAG.RETRY) + if sess.activeStep is not None: + taken_time = time.time() - start_time + if taken_time > 2 * int(cfg['stepTimeLimit']): + logger.error("step {} timed out after {} seconds\n".format( + oldstep.step.name, taken_time)) + # print active steps of other sessions also (if required) + exit(1) + sess.activeStep = pstep + sess.conn.executeactiveStep() + + mustwait = pstep.tryCompleteStep(STEP_FLAG.NONBLOCK) + self.tryCompleteSteps(STEP_FLAG.NONBLOCK | STEP_FLAG.RETRY) + if mustwait is True: + self.waiting.append(pstep) + + self.tryCompleteSteps(STEP_FLAG.RETRY) + if len(self.waiting) is not 0: + raise Exception("Failed to complete permutation due to mutually-blocking steps\n") + except Exception as e: + raise e + finally: + # Teardown at session level + for sess in testSpec.sessions: + if sess.conn is not None: + sess.conn.stop() + sess.teardownAndCloseConn() + + # Teardown + masterSession.teardownAndCloseConn() + logger.info("Teardown completed for {}".format(str(self))) + + def tryCompleteSteps(self, flags): + for pstep in self.waiting: + pstep.tryCompleteStep(flags) + + +class Conn(threading.Thread): + def __init__(self, sess=None, autocommit=False): + threading.Thread.__init__(self) + self.lock = threading.Event() + self.stopEvent = threading.Event() + self.sess = sess + self.logger = self.sess.parentTestSpec.logger + self.cnxn = pyodbc.connect('DRIVER={};SERVER={},{};DATABASE={};UID={};PWD={}'.format( + cfg['provider'], + cfg['fileGenerator_URL'], + cfg['fileGenerator_port'], + cfg['fileGenerator_databaseName'], + cfg['fileGenerator_user'], + cfg['fileGenerator_password']), + autocommit=autocommit) + self.cur = self.getCursor() + self.backendPid = self.getBackendPid() + + def getCursor(self): + return self.cnxn.cursor() + + def getBackendPid(self): + self.cur.execute("SELECT pg_backend_pid();") + (res,) = self.cur.fetchone() + return res + + def executeactiveStep(self): + self.lock.set() + + def executeBatch(self, batch): + self.cur.execute(batch) + + def printResult(self): + process_multiple_resultsets(self.cur, self.sess.parentTestSpec.fileWriter, 0, None) + + def closeCnxnAndCur(self): + if(self.cur is not None): + self.cur.close() + if(self.cnxn is not None): + self.cnxn.close() + + def isBusy(self): + return self.lock.is_set() + + def stop(self): + self.stopEvent.set() + + def run(self): + while(True): + if self.stopEvent.is_set(): + break + if self.lock.is_set(): + try: + self.cur.execute(self.sess.activeStep.step.sql) + except pyodbc.Error as e: + handle_exception_in_file(e, self.sess.parentTestSpec.fileWriter) + self.cur.nextset() + finally: + self.lock.clear() + + +class TestSpec(): + def __init__(self): + self.setupsqls = [] + self.teardownsql = None + self.sessions = [] + self.permutations = [] + self.fileWriter = None + self.logger = None + + def __repr__(self) -> str: + return '{' + str(self.setupsqls) + ':' + str(self.teardownsql) + ':' + str(self.sessions) + ':' + str(self.permutations) + '}' + + def initTestRun(self): + if self.permutations: + for permutation in self.permutations: + permutation.runPermutation() + else: + permutation = Permutation() + permutation.parentTestSpec = self + permutation.runAllPermutation() diff --git a/test/python/isolationtest/parser/__init__.py b/test/python/isolationtest/parser/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/python/isolationtest/parser/specLexer.g4 b/test/python/isolationtest/parser/specLexer.g4 new file mode 100644 index 0000000000..85dba2b46c --- /dev/null +++ b/test/python/isolationtest/parser/specLexer.g4 @@ -0,0 +1,17 @@ +lexer grammar specLexer; + +SETUP: 'setup'; +TEARDOWN: 'teardown'; +SESSION: 'session'; +STEP: 'step'; +PERMUTATION: 'permutation'; +SQLBLOCK: OPEN_PAR .*? CLOSE_PAR; +OPEN_PAR: '{'; +CLOSE_PAR: '}'; +OPEN_BRKT:'('; +CLOSE_BRKT:')'; +AST:'*'; +COMMA:','; +ID:[_a-zA-Z][a-zA-Z0-9_]*; +COMMENT : '#' ~[\r\n]* '\r'? '\n' -> skip ; +WS:[ \t\r\f\n]+ -> skip; \ No newline at end of file diff --git a/test/python/isolationtest/parser/specParser.g4 b/test/python/isolationtest/parser/specParser.g4 new file mode 100644 index 0000000000..474c0bf4bf --- /dev/null +++ b/test/python/isolationtest/parser/specParser.g4 @@ -0,0 +1,20 @@ +parser grammar specParser; +options {tokenVocab=specLexer;} + +parse: testspec EOF; + +testspec : setup* teardown? session+ permutation*; + +setup: SETUP SQLBLOCK; + +teardown: TEARDOWN SQLBLOCK; + +session: SESSION ID setup? step+ teardown?; + +step: STEP ID SQLBLOCK; + +pstep: ID (OPEN_BRKT blockers CLOSE_BRKT)?; + +blockers: (AST | ID) (COMMA (AST | ID))*; + +permutation: PERMUTATION pstep+; diff --git a/test/python/isolationtest/specParserVisitorImpl.py b/test/python/isolationtest/specParserVisitorImpl.py new file mode 100644 index 0000000000..2df644199b --- /dev/null +++ b/test/python/isolationtest/specParserVisitorImpl.py @@ -0,0 +1,122 @@ +from .isolationTester import Session, Step, Permutation, TestSpec, Pstep, Blocker +from .parser.specParserVisitor import specParserVisitor + + +# Generated from specParser.g4 by ANTLR 4.9.2 +from antlr4 import * +from .parser.specParser import specParser + +# This class defines a complete generic visitor for a parse tree produced by specParser. + +class specParserVisitorImpl(specParserVisitor): + + def __init__(self): + self.parentnode_stk = [] + self.steps_defined = {} + self.testSpec = TestSpec() + + # Visit a parse tree produced by specParser#parse. + def visitParse(self, ctx:specParser.ParseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by specParser#self.testSpec. + def visitTestspec(self, ctx:specParser.TestspecContext): + self.parentnode_stk.append(self.testSpec) + for setupsql in ctx.setup(): + self.testSpec.setupsqls.append(trimSQLBLOCK(setupsql.SQLBLOCK().getText())) + if(ctx.teardown() is not None): + self.testSpec.teardownsql = trimSQLBLOCK(ctx.teardown().SQLBLOCK().getText()) + for session_child in ctx.session(): + self.visitSession(session_child) + for permutation_child in ctx.permutation(): + self.visitPermutation(permutation_child) + self.parentnode_stk.pop() + return + + + # Visit a parse tree produced by specParser#setup. + def visitSetup(self, ctx:specParser.SetupContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by specParser#teardown. + def visitTeardown(self, ctx:specParser.TeardownContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by specParser#session. + def visitSession(self, ctx:specParser.SessionContext): + session = Session(name = ctx.ID().getText()) + session.parentTestSpec = self.parentnode_stk[-1] + self.parentnode_stk.append(session) + if(ctx.setup() is not None): + session.setupsqls.append(trimSQLBLOCK(ctx.setup().SQLBLOCK().getText())) + if(ctx.teardown() is not None): + session.teardownsql = trimSQLBLOCK(ctx.teardown().SQLBLOCK().getText()) + for step_ctx in ctx.step(): + self.visitStep(step_ctx) + self.parentnode_stk.pop() + self.parentnode_stk[-1].sessions.append(session) + return + + # Visit a parse tree produced by specParser#pstep. + def visitPstep(self, ctx:specParser.PstepContext): + pstep = Pstep(parentPermutation=self.parentnode_stk[-1]) + self.parentnode_stk.append(pstep) + step_id = ctx.ID().getText() + step_lookup_res = self.steps_defined.get(step_id) + if(step_lookup_res is None): + raise Exception("ParsingError : Undefine step found "+ step_id) + else : + pstep.step = step_lookup_res + if(ctx.blockers() is not None): + self.visitBlockers(ctx.blockers()) + else: + pstep.blocker = Blocker() + self.parentnode_stk.pop() + self.parentnode_stk[-1].psteps.append(pstep) + return + + # Visit a parse tree produced by specParser#blockers. + def visitBlockers(self, ctx:specParser.BlockersContext): + blocker = Blocker() + if ctx.AST(): + blocker.isFirstTryBlocker = True + for otherBlockerStepId in ctx.ID(): + stepLookupRes = self.steps_defined.get(otherBlockerStepId.getText()) + if(stepLookupRes is None): + raise Exception("ParsingError : Undefine step found "+otherBlockerStepId.getText()) + else: + blocker.otherStepBlocker.append(stepLookupRes) + self.parentnode_stk[-1].blocker = blocker + return + + + # Visit a parse tree produced by specParser#permutation. + def visitPermutation(self, ctx:specParser.PermutationContext): + permutation = Permutation() + permutation.parentTestSpec = self.parentnode_stk[-1] + self.parentnode_stk.append(permutation) + for pstep in ctx.pstep(): + self.visitPstep(pstep) + self.parentnode_stk.pop() + self.parentnode_stk[-1].permutations.append(permutation) + return + + + # Visit a parse tree produced by specParser#step. + def visitStep(self, ctx:specParser.StepContext): + old_step_lookup = self.steps_defined.get(ctx.ID().getText()) + if old_step_lookup is not None: + raise Exception("ParsingError : Steps already defined "+ctx.ID().getText()) + step = Step(ctx.ID().getText(), trimSQLBLOCK(ctx.SQLBLOCK().getText()), self.parentnode_stk[-1]) + self.steps_defined[step.name] = step + self.parentnode_stk[-1].steps.append(step) + return + + +del specParser + +def trimSQLBLOCK(text): + return text[1:-1].strip() diff --git a/test/python/logs/.gitignore b/test/python/logs/.gitignore deleted file mode 100644 index 5e7d2734cf..0000000000 --- a/test/python/logs/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore diff --git a/test/python/logs/.gitkeep b/test/python/logs/.gitkeep new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/python/test_main.py b/test/python/test_main.py index c91b2b6e79..b9fcd66b84 100644 --- a/test/python/test_main.py +++ b/test/python/test_main.py @@ -1,7 +1,8 @@ import logging from file_handler import file_handler import pytest -from utils.base import add_files +from utils.base import add_files, ignored_files +from utils.config import config_dict as cfg import os from datetime import datetime from pathlib import Path @@ -30,6 +31,13 @@ def fx1(request): #main test fuctions def test_main(fx1, my_setup): + + # skip tests specified by config + if os.path.splitext(fx1)[1] == '.spec' and cfg['runIsolationTests']=='false': + pytest.skip("Isolation Tests are not allowed - runIsolationTests config param is false") + if fx1.name in ignored_files: + pytest.skip("Ignored test file - Modify ignoredTestName to run this step") + logfname = my_setup #console logger diff --git a/test/python/utils/base.py b/test/python/utils/base.py index 8fd8dffa6b..a66f129940 100644 --- a/test/python/utils/base.py +++ b/test/python/utils/base.py @@ -26,7 +26,10 @@ def add_files(): for f in pth.rglob("*.sql"): files.append(f) - + + for f in pth.rglob("*.spec"): + files.append(f) + return files #search for testcase name recursively else: @@ -37,6 +40,14 @@ def add_files(): return lst +def get_ignored_files(): + cfg = read_config() + ignoredTestNames = cfg["ignoredTestName"] + if ";" in ignoredTestNames: + return ignoredTestNames.split(";") + else: + return [ignoredTestNames] + #to handle exceptions for babel server execution and return corresponding error code def handle_babel_exception(e, logger): if issubclass(type(e), pyodbc.Error): @@ -55,3 +66,5 @@ def handle_babel_exception(e, logger): #default else: logger.warning("Babel Exception: " + str(e)) + +ignored_files = get_ignored_files()