diff --git a/.github/workflows/connectors.yml b/.github/workflows/connectors.yml index c7f1d2061..677320e4a 100644 --- a/.github/workflows/connectors.yml +++ b/.github/workflows/connectors.yml @@ -13,7 +13,7 @@ concurrency: jobs: test_install_connectors: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checking out repo @@ -27,10 +27,10 @@ jobs: continue-on-error: true run: ./scripts/ci_check_no_file_changes.sh python - - name: Set up Python 3.10 + - name: Set up Python 3.12 uses: actions/setup-python@v5.1.0 with: - python-version: '3.10' + python-version: '3.12' - name: Check PipelineWise and all connectors are installable run: | diff --git a/.github/workflows/e2e_tests.yml b/.github/workflows/e2e_tests.yml index 4356a20ea..ed5da9647 100644 --- a/.github/workflows/e2e_tests.yml +++ b/.github/workflows/e2e_tests.yml @@ -29,7 +29,7 @@ env: jobs: e2e_tests_target_pg: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 environment: ci_tests steps: @@ -70,7 +70,7 @@ jobs: pipelinewise_dev pytest tests/end_to_end/test_target_postgres.py -vx --timer-top-n 10 e2e_tests_mariadb_to_sf: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 environment: ci_tests steps: @@ -122,7 +122,7 @@ jobs: pipelinewise_dev pytest tests/end_to_end/target_snowflake/tap_mariadb -vx --timer-top-n 10 e2e_tests_pg_to_sf: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 environment: ci_tests steps: @@ -174,7 +174,7 @@ jobs: pipelinewise_dev pytest tests/end_to_end/target_snowflake/tap_postgres -vx --timer-top-n 10 e2e_tests_mg_to_sf: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 environment: ci_tests steps: @@ -226,7 +226,7 @@ jobs: pipelinewise_dev pytest tests/end_to_end/target_snowflake/tap_mongodb -vx --timer-top-n 10 e2e_tests_s3_to_sf: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 environment: ci_tests steps: diff --git a/.github/workflows/lint_unit_tests.yml b/.github/workflows/lint_unit_tests.yml index 3a7024ce5..7456743f7 100644 --- a/.github/workflows/lint_unit_tests.yml +++ b/.github/workflows/lint_unit_tests.yml @@ -12,7 +12,7 @@ concurrency: jobs: lint_and_test: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checking out repo @@ -26,11 +26,11 @@ jobs: continue-on-error: true run: ./scripts/ci_check_no_file_changes.sh python - - name: Set up Python 3.10 + - name: Set up Python 3.12 if: steps.check.outcome == 'failure' uses: actions/setup-python@v5.1.0 with: - python-version: '3.10' + python-version: '3.12' - name: Install dependencies if: steps.check.outcome == 'failure' @@ -40,7 +40,7 @@ jobs: if: steps.check.outcome == 'failure' run: | . .virtualenvs/pipelinewise/bin/activate - find pipelinewise tests -type f -name '*.py' | xargs unify --check-only + ruff check pipelinewise tests - name: Pylinting if: steps.check.outcome == 'failure' diff --git a/.github/workflows/publish_doc.yml b/.github/workflows/publish_doc.yml index 738fe4e3e..89599ce80 100644 --- a/.github/workflows/publish_doc.yml +++ b/.github/workflows/publish_doc.yml @@ -23,7 +23,7 @@ jobs: - uses: actions/setup-python@v5.1.0 with: - python-version: '3.10' + python-version: '3.12' - name: check structure run: ls -l gh_doc_automation diff --git a/CHANGELOG.md b/CHANGELOG.md index 52e9d9bc5..7b68393be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +0.76.0 (2026-04-16) +------------------- +- Updates for Python 3.12 + 0.75.0 (2026-04-07) ------------------- - `pipelinewise-tap-postgres` from `2.1.0` to `2.2.0` diff --git a/Dockerfile b/Dockerfile index a26abab2d..ca21d7c5a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.10-slim-bullseye +FROM python:3.12-slim-bullseye ARG connectors=all @@ -15,12 +15,13 @@ RUN apt-get -qq update \ && pip install -U --no-cache-dir pip # Add Mongodb ppa -RUN wget -qO - https://www.mongodb.org/static/pgp/server-4.4.asc | apt-key add - \ - && echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb.list \ - && apt-get -qq update \ - && apt-get -qqy --no-install-recommends install \ - mongodb-database-tools \ - && rm -rf /var/lib/apt/lists/* +RUN ARCH=$(dpkg --print-architecture) && \ + wget -qO- https://www.mongodb.org/static/pgp/server-4.4.asc | gpg --dearmor > /usr/share/keyrings/mongodb-archive-keyring.gpg && \ + echo "deb [ arch=${ARCH} signed-by=/usr/share/keyrings/mongodb-archive-keyring.gpg ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" > /etc/apt/sources.list.d/mongodb.list && \ + apt-get -qq update && \ + apt-get -qqy --no-install-recommends install \ + mongodb-database-tools && \ + rm -rf /var/lib/apt/lists/* COPY singer-connectors/ /app/singer-connectors/ COPY Makefile /app diff --git a/Dockerfile.barebone b/Dockerfile.barebone index 5e62bc193..02e01c890 100644 --- a/Dockerfile.barebone +++ b/Dockerfile.barebone @@ -1,4 +1,4 @@ -FROM python:3.10-slim-bullseye +FROM python:3.12-slim-bullseye RUN apt-get -qq update \ && apt-get -qqy --no-install-recommends install \ diff --git a/dev-project/docker-compose.yml b/dev-project/docker-compose.yml index 455d56a3d..21df5a9fa 100644 --- a/dev-project/docker-compose.yml +++ b/dev-project/docker-compose.yml @@ -2,7 +2,7 @@ services: ### Primary container with PipelineWise CLI pipelinewise: platform: linux/amd64 - image: ubuntu:22.04 + image: ubuntu:24.04 container_name: pipelinewise_dev working_dir: /opt/pipelinewise entrypoint: /opt/pipelinewise/dev-project/entrypoint.sh diff --git a/dev-project/entrypoint.sh b/dev-project/entrypoint.sh index 36263b107..5fbcd1ed9 100755 --- a/dev-project/entrypoint.sh +++ b/dev-project/entrypoint.sh @@ -20,7 +20,7 @@ apt_retry() { } apt-get update -apt_retry apt-get install -y software-properties-common python3-apt apt-utils +apt_retry apt-get install -y software-properties-common apt-utils add-apt-repository ppa:deadsnakes/ppa apt-get update @@ -33,14 +33,11 @@ apt_retry apt-get install -y --no-install-recommends \ git \ alien \ gettext-base \ - libaio1 \ + libaio1t64 \ mariadb-client \ mbuffer \ postgresql-client \ - python3.10 python3-pip python3.10-venv python3.10-dev - -rm /usr/bin/python3 -ln -s /usr/bin/python3.10 /usr/bin/python3 + python3.12-dev python3.12-venv apt-get upgrade -y diff --git a/pipelinewise/cli/__init__.py b/pipelinewise/cli/__init__.py index e61a13f12..25b4b3cac 100644 --- a/pipelinewise/cli/__init__.py +++ b/pipelinewise/cli/__init__.py @@ -265,7 +265,7 @@ def main(): # import_config : this is for backward compatibility; use 'import' instead from CLI # Every command argument is mapped to a python function with the same name, but 'import' is a # python keyword and can't be used as function name - if args.command == 'import' or args.command == 'import_config': + if args.command in ['import', 'import_config']: args.command = 'import_project' try: _validate_command_specific_arguments(args) diff --git a/pipelinewise/cli/alert_handlers/victorops_alert_handler.py b/pipelinewise/cli/alert_handlers/victorops_alert_handler.py index e10885cad..79b062cf6 100644 --- a/pipelinewise/cli/alert_handlers/victorops_alert_handler.py +++ b/pipelinewise/cli/alert_handlers/victorops_alert_handler.py @@ -54,6 +54,7 @@ def send( Initialised alert handler object """ # Send alert to VictorOps REST Endpoint as a HTTP post request + # pylint: disable=missing-timeout response = requests.post( f'{self.base_url}/{self.routing_key}', data=json.dumps( diff --git a/pipelinewise/cli/commands.py b/pipelinewise/cli/commands.py index b06fcb577..269824551 100644 --- a/pipelinewise/cli/commands.py +++ b/pipelinewise/cli/commands.py @@ -149,7 +149,7 @@ def exists_and_executable(bin_path: str) -> bool: try: paths = f"{os.environ['PATH']}".split(':') - (p for p in paths if os.access(f'{p}/{bin_path}', os.X_OK)).__next__() + next(p for p in paths if os.access(f'{p}/{bin_path}', os.X_OK)) except StopIteration: return False return True @@ -296,6 +296,7 @@ def build_stream_buffer_command( return buffer_command +# pylint: disable=too-many-positional-arguments def build_singer_command( tap: TapParams, target: TargetParams, @@ -354,6 +355,7 @@ def build_singer_command( return command +# pylint: disable=too-many-positional-arguments # pylint: disable=too-many-arguments def build_partialsync_command( tap: TapParams, @@ -400,6 +402,7 @@ def build_partialsync_command( return command +# pylint: disable=too-many-positional-arguments # pylint: disable=too-many-arguments def build_fastsync_command( tap: TapParams, diff --git a/pipelinewise/cli/pipelinewise.py b/pipelinewise/cli/pipelinewise.py index 951f61fa2..a2a753ace 100644 --- a/pipelinewise/cli/pipelinewise.py +++ b/pipelinewise/cli/pipelinewise.py @@ -166,6 +166,7 @@ def create_consumable_target_config(self, target_config, tap_inheritable_config) f'Cannot merge JSON files {dict_a} {dict_b} - {exc}' ) from exc + # pylint: disable=too-many-positional-arguments # pylint: disable=too-many-statements,too-many-branches,too-many-nested-blocks,too-many-locals,too-many-arguments def create_filtered_tap_properties( self, @@ -2273,6 +2274,7 @@ def cleanup_after_deleted_config(self, old_config: Dict) -> int: deleted_taps_count = 0 for target_id, taps in old_config_dict.items(): + # pylint: disable=unreachable if target_id not in new_config_dict: # target is no longer configured, thus we need to remove all its config and taps tied to it self._remove_target_config(target_id, taps) diff --git a/pipelinewise/fastsync/commons/tap_mongodb.py b/pipelinewise/fastsync/commons/tap_mongodb.py index 931f2c4b0..09139ed9a 100644 --- a/pipelinewise/fastsync/commons/tap_mongodb.py +++ b/pipelinewise/fastsync/commons/tap_mongodb.py @@ -235,7 +235,7 @@ def close_connection(self): """ self.database.client.close() - # pylint: disable=R0914,R0913 + # pylint: disable=R0914,R0913,R0917 def copy_table( self, table_name: str, diff --git a/pipelinewise/fastsync/commons/tap_mysql.py b/pipelinewise/fastsync/commons/tap_mysql.py index cada792ac..f94e67818 100644 --- a/pipelinewise/fastsync/commons/tap_mysql.py +++ b/pipelinewise/fastsync/commons/tap_mysql.py @@ -347,6 +347,7 @@ def get_table_columns(self, table_name, max_num=None, date_type='date'): schema_name = table_dict.get('schema_name') table_name = table_dict.get('table_name') + # pylint: disable=line-too-long sql = f""" SELECT column_name AS column_name, data_type AS data_type, @@ -385,6 +386,7 @@ def get_table_columns(self, table_name, max_num=None, date_type='date'): ORDER BY ordinal_position """ # noqa: E501 + # pylint: enable=line-too-long return self.query(sql) def map_column_types_to_target(self, table_name): @@ -407,7 +409,7 @@ def map_column_types_to_target(self, table_name): 'primary_key': self.get_primary_keys(table_name), } - # pylint: disable=too-many-locals + # pylint: disable=too-many-locals, too-many-positional-arguments def copy_table( self, table_name, diff --git a/pipelinewise/fastsync/commons/tap_postgres.py b/pipelinewise/fastsync/commons/tap_postgres.py index 1807f7b2f..440670223 100644 --- a/pipelinewise/fastsync/commons/tap_postgres.py +++ b/pipelinewise/fastsync/commons/tap_postgres.py @@ -271,9 +271,11 @@ def fetch_current_log_pos(self): self.primary_host_curr = self.primary_host_conn.cursor() # Make sure PostgreSQL version is 9.4 or higher + # pylint: disable=assignment-from-no-return result = self.primary_host_query( "SELECT setting::int AS version FROM pg_settings WHERE name='server_version_num'" ) + # pylint: disable=unsubscriptable-object version = result[0].get('version') # Do not allow minor versions with PostgreSQL BUG #15114 @@ -395,9 +397,10 @@ def get_table_columns(self, table_name, max_num=None, date_type='date'): if max_num: decimals = len(max_num.split('.')[1]) if '.' in max_num else 0 + decimal_format = f""" 'CASE WHEN "' || column_name || '" IS NULL THEN NULL ELSE GREATEST(LEAST({max_num}, ROUND("' || column_name || '"::numeric , {decimals})), -{max_num}) END' - """ # noqa E501 + """ # noqa E501 pylint: disable=line-too-long integer_format = """ '"' || column_name || '"' """ @@ -410,6 +413,7 @@ def get_table_columns(self, table_name, max_num=None, date_type='date'): schema_name = table_dict.get('schema_name') table_name = table_dict.get('table_name') + # pylint: disable = line-too-long sql = f""" SELECT column_name @@ -442,6 +446,8 @@ def get_table_columns(self, table_name, max_num=None, date_type='date'): ORDER BY ordinal_position ) AS x """ # noqa: E501 + # pylint: enable = line-too-long + return self.query(sql) def map_column_types_to_target(self, table_name): @@ -466,7 +472,7 @@ def map_column_types_to_target(self, table_name): 'primary_key': self.get_primary_keys(table_name), } - # pylint: disable=too-many-arguments, too-many-locals + # pylint: disable=too-many-arguments, too-many-locals, too-many-positional-arguments def copy_table( self, table_name, diff --git a/pipelinewise/fastsync/commons/target_postgres.py b/pipelinewise/fastsync/commons/target_postgres.py index c8b025b87..02dc3cd42 100644 --- a/pipelinewise/fastsync/commons/target_postgres.py +++ b/pipelinewise/fastsync/commons/target_postgres.py @@ -12,7 +12,7 @@ LOGGER = logging.getLogger(__name__) -# pylint: disable=missing-function-docstring,no-self-use,too-many-arguments +# pylint: disable=missing-function-docstring,too-many-arguments class FastSyncTargetPostgres: """ Common functions for fastsync to Postgres diff --git a/pipelinewise/fastsync/commons/target_snowflake.py b/pipelinewise/fastsync/commons/target_snowflake.py index a499bf765..dffe7d5b6 100644 --- a/pipelinewise/fastsync/commons/target_snowflake.py +++ b/pipelinewise/fastsync/commons/target_snowflake.py @@ -18,7 +18,7 @@ logging.getLogger('snowflake.connector').setLevel(logging.WARNING) -# pylint: disable=missing-function-docstring,no-self-use,too-many-arguments +# pylint: disable=missing-function-docstring,too-many-arguments class FastSyncTargetSnowflake: """ Common functions for fastsync to Snowflake @@ -214,6 +214,7 @@ def drop_table(self, target_schema, table_name, is_temporary=False): sql = 'DROP TABLE IF EXISTS {}."{}"'.format(target_schema, target_table.upper()) self.query(sql, query_tag_props={'schema': target_schema, 'table': table_name}) + # pylint: disable=too-many-positional-arguments def create_table( self, target_schema: str, diff --git a/pipelinewise/fastsync/partialsync/utils.py b/pipelinewise/fastsync/partialsync/utils.py index ff2e1fb60..828c3008b 100644 --- a/pipelinewise/fastsync/partialsync/utils.py +++ b/pipelinewise/fastsync/partialsync/utils.py @@ -158,7 +158,7 @@ def _validate_dynamic_boundary_value(query_object, string_to_check: str) -> str: else: boundary_value = return_value[0][0] except Exception: - raise(InvalidConfigException(f'Invalid query for boundary value: {string_to_check}')) from Exception + raise (InvalidConfigException(f'Invalid query for boundary value: {string_to_check}')) from Exception return boundary_value diff --git a/pylintrc b/pylintrc index a290fcb40..3a61e9827 100644 --- a/pylintrc +++ b/pylintrc @@ -38,7 +38,6 @@ extension-pkg-whitelist=ujson # operator. Joining a lot of strings can lead to a maximum recursion error in # Pylint and this flag can prevent that. It has one side effect, the resulting # AST will be different than the one from reality. -optimize-ast=no [MESSAGES CONTROL] @@ -54,164 +53,14 @@ disable=wrong-import-order, broad-except, missing-module-docstring, duplicate-code, # not useful until a major code refactoring + consider-using-f-string, + use-implicit-booleaness-not-comparison, + use-dict-literal, + consider-using-generator, + broad-exception-raised +# remove broad-exception-raised and refactor teh codes to handle exception in a better way -enable=import-error, - import-self, - reimported, - wildcard-import, - misplaced-future, - deprecated-module, - unpacking-non-sequence, - invalid-all-object, - undefined-all-variable, - used-before-assignment, - cell-var-from-loop, - global-variable-undefined, - redefine-in-handler, - unused-import, - unused-wildcard-import, - global-variable-not-assigned, - undefined-loop-variable, - global-statement, - global-at-module-level, - bad-open-mode, - redundant-unittest-assert, - boolean-datetime - deprecated-method, - anomalous-unicode-escape-in-string, - anomalous-backslash-in-string, - not-in-loop, - continue-in-finally, - abstract-class-instantiated, - star-needs-assignment-target, - duplicate-argument-name, - return-in-init, - too-many-star-expressions, - nonlocal-and-global, - return-outside-function, - return-arg-in-generator, - invalid-star-assignment-target, - bad-reversed-sequence, - nonexistent-operator, - yield-outside-function, - init-is-generator, - nonlocal-without-binding, - lost-exception, - assert-on-tuple, - dangerous-default-value, - duplicate-key, - useless-else-on-loop - expression-not-assigned, - confusing-with-statement, - unnecessary-lambda, - pointless-statement, - pointless-string-statement, - unnecessary-pass, - unreachable, - eval-used, - exec-used, - using-constant-test, - bad-super-call, - missing-super-argument, - slots-on-old-class, - super-on-old-class, - property-on-old-class, - not-an-iterable, - not-a-mapping, - format-needs-mapping, - truncated-format-string, - missing-format-string-key, - mixed-format-string, - too-few-format-args, - bad-str-strip-call, - too-many-format-args, - bad-format-character, - format-combined-specification, - bad-format-string-key, - bad-format-string, - missing-format-attribute, - missing-format-argument-key, - unused-format-string-argument - unused-format-string-key, - invalid-format-index, - bad-indentation, - mixed-indentation, - unnecessary-semicolon, - lowercase-l-suffix, - invalid-encoded-data, - unpacking-in-except, - import-star-module-level, - long-suffix, - old-octal-literal, - old-ne-operator, - backtick, - old-raise-syntax, - metaclass-assignment, - next-method-called, - dict-iter-method, - dict-view-method, - indexing-exception, - raising-string, - using-cmp-argument, - cmp-method, - coerce-method, - delslice-method, - getslice-method, - hex-method, - nonzero-method, - t-method, - setslice-method, - logging-format-truncated, - logging-too-few-args, - logging-too-many-args, - logging-unsupported-format, - logging-format-interpolation, - invalid-unary-operand-type, - unsupported-binary-operation, - not-callable, - redundant-keyword-arg, - assignment-from-no-return, - assignment-from-none, - not-context-manager, - repeated-keyword, - missing-kwoa, - no-value-for-parameter, - invalid-sequence-index, - invalid-slice-index, - unexpected-keyword-arg, - unsupported-membership-test, - unsubscriptable-object, - access-member-before-definition, - method-hidden, - assigning-non-slot, - duplicate-bases, - inconsistent-mro, - inherit-non-class, - invalid-slots, - invalid-slots-object, - no-method-argument, - no-self-argument, - unexpected-special-method-signature, - non-iterator-returned, - arguments-differ, - signature-differs, - bad-staticmethod-argument, - non-parent-init-called, - bad-except-order, - catching-non-exception, - bad-exception-context, - notimplemented-raised, - raising-bad-type, - raising-non-exception, - misplaced-bare-raise, - duplicate-except, - nonstandard-exception, - binary-op-exception, - bare-except, - not-async-context-manager, - yield-inside-async-function - # Needs investigation: # abstract-method (might be indicating a bug? probably not though) # protected-access (requires some refactoring) @@ -246,7 +95,6 @@ output-format=parseable # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". -files-output=no # Tells whether to display a full report or only the messages reports=no @@ -286,7 +134,6 @@ single-line-if-stmt=no # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. # `trailing-comma` allows a space between comma and closing bracket: (a, ). # `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator # Maximum number of lines in a module max-module-lines=1000 @@ -384,7 +231,6 @@ notes=FIXME,XXX [BASIC] # List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,input # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ @@ -403,62 +249,36 @@ include-naming-hint=no function-rgx=[a-z_][a-z0-9_]{2,40}$ # Naming hint for function names -function-name-hint=[a-z_][a-z0-9_]{2,40}$ # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for variable names -variable-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for attribute names -attr-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ -# Naming hint for argument names -argument-name-hint=[a-z_][a-z0-9_]{2,30}$ - # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,80}$ -# Naming hint for method names -method-name-hint=[a-z_][a-z0-9_]{2,80}$ - # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ @@ -496,6 +316,7 @@ int-import-graph= # Maximum number of arguments for function / method max-args=7 +max-positional-arguments=7 # Argument names that match this expression will be ignored. Default to name # with leading underscore @@ -549,4 +370,4 @@ exclude-protected=_asdict,_fields,_replace,_source,_make # Exceptions that will emit a warning when being caught. Defaults to # "Exception" -overgeneral-exceptions=Exception \ No newline at end of file +overgeneral-exceptions=builtins.Exception \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..ea42771fe --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,20 @@ +[tool.ruff] +# Set the maximum line length to 120 +line-length = 130 + +[tool.ruff.lint] +# Select the rules you want to check (E=Error, F=Pyflakes, Q=Quotes) +select = ["E", "F", "Q"] + +# Add Q000, Q001, or any other rules you want to skip entirely +# Q000: Single quotes found but double quotes preferred +# Q001: Multiline string quote preference +# Q003: Avoidable escaped quotes +# Q004: Unnecessary escape on inner quote character +ignore = ["Q000", "Q001", "Q003", "Q004", "E721"] + +[tool.ruff.lint.flake8-quotes] +# This sets your preference, but the 'ignore' above will +# stop Ruff from complaining even if you break these rules. +inline-quotes = "single" +multiline-quotes = "single" \ No newline at end of file diff --git a/setup.py b/setup.py index d4a709b94..cba98c65f 100644 --- a/setup.py +++ b/setup.py @@ -6,8 +6,8 @@ LONG_DESCRIPTION = f.read() setup(name='pipelinewise', - python_requires='==3.10.*', - version='0.75.0', + python_requires='==3.12.*', + version='0.76.0', description='PipelineWise', long_description=LONG_DESCRIPTION, long_description_content_type='text/markdown', @@ -16,12 +16,12 @@ classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.12', ], install_requires=[ 'argparse==1.4.0', 'tabulate==0.8.9', - 'PyYAML==6.0', + 'PyYAML==6.0.3', 'ansible-core==2.17.8', 'Jinja2==3.1.6', 'joblib==1.3.2', @@ -29,14 +29,14 @@ 'psycopg2-binary==2.9.10', 'numpy==1.26.4', # numpy 2.X is not compatible with our used pandas 'snowflake-connector-python[pandas]==3.15.0', - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'python-pidfile==3.0.0', 'pymongo>=4.7,<4.12', 'tzlocal>=2.0,<4.1', 'slackclient==2.9.4', 'sqlparse==0.5.3', 'psutil==5.9.5', - 'ujson==5.4.0', + 'ujson==5.12.0', 'dnspython==2.1.*', 'boto3>=1.21,<1.27', 'chardet==4.0.0', @@ -45,13 +45,13 @@ extras_require={ 'test': [ 'pre-commit==2.21.0', - 'flake8==4.0.1', - 'pytest==7.1.1', + 'flake8==7.3.0', + 'pytest==9.0.3', 'pytest-dependency==0.4.0', 'pytest-cov==4.1.0', 'python-dotenv==0.19.1', - 'pylint==2.10.*', - 'unify==0.5', + 'pylint==4.0.5', + 'ruff==0.15.11', 'pytest-timer~=0.0', ] }, diff --git a/singer-connectors/tap-github/setup.py b/singer-connectors/tap-github/setup.py index 4df2e958b..40c4cd22e 100644 --- a/singer-connectors/tap-github/setup.py +++ b/singer-connectors/tap-github/setup.py @@ -18,13 +18,13 @@ ], py_modules=['tap_github'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'requests==2.32.4' ], extras_require={ 'test': [ - 'pylint==2.10.2', - 'pytest==6.2.4' + 'pylint==4.0.5', + 'pytest==9.0.3' ] }, entry_points=''' diff --git a/singer-connectors/tap-kafka/setup.py b/singer-connectors/tap-kafka/setup.py index 87a041b8c..92834d58a 100644 --- a/singer-connectors/tap-kafka/setup.py +++ b/singer-connectors/tap-kafka/setup.py @@ -17,16 +17,16 @@ 'Programming Language :: Python :: 3 :: Only' ], install_requires=[ - 'pipelinewise-singer-python==2.*', + 'pipelinewise-singer-python==3.0.2', 'dpath==2.1.*', 'confluent-kafka[protobuf]==2.3.*', - 'grpcio-tools==1.57.*' + 'grpcio-tools==1.80.*' ], extras_require={ 'test': [ - 'pytest==7.4.*', - 'pylint==2.17.*', - 'pytest-cov==4.0.*' + 'pytest==9.0.3', + 'pylint==4.0.5', + 'pytest-cov==7.1.0' ] }, entry_points=''' diff --git a/singer-connectors/tap-mixpanel/setup.py b/singer-connectors/tap-mixpanel/setup.py index e2dbbee6e..e23568693 100644 --- a/singer-connectors/tap-mixpanel/setup.py +++ b/singer-connectors/tap-mixpanel/setup.py @@ -18,15 +18,15 @@ ], py_modules=['tap_mixpanel'], install_requires=[ - 'backoff>=1.8.0,<2.0.0', + 'backoff==2.1.2', 'requests==2.32.3', - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'jsonlines==1.2.0' ], extras_require={ 'test': [ - 'pylint==2.9.*', - 'pytest==6.2.*', + 'pylint==4.0.5', + 'pytest==9.0.3', 'requests_mock==1.9.*', ] }, diff --git a/singer-connectors/tap-mongodb/setup.py b/singer-connectors/tap-mongodb/setup.py index 02bf476d5..9ccd27760 100644 --- a/singer-connectors/tap-mongodb/setup.py +++ b/singer-connectors/tap-mongodb/setup.py @@ -20,7 +20,7 @@ ], py_modules=['tap_mongodb'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'pymongo==4.7.*', 'tzlocal==4.0.2', 'terminaltables==3.1.*', @@ -32,8 +32,8 @@ 'ipdb==0.13.*' ], 'test': [ - 'pytest==6.2.5', - 'pytest-cov==3.0.0' + 'pytest==9.0.3', + 'pytest-cov==7.1.0' ] }, entry_points=''' diff --git a/singer-connectors/tap-mysql/setup.py b/singer-connectors/tap-mysql/setup.py index 0cb6a2582..c238faef3 100644 --- a/singer-connectors/tap-mysql/setup.py +++ b/singer-connectors/tap-mysql/setup.py @@ -18,12 +18,12 @@ ], py_modules=['tap_mysql'], install_requires=[ - 'pendulum==2.1.2', - 'pipelinewise-singer-python==1.*', - 'mysql-replication==0.43', - 'PyMySQL==1.1.*', - 'plpygis==0.2.1', - 'tzlocal==4.0.2', + 'pendulum==3.2.0', + 'pipelinewise-singer-python==3.0.2', + 'mysql-replication==0.46', + 'PyMySQL==1.1.2', + 'plpygis==0.6.1', + 'tzlocal==5.3.1', ], extras_require={ 'test': [ diff --git a/singer-connectors/tap-mysql/tap_mysql/connection.py b/singer-connectors/tap-mysql/tap_mysql/connection.py index a86fe5d68..f7ffa3788 100644 --- a/singer-connectors/tap-mysql/tap_mysql/connection.py +++ b/singer-connectors/tap-mysql/tap_mysql/connection.py @@ -12,8 +12,6 @@ CONNECT_TIMEOUT_SECONDS = 30 -# We need to hold onto this for self-signed SSL -MATCH_HOSTNAME = ssl.match_hostname MARIADB_ENGINE = 'mariadb' MYSQL_ENGINE = 'mysql' @@ -118,26 +116,35 @@ def __init__(self, config): with open("key.pem", "wb") as key_file: key_file.write(config["ssl_key"].encode('utf-8')) - ssl_arg = { - "ca": "./ca.pem", - "cert": "./cert.pem", - "key": "./key.pem", - } + ctx = ssl.create_default_context(cafile="./ca.pem") + ctx.load_cert_chain(certfile="./cert.pem", keyfile="./key.pem") - # override match hostname for google cloud if config.get("internal_hostname"): parsed_hostname = parse_internal_hostname(config["internal_hostname"]) - ssl.match_hostname = lambda cert, hostname: MATCH_HOSTNAME(cert, parsed_hostname)# pylint: disable=W1505 + # This tells Python to verify the cert against THIS name, + # even if we are connecting to an IP address. + ctx.check_hostname = True + server_hostname = parsed_hostname + else: + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_REQUIRED # Or ssl.CERT_NONE if preferred + server_hostname = None + + + + ssl_arg = ctx + + args["server_hostname"] = server_hostname super().__init__(defer_connect=True, ssl=ssl_arg, **args) # Attempt SSL if config.get("ssl") == 'true' and not use_self_signed_ssl: LOGGER.info("Attempting SSL connection") - self.ssl = True - self.ctx = ssl.create_default_context() - self.ctx.check_hostname = False - self.ctx.verify_mode = ssl.CERT_NONE + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + ssl_arg = ctx # Assign the context to ssl_arg self.client_flag |= CLIENT.SSL self.session_sqls = config.get("session_sqls", DEFAULT_SESSION_SQLS) diff --git a/singer-connectors/tap-mysql/tap_mysql/sync_strategies/binlog.py b/singer-connectors/tap-mysql/tap_mysql/sync_strategies/binlog.py index 6a75f334f..979d5f927 100644 --- a/singer-connectors/tap-mysql/tap_mysql/sync_strategies/binlog.py +++ b/singer-connectors/tap-mysql/tap_mysql/sync_strategies/binlog.py @@ -216,7 +216,6 @@ def json_bytes_to_string(data): # pylint: disable=too-many-locals def row_to_singer_record(catalog_entry, version, db_column_map, row, time_extracted): row_to_persist = {} - for column_name, val in row.items(): property_type = catalog_entry.schema.properties[column_name].type property_format = catalog_entry.schema.properties[column_name].format @@ -254,8 +253,7 @@ def row_to_singer_record(catalog_entry, version, db_column_map, row, time_extrac elif property_format == 'spatial': if val: - srid = int.from_bytes(val[:4], byteorder='little') - geom = Geometry(val[4:], srid=srid) + geom = Geometry(val) row_to_persist[column_name] = json.dumps(geom.geojson) else: row_to_persist[column_name] = None @@ -520,8 +518,7 @@ def handle_update_rows_event(event, catalog_entry, state, columns, rows_saved, t db_column_types = get_db_column_types(event) for row in event.rows: - filtered_vals = {k: v for k, v in row['after_values'].items() - if k in columns} + filtered_vals = {k: v for k, v in row['after_values'].items() if k in columns} record_message = row_to_singer_record(catalog_entry, stream_version, @@ -600,10 +597,20 @@ def __get_diff_in_columns_list( # if a column no longer exists, the event will have something like __dropped_col_XY__ # to refer to this column, we don't want these columns to be included in the difference # we also will ignore any column using the given ignore_columns argument. - binlog_columns_filtered = filter( - lambda col_name, ignored_cols=ignore_columns: - not bool(re.match(r'__dropped_col_\d+__', col_name) or col_name in ignored_cols), - [col.name for col in binlog_event.columns]) + + # binlog_columns_filtered = filter( + # lambda col_name, ignored_cols=ignore_columns: + # not bool(re.match(r'__dropped_col_\d+__', col_name) or col_name in ignored_cols), + # [col.name for col in binlog_event.columns]) + + binlog_columns_filtered = [ + col.name for col in binlog_event.columns + if col.name and not ( + re.match(r'__dropped_col_\d+__', str(col.name)) or + col.name in ignore_columns + ) + ] + return set(binlog_columns_filtered).difference(schema_properties) @@ -628,7 +635,6 @@ def _run_binlog_sync( # A set to hold all columns that are detected as we sync but should be ignored cuz they are unsupported types. # Saving them here to avoid doing the check if we should ignore a column over and over again ignored_columns = set() - # Exit from the loop when the reader either runs out of streams to return or we reach # the end position (which is Master's) for binlog_event in reader: @@ -906,7 +912,6 @@ def sync_binlog_stream( end_log_file, end_log_pos = fetch_current_log_file_and_pos(mysql_conn) LOGGER.info('Current Master binlog file and pos: %s %s', end_log_file, end_log_pos) - _run_binlog_sync(mysql_conn, reader, binlog_streams_map, state, config, end_log_file, end_log_pos) finally: diff --git a/singer-connectors/tap-postgres/setup.py b/singer-connectors/tap-postgres/setup.py index 011adb3dd..ebae35f66 100644 --- a/singer-connectors/tap-postgres/setup.py +++ b/singer-connectors/tap-postgres/setup.py @@ -16,17 +16,18 @@ 'License :: OSI Approved :: GNU Affero General Public License v3', 'Programming Language :: Python :: 3 :: Only' ], - python_requires=">=3.10", + python_requires=">=3.12.0, <3.13", install_requires=[ - 'pipelinewise-singer-python==1.*', - 'psycopg2-binary==2.9.5', + 'pipelinewise-singer-python==3.0.2', + 'psycopg2-binary==2.9.12', 'strict-rfc3339==0.7', + 'simplejson==4.0.1' ], extras_require={ "test": [ - 'pytest==7.2.2', - 'pylint==2.12.*', - 'pytest-cov==4.0.0' + 'pytest==9.0.3', + 'pylint==4.0.5', + 'pytest-cov==7.1.0' ] }, entry_points=''' diff --git a/singer-connectors/tap-s3-csv/setup.py b/singer-connectors/tap-s3-csv/setup.py index 4da4c0e11..44545cf8b 100644 --- a/singer-connectors/tap-s3-csv/setup.py +++ b/singer-connectors/tap-s3-csv/setup.py @@ -20,9 +20,9 @@ install_requires=[ 'boto3==1.26.165', 'singer-encodings==0.0.*', - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'voluptuous==0.13.1', - 'ujson==5.4.0', + 'ujson==5.12.0', 'more_itertools==8.12.*', ], extras_require={ @@ -30,9 +30,9 @@ 'ipdb==0.13.9', ], 'test': [ - 'pytest==7.1.*', - 'pylint==2.12.*', - 'pytest-cov==3.0.*' + 'pytest==9.0.3', + 'pylint==4.0.5', + 'pytest-cov==7.1.0' ] }, entry_points=''' diff --git a/singer-connectors/tap-salesforce/setup.py b/singer-connectors/tap-salesforce/setup.py index da3310ac7..0c517e4ae 100644 --- a/singer-connectors/tap-salesforce/setup.py +++ b/singer-connectors/tap-salesforce/setup.py @@ -19,7 +19,7 @@ py_modules=['tap_salesforce'], install_requires=[ 'requests==2.20.0', - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'xmltodict==0.11.0' ], extras_require={ @@ -27,7 +27,7 @@ 'pylint==2.9.*', ] }, - python_requires='>=3.6', + python_requires='>=3.12.0, <3.13', entry_points=''' [console_scripts] tap-salesforce=tap_salesforce:main diff --git a/singer-connectors/tap-slack/setup.py b/singer-connectors/tap-slack/setup.py index 16a31ffab..967a715b6 100644 --- a/singer-connectors/tap-slack/setup.py +++ b/singer-connectors/tap-slack/setup.py @@ -18,17 +18,17 @@ ], py_modules=['tap_slack'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'slack-sdk==3.20.0', ], extras_require={ 'test': [ - 'pylint==2.8.*', - 'pytest==6.2.*', - 'pytest-cov==2.12.*', + 'pylint==4.0.5', + 'pytest==9.0.3', + 'pytest-cov==7.1.0', ] }, - python_requires='>=3.6', + python_requires='>=3.12.0, <3.13', entry_points=''' [console_scripts] tap-slack=tap_slack:main diff --git a/singer-connectors/tap-snowflake/setup.py b/singer-connectors/tap-snowflake/setup.py index 8847f8a15..7e2378f81 100644 --- a/singer-connectors/tap-snowflake/setup.py +++ b/singer-connectors/tap-snowflake/setup.py @@ -18,15 +18,15 @@ ], py_modules=['tap_snowflake'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'snowflake-connector-python[pandas]==3.15.*', 'pendulum==1.2.0' ], extras_require={ 'test': [ - 'pylint==2.8.*', - 'pytest==6.2.*', - 'pytest-cov==2.12.*', + 'pylint==4.0.5', + 'pytest==9.0.3', + 'pytest-cov==7.1.0', 'unify==0.5' ] }, diff --git a/singer-connectors/tap-twilio/setup.py b/singer-connectors/tap-twilio/setup.py index a2d6441c8..15ee9f8ca 100644 --- a/singer-connectors/tap-twilio/setup.py +++ b/singer-connectors/tap-twilio/setup.py @@ -19,15 +19,15 @@ py_modules=['tap_twilio'], install_requires=[ 'requests==2.25.*', - 'pipelinewise-singer-python==1.*' + 'pipelinewise-singer-python==3.0.2' ], extras_require={ 'test': [ - 'pylint==2.9.*', - 'pytest==6.2.*' + 'pylint==4.0.5', + 'pytest==9.0.3' ] }, - python_requires='>=3.6', + python_requires='>=3.12.0, <3.13', entry_points=''' [console_scripts] tap-twilio=tap_twilio:main diff --git a/singer-connectors/tap-zendesk/setup.py b/singer-connectors/tap-zendesk/setup.py index ea2a69218..6062b097c 100644 --- a/singer-connectors/tap-zendesk/setup.py +++ b/singer-connectors/tap-zendesk/setup.py @@ -15,7 +15,7 @@ classifiers=['Programming Language :: Python :: 3 :: Only'], py_modules=['tap_zendesk'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'zenpy==2.0.52', ], extras_require={ diff --git a/singer-connectors/target-postgres/setup.py b/singer-connectors/target-postgres/setup.py index 989235929..63420fe33 100644 --- a/singer-connectors/target-postgres/setup.py +++ b/singer-connectors/target-postgres/setup.py @@ -18,16 +18,16 @@ ], py_modules=["target_postgres"], install_requires=[ - 'pipelinewise-singer-python==2.*', + 'pipelinewise-singer-python==3.0.2', 'psycopg2-binary==2.9.10', 'inflection==0.3.1', 'joblib==1.2.0', ], extras_require={ "test": [ - 'pytest==6.2.5', - 'pylint==2.6.0', - 'pytest-cov==2.10.1', + 'pytest==9.0.3', + 'pylint==4.0.5', + 'pytest-cov==7.1.0', ] }, entry_points=""" diff --git a/singer-connectors/target-s3-csv/setup.py b/singer-connectors/target-s3-csv/setup.py index 57c82f425..748169e47 100644 --- a/singer-connectors/target-s3-csv/setup.py +++ b/singer-connectors/target-s3-csv/setup.py @@ -7,7 +7,7 @@ setup(name="pipelinewise-target-s3-csv", version="2.0.0", - python_requires=">=3.7.0, <3.11", + python_requires=">=3.12.0, <3.13", description="Singer.io target for writing CSV files and upload to S3 - PipelineWise compatible", long_description=long_description, long_description_content_type='text/markdown', @@ -16,22 +16,19 @@ classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10' + 'Programming Language :: Python :: 3.12', ], py_modules=["target_s3_csv"], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'inflection==0.5.1', 'boto3==1.17.39', ], extras_require={ "test": [ - 'pylint==2.10.*', - 'pytest==6.2.*', - 'pytest-cov==2.12.*', + 'pylint==4.0.5', + 'pytest==9.0.3', + 'pytest-cov==7.1.0', ] }, entry_points=""" diff --git a/singer-connectors/target-snowflake/setup.py b/singer-connectors/target-snowflake/setup.py index e504c99c3..7d9015eec 100644 --- a/singer-connectors/target-snowflake/setup.py +++ b/singer-connectors/target-snowflake/setup.py @@ -21,9 +21,9 @@ 'Programming Language :: Python :: 3.10', ], py_modules=["target_snowflake"], - python_requires='>=3.7', + python_requires='>=3.12.0, <3.13', install_requires=[ - 'pipelinewise-singer-python==2.*', + 'pipelinewise-singer-python==3.0.2', 'numpy==1.26.4', # numpy 2.X is not compatible with our used pandas 'snowflake-connector-python[pandas]==3.15.0', 'inflection==0.5.1', @@ -32,10 +32,10 @@ ], extras_require={ "test": [ - "pylint==2.12.*", - 'pytest==7.4.0', - 'pytest-cov==3.0.0', - "python-dotenv>=0.19,<1.1" + "pylint==4.0.5", + 'pytest==9.0.3', + 'pytest-cov==7.1.0', + "python-dotenv==1.2.2" ] }, entry_points=""" diff --git a/singer-connectors/transform-field/setup.py b/singer-connectors/transform-field/setup.py index 49b0bb64f..bf7e41090 100644 --- a/singer-connectors/transform-field/setup.py +++ b/singer-connectors/transform-field/setup.py @@ -22,14 +22,14 @@ ], py_modules=['transform_field'], install_requires=[ - 'pipelinewise-singer-python==1.*', + 'pipelinewise-singer-python==3.0.2', 'dpath==2.0.*', ], extras_require={ 'test': [ - 'pytest==6.2.*', - 'pytest-cov==3.0.*', - 'pylint==2.12.*', + 'pytest==9.0.3', + 'pytest-cov==7.1.0', + 'pylint==4.0.5', ] }, entry_points=''' diff --git a/tests/end_to_end/target_snowflake/__init__.py b/tests/end_to_end/target_snowflake/__init__.py index 91f419926..bbf9f9cb5 100644 --- a/tests/end_to_end/target_snowflake/__init__.py +++ b/tests/end_to_end/target_snowflake/__init__.py @@ -42,7 +42,6 @@ def tearDown(self): self.drop_sf_schema_if_exists(f'ppw_e2e_{self.tap_type}_public2{self.e2e_env.sf_schema_postfix}'.upper()) super().tearDown() - # pylint: disable=no-self-use def get_e2e_env(self) -> E2EEnv: """ get validated end-to-end environment @@ -59,7 +58,6 @@ def check_snowflake_credentials_provided(self): if self.e2e_env.env['TARGET_SNOWFLAKE']['is_configured'] is False: self.skipTest('TARGET SNOWFLAKE credentials are not configured') - # pylint: disable=no-self-use def check_validate_taps(self): """ run `pipelinewise validate` @@ -69,7 +67,6 @@ def check_validate_taps(self): ) assertions.assert_command_success(return_code, stdout, stderr) - # pylint: disable=no-self-use def check_import_config(self): """ run `pipelinewise import_config` @@ -87,7 +84,6 @@ def drop_sf_schema_if_exists(self, schema: str): f'DROP SCHEMA IF EXISTS {schema} CASCADE' ) - # pylint: disable=no-self-use def remove_dir_from_config_dir(self, dir_path: str): """ remove directory from config directory diff --git a/tests/end_to_end/target_snowflake/tap_mariadb/test_resync_mariadb_to_sf_table_size_check.py b/tests/end_to_end/target_snowflake/tap_mariadb/test_resync_mariadb_to_sf_table_size_check.py index 9804aeae0..32608c484 100644 --- a/tests/end_to_end/target_snowflake/tap_mariadb/test_resync_mariadb_to_sf_table_size_check.py +++ b/tests/end_to_end/target_snowflake/tap_mariadb/test_resync_mariadb_to_sf_table_size_check.py @@ -30,7 +30,7 @@ def tearDown(self): pass super().tearDown() - def test_resync_mariadb_to_sf_if_table_size_greater_than_limit(self): # pylint: disable = no-self-use + def test_resync_mariadb_to_sf_if_table_size_greater_than_limit(self): """test resync mariadb to SF returns error 1 if table size is greater than the limit""" a_small_number = 0.001 # Mb @@ -42,7 +42,7 @@ def test_resync_mariadb_to_sf_if_table_size_greater_than_limit(self): # pylint: assert return_code == 1 - def test_resync_mariadb_to_sf_if_table_size_less_than_limit(self): # pylint: disable = no-self-use + def test_resync_mariadb_to_sf_if_table_size_less_than_limit(self): """test resync mariadb to SF returns error if table size is less than the limit""" a_big_number = 10000 #Mb _create_ppw_config_file(table_mb=a_big_number) @@ -52,7 +52,7 @@ def test_resync_mariadb_to_sf_if_table_size_less_than_limit(self): # pylint: di assert return_code == 0 - def test_resync_mariadb_to_sf_if_table_size_greater_than_limit_and_force(self): # pylint: disable = no-self-use + def test_resync_mariadb_to_sf_if_table_size_greater_than_limit_and_force(self): """test resync mariadb to SF returns error if table size is greater than the limit and --force is used""" a_small_number = 0.001 # Mb _create_ppw_config_file(table_mb=a_small_number) @@ -63,7 +63,7 @@ def test_resync_mariadb_to_sf_if_table_size_greater_than_limit_and_force(self): assert return_code == 0 - def test_run_tap_mariadb_to_sf_if_size_greater_than_limit(self): # pylint: disable = no-self-use + def test_run_tap_mariadb_to_sf_if_size_greater_than_limit(self): """test run_tap mariadb to sf if table size is greater than the limit""" a_small_number = 0.001 # Mb _create_ppw_config_file(table_mb=a_small_number) diff --git a/tests/end_to_end/target_snowflake/tap_postgres/test_resync_pg_to_sf_table_size_check.py b/tests/end_to_end/target_snowflake/tap_postgres/test_resync_pg_to_sf_table_size_check.py index ec0152195..add3c7a3f 100644 --- a/tests/end_to_end/target_snowflake/tap_postgres/test_resync_pg_to_sf_table_size_check.py +++ b/tests/end_to_end/target_snowflake/tap_postgres/test_resync_pg_to_sf_table_size_check.py @@ -31,7 +31,7 @@ def tearDown(self): pass super().tearDown() - def test_resync_pg_to_sf_if_table_size_greater_than_limit(self): # pylint: disable = no-self-use + def test_resync_pg_to_sf_if_table_size_greater_than_limit(self): """test resync pg to SF returns error 1 if table size is greater than the limit""" a_small_number = 0.001 # Mb @@ -43,7 +43,7 @@ def test_resync_pg_to_sf_if_table_size_greater_than_limit(self): # pylint: dis assert return_code == 1 - def test_resync_pg_to_sf_if_table_size_less_than_limit(self): # pylint: disable = no-self-use + def test_resync_pg_to_sf_if_table_size_less_than_limit(self): """test resync pg to SF returns error if table size is less than the limit""" a_big_number = 1000 # Mb _create_ppw_config_file(table_mb=a_big_number) @@ -53,8 +53,7 @@ def test_resync_pg_to_sf_if_table_size_less_than_limit(self): # pylint: disabl assert return_code == 0 - def test_resync_pg_to_sf_if_table_size_greater_than_limit_and_force(self): # pylint: disable = no-self-use - + def test_resync_pg_to_sf_if_table_size_greater_than_limit_and_force(self): """test resync pg to SF returns error if table size is greater than the limit and --force is used""" a_small_number = 0.001 # Mb _create_ppw_config_file(table_mb=a_small_number) @@ -65,7 +64,7 @@ def test_resync_pg_to_sf_if_table_size_greater_than_limit_and_force(self): # p assert return_code == 0 - def test_run_tap_pg_to_sf_if_size_greater_than_limit(self): # pylint: disable = no-self-use + def test_run_tap_pg_to_sf_if_size_greater_than_limit(self): """test run_tap postgres to sf if table size is greater than the limit""" a_small_number = 0.001 # Mb _create_ppw_config_file(table_mb=a_small_number) diff --git a/tests/units/cli/cli_args.py b/tests/units/cli/cli_args.py index 795f5b3bb..0676ff19f 100644 --- a/tests/units/cli/cli_args.py +++ b/tests/units/cli/cli_args.py @@ -7,6 +7,7 @@ class CliArgs: """Class to simulate argparse command line arguments required by PipelineWise class""" + # pylint: disable=too-many-positional-arguments def __init__( self, target='*', diff --git a/tests/units/cli/test_alert_sender.py b/tests/units/cli/test_alert_sender.py index feee8a25f..e1b99bdc7 100644 --- a/tests/units/cli/test_alert_sender.py +++ b/tests/units/cli/test_alert_sender.py @@ -11,7 +11,7 @@ ) -# pylint: disable=no-self-use,too-few-public-methods +# pylint: disable=too-few-public-methods class TestAlertSender: """ Unit tests for PipelineWise CLI alert sender classes diff --git a/tests/units/cli/test_cli.py b/tests/units/cli/test_cli.py index 366df11c7..6cc03378a 100644 --- a/tests/units/cli/test_cli.py +++ b/tests/units/cli/test_cli.py @@ -19,18 +19,18 @@ from pipelinewise.cli.pipelinewise import PipelineWise from pipelinewise.cli.errors import DuplicateConfigException, InvalidConfigException, InvalidTransformationException -RESOURCES_DIR = '{}/resources'.format(os.path.dirname(__file__)) -CONFIG_DIR = '{}/sample_json_config'.format(RESOURCES_DIR) +RESOURCES_DIR = f'{os.path.dirname(__file__)}/resources' +CONFIG_DIR = f'{RESOURCES_DIR}/sample_json_config' VIRTUALENVS_DIR = './virtualenvs-dummy' TEST_PROJECT_NAME = 'test-project' -TEST_PROJECT_DIR = '{}/{}'.format(os.getcwd(), TEST_PROJECT_NAME) +TEST_PROJECT_DIR = f'{os.getcwd()}/{TEST_PROJECT_NAME}' PROFILING_DIR = './profiling' # Can't inherit from unittest.TestCase because it breaks pytest fixture # https://github.com/pytest-dev/pytest/issues/2504#issuecomment-308828149 -# pylint: disable=no-self-use,too-many-public-methods,attribute-defined-outside-init,too-many-lines,fixme +# pylint: disable=too-many-public-methods,attribute-defined-outside-init,too-many-lines,fixme class TestCli: """ Unit Tests for PipelineWise CLI executable @@ -140,25 +140,25 @@ def test_target_dir(self): """Singer target connector config path must be relative to the project config dir""" assert self.pipelinewise.get_target_dir( 'dummy-target' - ) == '{}/dummy-target'.format(CONFIG_DIR) + ) == f'{CONFIG_DIR}/dummy-target' def test_tap_dir(self): """Singer tap connector config path must be relative to the target connector config path""" assert self.pipelinewise.get_tap_dir( 'dummy-target', 'dummy-tap' - ) == '{}/dummy-target/dummy-tap'.format(CONFIG_DIR) + ) == f'{CONFIG_DIR}/dummy-target/dummy-tap' def test_tap_log_dir(self): """Singer tap log path must be relative to the tap connector config path""" assert self.pipelinewise.get_tap_log_dir( 'dummy-target', 'dummy-tap' - ) == '{}/dummy-target/dummy-tap/log'.format(CONFIG_DIR) + ) == f'{CONFIG_DIR}/dummy-target/dummy-tap/log' def test_connector_bin(self): """Singer connector binary must be at a certain location under PIPELINEWISE_HOME .virtualenvs dir""" assert self.pipelinewise.get_connector_bin( 'dummy-type' - ) == '{}/dummy-type/bin/dummy-type'.format(VIRTUALENVS_DIR) + ) == f'{VIRTUALENVS_DIR}/dummy-type/bin/dummy-type' def test_not_existing_config_dir(self): """Test with not existing config dir""" @@ -175,13 +175,13 @@ def test_not_existing_config_dir(self): def test_get_targets(self): """Targets should be loaded from JSON as is""" assert self.pipelinewise.get_targets() == cli.utils.load_json( - '{}/config.json'.format(CONFIG_DIR) + f'{CONFIG_DIR}/config.json' ).get('targets', []) def test_get_target(self): """Selecting target by ID should append connector files""" # Get target definitions from JSON file - targets = cli.utils.load_json('{}/config.json'.format(CONFIG_DIR)).get( + targets = cli.utils.load_json(f'{CONFIG_DIR}/config.json').get( 'targets', [] ) exp_target_one = next( @@ -193,10 +193,10 @@ def test_get_target(self): # Append the connector file paths to the expected targets exp_target_one['files'] = Config.get_connector_files( - '{}/target_one'.format(CONFIG_DIR) + f'{CONFIG_DIR}/target_one' ) exp_target_two['files'] = Config.get_connector_files( - '{}/target_two'.format(CONFIG_DIR) + f'{CONFIG_DIR}/target_two' ) # Getting target by ID should match to original JSON and should contains the connector files list @@ -206,7 +206,7 @@ def test_get_target(self): def test_get_taps(self): """Selecting taps by target ID should append tap statuses""" # Get target definitions from JSON file - targets = cli.utils.load_json('{}/config.json'.format(CONFIG_DIR)).get( + targets = cli.utils.load_json(f'{CONFIG_DIR}/config.json').get( 'targets', [] ) target_one = next( @@ -239,7 +239,7 @@ def test_get_taps(self): def test_get_tap(self): """Getting tap by ID should return status, connector and target props as well""" # Get target definitions from JSON file - targets = cli.utils.load_json('{}/config.json'.format(CONFIG_DIR)).get( + targets = cli.utils.load_json(f'{CONFIG_DIR}/config.json').get( 'targets', [] ) target_one = next( @@ -252,7 +252,7 @@ def test_get_tap(self): 'target_one', exp_tap_one['id'] ) exp_tap_one['files'] = Config.get_connector_files( - '{}/target_one/tap_one'.format(CONFIG_DIR) + f'{CONFIG_DIR}/target_one/tap_one' ) exp_tap_one['target'] = self.pipelinewise.get_target('target_one') @@ -293,12 +293,8 @@ def test_create_filtered_tap_props(self): ) = self.pipelinewise.create_filtered_tap_properties( target_type=ConnectorType('target-snowflake'), tap_type=ConnectorType('tap-mysql'), - tap_properties='{}/resources/sample_json_config/target_one/tap_one/properties.json'.format( - os.path.dirname(__file__) - ), - tap_state='{}/resources/sample_json_config/target_one/tap_one/state.json'.format( - os.path.dirname(__file__) - ), + tap_properties=f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties.json', # pylint: disable=line-too-long + tap_state=f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/state.json', filters={ 'selected': True, 'tap_target_pairs': { @@ -338,12 +334,8 @@ def test_create_filtered_tap_props_no_fastsync(self): ) = self.pipelinewise.create_filtered_tap_properties( target_type=ConnectorType('target-snowflake'), tap_type=ConnectorType('tap-mysql'), - tap_properties='{}/resources/sample_json_config/target_one/tap_one/properties.json'.format( - os.path.dirname(__file__) - ), - tap_state='{}/resources/sample_json_config/target_one/tap_one/state.json'.format( - os.path.dirname(__file__) - ), + tap_properties=f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties.json', # pylint: disable=line-too-long + tap_state=f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/state.json', filters={ 'selected': True, 'tap_target_pairs': { @@ -386,9 +378,7 @@ def test_merge_same_catalog(self): """Test merging not empty schemas""" # TODO: Check if pipelinewise.merge_schemas is required at all or not tap_one_catalog = cli.utils.load_json( - '{}/resources/sample_json_config/target_one/tap_one/properties.json'.format( - os.path.dirname(__file__) - ) + f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties.json' ) assert ( @@ -399,14 +389,10 @@ def test_merge_same_catalog(self): def test_merge_updated_catalog(self): """Test merging not empty schemas""" tap_one_catalog = cli.utils.load_json( - '{}/resources/sample_json_config/target_one/tap_one/properties.json'.format( - os.path.dirname(__file__) - ) + f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties.json' ) tap_one_updated_catalog = cli.utils.load_json( - '{}/resources/sample_json_config/target_one/tap_one/properties_updated.json'.format( - os.path.dirname(__file__) - ) + f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties_updated.json' ) assert ( @@ -417,14 +403,10 @@ def test_merge_updated_catalog(self): def test_make_default_selection(self): """Test if streams selected correctly in catalog JSON""" tap_one_catalog = cli.utils.load_json( - '{}/resources/sample_json_config/target_one/tap_one/properties.json'.format( - os.path.dirname(__file__) - ) + f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/properties.json' ) tap_one_selection_file = ( - '{}/resources/sample_json_config/target_one/tap_one/selection.json'.format( - os.path.dirname(__file__) - ) + f'{os.path.dirname(__file__)}/resources/sample_json_config/target_one/tap_one/selection.json' ) # Update catalog selection @@ -470,12 +452,8 @@ def test_make_default_selection(self): def test_target_config(self): """Test merging target config.json and inheritable_config.json""" - target_config = '{}/resources/target-config.json'.format( - os.path.dirname(__file__) - ) - tap_inheritable_config = '{}/resources/tap-inheritable-config.json'.format( - os.path.dirname(__file__) - ) + target_config = f'{os.path.dirname(__file__)}/resources/target-config.json' + tap_inheritable_config = f'{os.path.dirname(__file__)}/resources/tap-inheritable-config.json' # The merged JSON written into a temp file temp_file = self.pipelinewise.create_consumable_target_config( @@ -516,7 +494,7 @@ def test_target_config(self): def test_invalid_target_config(self): """Test merging invalid target config.json and inheritable_config.json""" - target_config = '{}/resources/invalid.json'.format(os.path.dirname(__file__)) + target_config = f'{os.path.dirname(__file__)}/resources/invalid.json' tap_inheritable_config = 'not-existing-json' # Merging invalid or not existing JSONs should raise exception @@ -568,7 +546,7 @@ def test_send_alert_to_tap_specific_slack_channel(self): def test_command_encrypt_string(self, capsys): """Test vault encryption command output""" - secret_path = '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__)) + secret_path = f'{os.path.dirname(__file__)}/resources/vault-secret.txt' args = CliArgs(string='plain text', secret=secret_path) pipelinewise = PipelineWise(args, CONFIG_DIR, VIRTUALENVS_DIR) @@ -589,7 +567,7 @@ def test_command_init(self): # The test project should contain every sample YAML file for sample_yaml in os.listdir( - '{}/../../../pipelinewise/cli/samples'.format(os.path.dirname(__file__)) + f'{os.path.dirname(__file__)}/../../../pipelinewise/cli/samples' ): assert os.path.isfile(os.path.join(TEST_PROJECT_DIR, sample_yaml)) @@ -659,7 +637,7 @@ def test_command_stop_tap(self): args = CliArgs(target='target_one', tap='tap_one') pipelinewise = PipelineWise(args, CONFIG_DIR, VIRTUALENVS_DIR) pipelinewise.tap_run_log_file = 'test-tap-run-dummy.log' - Path('{}.running'.format(pipelinewise.tap_run_log_file)).touch() + Path(f'{pipelinewise.tap_run_log_file}.running').touch() # Tap is not running, pid file not exist, should exit with error with pytest.raises(SystemExit) as pytest_wrapped_e: @@ -689,10 +667,10 @@ def test_command_stop_tap(self): assert re.match('scheduler|pipelinewise|tap|target', full_command) is None # Graceful exit should rename log file from running status to terminated - assert os.path.isfile('{}.terminated'.format(pipelinewise.tap_run_log_file)) + assert os.path.isfile(f'{pipelinewise.tap_run_log_file}.terminated') # Delete test log file - os.remove('{}.terminated'.format(pipelinewise.tap_run_log_file)) + os.remove(f'{pipelinewise.tap_run_log_file}.terminated') def test_command_run_tap_exit_with_error_1_if_fastsync_exception(self): """Test if run_tap command returns error 1 if exception in fastsync""" @@ -865,33 +843,31 @@ def test_post_import_checks(self): """Test post import checks""" args = CliArgs() pipelinewise = PipelineWise(args, CONFIG_DIR, VIRTUALENVS_DIR) - test_files_dir = '{}/resources/test_post_import_checks'.format( - os.path.dirname(__file__) - ) + test_files_dir = f'{os.path.dirname(__file__)}/resources/test_post_import_checks' tap_pk_required = cli.utils.load_json( - '{}/tap_config_pk_required.json'.format(test_files_dir) + f'{test_files_dir}/tap_config_pk_required.json' ) tap_pk_not_required = cli.utils.load_json( - '{}/tap_config_pk_not_required.json'.format(test_files_dir) + f'{test_files_dir}/tap_config_pk_not_required.json' ) tap_pk_not_defined = cli.utils.load_json( - '{}/tap_config_pk_not_defined.json'.format(test_files_dir) + f'{test_files_dir}/tap_config_pk_not_defined.json' ) tap_with_pk = cli.utils.load_json( - '{}//tap_properties_with_pk.json'.format(test_files_dir) + f'{test_files_dir}//tap_properties_with_pk.json' ) tap_with_no_pk_full_table = cli.utils.load_json( - '{}//tap_properties_with_no_pk_full_table.json'.format(test_files_dir) + f'{test_files_dir}//tap_properties_with_no_pk_full_table.json' ) tap_with_no_pk_incremental = cli.utils.load_json( - '{}//tap_properties_with_no_pk_incremental.json'.format(test_files_dir) + f'{test_files_dir}//tap_properties_with_no_pk_incremental.json' ) tap_with_no_pk_log_based = cli.utils.load_json( - '{}//tap_properties_with_no_pk_log_based.json'.format(test_files_dir) + f'{test_files_dir}//tap_properties_with_no_pk_log_based.json' ) tap_with_no_pk_not_selected = cli.utils.load_json( - '{}//tap_properties_with_no_pk_not_selected.json'.format(test_files_dir) + f'{test_files_dir}//tap_properties_with_no_pk_not_selected.json' ) with patch( @@ -977,7 +953,7 @@ def test_post_import_checks(self): # Test scenarios when post import checks should fail due to transformations validation command fails tap_with_trans = cli.utils.load_json( - '{}/tap_config_with_transformations.json'.format(test_files_dir) + f'{test_files_dir}/tap_config_with_transformations.json' ) run_command_mock.return_value = ( diff --git a/tests/units/cli/test_cli_2.py b/tests/units/cli/test_cli_2.py index 5d66341a4..ba881399a 100644 --- a/tests/units/cli/test_cli_2.py +++ b/tests/units/cli/test_cli_2.py @@ -7,15 +7,15 @@ from pipelinewise.cli import PipelineWise from .cli_args import CliArgs -RESOURCES_DIR = '{}/resources'.format(os.path.dirname(__file__)) -CONFIG_DIR = '{}/sample_json_config'.format(RESOURCES_DIR) +RESOURCES_DIR = f'{os.path.dirname(__file__)}/resources' +CONFIG_DIR = f'{RESOURCES_DIR}/sample_json_config' VIRTUALENVS_DIR = './virtualenvs-dummy' TEST_PROJECT_NAME = 'test-project' -TEST_PROJECT_DIR = '{}/{}'.format(os.getcwd(), TEST_PROJECT_NAME) +TEST_PROJECT_DIR = f'{os.getcwd()}/{TEST_PROJECT_NAME}' PROFILING_DIR = './profiling' -# pylint: disable=no-self-use,attribute-defined-outside-init,fixme +# pylint: disable=attribute-defined-outside-init,fixme class TestCli2: """ Continuation of pipelinewise unit tests diff --git a/tests/units/cli/test_cli_utils.py b/tests/units/cli/test_cli_utils.py index 21701574a..391cc4f60 100644 --- a/tests/units/cli/test_cli_utils.py +++ b/tests/units/cli/test_cli_utils.py @@ -10,7 +10,7 @@ VIRTUALENVS_DIR = './virtualenvs-dummy' -# pylint: disable=no-self-use,too-many-public-methods,fixme +# pylint: disable=too-many-public-methods,fixme class TestUtils: """ Unit Tests for PipelineWise CLI utility functions @@ -31,9 +31,9 @@ def test_json_detectors(self): assert cli.utils.is_json('{"prop-str":"dummy-string","prop-int":123,"prop-bool":true}') is True assert cli.utils.is_json_file('./dummy-json') is False - assert cli.utils.is_json_file('{}/resources/example.json'.format(os.path.dirname(__file__))) is True - assert cli.utils.is_json_file('{}/resources/invalid.json'.format(os.path.dirname(__file__))) is False - assert cli.utils.is_json_file('{}/resources'.format(os.path.dirname(__file__))) is False + assert cli.utils.is_json_file(f'{os.path.dirname(__file__)}/resources/example.json') is True + assert cli.utils.is_json_file(f'{os.path.dirname(__file__)}/resources/invalid.json') is False + assert cli.utils.is_json_file(f'{os.path.dirname(__file__)}/resources') is False def test_json_loader(self): """Testing JSON loader functions""" @@ -42,11 +42,11 @@ def test_json_loader(self): # Loading JSON file with invalid JSON syntax should raise exception with pytest.raises(Exception): - cli.utils.load_json('{}/resources/invalid.json'.format(os.path.dirname(__file__))) + cli.utils.load_json(f'{os.path.dirname(__file__)}/resources/invalid.json') # Loading JSON should return python dict assert \ - cli.utils.load_json('{}/resources/example.json'.format(os.path.dirname(__file__))) == \ + cli.utils.load_json(f'{os.path.dirname(__file__)}/resources/example.json') == \ { 'glossary': { 'title': 'example glossary', @@ -99,9 +99,9 @@ def test_yaml_detectors(self): """) is True assert cli.utils.is_yaml_file('./dummy-yaml') is False - assert cli.utils.is_yaml_file('{}/resources/example.yml'.format(os.path.dirname(__file__))) is True - assert cli.utils.is_yaml_file('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) is False - assert cli.utils.is_yaml_file('{}/resources'.format(os.path.dirname(__file__))) is False + assert cli.utils.is_yaml_file(f'{os.path.dirname(__file__)}/resources/example.yml') is True + assert cli.utils.is_yaml_file(f'{os.path.dirname(__file__)}/resources/invalid.yml') is False + assert cli.utils.is_yaml_file(f'{os.path.dirname(__file__)}/resources') is False def test_yaml_loader(self): """Testing YAML loader functions""" diff --git a/tests/units/cli/test_cli_utils_tap_github.py b/tests/units/cli/test_cli_utils_tap_github.py index c17ec9513..c8d285bc2 100644 --- a/tests/units/cli/test_cli_utils_tap_github.py +++ b/tests/units/cli/test_cli_utils_tap_github.py @@ -8,7 +8,7 @@ TAP_GITHUB_YAML = '{}/resources/tap-github.yml'.format(os.path.dirname(__file__)) -# pylint: disable=no-self-use,too-many-public-methods,fixme +# pylint: disable=too-many-public-methods,fixme # pylint: disable=E1136,E1137 # False positive when loading vault encrypted YAML class TestUtils(TestCase): """ diff --git a/tests/units/cli/test_commands.py b/tests/units/cli/test_commands.py index 873c83031..0f8aa6970 100644 --- a/tests/units/cli/test_commands.py +++ b/tests/units/cli/test_commands.py @@ -10,7 +10,6 @@ from pipelinewise.cli.errors import StreamBufferTooLargeException -# pylint: disable=no-self-use,fixme class TestCommands: """ Unit tests for PipelineWise CLI commands functions diff --git a/tests/units/cli/test_config.py b/tests/units/cli/test_config.py index ca2f45664..8f8d2fc42 100644 --- a/tests/units/cli/test_config.py +++ b/tests/units/cli/test_config.py @@ -10,7 +10,6 @@ # Todo: Inherit from unittest.TestCase -# pylint: disable=no-self-use,fixme class TestConfig: """ Unit Tests for PipelineWise CLI Config class diff --git a/tests/units/fastsync/commons/test_fastsync_tap_mongodb.py b/tests/units/fastsync/commons/test_fastsync_tap_mongodb.py index 405f4b6b0..9e828c156 100644 --- a/tests/units/fastsync/commons/test_fastsync_tap_mongodb.py +++ b/tests/units/fastsync/commons/test_fastsync_tap_mongodb.py @@ -78,7 +78,7 @@ def generate_all_datatypes_doc(): } -# pylint: disable=invalid-name,no-self-use +# pylint: disable=invalid-name class TestFastSyncTapMongoDB(TestCase): """ Unit tests for fastsync tap mongo diff --git a/tests/units/fastsync/commons/test_fastsync_tap_mysql.py b/tests/units/fastsync/commons/test_fastsync_tap_mysql.py index 928586b12..c094ab01b 100644 --- a/tests/units/fastsync/commons/test_fastsync_tap_mysql.py +++ b/tests/units/fastsync/commons/test_fastsync_tap_mysql.py @@ -19,6 +19,7 @@ def __init__(self, connection_config, tap_type_to_target_type=None): self.executed_queries = [] # pylint: disable=too-many-arguments + # pylint: disable=too-many-positional-arguments def query(self, query, conn=None, params=None, return_as_cursor=False, n_retry=1): if query.startswith('INVALID-SQL'): raise pymysql.err.InternalError @@ -31,7 +32,7 @@ def query(self, query, conn=None, params=None, return_as_cursor=False, n_retry=1 return [] -# pylint: disable=invalid-name,no-self-use +# pylint: disable=invalid-name class TestFastSyncTapMySql(TestCase): """ Unit tests for fastsync tap mysql diff --git a/tests/units/fastsync/commons/test_fastsync_target_snowflake.py b/tests/units/fastsync/commons/test_fastsync_target_snowflake.py index add614fae..da081b8b4 100644 --- a/tests/units/fastsync/commons/test_fastsync_target_snowflake.py +++ b/tests/units/fastsync/commons/test_fastsync_target_snowflake.py @@ -21,7 +21,7 @@ def delete_object(self, Bucket, Key): def copy_object(self, **kwargs): """Mock if needed""" - # pylint: disable=no-self-use, unused-argument + # pylint: disable=unused-argument def head_object(self, **kwargs): """Mock if needed""" return {} diff --git a/tests/units/fastsync/commons/test_fastsync_utils.py b/tests/units/fastsync/commons/test_fastsync_utils.py index 94c477fee..ed2d29352 100644 --- a/tests/units/fastsync/commons/test_fastsync_utils.py +++ b/tests/units/fastsync/commons/test_fastsync_utils.py @@ -11,7 +11,7 @@ RESOURCES_DIR = '{}/resources'.format(os.path.dirname(__file__)) -# pylint: disable=missing-function-docstring,no-self-use,invalid-name,too-few-public-methods +# pylint: disable=missing-function-docstring,invalid-name,too-few-public-methods class MySqlMock: """ MySQL mock diff --git a/tests/units/fastsync/test_mongodb_to_postgres.py b/tests/units/fastsync/test_mongodb_to_postgres.py index f8f12b459..e3bc0d029 100644 --- a/tests/units/fastsync/test_mongodb_to_postgres.py +++ b/tests/units/fastsync/test_mongodb_to_postgres.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetPostgres' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class MongoDBToPostgres(unittest.TestCase): """ Unit tests for fastsync MongoDB to postgres diff --git a/tests/units/fastsync/test_mongodb_to_snowflake.py b/tests/units/fastsync/test_mongodb_to_snowflake.py index a3f46191a..d2bb3169a 100644 --- a/tests/units/fastsync/test_mongodb_to_snowflake.py +++ b/tests/units/fastsync/test_mongodb_to_snowflake.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetSnowflake' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class MongoDBToSnowflake(unittest.TestCase): """ Unit tests for fastsync MongoDB to postgres diff --git a/tests/units/fastsync/test_mysql_to_postgres.py b/tests/units/fastsync/test_mysql_to_postgres.py index bca128ed6..07a41a8a7 100644 --- a/tests/units/fastsync/test_mysql_to_postgres.py +++ b/tests/units/fastsync/test_mysql_to_postgres.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetPostgres' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class S3CsvToPostgres(unittest.TestCase): """ Unit tests for fastsync mysql to postgres diff --git a/tests/units/fastsync/test_mysql_to_snowflake.py b/tests/units/fastsync/test_mysql_to_snowflake.py index 73831fe70..df8892f8c 100644 --- a/tests/units/fastsync/test_mysql_to_snowflake.py +++ b/tests/units/fastsync/test_mysql_to_snowflake.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetSnowflake' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class S3CsvToPostgres(unittest.TestCase): """ Unit tests for fastsync mysql to snowflake diff --git a/tests/units/fastsync/test_postgres_to_postgres.py b/tests/units/fastsync/test_postgres_to_postgres.py index e821ee22a..02a16561d 100644 --- a/tests/units/fastsync/test_postgres_to_postgres.py +++ b/tests/units/fastsync/test_postgres_to_postgres.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetPostgres' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class PostgresToPostgres(unittest.TestCase): """ Unit tests for postgres postgres to postgres diff --git a/tests/units/fastsync/test_postgres_to_snowflake.py b/tests/units/fastsync/test_postgres_to_snowflake.py index 7db82d791..772196297 100644 --- a/tests/units/fastsync/test_postgres_to_snowflake.py +++ b/tests/units/fastsync/test_postgres_to_snowflake.py @@ -12,7 +12,7 @@ TARGET = 'FastSyncTargetSnowflake' -# pylint: disable=missing-function-docstring,invalid-name,no-self-use +# pylint: disable=missing-function-docstring,invalid-name class PostgresToSnowflake(unittest.TestCase): """ Unit tests for fastsync postgres to snowflake diff --git a/tests/units/partialsync/test_mysql_to_snowflake.py b/tests/units/partialsync/test_mysql_to_snowflake.py index c419c5e13..0edd909f9 100644 --- a/tests/units/partialsync/test_mysql_to_snowflake.py +++ b/tests/units/partialsync/test_mysql_to_snowflake.py @@ -172,6 +172,7 @@ def __exit__(self, *args, **kwargs): for message in log_messages: self.assertIn(message, actual_logs.output[log_index]) + # pylint: disable=too-many-positional-arguments @mock.patch('pipelinewise.fastsync.partialsync.utils.load_into_snowflake') @mock.patch('pipelinewise.fastsync.partialsync.utils.upload_to_s3') @mock.patch('pipelinewise.fastsync.commons.utils.save_state_file') diff --git a/tests/units/partialsync/test_partial_sync_utils.py b/tests/units/partialsync/test_partial_sync_utils.py index 55ab04c38..7325ae773 100644 --- a/tests/units/partialsync/test_partial_sync_utils.py +++ b/tests/units/partialsync/test_partial_sync_utils.py @@ -30,7 +30,6 @@ def test_upload_to_s3(self): self.assertTupleEqual(([test_s3_key], test_s3_key), actual_return) mocked_upload_to_s3.assert_called_with(test_file_part, tmp_dir=temp_test_dir) - # pylint: disable=no-self-use def test_load_into_snowflake_hard_delete(self): """Test load_into_snowflake method with hard delete""" snowflake = mock.MagicMock() @@ -65,7 +64,6 @@ def test_load_into_snowflake_hard_delete(self): mock.call.drop_table(target['schema'], target['temp']) ]) - # pylint: disable=no-self-use def test_load_into_snowflake_soft_delete(self): """Test load_into_snowflake method with soft delete""" snowflake = mock.MagicMock() @@ -129,7 +127,6 @@ def test_load_into_snowflake_drop_target_table_enabled(self): mock.call.swap_tables(target['schema'], target['table']), ]) - # pylint: disable=no-self-use def test_update_state_file(self): """Test state file updating with and without end value""" bookmark = {'foo': 2} diff --git a/tests/units/partialsync/test_postgres_to_snowflake.py b/tests/units/partialsync/test_postgres_to_snowflake.py index 20524ae02..935d9c531 100644 --- a/tests/units/partialsync/test_postgres_to_snowflake.py +++ b/tests/units/partialsync/test_postgres_to_snowflake.py @@ -168,6 +168,7 @@ def __exit__(self, *args, **kwargs): for message in log_messages: self.assertIn(message, actual_logs.output[log_index]) + # pylint: disable=too-many-positional-arguments @mock.patch('pipelinewise.fastsync.partialsync.utils.load_into_snowflake') @mock.patch('pipelinewise.fastsync.partialsync.utils.upload_to_s3') @mock.patch('pipelinewise.fastsync.commons.utils.save_state_file') diff --git a/tests/units/partialsync/utils.py b/tests/units/partialsync/utils.py index 176d61f2a..e52c51815 100644 --- a/tests/units/partialsync/utils.py +++ b/tests/units/partialsync/utils.py @@ -6,6 +6,7 @@ class PartialSync2SFArgs: """Arguments for using in mysql to snowflake tests""" # pylint: disable=too-many-arguments + # pylint: disable=too-many-positional-arguments def __init__(self, temp_test_dir, table='email', start_value='FOO_START', end_value='FOO_END', state='state.json', hard_delete=None, drop_target_table=False): diff --git a/tests/units/test_logger.py b/tests/units/test_logger.py index c54e9eb55..97f0cd0a6 100644 --- a/tests/units/test_logger.py +++ b/tests/units/test_logger.py @@ -6,7 +6,6 @@ from tests.units.cli.cli_args import CliArgs -# pylint: disable=no-self-use class TestLogging: """ Unit Tests for PipelineWise Logging functions