Skip to content

Commit d07b415

Browse files
chore: Enable PL Ruff rules (#445)
1 parent 03ac31d commit d07b415

File tree

3 files changed

+29
-35
lines changed

3 files changed

+29
-35
lines changed

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@ select = [
9797
"RET", # flake8-return
9898
"SIM", # flake8-simplify
9999
"TCH", # flake8-type-checking
100+
"PL", # Pylint
100101
"PERF", # Perflint
101102
"RUF", # ruff
102103
]

target_postgres/connector.py

Lines changed: 15 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44

55
import atexit
66
import io
7+
import itertools
78
import signal
9+
import sys
810
import typing as t
911
from contextlib import contextmanager
1012
from functools import cached_property
@@ -95,7 +97,7 @@ def interpret_content_encoding(self) -> bool:
9597
"""
9698
return self.config.get("interpret_content_encoding", False)
9799

98-
def prepare_table( # type: ignore[override]
100+
def prepare_table( # type: ignore[override] # noqa: PLR0913
99101
self,
100102
full_table_name: str | FullyQualifiedName,
101103
schema: dict,
@@ -121,7 +123,7 @@ def prepare_table( # type: ignore[override]
121123
meta = sa.MetaData(schema=schema_name)
122124
table: sa.Table
123125
if not self.table_exists(full_table_name=full_table_name):
124-
table = self.create_empty_table(
126+
return self.create_empty_table(
125127
table_name=table_name,
126128
meta=meta,
127129
schema=schema,
@@ -130,7 +132,6 @@ def prepare_table( # type: ignore[override]
130132
as_temp_table=as_temp_table,
131133
connection=connection,
132134
)
133-
return table
134135
meta.reflect(connection, only=[table_name])
135136
table = meta.tables[
136137
full_table_name
@@ -269,7 +270,7 @@ def to_sql_type(self, jsonschema_type: dict) -> sa.types.TypeEngine: # type: ig
269270

270271
return PostgresConnector.pick_best_sql_type(sql_type_array=sql_type_array)
271272

272-
def pick_individual_type(self, jsonschema_type: dict):
273+
def pick_individual_type(self, jsonschema_type: dict): # noqa: PLR0911
273274
"""Select the correct sql type assuming jsonschema_type has only a single type.
274275
275276
Args:
@@ -307,11 +308,7 @@ def pick_individual_type(self, jsonschema_type: dict):
307308
return ARRAY(self.to_sql_type({"type": items_type}))
308309

309310
# Case 3: tuples
310-
if isinstance(items, list):
311-
return ARRAY(JSONB())
312-
313-
# All other cases, return JSONB
314-
return JSONB()
311+
return ARRAY(JSONB()) if isinstance(items, list) else JSONB()
315312

316313
# string formats
317314
if jsonschema_type.get("format") == "date-time":
@@ -324,9 +321,7 @@ def pick_individual_type(self, jsonschema_type: dict):
324321
):
325322
return HexByteString()
326323
individual_type = th.to_sql_type(jsonschema_type)
327-
if isinstance(individual_type, VARCHAR):
328-
return TEXT()
329-
return individual_type
324+
return TEXT() if isinstance(individual_type, VARCHAR) else individual_type
330325

331326
@staticmethod
332327
def pick_best_sql_type(sql_type_array: list):
@@ -355,13 +350,12 @@ def pick_best_sql_type(sql_type_array: list):
355350
NOTYPE,
356351
]
357352

358-
for sql_type in precedence_order:
359-
for obj in sql_type_array:
360-
if isinstance(obj, sql_type):
361-
return obj
353+
for sql_type, obj in itertools.product(precedence_order, sql_type_array):
354+
if isinstance(obj, sql_type):
355+
return obj
362356
return TEXT()
363357

364-
def create_empty_table( # type: ignore[override]
358+
def create_empty_table( # type: ignore[override] # noqa: PLR0913
365359
self,
366360
table_name: str,
367361
meta: sa.MetaData,
@@ -397,7 +391,7 @@ def create_empty_table( # type: ignore[override]
397391
raise RuntimeError(
398392
f"Schema for table_name: '{table_name}'"
399393
f"does not define properties: {schema}"
400-
)
394+
) from None
401395

402396
for property_name, property_jsonschema in properties.items():
403397
is_primary_key = property_name in primary_keys
@@ -531,7 +525,7 @@ def get_column_add_ddl( # type: ignore[override]
531525
},
532526
)
533527

534-
def _adapt_column_type( # type: ignore[override]
528+
def _adapt_column_type( # type: ignore[override] # noqa: PLR0913
535529
self,
536530
schema_name: str,
537531
table_name: str,
@@ -669,7 +663,7 @@ def get_sqlalchemy_query(self, config: dict) -> dict:
669663
# ssl_enable is for verifying the server's identity to the client.
670664
if config["ssl_enable"]:
671665
ssl_mode = config["ssl_mode"]
672-
query.update({"sslmode": ssl_mode})
666+
query["sslmode"] = ssl_mode
673667
query["sslrootcert"] = self.filepath_or_certificate(
674668
value=config["ssl_certificate_authority"],
675669
alternative_name=config["ssl_storage_directory"] + "/root.crt",
@@ -764,7 +758,7 @@ def catch_signal(self, signum, frame) -> None:
764758
signum: The signal number
765759
frame: The current stack frame
766760
"""
767-
exit(1) # Calling this to be sure atexit is called, so clean_up gets called
761+
sys.exit(1) # Calling this to be sure atexit is called, so clean_up gets called
768762

769763
def _get_column_type( # type: ignore[override]
770764
self,

target_postgres/sinks.py

Lines changed: 13 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -159,19 +159,19 @@ def bulk_insert_records( # type: ignore[override]
159159
if self.append_only is False:
160160
insert_records: dict[str, dict] = {} # pk : record
161161
for record in records:
162-
insert_record = {}
163-
for column in columns:
164-
insert_record[column.name] = record.get(column.name)
162+
insert_record = {
163+
column.name: record.get(column.name) for column in columns
164+
}
165165
# No need to check for a KeyError here because the SDK already
166166
# guarantees that all key properties exist in the record.
167167
primary_key_value = "".join([str(record[key]) for key in primary_keys])
168168
insert_records[primary_key_value] = insert_record
169169
data_to_insert = list(insert_records.values())
170170
else:
171171
for record in records:
172-
insert_record = {}
173-
for column in columns:
174-
insert_record[column.name] = record.get(column.name)
172+
insert_record = {
173+
column.name: record.get(column.name) for column in columns
174+
}
175175
data_to_insert.append(insert_record)
176176
connection.execute(insert, data_to_insert)
177177
return True
@@ -252,14 +252,13 @@ def column_representation(
252252
schema: dict,
253253
) -> list[sa.Column]:
254254
"""Return a sqlalchemy table representation for the current schema."""
255-
columns: list[sa.Column] = []
256-
for property_name, property_jsonschema in schema["properties"].items():
257-
columns.append(
258-
sa.Column(
259-
property_name,
260-
self.connector.to_sql_type(property_jsonschema),
261-
)
255+
columns: list[sa.Column] = [
256+
sa.Column(
257+
property_name,
258+
self.connector.to_sql_type(property_jsonschema),
262259
)
260+
for property_name, property_jsonschema in schema["properties"].items()
261+
]
263262
return columns
264263

265264
def generate_insert_statement(
@@ -289,7 +288,7 @@ def schema_name(self) -> str | None:
289288
"""Return the schema name or `None` if using names with no schema part.
290289
291290
Note that after the next SDK release (after 0.14.0) we can remove this
292-
as it's already upstreamed.
291+
as it's already implemented upstream.
293292
294293
Returns:
295294
The target schema name.

0 commit comments

Comments
 (0)