From 5db3275f846da8c826a5243328a85e212d535743 Mon Sep 17 00:00:00 2001 From: Hongbo Miao <3375461+hongbo-miao@users.noreply.github.com> Date: Fri, 10 Jan 2025 00:52:22 -0800 Subject: [PATCH] feat(ruff): enable flake8-blind-except, flake8-boolean-trap, flake8-bugbear, flake8-builtins (#22609) --- .ruff.toml | 6 ++++++ .../hm-opal-client/opal_fetcher_postgres/provider.py | 2 +- ...parquet_to_delta_table_adsb_2x_flight_trace_data.py | 10 +++++----- computer-vision/hm-imagebind/src/main.py | 8 +++++--- .../find-taxi-top-routes-sql/src/utils/zone.py | 4 ++-- .../find-taxi-top-routes/src/utils/zone.py | 4 ++-- .../iads-data-manager/iads-config-reader/src/main.py | 4 ++-- .../iads-data-reader/src/utils/iads_util.py | 2 +- .../veristand/hm-veristand/src/main.py | 2 +- hm-xxhash/src/main.py | 6 +++--- .../convolutional-neural-network/src/train.py | 2 +- machine-learning/graph-neural-network/src/main.py | 4 ++-- .../graph-neural-network/src/model/conv.py | 4 ++-- machine-learning/hm-docling/src/main.py | 4 ++-- .../hm-gradio/applications/classify-image/src/main.py | 6 +++--- 15 files changed, 38 insertions(+), 30 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 72867b21e5..37d99d9d6b 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -173,9 +173,12 @@ exclude = [ # https://docs.astral.sh/ruff/rules/ [lint] select = [ + "A", # flake8-builtins "AIR", # Airflow "ANN", # flake8-annotations "ASYNC", # flake8-async + "B", # flake8-bugbear + "BLE", # flake8-blind-except "C90", # mccabe "COM", # flake8-commas "D", # pydocstyle @@ -183,6 +186,7 @@ select = [ "EM", # flake8-errmsg "EXE", # flake8-executable "F", # Pyflakes + "FBT", # flake8-boolean-trap "FAST", # FastAPI "FLY", # flynt "FURB", # Refurb @@ -213,6 +217,8 @@ ignore = [ "D203", # Conflicts with D211 "D212", "E501", + "FBT001", + "FBT002", "ISC001", "PLR0913", "PLR0915", diff --git a/authorization/hm-opal-client/opal_fetcher_postgres/provider.py b/authorization/hm-opal-client/opal_fetcher_postgres/provider.py index d29dad32a1..285ece84f5 100644 --- a/authorization/hm-opal-client/opal_fetcher_postgres/provider.py +++ b/authorization/hm-opal-client/opal_fetcher_postgres/provider.py @@ -29,7 +29,7 @@ class PostgresFetcherConfig(FetcherConfig): description="can be overridden or complement parts of the DSN", ) query: str = Field(..., description="the query") - fetch_one: bool = Field(False, description="fetch only one row") + fetch_one: bool = Field(False, description="fetch only one row") # noqa: FBT003 dict_key: str | None = Field( None, description="array of dict will map to dict with provided dict_key", diff --git a/cloud-infrastructure/terraform/environments/production/aws/general/files/aws-glue/spark-scripts/src/hm_write_parquet_to_delta_table_adsb_2x_flight_trace_data.py b/cloud-infrastructure/terraform/environments/production/aws/general/files/aws-glue/spark-scripts/src/hm_write_parquet_to_delta_table_adsb_2x_flight_trace_data.py index 5b6e1eb10a..3ba0af6623 100644 --- a/cloud-infrastructure/terraform/environments/production/aws/general/files/aws-glue/spark-scripts/src/hm_write_parquet_to_delta_table_adsb_2x_flight_trace_data.py +++ b/cloud-infrastructure/terraform/environments/production/aws/general/files/aws-glue/spark-scripts/src/hm_write_parquet_to_delta_table_adsb_2x_flight_trace_data.py @@ -68,7 +68,7 @@ def add_dbflags_columns( for column_name, mask in columns_and_masks: df = df.withColumn( column_name, - when((col(flag_column_name).bitwiseAND(mask)) > 0, True).otherwise(False), + when((col(flag_column_name).bitwiseAND(mask)) > 0, True).otherwise(False), # noqa: FBT003 ) return df @@ -93,8 +93,8 @@ def add_trace_flags_columns( else: df = df.withColumn( column_name, - when((col(flag_column_name).bitwiseAND(mask)) > 0, True).otherwise( - False, + when((col(flag_column_name).bitwiseAND(mask)) > 0, True).otherwise( # noqa: FBT003 + False, # noqa: FBT003 ), ) return df @@ -111,8 +111,8 @@ def add_trace_on_ground_column( trace_on_ground_column_name, when( col(trace_altitude_ft_column_name) == lit(ground_value), - True, - ).otherwise(False), + True, # noqa: FBT003 + ).otherwise(False), # noqa: FBT003 ) return df diff --git a/computer-vision/hm-imagebind/src/main.py b/computer-vision/hm-imagebind/src/main.py index a26d61f69d..8cd6f8e982 100644 --- a/computer-vision/hm-imagebind/src/main.py +++ b/computer-vision/hm-imagebind/src/main.py @@ -45,7 +45,7 @@ def __init__(self) -> None: self.table: Table | None = None @staticmethod - def download_file(client: httpx.Client, url: str, is_audio: bool = True) -> Path: + def download_file(client: httpx.Client, url: str, is_audio: bool) -> Path: filename = url.split("/")[-1] if not is_audio: filename = f"{filename}.jpg" @@ -65,10 +65,12 @@ def download_file(client: httpx.Client, url: str, is_audio: bool = True) -> Path def download_all_files() -> tuple[list[Path], list[Path]]: with httpx.Client(follow_redirects=True) as client: audio_paths = [ - ImageBindSearch.download_file(client, url, True) for url in AUDIO_URLS + ImageBindSearch.download_file(client, url, is_audio=True) + for url in AUDIO_URLS ] image_paths = [ - ImageBindSearch.download_file(client, url, False) for url in IMAGE_URLS + ImageBindSearch.download_file(client, url, is_audio=False) + for url in IMAGE_URLS ] return audio_paths, image_paths diff --git a/data-processing/hm-spark/applications/find-taxi-top-routes-sql/src/utils/zone.py b/data-processing/hm-spark/applications/find-taxi-top-routes-sql/src/utils/zone.py index ab1551c782..b987af095f 100644 --- a/data-processing/hm-spark/applications/find-taxi-top-routes-sql/src/utils/zone.py +++ b/data-processing/hm-spark/applications/find-taxi-top-routes-sql/src/utils/zone.py @@ -4,8 +4,8 @@ def load_zones(spark: SparkSession, zone_data_path: str) -> DataFrame: return ( spark.read.format("csv") - .option("inferSchema", True) - .option("header", True) + .option("inferSchema", True) # noqa: FBT003 + .option("header", True) # noqa: FBT003 .load(zone_data_path) ) diff --git a/data-processing/hm-spark/applications/find-taxi-top-routes/src/utils/zone.py b/data-processing/hm-spark/applications/find-taxi-top-routes/src/utils/zone.py index ab1551c782..b987af095f 100644 --- a/data-processing/hm-spark/applications/find-taxi-top-routes/src/utils/zone.py +++ b/data-processing/hm-spark/applications/find-taxi-top-routes/src/utils/zone.py @@ -4,8 +4,8 @@ def load_zones(spark: SparkSession, zone_data_path: str) -> DataFrame: return ( spark.read.format("csv") - .option("inferSchema", True) - .option("header", True) + .option("inferSchema", True) # noqa: FBT003 + .option("header", True) # noqa: FBT003 .load(zone_data_path) ) diff --git a/data-visualization/iads/iads-data-manager/iads-config-reader/src/main.py b/data-visualization/iads/iads-data-manager/iads-config-reader/src/main.py index aad264cb6c..c66455e5af 100644 --- a/data-visualization/iads/iads-data-manager/iads-config-reader/src/main.py +++ b/data-visualization/iads/iads-data-manager/iads-config-reader/src/main.py @@ -34,13 +34,13 @@ def process_config(iads_config_path: Path) -> None: iads_config = win32com.client.Dispatch("IadsConfigInterface.IadsConfig") show_version_from_file(iads_config, iads_config_path) - iads_config.Open(iads_config_path, True) + iads_config.Open(iads_config_path, True) # noqa: FBT003 execute_query(iads_config, "select * from Desktops") execute_query(iads_config, "select System.RowNumber from Desktops") execute_query(iads_config, "select Parameter from ParameterDefaults") - iads_config.Close(True) + iads_config.Close(True) # noqa: FBT003 except Exception: logger.exception("Failed to close IADS config") finally: diff --git a/data-visualization/iads/iads-data-manager/iads-data-reader/src/utils/iads_util.py b/data-visualization/iads/iads-data-manager/iads-data-reader/src/utils/iads_util.py index 088fcca43a..660a7a4a38 100644 --- a/data-visualization/iads/iads-data-manager/iads-data-reader/src/utils/iads_util.py +++ b/data-visualization/iads/iads-data-manager/iads-data-reader/src/utils/iads_util.py @@ -171,7 +171,7 @@ def get_iads_dataframe( # Get IADS config pythoncom.CoInitialize() iads_config = win32com.client.Dispatch("IadsConfigInterface.IadsConfig") - iads_config.Open(str(temp_iads_config_path), False) + iads_config.Open(str(temp_iads_config_path), False) # noqa: FBT003 # Get signals query = "select Parameter from ParameterDefaults" diff --git a/hardware-in-the-loop/national-instruments/veristand/hm-veristand/src/main.py b/hardware-in-the-loop/national-instruments/veristand/hm-veristand/src/main.py index d0fe20fa93..3b83da9ad2 100644 --- a/hardware-in-the-loop/national-instruments/veristand/hm-veristand/src/main.py +++ b/hardware-in-the-loop/national-instruments/veristand/hm-veristand/src/main.py @@ -18,7 +18,7 @@ def engine_demo_basic(engine_power, desired_rpm) -> None: # noqa: ANN001 @nivs_rt_sequence def run_engine_demo() -> None: - engine_demo_basic(BooleanValue(True), DoubleValue(2500)) + engine_demo_basic(BooleanValue(True), DoubleValue(2500)) # noqa: FBT003 def run_non_deterministic() -> None: diff --git a/hm-xxhash/src/main.py b/hm-xxhash/src/main.py index 3aaf6d58e9..ee9e1984f2 100644 --- a/hm-xxhash/src/main.py +++ b/hm-xxhash/src/main.py @@ -7,14 +7,14 @@ def get_file_xxh128(file_path: Path) -> str: - hash = xxhash.xxh128() + xxh128_hash = xxhash.xxh128() with open(file_path, "rb") as file: while True: data = file.read(8192) # Read 8192 bytes at a time to use less memory if not data: break - hash.update(data) - return hash.hexdigest() + xxh128_hash.update(data) + return xxh128_hash.hexdigest() def main() -> None: diff --git a/machine-learning/convolutional-neural-network/src/train.py b/machine-learning/convolutional-neural-network/src/train.py index cb25b3de57..404f02b187 100644 --- a/machine-learning/convolutional-neural-network/src/train.py +++ b/machine-learning/convolutional-neural-network/src/train.py @@ -13,7 +13,7 @@ def train( net.train() running_loss = 0.0 - for i, data in enumerate(data_loader, 0): + for _i, data in enumerate(data_loader, 0): inputs, labels = data inputs = inputs.to(device) diff --git a/machine-learning/graph-neural-network/src/main.py b/machine-learning/graph-neural-network/src/main.py index c58be0eac8..4ba860c98b 100644 --- a/machine-learning/graph-neural-network/src/main.py +++ b/machine-learning/graph-neural-network/src/main.py @@ -23,7 +23,7 @@ def train( model.train() total_loss = 0 - for step, batch in enumerate(tqdm(loader, desc="Iteration")): + for _step, batch in enumerate(tqdm(loader, desc="Iteration")): device_batch = batch.to(device) if device_batch.x.shape[0] == 1 or device_batch.batch[-1] == 0: @@ -60,7 +60,7 @@ def evaluate( y_true = [] y_pred = [] - for step, batch in enumerate(tqdm(loader, desc="Iteration")): + for _step, batch in enumerate(tqdm(loader, desc="Iteration")): device_batch = batch.to(device) if device_batch.x.shape[0] == 1: diff --git a/machine-learning/graph-neural-network/src/model/conv.py b/machine-learning/graph-neural-network/src/model/conv.py index 58a032666c..f904879ddf 100644 --- a/machine-learning/graph-neural-network/src/model/conv.py +++ b/machine-learning/graph-neural-network/src/model/conv.py @@ -185,7 +185,7 @@ def __init__( # List of MLPs to transform virtual node at every layer self.mlp_virtualnode_list = torch.nn.ModuleList() - for layer in range(num_layer): + for _layer in range(num_layer): if gnn_type == "gin": self.convs.append(GINConv(emb_dim)) elif gnn_type == "gcn": @@ -196,7 +196,7 @@ def __init__( self.batch_norms.append(torch.nn.BatchNorm1d(emb_dim)) - for layer in range(num_layer - 1): + for _layer in range(num_layer - 1): self.mlp_virtualnode_list.append( torch.nn.Sequential( torch.nn.Linear(emb_dim, 2 * emb_dim), diff --git a/machine-learning/hm-docling/src/main.py b/machine-learning/hm-docling/src/main.py index fae9c99d97..510f948fbd 100644 --- a/machine-learning/hm-docling/src/main.py +++ b/machine-learning/hm-docling/src/main.py @@ -41,8 +41,8 @@ def main() -> None: markdown_path = pdf_path.with_suffix(".md") markdown_path.write_text(markdown_content, encoding="utf-8") logger.info(f"Converted {pdf_path.name}") - except Exception as e: - logger.info(f"Error processing PDFs: {e}") + except Exception: + logger.exception("Error processing PDFs") if __name__ == "__main__": diff --git a/machine-learning/hm-gradio/applications/classify-image/src/main.py b/machine-learning/hm-gradio/applications/classify-image/src/main.py index 8251c7fdc2..e2b8ac5d22 100644 --- a/machine-learning/hm-gradio/applications/classify-image/src/main.py +++ b/machine-learning/hm-gradio/applications/classify-image/src/main.py @@ -13,10 +13,10 @@ def main() -> None: ) labels = res.text.split("\n") - def predict(input: torch.Tensor) -> dict[str, float]: - input = transforms.ToTensor()(input).unsqueeze(0) + def predict(image_tensor: torch.Tensor) -> dict[str, float]: + image_tensor = transforms.ToTensor()(image_tensor).unsqueeze(0) with torch.no_grad(): - prediction = torch.nn.functional.softmax(model(input)[0], dim=0) + prediction = torch.nn.functional.softmax(model(image_tensor)[0], dim=0) confidences = {labels[i]: float(prediction[i]) for i in range(1000)} return confidences