Skip to content

Commit

Permalink
F841 and Post-PR
Browse files Browse the repository at this point in the history
  • Loading branch information
mmacata committed Nov 27, 2024
1 parent 4a29419 commit 8c9f97a
Show file tree
Hide file tree
Showing 11 changed files with 31 additions and 26 deletions.
7 changes: 6 additions & 1 deletion .github/workflows/linting.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Linting and code quality check
name: Linting and code quality check and post PR code suggestions

on: [push, pull_request]

Expand All @@ -9,3 +9,8 @@ jobs:
# set pylint-version to empty string to skip the pylint workflow
pylint-version: ''
BASH_SEVERITY: 'warning'

post-pr-reviews:
needs: lint
if: ${{ needs.lint.result == 'failure' }}
uses: mundialis/github-workflows/.github/workflows/post-pr-reviews.yml@main
Original file line number Diff line number Diff line change
Expand Up @@ -827,7 +827,7 @@ def get_sentinel_urls(self, product_ids, bands=None):
# The whole XML content is returned as well
(
gml,
xml_metadata,
_,
bbox,
) = self._generate_sentinel2_footprint(base_url=base_url)
result[product_id]["gml_footprint"] = gml
Expand Down
4 changes: 2 additions & 2 deletions src/actinia_core/core/common/process_chain.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def _get_landsat_import_download_commands(self, entry):
scene_id=scene,
)

download_commands, import_file_info = lp.get_download_process_list()
download_commands, _ = lp.get_download_process_list()
import_commands = lp.get_import_process_list()
atcor_commands = lp.get_i_landsat_toar_process_list(atcor)
landsat_commands = download_commands
Expand Down Expand Up @@ -332,7 +332,7 @@ def _get_sentinel_import_command(self, entry):
import_commands = sp.get_sentinel2_import_process_list()
sentinel_commands.extend(import_commands)

input_file, map_name = import_file_info[band]
_, map_name = import_file_info[band]
p = Process(
exec_type="grass",
executable="g.rename",
Expand Down
2 changes: 1 addition & 1 deletion src/actinia_core/core/common/process_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def check_exit(self):
)

if response_data is not None:
http_code, response_model = pickle.loads(response_data)
_, response_model = pickle.loads(response_data)
if (
response_model["status"] != "error"
and response_model["status"] != "terminated"
Expand Down
2 changes: 1 addition & 1 deletion src/actinia_core/core/grass_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -599,7 +599,7 @@ def setup_tmp_region(self):
"""
# Safe the current region in a temporary region that can be overwritten
errorid, stdout_buff, stderr_buff = self.run_module(
errorid, _, _ = self.run_module(
"g.region", ["save=%s" % self.tmp_region_name, "--o"]
)

Expand Down
6 changes: 3 additions & 3 deletions src/actinia_core/core/resources_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def commit(
user_id, resource_id, iteration
)
redis_return = bool(self.db.set(db_resource_id, document, expiration))
http_code, data = pickle.loads(document)
_, data = pickle.loads(document)
data["logger"] = "resources_logger"
self.send_to_logger("RESOURCE_LOG", data)
return redis_return
Expand Down Expand Up @@ -232,7 +232,7 @@ def get_user_resources(self, user_id):

if resource_list_pickled:
for entry in resource_list_pickled:
http_code, data = pickle.loads(entry)
_, data = pickle.loads(entry)
resource_list.append(data)

return resource_list
Expand All @@ -251,7 +251,7 @@ def get_all_resources(self):

if resource_list_pickled:
for entry in resource_list_pickled:
http_code, data = pickle.loads(entry)
_, data = pickle.loads(entry)
resource_list.append(data)

return resource_list
Expand Down
2 changes: 1 addition & 1 deletion src/actinia_core/core/stac_exporter_interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def _set_processing_extention(item):
def _set_raster_extention(raster_path, item):
with rasterio.open(raster_path) as raster:
band = raster.read(1)
pixelSizeX, pixelSizeY = raster.res
pixelSizeX, _ = raster.res

nodata = np.count_nonzero(np.isnan(band))
spatial_resolution = pixelSizeX
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -440,7 +440,7 @@ def _export_resources(self, use_raster_region=False):
resource["export"]["format"],
)
self._send_resource_update(message)
output_name, output_path = self._export_raster(
_, output_path = self._export_raster(
raster_name=file_name,
format=resource["export"]["format"],
use_raster_region=use_raster_region,
Expand Down Expand Up @@ -470,14 +470,14 @@ def _export_resources(self, use_raster_region=False):
resource["export"]["format"],
)
self._send_resource_update(message)
output_name, output_path = self._export_vector(
_, output_path = self._export_vector(
vector_name=file_name,
format=resource["export"]["format"],
)
elif output_type == "file":
file_name = resource["file_name"]
tmp_file = resource["tmp_file"]
output_name, output_path = self._export_file(
_, output_path = self._export_file(
tmp_file=tmp_file, file_name=file_name
)
elif output_type == "strds":
Expand All @@ -486,7 +486,7 @@ def _export_resources(self, use_raster_region=False):
resource["export"]["format"],
)
self._send_resource_update(message)
output_name, output_path = self._export_strds(
_, output_path = self._export_strds(
strds_name=file_name,
format=resource["export"]["format"],
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ def _post_to_webhook(self, document, type):
webhook_retries = 1
webhook_sleep = 0

http_code, response_model = pickle.loads(document)
_, response_model = pickle.loads(document)

webhook_not_reached = True
retry = 0
Expand Down Expand Up @@ -1239,7 +1239,7 @@ def _check_pixellimit_rimport(self, process_executable_params):
# if extent=region set, vrt only for region, not complete input
if extent_region:
# first query region extents
errorid, stdout_gregion, stderr_gregion = self.ginit.run_module(
errorid, stdout_gregion, _ = self.ginit.run_module(
"g.region", ["-ug"]
)
if errorid != 0:
Expand All @@ -1259,13 +1259,13 @@ def _check_pixellimit_rimport(self, process_executable_params):
# build vrt with previous defined parameters
(
errorid,
stdout_gdalbuildvrt,
stderr_gdalbuildvrt,
_,
_,
) = self.ginit.run_module("/usr/bin/gdalbuildvrt", gdabuildvrt_params)

# gdalinfo for created vrt
gdalinfo_params = [vrt_out]
errorid, stdout_gdalinfo, stderr_gdalinfo = self.ginit.run_module(
errorid, stdout_gdalinfo, _ = self.ginit.run_module(
"/usr/bin/gdalinfo", gdalinfo_params
)
# parse "Size" output of gdalinfo
Expand All @@ -1285,7 +1285,7 @@ def _check_pixellimit_rimport(self, process_executable_params):
# If raster exceeds cell limit already in original resolution, next part can be skipped
if rimport_res and (rastersize < self.cell_limit):
# determine estimated resolution
errorid, stdout_estres, stderr_estres = self.ginit.run_module(
errorid, _, stderr_estres = self.ginit.run_module(
"r.import", [vrt_out, "-e"]
)
if "Estimated" in stderr_estres:
Expand Down Expand Up @@ -1321,7 +1321,7 @@ def _check_pixellimit_rimport(self, process_executable_params):
(
errorid,
stdout_gregion,
stderr_gregion,
_,
) = self.ginit.run_module("g.region", ["-ug"])
res_val_ns = float(
[x for x in stdout_gregion.split("\n") if "nsres=" in x][
Expand Down Expand Up @@ -1367,7 +1367,7 @@ def _check_reset_region(self):
if self.skip_region_check is True:
return

errorid, stdout_buff, stderr_buff = self.ginit.run_module(
errorid, stdout_buff, _ = self.ginit.run_module(
"g.region", ["-ug"]
)

Expand Down Expand Up @@ -1691,7 +1691,7 @@ def _run_executable(self, process, poll_time=0.005):
for i in range(len(process.executable_params)):
param = process.executable_params[i]
if func_name in param:
par, val = param.split("=", 1)
_, val = param.split("=", 1)
par_val = func().strip()
val_splitted = val.split(func_name)
for j in range(1, len(val_splitted)):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def __init__(self, *args):
def _execute(self):
self._setup()

args, layer_type = self.data
_, layer_type = self.data
self.required_mapsets.append(self.target_mapset_name)

# List format must be
Expand Down
4 changes: 2 additions & 2 deletions src/actinia_core/rest/process_chain_monitoring.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def get(self, user_id, resource_id):
response_data = self.resource_logger.get(user_id, resource_id)

if response_data is not None:
http_code, pc_response_model = pickle.loads(response_data)
_, pc_response_model = pickle.loads(response_data)

pc_status = pc_response_model["status"]
if pc_status in ["accepted", "running"]:
Expand Down Expand Up @@ -369,7 +369,7 @@ def get(self, user_id, resource_id):
response_data = self.resource_logger.get(user_id, resource_id)

if response_data is not None:
http_code, pc_response_model = pickle.loads(response_data)
_, pc_response_model = pickle.loads(response_data)

pc_status = pc_response_model["status"]
if pc_status in ["accepted", "running"]:
Expand Down

0 comments on commit 8c9f97a

Please sign in to comment.