From 2c814ef2fa1df7ac76671b82908a5f23f5a60c6a Mon Sep 17 00:00:00 2001 From: Marek Fiala Date: Tue, 23 Jul 2024 15:59:09 +0200 Subject: [PATCH] feat(tools): Enforce utf-8 encoding with open() function --- components/efuse/efuse_table_gen.py | 10 ++++---- .../main/gen_digital_signature_tests.py | 2 +- .../check_system_init_priorities.py | 4 ++-- components/espcoredump/espcoredump.py | 3 +-- .../key_manager/gen_key_manager_test_cases.py | 2 +- components/partition_table/gen_esp32part.py | 2 +- .../partition_table/gen_extra_subtypes_inc.py | 3 +-- components/partition_table/parttool.py | 2 +- components/ulp/esp32ulp_mapgen.py | 8 +++---- conftest.py | 2 +- .../esp_local_ctrl/pytest_esp_local_ctrl.py | 3 +-- .../simple/pytest_https_server_simple.py | 2 +- .../protocols/mqtt/ssl/pytest_mqtt_ssl.py | 7 +++--- .../hmac_soft_jtag/jtag_example_helper.py | 2 +- .../hmac_soft_jtag/pytest_jtag_example.py | 2 +- .../semihost_vfs/pytest_semihost_vfs.py | 4 ++-- .../spiffsgen/pytest_spiffsgen_example.py | 2 +- .../app_trace_basic/pytest_app_trace_basic.py | 4 ++-- .../system/app_trace_to_plot/read_trace.py | 14 +++++------ examples/system/efuse/conftest.py | 2 +- .../native_ota_example/pytest_native_ota.py | 2 +- .../partitions_ota/pytest_partitions_ota.py | 4 ++-- .../simple_ota_example/pytest_simple_ota.py | 8 +++---- .../sysview_tracing/pytest_sysview_tracing.py | 2 +- .../pytest_sysview_tracing_heap_log.py | 2 +- tools/check_python_dependencies.py | 4 ++-- tools/export_utils/shell_types.py | 12 +++++----- .../gen_soc_caps_kconfig.py | 6 ++--- tools/idf.py | 4 ++-- tools/idf_py_actions/create_ext.py | 2 +- tools/idf_py_actions/debug_ext.py | 12 +++++----- tools/idf_py_actions/qemu_ext.py | 2 +- tools/idf_py_actions/serial_ext.py | 4 ++-- tools/idf_py_actions/tools.py | 12 +++++----- tools/idf_tools.py | 24 +++++++++---------- tools/install_util.py | 7 ++---- tools/ldgen/ldgen.py | 2 +- tools/mass_mfg/mfg_gen.py | 24 +++++++++---------- tools/mkdfu.py | 7 ++---- tools/mkuf2.py | 19 +++++++-------- 40 files changed, 115 insertions(+), 124 deletions(-) diff --git a/components/efuse/efuse_table_gen.py b/components/efuse/efuse_table_gen.py index fc3b865ef31..4cf06452ee9 100755 --- a/components/efuse/efuse_table_gen.py +++ b/components/efuse/efuse_table_gen.py @@ -126,13 +126,13 @@ def verify_duplicate_name(self): field_name = p.field_name + p.group if field_name != '' and len(duplicates.intersection([field_name])) != 0: fl_error = True - print('Field at %s, %s, %s, %s have dublicate field_name' % + print('Field at %s, %s, %s, %s have duplicate field_name' % (p.field_name, p.efuse_block, p.bit_start, p.bit_count)) if fl_error is True: raise InputError('Field names must be unique') def check_struct_field_name(self): - # check that stuctured fields have a root field + # check that structured fields have a root field for p in self: if '.' in p.field_name: name = '' @@ -454,7 +454,7 @@ def process_input_file(file, type_table): def ckeck_md5_in_file(md5, filename): if os.path.exists(filename): - with open(filename, 'r') as f: + with open(filename, 'r', encoding='utf-8') as f: for line in f: if md5 in line: return True @@ -478,12 +478,12 @@ def create_output_files(name, output_table, debug): if ckeck_md5_in_file(output_table.md5_digest_table, file_c_path) is False: status('Creating efuse *.h file ' + file_h_path + ' ...') output = output_table.to_header(file_name) - with open(file_h_path, 'w') as f: + with open(file_h_path, 'w', encoding='utf-8') as f: f.write(output) status('Creating efuse *.c file ' + file_c_path + ' ...') output = output_table.to_c_file(file_name, debug) - with open(file_c_path, 'w') as f: + with open(file_c_path, 'w', encoding='utf-8') as f: f.write(output) else: print('Source files do not require updating correspond to csv file.') diff --git a/components/esp_security/test_apps/crypto_drivers/main/gen_digital_signature_tests.py b/components/esp_security/test_apps/crypto_drivers/main/gen_digital_signature_tests.py index 444fc891dea..91b236dd3b5 100644 --- a/components/esp_security/test_apps/crypto_drivers/main/gen_digital_signature_tests.py +++ b/components/esp_security/test_apps/crypto_drivers/main/gen_digital_signature_tests.py @@ -69,7 +69,7 @@ def generate_tests_cases(target): # type: (str) -> None messages = [random.randrange(0, 1 << max_key_size) for x in range(NUM_MESSAGES)] - with open('digital_signature_test_cases.h', 'w') as f: + with open('digital_signature_test_cases.h', 'w', encoding='utf-8') as f: f.write('/*\n') year = datetime.datetime.now().year f.write(' * SPDX-FileCopyrightText: {year} Espressif Systems (Shanghai) CO LTD\n'.format(year=year)) diff --git a/components/esp_system/check_system_init_priorities.py b/components/esp_system/check_system_init_priorities.py index 97c9585d416..29f420cf81c 100644 --- a/components/esp_system/check_system_init_priorities.py +++ b/components/esp_system/check_system_init_priorities.py @@ -50,7 +50,7 @@ def main() -> None: glob_iter = glob.glob(os.path.join(idf_path, 'components', '**', f'*.{extension}'), recursive=True) source_files_iters.append(glob_iter) for filename in itertools.chain(*source_files_iters): - with open(filename, 'r') as f_obj: + with open(filename, 'r', encoding='utf-8') as f_obj: file_contents = f_obj.read() if ESP_SYSTEM_INIT_FN_STR not in file_contents: continue @@ -88,7 +88,7 @@ def sort_key(entry: StartupEntry) -> typing.Tuple[str, int, str]: # 3. Load startup entries list from STARTUP_ENTRIES_FILE, removing comments and empty lines # startup_entries_expected_lines = [] - with open(os.path.join(idf_path, STARTUP_ENTRIES_FILE), 'r') as startup_entries_expected_file: + with open(os.path.join(idf_path, STARTUP_ENTRIES_FILE), 'r', encoding='utf-8') as startup_entries_expected_file: for line in startup_entries_expected_file: if line.startswith('#') or len(line.strip()) == 0: continue diff --git a/components/espcoredump/espcoredump.py b/components/espcoredump/espcoredump.py index b17f234b56b..5bb35dd96e3 100755 --- a/components/espcoredump/espcoredump.py +++ b/components/espcoredump/espcoredump.py @@ -4,7 +4,6 @@ # # SPDX-License-Identifier: Apache-2.0 # - import json import logging import os.path @@ -26,7 +25,7 @@ def get_prefix_map_gdbinit_path(prog_path): # type: (str) -> Any logging.warning('%s does not exist. Please build the app with "idf.py build"', desc_path) return '' - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc.get('debug_prefix_map_gdbinit') diff --git a/components/hal/test_apps/crypto/main/key_manager/gen_key_manager_test_cases.py b/components/hal/test_apps/crypto/main/key_manager/gen_key_manager_test_cases.py index fb2419e716b..053c2e41a2e 100644 --- a/components/hal/test_apps/crypto/main/key_manager/gen_key_manager_test_cases.py +++ b/components/hal/test_apps/crypto/main/key_manager/gen_key_manager_test_cases.py @@ -138,7 +138,7 @@ def write_to_c_header(init_key: bytes, k1: bytes, k2_info: bytes, k1_encrypted_3 test_data_xts_aes_128: list, k1_encrypted_64: list, xts_test_data_xts_aes_256: list, pubx: bytes, puby: bytes, k1_G_0: bytes, k1_G_1: bytes) -> None: - with open('key_manager_test_cases.h', 'w') as file: + with open('key_manager_test_cases.h', 'w', encoding='utf-8') as file: header_content = """#include #define TEST_COUNT 5 diff --git a/components/partition_table/gen_esp32part.py b/components/partition_table/gen_esp32part.py index 029ac4104eb..cf6993b9ae4 100755 --- a/components/partition_table/gen_esp32part.py +++ b/components/partition_table/gen_esp32part.py @@ -676,7 +676,7 @@ def main(): if input_is_binary: output = table.to_csv() - with sys.stdout if args.output == '-' else open(args.output, 'w') as f: + with sys.stdout if args.output == '-' else open(args.output, 'w', encoding='utf-8') as f: f.write(output) else: output = table.to_binary() diff --git a/components/partition_table/gen_extra_subtypes_inc.py b/components/partition_table/gen_extra_subtypes_inc.py index 45e4751a473..59814bfa2fa 100755 --- a/components/partition_table/gen_extra_subtypes_inc.py +++ b/components/partition_table/gen_extra_subtypes_inc.py @@ -1,14 +1,13 @@ #!/usr/bin/env python # SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import argparse def gen_header_file(path: str, subtypes: str) -> None: HDR_MESSAGE = '/* Automatically generated file. DO NOT EDIT. */\n\n' PARTTOOL_USAGE = 'If you want to use parttool.py manually, please use the following as an extra argument:' - with open(path, 'w') as f: + with open(path, 'w', encoding='utf-8') as f: f.write(HDR_MESSAGE) if subtypes: f.write('/*\n\t' + PARTTOOL_USAGE + '\n\t') diff --git a/components/partition_table/parttool.py b/components/partition_table/parttool.py index 03ef8920580..6751e879e01 100755 --- a/components/partition_table/parttool.py +++ b/components/partition_table/parttool.py @@ -92,7 +92,7 @@ def parse_esptool_args(esptool_args): partition_table = gen.PartitionTable.from_binary(f.read()) if partition_table is None: - with open(partition_table_file, 'r') as f: + with open(partition_table_file, 'r', encoding='utf-8') as f: f.seek(0) partition_table = gen.PartitionTable.from_csv(f.read()) else: diff --git a/components/ulp/esp32ulp_mapgen.py b/components/ulp/esp32ulp_mapgen.py index 90b8535aaee..7d806cec206 100755 --- a/components/ulp/esp32ulp_mapgen.py +++ b/components/ulp/esp32ulp_mapgen.py @@ -1,9 +1,9 @@ #!/usr/bin/env python -# esp32ulp_mapgen utility converts a symbol list provided by nm into an export script -# for the linker and a header file. -# # SPDX-FileCopyrightText: 2016-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 +# +# esp32ulp_mapgen utility converts a symbol list provided by nm into an export script +# for the linker and a header file. import argparse import os import textwrap @@ -64,7 +64,7 @@ def main() -> None: args = parser.parse_args() - with open(args.outputfile + '.h', 'w') as f_h, open(args.outputfile + '.ld', 'w') as f_ld: + with open(args.outputfile + '.h', 'w', encoding='utf-8') as f_h, open(args.outputfile + '.ld', 'w', encoding='utf-8') as f_ld: gen_ld_h_from_sym(args.symfile, f_ld, f_h, int(args.base_addr, 0)) diff --git a/conftest.py b/conftest.py index fe53fe82ad5..e14b0d200ee 100644 --- a/conftest.py +++ b/conftest.py @@ -316,7 +316,7 @@ def real_func(item: str, value: float, target: str) -> None: """ def _find_perf_item(operator: str, path: str) -> float: - with open(path) as f: + with open(path, encoding='utf-8') as f: data = f.read() match = re.search(fr'#define\s+IDF_PERFORMANCE_{operator}_{item.upper()}\s+([\d.]+)', data) return float(match.group(1)) # type: ignore diff --git a/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py b/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py index aaa5ace52aa..bfd59b66cb5 100644 --- a/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py +++ b/examples/protocols/esp_local_ctrl/pytest_esp_local_ctrl.py @@ -1,6 +1,5 @@ # SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Unlicense OR CC0-1.0 - import logging import os import re @@ -22,7 +21,7 @@ def get_sdk_path() -> str: class CustomProcess(object): def __init__(self, cmd: str, logfile: str, verbose:bool =True) -> None: self.verbose = verbose - self.f = open(logfile, 'w') + self.f = open(logfile, 'w', encoding='utf-8') if self.verbose: logging.info('Starting {} > {}'.format(cmd, self.f.name)) self.pexpect_proc = pexpect.spawn(cmd, timeout=60, logfile=self.f, encoding='utf-8', codec_errors='ignore') diff --git a/examples/protocols/https_server/simple/pytest_https_server_simple.py b/examples/protocols/https_server/simple/pytest_https_server_simple.py index b86fafde337..98179cfca77 100644 --- a/examples/protocols/https_server/simple/pytest_https_server_simple.py +++ b/examples/protocols/https_server/simple/pytest_https_server_simple.py @@ -132,7 +132,7 @@ def test_examples_protocol_https_server_simple(dut: Dut) -> None: ssl_context.check_hostname = False ssl_context.load_verify_locations(cadata=server_cert_pem) - with open(CLIENT_CERT_FILE, 'w') as cert, open(CLIENT_KEY_FILE, 'w') as key: + with open(CLIENT_CERT_FILE, 'w', encoding='utf-8') as cert, open(CLIENT_KEY_FILE, 'w', encoding='utf-8') as key: cert.write(client_cert_pem) key.write(client_key_pem) diff --git a/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py b/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py index c16b89cbb1a..d27cc65b782 100644 --- a/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py +++ b/examples/protocols/mqtt/ssl/pytest_mqtt_ssl.py @@ -1,11 +1,12 @@ -# SPDX-FileCopyrightText: 2022 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Unlicense OR CC0-1.0 import logging import os import re import ssl import sys -from threading import Event, Thread +from threading import Event +from threading import Thread import paho.mqtt.client as mqtt import pexpect @@ -47,7 +48,7 @@ def on_message(client, userdata, msg): # type: (mqtt.Client, tuple, mqtt.client event_client_received_binary.set() return recv_binary = binary + '.received' - with open(recv_binary, 'w') as fw: + with open(recv_binary, 'w', encoding='utf-8') as fw: fw.write(msg.payload) raise ValueError('Received binary (saved as: {}) does not match the original file: {}'.format(recv_binary, binary)) diff --git a/examples/security/hmac_soft_jtag/jtag_example_helper.py b/examples/security/hmac_soft_jtag/jtag_example_helper.py index 1674245f898..ad7eef24645 100644 --- a/examples/security/hmac_soft_jtag/jtag_example_helper.py +++ b/examples/security/hmac_soft_jtag/jtag_example_helper.py @@ -21,7 +21,7 @@ def generate_token_data(hmac_key_file: str, output_file: Optional[str] = None) - with open(output_file, 'wb') as out_file: out_file.write(token_data) elif output_file.endswith('.hex'): - with open(output_file, 'w') as out_file: + with open(output_file, 'w', encoding='utf-8') as out_file: out_file.write(token_hex) else: print(f'Unsupported file format for output file: {output_file}') diff --git a/examples/security/hmac_soft_jtag/pytest_jtag_example.py b/examples/security/hmac_soft_jtag/pytest_jtag_example.py index a8e55104108..01b90c6251a 100644 --- a/examples/security/hmac_soft_jtag/pytest_jtag_example.py +++ b/examples/security/hmac_soft_jtag/pytest_jtag_example.py @@ -11,7 +11,7 @@ def run_gdb_test(dut: IdfDut) -> None: - with open(os.path.join(dut.logdir, 'ocd.txt'), 'w') as ocd_log, \ + with open(os.path.join(dut.logdir, 'ocd.txt'), 'w', encoding='utf-8') as ocd_log, \ pexpect.spawn(f'openocd -f board/esp32c6-builtin.cfg', timeout=60, logfile=ocd_log, diff --git a/examples/storage/semihost_vfs/pytest_semihost_vfs.py b/examples/storage/semihost_vfs/pytest_semihost_vfs.py index 90af90e29e2..5317331ceb0 100644 --- a/examples/storage/semihost_vfs/pytest_semihost_vfs.py +++ b/examples/storage/semihost_vfs/pytest_semihost_vfs.py @@ -47,7 +47,7 @@ def test_semihost_vfs(dut: IdfDut) -> None: dut.expect_exact('example: Wrote 2776 bytes') dut.expect_exact('====================== HOST DATA START =========================') - with open(HOST_FILE_PATH) as f: + with open(HOST_FILE_PATH, encoding='utf-8') as f: for line in f: if line.strip(): dut.expect_exact(line.strip()) @@ -55,7 +55,7 @@ def test_semihost_vfs(dut: IdfDut) -> None: dut.expect_exact('====================== HOST DATA END =========================') dut.expect_exact('example: Read 6121 bytes') - with open(os.path.join(TEMP_DIR, 'esp32_stdout.txt')) as f: + with open(os.path.join(TEMP_DIR, 'esp32_stdout.txt'), encoding='utf-8') as f: def expected_content() -> t.Iterator[str]: yield 'example: Switched to semihosted stdout' diff --git a/examples/storage/spiffsgen/pytest_spiffsgen_example.py b/examples/storage/spiffsgen/pytest_spiffsgen_example.py index 6b6e4fa243b..1394cd2f52f 100644 --- a/examples/storage/spiffsgen/pytest_spiffsgen_example.py +++ b/examples/storage/spiffsgen/pytest_spiffsgen_example.py @@ -14,7 +14,7 @@ def test_spiffsgen_example(dut: Dut) -> None: base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'spiffs_image') # Expect hello.txt is read successfully - with open(os.path.join(base_dir, 'hello.txt'), 'r') as hello_txt: + with open(os.path.join(base_dir, 'hello.txt'), 'r', encoding='utf-8') as hello_txt: dut.expect('Read from hello.txt: ' + hello_txt.read().rstrip()) # Expect alice.txt MD5 hash is computed accurately diff --git a/examples/system/app_trace_basic/pytest_app_trace_basic.py b/examples/system/app_trace_basic/pytest_app_trace_basic.py index 2d7d18241a4..47214bc94a0 100644 --- a/examples/system/app_trace_basic/pytest_app_trace_basic.py +++ b/examples/system/app_trace_basic/pytest_app_trace_basic.py @@ -46,7 +46,7 @@ def test_examples_app_trace_basic(dut: IdfDut, openocd: OpenOcd) -> None: assert 'Targets connected.' in dut.openocd.write('esp apptrace start file://apptrace.log 0 2000 3 0 0') apptrace_wait_stop(dut.openocd) - with open(openocd._logfile) as oocd_log: # pylint: disable=protected-access + with open(openocd._logfile, encoding='utf-8') as oocd_log: # pylint: disable=protected-access cores = 1 if dut.app.sdkconfig.get('ESP_SYSTEM_SINGLE_CORE_MODE') is True else 2 params_str = 'App trace params: from {} cores,'.format(cores) found = False @@ -59,7 +59,7 @@ def test_examples_app_trace_basic(dut: IdfDut, openocd: OpenOcd) -> None: '"{}" could not be found in {}'.format(params_str, openocd._logfile) # pylint: disable=protected-access ) - with open('apptrace.log') as apptrace_log: + with open('apptrace.log', encoding='utf-8') as apptrace_log: for sample_num in range(1, 51): log_str = 'Apptrace test data[{}]:{}'.format(sample_num, sample_num * sample_num) found = False diff --git a/examples/system/app_trace_to_plot/read_trace.py b/examples/system/app_trace_to_plot/read_trace.py index 660348cea22..50c90d2232b 100644 --- a/examples/system/app_trace_to_plot/read_trace.py +++ b/examples/system/app_trace_to_plot/read_trace.py @@ -1,6 +1,5 @@ -# SPDX-FileCopyrightText: 2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2023-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 - import argparse import datetime import json @@ -9,7 +8,8 @@ import sys from enum import Enum from functools import partial -from typing import Any, List +from typing import Any +from typing import List try: import espytrace.apptrace @@ -47,7 +47,7 @@ class States(Enum): html.Div([ html.H2('Telemetry Data'), html.Div(id='live-update-data'), - dcc.Graph(id='live-update-graph', style={'height': 800}), # Height of the plotting area setted to 800px + dcc.Graph(id='live-update-graph', style={'height': 800}), # Height of the plotting area set to 800px dcc.Interval( id='interval-component', interval=5 * 100, # Graph will be updated every 500 ms @@ -57,7 +57,7 @@ class States(Enum): ) -# Multiple components can update everytime interval gets fired. +# Multiple components can update every time interval gets fired. @app.callback(Output('live-update-graph', 'figure'), Input('interval-component', 'n_intervals')) def update_graph_live(_n: Any) -> Any: # pylint: disable=undefined-argument @@ -162,13 +162,13 @@ def handle(self) -> None: def read_json(file_path: str) -> Any: - with open(file_path, 'r') as f: + with open(file_path, 'r', encoding='utf-8') as f: data = json.load(f) return data def save_data(file_path: str) -> None: - with open(file_path, 'w') as f: + with open(file_path, 'w', encoding='utf-8') as f: f.writelines(output_lines) diff --git a/examples/system/efuse/conftest.py b/examples/system/efuse/conftest.py index 179715fd085..85e27c14895 100644 --- a/examples/system/efuse/conftest.py +++ b/examples/system/efuse/conftest.py @@ -99,7 +99,7 @@ def get_efuse_offset(self, efuse_name: str) -> Any: with tempfile.NamedTemporaryFile(suffix='.json') as temp_file: temp_file_path = temp_file.name espefuse.main(f'--virt -c {self.target} summary --format json --file {temp_file_path}'.split()) - with open(temp_file_path, 'r') as file: + with open(temp_file_path, 'r', encoding='utf-8') as file: efuse_summary = json.load(file) if efuse_name in efuse_summary: data = efuse_summary[efuse_name] diff --git a/examples/system/ota/native_ota_example/pytest_native_ota.py b/examples/system/ota/native_ota_example/pytest_native_ota.py index 3c0fcfe6dbb..ebf5c6d1e36 100644 --- a/examples/system/ota/native_ota_example/pytest_native_ota.py +++ b/examples/system/ota/native_ota_example/pytest_native_ota.py @@ -68,7 +68,7 @@ def create_file(server_file: str, file_data: str) -> None: - with open(server_file, 'w+') as file: + with open(server_file, 'w+', encoding='utf-8') as file: file.write(file_data) diff --git a/examples/system/ota/partitions_ota/pytest_partitions_ota.py b/examples/system/ota/partitions_ota/pytest_partitions_ota.py index dae55ff5db1..f34820aadec 100644 --- a/examples/system/ota/partitions_ota/pytest_partitions_ota.py +++ b/examples/system/ota/partitions_ota/pytest_partitions_ota.py @@ -151,13 +151,13 @@ def start_https_server(ota_image_dir: str, server_ip: str, server_port: int, ser if server_file is None: server_file = os.path.join(ota_image_dir, 'server_cert.pem') - cert_file_handle = open(server_file, 'w+') + cert_file_handle = open(server_file, 'w+', encoding='utf-8') cert_file_handle.write(server_cert) cert_file_handle.close() if key_file is None: key_file = os.path.join(ota_image_dir, 'server_key.pem') - key_file_handle = open('server_key.pem', 'w+') + key_file_handle = open('server_key.pem', 'w+', encoding='utf-8') key_file_handle.write(server_key) key_file_handle.close() diff --git a/examples/system/ota/simple_ota_example/pytest_simple_ota.py b/examples/system/ota/simple_ota_example/pytest_simple_ota.py index bbc7d27be62..861f2525b68 100644 --- a/examples/system/ota/simple_ota_example/pytest_simple_ota.py +++ b/examples/system/ota/simple_ota_example/pytest_simple_ota.py @@ -80,13 +80,13 @@ def start_https_server(ota_image_dir: str, server_ip: str, server_port: int, ser if server_file is None: server_file = os.path.join(ota_image_dir, 'server_cert.pem') - cert_file_handle = open(server_file, 'w+') + cert_file_handle = open(server_file, 'w+', encoding='utf-8') cert_file_handle.write(server_cert) cert_file_handle.close() if key_file is None: key_file = os.path.join(ota_image_dir, 'server_key.pem') - key_file_handle = open('server_key.pem', 'w+') + key_file_handle = open('server_key.pem', 'w+', encoding='utf-8') key_file_handle.write(server_key) key_file_handle.close() @@ -102,12 +102,12 @@ def start_https_server(ota_image_dir: str, server_ip: str, server_port: int, ser def start_tls1_3_server(ota_image_dir: str, server_port: int) -> subprocess.Popen: os.chdir(ota_image_dir) server_file = os.path.join(ota_image_dir, 'server_cert.pem') - cert_file_handle = open(server_file, 'w+') + cert_file_handle = open(server_file, 'w+', encoding='utf-8') cert_file_handle.write(server_cert) cert_file_handle.close() key_file = os.path.join(ota_image_dir, 'server_key.pem') - key_file_handle = open('server_key.pem', 'w+') + key_file_handle = open('server_key.pem', 'w+', encoding='utf-8') key_file_handle.write(server_key) key_file_handle.close() diff --git a/examples/system/sysview_tracing/pytest_sysview_tracing.py b/examples/system/sysview_tracing/pytest_sysview_tracing.py index a970480d3f3..6af3e02c71a 100644 --- a/examples/system/sysview_tracing/pytest_sysview_tracing.py +++ b/examples/system/sysview_tracing/pytest_sysview_tracing.py @@ -34,7 +34,7 @@ def dut_expect_task_event() -> None: dut.gdb.write('c', non_blocking=True) time.sleep(1) # to avoid EOF file error - with open(dut.gdb._logfile) as fr: # pylint: disable=protected-access + with open(dut.gdb._logfile, encoding='utf-8') as fr: # pylint: disable=protected-access gdb_pexpect_proc = pexpect.fdpexpect.fdspawn(fr.fileno()) gdb_pexpect_proc.expect('Thread 2 "main" hit Breakpoint 1, app_main ()') diff --git a/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py b/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py index f96242dc225..c6147f52d5d 100644 --- a/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py +++ b/examples/system/sysview_tracing_heap_log/pytest_sysview_tracing_heap_log.py @@ -49,7 +49,7 @@ def test_examples_sysview_tracing_heap_log(idf_path: str, dut: IdfDut) -> None: sysviewtrace.expect(r'Found \d+ leaked bytes in \d+ blocks.', timeout=120) # Validate GDB logs - with open(dut.gdb._logfile) as fr: # pylint: disable=protected-access + with open(dut.gdb._logfile, encoding='utf-8') as fr: # pylint: disable=protected-access gdb_pexpect_proc = pexpect.fdpexpect.fdspawn(fr.fileno()) gdb_pexpect_proc.expect_exact( 'Thread 2 "main" hit Temporary breakpoint 1, heap_trace_start (mode_param', timeout=10) # should be (mode_param=HEAP_TRACE_ALL) # TODO GCC-329 diff --git a/tools/check_python_dependencies.py b/tools/check_python_dependencies.py index 945d8e7a0d3..6621b033144 100755 --- a/tools/check_python_dependencies.py +++ b/tools/check_python_dependencies.py @@ -45,12 +45,12 @@ required_set = set() for req_path in args.requirements: - with open(req_path) as f: + with open(req_path, encoding='utf-8') as f: required_set |= set(i for i in map(str.strip, f.readlines()) if len(i) > 0 and not i.startswith('#')) constr_dict = {} # for example package_name -> package_name==1.0 for const_path in args.constraints: - with open(const_path) as f: + with open(const_path, encoding='utf-8') as f: for con in [i for i in map(str.strip, f.readlines()) if len(i) > 0 and not i.startswith('#')]: if con.startswith('file://'): con = os.path.basename(con) diff --git a/tools/export_utils/shell_types.py b/tools/export_utils/shell_types.py index 8918eb5d8e3..c40c24f0cf1 100644 --- a/tools/export_utils/shell_types.py +++ b/tools/export_utils/shell_types.py @@ -104,7 +104,7 @@ def export_file(self, fd: TextIO) -> None: 'Go to the project directory and run:\n\n idf.py build"\n')) def export(self) -> None: - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: self.export_file(fd) print(f'. {self.script_file_path}') @@ -133,7 +133,7 @@ def autocompletion(self) -> str: return autocom def init_file(self) -> None: - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: # We will use the --init-file option to pass a custom rc file, which will ignore .bashrc, # so we need to source .bashrc first. bashrc_path = os.path.expanduser('~/.bashrc') @@ -167,7 +167,7 @@ def init_file(self) -> None: # If ZDOTDIR is unset, HOME is used instead. # https://zsh.sourceforge.io/Doc/Release/Files.html#Startup_002fShutdown-Files zdotdir = os.environ.get('ZDOTDIR', str(Path.home())) - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: # We will use the ZDOTDIR env variable to load our custom script in the newly spawned shell # so we need to source .zshrc first. zshrc_path = Path(zdotdir) / '.zshrc' @@ -211,7 +211,7 @@ def autocompletion(self) -> str: return stdout def init_file(self) -> None: - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: self.export_file(fd) def spawn(self) -> None: @@ -249,7 +249,7 @@ def export(self) -> None: print(f'{self.script_file_path}') def init_file(self) -> None: - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: # fd.write(f'{self.deactivate_cmd}\n') TODO in upcoming task IDF-10292 for var, value in self.new_esp_idf_env.items(): if var == 'PATH': @@ -297,7 +297,7 @@ def export(self) -> None: print(f'call {self.script_file_path}') def init_file(self) -> None: - with open(self.script_file_path, 'w') as fd: + with open(self.script_file_path, 'w', encoding='utf-8') as fd: fd.write('@echo off\n') # fd.write(f'{self.deactivate_cmd}\n') TODO in upcoming task IDF-10292 for var, value in self.new_esp_idf_env.items(): diff --git a/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py b/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py index 293a25c133b..af6cfefc02b 100755 --- a/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py +++ b/tools/gen_soc_caps_kconfig/gen_soc_caps_kconfig.py @@ -96,7 +96,7 @@ def add_source(self, source_path, condition): # type: (str, str) -> None def update_file(self, kconfig_path, always_write): # type: (Path, bool) -> bool try: - with open(kconfig_path, 'r') as f: + with open(kconfig_path, 'r', encoding='utf-8') as f: old_content = f.readlines() except FileNotFoundError: old_content = [''] @@ -115,7 +115,7 @@ def update_file(self, kconfig_path, always_write): # type: (Path, bool) -> bool if file_needs_update: print('\n' + 'Updating file: {}'.format(kconfig_path)) - with open(kconfig_path, 'w') as f: + with open(kconfig_path, 'w', encoding='utf-8') as f: f.writelines(new_content) return file_needs_update @@ -218,7 +218,7 @@ def generate_defines(soc_caps_dir, filename, always_write): # type: (Path, str, def get_defines(header_path): # type: (Path) -> list[str] defines = [] logging.info('Reading macros from {}...'.format(header_path)) - with open(header_path, 'r') as f: + with open(header_path, 'r', encoding='utf-8') as f: output = f.read() for line in output.split('\n'): diff --git a/tools/idf.py b/tools/idf.py index 200a557d947..740713f7f42 100755 --- a/tools/idf.py +++ b/tools/idf.py @@ -481,7 +481,7 @@ def _print_closing_message(self, args: PropertyDict, actions: KeysView) -> None: # Otherwise, if we built any binaries print a message about # how to flash them def print_flashing_message(title: str, key: str) -> None: - with open(os.path.join(args.build_dir, 'flasher_args.json')) as file: + with open(os.path.join(args.build_dir, 'flasher_args.json'), encoding='utf-8') as file: flasher_args: Dict[str, Any] = json.load(file) def flasher_path(f: Union[str, 'os.PathLike[str]']) -> str: @@ -789,7 +789,7 @@ def expand_args(args: List[Any], parent_path: str, file_stack: List[str]) -> Lis visited.add(rel_path) try: - with open(rel_path, 'r') as f: + with open(rel_path, 'r', encoding='utf-8') as f: for line in f: expanded_args.extend(expand_args(shlex.split(line), os.path.dirname(rel_path), file_stack + [file_name])) except IOError: diff --git a/tools/idf_py_actions/create_ext.py b/tools/idf_py_actions/create_ext.py index 9634bd8ccc4..00d9fa28816 100644 --- a/tools/idf_py_actions/create_ext.py +++ b/tools/idf_py_actions/create_ext.py @@ -16,7 +16,7 @@ def get_type(action: str) -> str: def replace_in_file(filename: str, pattern: str, replacement: str) -> None: - with open(filename, 'r+') as f: + with open(filename, 'r+', encoding='utf-8') as f: content = f.read() overwritten_content = re.sub(pattern, replacement, content, flags=re.M) f.seek(0) diff --git a/tools/idf_py_actions/debug_ext.py b/tools/idf_py_actions/debug_ext.py index 5805e838b06..299913afebc 100644 --- a/tools/idf_py_actions/debug_ext.py +++ b/tools/idf_py_actions/debug_ext.py @@ -70,7 +70,7 @@ def _check_openocd_errors(fail_if_openocd_failed: Dict, target: str, ctx: Contex if p.poll() is not None: print('OpenOCD exited with {}'.format(p.poll())) break - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: content = f.read() if re.search(r'Listening on port \d+ for gdb connections', content): # expect OpenOCD has started successfully - stop watching @@ -78,7 +78,7 @@ def _check_openocd_errors(fail_if_openocd_failed: Dict, target: str, ctx: Contex time.sleep(0.5) # OpenOCD exited or is not listening -> print full log and terminate - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: print(f.read()) raise FatalError('Action "{}" failed due to errors in OpenOCD'.format(target), ctx) @@ -194,7 +194,7 @@ def post_debug(action: str, ctx: Context, args: PropertyDict, **kwargs: str) -> name = processes[target + '_outfile_name'] pos = 0 while True: - with open(name, 'r') as f: + with open(name, 'r', encoding='utf-8') as f: f.seek(pos) for line in f: print(line.rstrip()) @@ -212,7 +212,7 @@ def get_project_desc(args: PropertyDict, ctx: Context) -> Any: desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc @@ -237,7 +237,7 @@ def openocd(action: str, ctx: Context, args: PropertyDict, openocd_scripts: Opti local_dir = project_desc['build_dir'] args = ['openocd'] + shlex.split(openocd_arguments) openocd_out_name = os.path.join(local_dir, OPENOCD_OUT_FILE) - openocd_out = open(openocd_out_name, 'w') + openocd_out = open(openocd_out_name, 'w', encoding='utf-8') try: process = subprocess.Popen(args, stdout=openocd_out, stderr=subprocess.STDOUT, bufsize=1) except Exception as e: @@ -350,7 +350,7 @@ def gdbui(action: str, ctx: Context, args: PropertyDict, gdbgui_port: Optional[s if gdbgui_port is not None: gdbgui_args += ['--port', gdbgui_port] gdbgui_out_name = os.path.join(local_dir, GDBGUI_OUT_FILE) - gdbgui_out = open(gdbgui_out_name, 'w') + gdbgui_out = open(gdbgui_out_name, 'w', encoding='utf-8') env = os.environ.copy() # The only known solution for https://github.com/cs01/gdbgui/issues/359 is to set the following environment # variable. The greenlet package cannot be downgraded for compatibility with other requirements (gdbgui, diff --git a/tools/idf_py_actions/qemu_ext.py b/tools/idf_py_actions/qemu_ext.py index ad46a07ddee..716af4ad950 100644 --- a/tools/idf_py_actions/qemu_ext.py +++ b/tools/idf_py_actions/qemu_ext.py @@ -209,7 +209,7 @@ def _get_project_desc(args: PropertyDict, ctx: Context) -> Any: desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc diff --git a/tools/idf_py_actions/serial_ext.py b/tools/idf_py_actions/serial_ext.py index 8e41153fa0c..a8a1630821b 100644 --- a/tools/idf_py_actions/serial_ext.py +++ b/tools/idf_py_actions/serial_ext.py @@ -53,7 +53,7 @@ def _get_project_desc(ctx: click.core.Context, args: PropertyDict) -> Any: desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, 'r') as f: + with open(desc_path, 'r', encoding='utf-8') as f: project_desc = json.load(f) return project_desc @@ -71,7 +71,7 @@ def _get_esptool_args(args: PropertyDict) -> List: result += ['-p', args.port] result += ['-b', str(args.baud)] - with open(os.path.join(args.build_dir, 'flasher_args.json')) as f: + with open(os.path.join(args.build_dir, 'flasher_args.json'), encoding='utf-8') as f: flasher_args = json.load(f) extra_esptool_args = flasher_args['extra_esptool_args'] diff --git a/tools/idf_py_actions/tools.py b/tools/idf_py_actions/tools.py index 0f4cb73a1d0..1b065ddc8f6 100644 --- a/tools/idf_py_actions/tools.py +++ b/tools/idf_py_actions/tools.py @@ -64,7 +64,7 @@ def _set_build_context(args: 'PropertyDict') -> None: proj_desc_fn = f'{args.build_dir}/project_description.json' try: - with open(proj_desc_fn, 'r') as f: + with open(proj_desc_fn, 'r', encoding='utf-8') as f: ctx['proj_desc'] = json.load(f) except (OSError, ValueError) as e: raise FatalError(f'Cannot load {proj_desc_fn}: {e}') @@ -85,7 +85,7 @@ def _idf_version_from_cmake() -> Optional[str]: regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)') ver = {} try: - with open(version_path) as f: + with open(version_path, encoding='utf-8') as f: for line in f: m = regex.match(line) @@ -189,7 +189,7 @@ def load_hints() -> Dict: } current_module_dir = os.path.dirname(__file__) - with open(os.path.join(current_module_dir, 'hints.yml'), 'r') as file: + with open(os.path.join(current_module_dir, 'hints.yml'), 'r', encoding='utf-8') as file: hints['yml'] = yaml.safe_load(file) hint_modules_dir = os.path.join(current_module_dir, 'hint_modules') @@ -263,7 +263,7 @@ def generate_hints(*filenames: str) -> Generator: """Getting output files and printing hints on how to resolve errors based on the output.""" hints = load_hints() for file_name in filenames: - with open(file_name, 'r') as file: + with open(file_name, 'r', encoding='utf-8') as file: yield from generate_hints_buffer(file.read(), hints) @@ -691,7 +691,7 @@ def get_sdkconfig_filename(args: 'PropertyDict', cache_cmdl: Optional[Dict]=None proj_desc_path = os.path.join(args.build_dir, 'project_description.json') try: - with open(proj_desc_path, 'r') as f: + with open(proj_desc_path, 'r', encoding='utf-8') as f: proj_desc = json.load(f) return str(proj_desc['config_file']) except (OSError, KeyError): @@ -712,7 +712,7 @@ def get_sdkconfig_value(sdkconfig_file: str, key: str) -> Optional[str]: value = None # if the value is quoted, this excludes the quotes from the value pattern = re.compile(r"^{}=\"?([^\"]*)\"?$".format(key)) - with open(sdkconfig_file, 'r') as f: + with open(sdkconfig_file, 'r', encoding='utf-8') as f: for line in f: match = re.match(pattern, line) if match: diff --git a/tools/idf_tools.py b/tools/idf_tools.py index b5e4f024ec6..5b435966450 100755 --- a/tools/idf_tools.py +++ b/tools/idf_tools.py @@ -1575,7 +1575,7 @@ def get_env_state(cls) -> 'ENVState': if cls.deactivate_file_path: try: - with open(cls.deactivate_file_path, 'r') as fp: + with open(cls.deactivate_file_path, 'r', encoding='utf-8') as fp: env_state_obj.idf_variables = json.load(fp) except (IOError, OSError, ValueError): pass @@ -1585,7 +1585,7 @@ def save(self) -> str: try: if self.deactivate_file_path and os.path.basename(self.deactivate_file_path).endswith(f'idf_{str(os.getppid())}'): # If exported file path/name exists and belongs to actual opened shell - with open(self.deactivate_file_path, 'w') as w: + with open(self.deactivate_file_path, 'w', encoding='utf-8') as w: json.dump(self.idf_variables, w, ensure_ascii=False, indent=4) # type: ignore else: with tempfile.NamedTemporaryFile(delete=False, suffix=f'idf_{str(os.getppid())}') as fp: @@ -1604,7 +1604,7 @@ def load_tools_info() -> Dict[str, IDFTool]: tool_versions_file_name = g.tools_json - with open(tool_versions_file_name, 'r') as f: # type: ignore + with open(tool_versions_file_name, 'r', encoding='utf-8') as f: # type: ignore tools_info = json.load(f) return parse_tools_info_json(tools_info) # type: ignore @@ -1666,7 +1666,7 @@ def get_idf_version() -> str: version_file_path = os.path.join(g.idf_path, 'version.txt') if os.path.exists(version_file_path): - with open(version_file_path, 'r') as version_file: + with open(version_file_path, 'r', encoding='utf-8') as version_file: idf_version_str = version_file.read() match = re.match(r'^v([0-9]+\.[0-9]+).*', idf_version_str) @@ -1675,7 +1675,7 @@ def get_idf_version() -> str: if idf_version is None: try: - with open(os.path.join(g.idf_path, 'components', 'esp_common', 'include', 'esp_idf_version.h')) as f: + with open(os.path.join(g.idf_path, 'components', 'esp_common', 'include', 'esp_idf_version.h'), encoding='utf-8') as f: m = re.search(r'^#define\s+ESP_IDF_VERSION_MAJOR\s+(\d+).+?^#define\s+ESP_IDF_VERSION_MINOR\s+(\d+)', f.read(), re.DOTALL | re.MULTILINE) if m: @@ -2136,7 +2136,7 @@ def process_tool( def check_python_venv_compatibility(idf_python_env_path: str, idf_version: str) -> None: try: - with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'r') as f: + with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'r', encoding='utf-8') as f: read_idf_version = f.read().strip() if read_idf_version != idf_version: fatal(f'Python environment is set to {idf_python_env_path} which was generated for ' @@ -2643,7 +2643,7 @@ def action_install_python_env(args): # type: ignore stdout=sys.stdout, stderr=sys.stderr) try: - with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'w') as f: + with open(os.path.join(idf_python_env_path, VENV_VER_FILE), 'w', encoding='utf-8') as f: f.write(idf_version) except OSError as e: warn(f'The following issue occurred while generating the ESP-IDF version file in the Python environment: {e}. ' @@ -2781,7 +2781,7 @@ def __init__(self, filename_prefix: str, url: str) -> None: sha256_file = sha256_file_tmp download(url, sha256_file) - with open(sha256_file, 'r') as f: + with open(sha256_file, 'r', encoding='utf-8') as f: self.checksum = f.read().splitlines() # remove temp file @@ -2867,7 +2867,7 @@ def action_add_version(args: Any) -> None: json_str = dump_tools_json(tools_info) if not args.output: args.output = os.path.join(g.idf_path, TOOLS_FILE_NEW) # type: ignore - with open(args.output, 'w') as f: + with open(args.output, 'w', encoding='utf-8') as f: f.write(json_str) f.write('\n') info(f'Wrote output to {args.output}') @@ -2881,7 +2881,7 @@ def action_rewrite(args): # type: ignore json_str = dump_tools_json(tools_info) if not args.output: args.output = os.path.join(g.idf_path, TOOLS_FILE_NEW) - with open(args.output, 'w') as f: + with open(args.output, 'w', encoding='utf-8') as f: f.write(json_str) f.write('\n') info(f'Wrote output to {args.output}') @@ -2974,10 +2974,10 @@ def action_validate(args): # type: ignore fatal('You need to install jsonschema package to use validate command') raise SystemExit(1) - with open(os.path.join(g.idf_path, TOOLS_FILE), 'r') as tools_file: + with open(os.path.join(g.idf_path, TOOLS_FILE), 'r', encoding='utf-8') as tools_file: tools_json = json.load(tools_file) - with open(os.path.join(g.idf_path, TOOLS_SCHEMA_FILE), 'r') as schema_file: + with open(os.path.join(g.idf_path, TOOLS_SCHEMA_FILE), 'r', encoding='utf-8') as schema_file: schema_json = json.load(schema_file) jsonschema.validate(tools_json, schema_json) # on failure, this will raise an exception with a fairly verbose diagnostic message diff --git a/tools/install_util.py b/tools/install_util.py index 387327d20ff..1200b8c5178 100644 --- a/tools/install_util.py +++ b/tools/install_util.py @@ -1,12 +1,9 @@ #!/usr/bin/env python - -# SPDX-FileCopyrightText: 2022-2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2022-2024 Espressif Systems (Shanghai) CO LTD # # SPDX-License-Identifier: Apache-2.0 - # This script is used from the $IDF_PATH/install.* scripts. This way the argument parsing can be done at one place and # doesn't have to be implemented for all shells. - import argparse import json import os @@ -57,7 +54,7 @@ def action_print_help(script_extension: str) -> None: # extract the list of features from ./requirements.json thisdir = os.path.dirname(os.path.realpath(__file__)) - with open(f'{thisdir}/requirements.json', 'r') as f: + with open(f'{thisdir}/requirements.json', 'r', encoding='utf-8') as f: json_data = json.load(f) features = [feat['name'] for feat in json_data['features']] diff --git a/tools/ldgen/ldgen.py b/tools/ldgen/ldgen.py index 41a854e258a..681020fa36f 100755 --- a/tools/ldgen/ldgen.py +++ b/tools/ldgen/ldgen.py @@ -165,7 +165,7 @@ def main(): if exc.errno != errno.EEXIST: raise - with open(output_path, 'w') as f: # only create output file after generation has suceeded + with open(output_path, 'w', encoding='utf-8') as f: # only create output file after generation has succeeded f.write(output.read()) except LdGenFailure as e: print('linker script generation failed for %s\nERROR: %s' % (input_file.name, e)) diff --git a/tools/mass_mfg/mfg_gen.py b/tools/mass_mfg/mfg_gen.py index 3f303f8a8e9..759a4384d9d 100644 --- a/tools/mass_mfg/mfg_gen.py +++ b/tools/mass_mfg/mfg_gen.py @@ -23,7 +23,7 @@ def create_temp_files(args): def strip_blank_lines(input_filename, output_filename): - with open(input_filename, 'r') as read_from, open(output_filename,'w', newline='') as write_to: + with open(input_filename, 'r', encoding='utf-8') as read_from, open(output_filename,'w', newline='', encoding='utf-8') as write_to: for line in read_from: if not line.isspace(): write_to.write(line) @@ -32,7 +32,7 @@ def strip_blank_lines(input_filename, output_filename): def verify_values_exist(input_values_file, keys_in_values_file): """ Verify all keys have corresponding values in values file """ - with open(input_values_file, 'r') as values_file: + with open(input_values_file, 'r', encoding='utf-8') as values_file: values_file_reader = csv.reader(values_file, delimiter=',') next(values_file_reader) @@ -48,7 +48,7 @@ def verify_keys_exist(values_file_keys, input_config_file): """ keys_missing = [] - with open(input_config_file,'r') as config_file: + with open(input_config_file,'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -74,7 +74,7 @@ def verify_datatype_encoding(input_config_file): valid_encodings = {'string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32', 'u64', 'i64','base64'} valid_datatypes = {'file','data','namespace'} - with open(input_config_file,'r') as config_file: + with open(input_config_file,'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -90,7 +90,7 @@ def verify_file_data_count(input_config_file, keys_repeat): """ Verify count of data on each line in config file is equal to 3 (as format must be: ) """ - with open(input_config_file, 'r') as config_file: + with open(input_config_file, 'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for line_num, line in enumerate(config_file_reader, start=1): @@ -136,7 +136,7 @@ def add_config_data_per_namespace(input_config_file): config_data_to_write = [] config_data_per_namespace = [] - with open(input_config_file,'r') as csv_config_file: + with open(input_config_file,'r', encoding='utf-8') as csv_config_file: config_file_reader = csv.reader(csv_config_file, delimiter=',') # `config_data_per_namespace` is added to `config_data_to_write` list after reading next namespace @@ -182,7 +182,7 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file): header = ['key', 'type', 'encoding', 'value'] data_to_write = [] - with open(output_csv_file, 'w', newline='') as target_csv_file: + with open(output_csv_file, 'w', newline='', encoding='utf-8') as target_csv_file: output_file_writer = csv.writer(target_csv_file, delimiter=',') output_file_writer.writerow(header) @@ -214,7 +214,7 @@ def create_dir(filetype, output_dir_path): def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename): - with open(csv_file, 'r') as read_from, open(target_filename,'w', newline='') as write_to: + with open(csv_file, 'r', encoding='utf-8') as read_from, open(target_filename,'w', newline='', encoding='utf-8') as write_to: csv_file_reader = csv.reader(read_from, delimiter=',') headers = next(csv_file_reader) values = next(csv_file_reader) @@ -247,7 +247,7 @@ def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=Fals config_data_to_write = add_config_data_per_namespace(args.conf) try: - with open(args.values, 'r') as csv_values_file: + with open(args.values, 'r', encoding='utf-8') as csv_values_file: values_file_reader = csv.reader(csv_values_file, delimiter=',') keys = next(values_file_reader) @@ -258,7 +258,7 @@ def create_intermediate_csv(args, keys_in_values_file, keys_repeat, is_encr=Fals else: target_values_file = args.values - with open(target_values_file, 'r') as csv_values_file: + with open(target_values_file, 'r', encoding='utf-8') as csv_values_file: values_file_reader = csv.reader(csv_values_file, delimiter=',') next(values_file_reader) @@ -341,7 +341,7 @@ def verify_file_format(args): raise SystemExit('Error: values file: %s is empty.' % args.values) # Extract keys from config file - with open(args.conf, 'r') as config_file: + with open(args.conf, 'r', encoding='utf-8') as config_file: config_file_reader = csv.reader(config_file, delimiter=',') for config_data in config_file_reader: if 'namespace' not in config_data: @@ -350,7 +350,7 @@ def verify_file_format(args): keys_repeat.append(config_data[0]) # Extract keys from values file - with open(args.values, 'r') as values_file: + with open(args.values, 'r', encoding='utf-8') as values_file: values_file_reader = csv.reader(values_file, delimiter=',') keys_in_values_file = next(values_file_reader) diff --git a/tools/mkdfu.py b/tools/mkdfu.py index 5c994e71ec8..0fe4000e5e2 100755 --- a/tools/mkdfu.py +++ b/tools/mkdfu.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -# SPDX-FileCopyrightText: 2020-2022 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 # # This program creates archives compatible with ESP32-S* ROM DFU implementation. @@ -9,9 +9,6 @@ # as a separate file. In addition to that, a special index file, 'dfuinfo0.dat', is created. # This file must be the first one in the archive. It contains binary structures describing each # subsequent file (for example, where the file needs to be flashed/loaded). - -from __future__ import print_function, unicode_literals - import argparse import hashlib import json @@ -308,7 +305,7 @@ def process_json_file(path): # type: (str) -> str ''' return check_file(os.path.relpath(os.path.join(json_dir, path), start=os.curdir)) - with open(args.json) as f: + with open(args.json, encoding='utf-8') as f: files += [(int(addr, 0), process_json_file(f_name)) for addr, f_name in json.load(f)['flash_files'].items()] diff --git a/tools/mkuf2.py b/tools/mkuf2.py index 05d45dfecc0..90e8572cbaf 100755 --- a/tools/mkuf2.py +++ b/tools/mkuf2.py @@ -1,16 +1,15 @@ #!/usr/bin/env python # -# SPDX-FileCopyrightText: 2020-2023 Espressif Systems (Shanghai) CO LTD +# SPDX-FileCopyrightText: 2020-2024 Espressif Systems (Shanghai) CO LTD # SPDX-License-Identifier: Apache-2.0 # Module was moved to the esptool in ESP-IDF v5.2 and relicensed under GPL v2.0 license. - -from __future__ import division - import argparse import json import os import subprocess import sys +from typing import List +from typing import Tuple def main() -> None: @@ -75,7 +74,7 @@ def check_file(file_name: str) -> str: raise RuntimeError('{} is not a regular file!'.format(file_name)) return file_name - files = [] + files: List[Tuple[int, str]] = [] if args.files: files += [(addr, check_file(f_name)) for addr, f_name in zip(args.files[::2], args.files[1::2])] @@ -89,7 +88,7 @@ def process_json_file(path: str) -> str: ''' return check_file(os.path.abspath(os.path.join(json_dir, path))) - with open(args.json) as f: + with open(args.json, encoding='utf-8') as f: json_content = json.load(f) if args.bin: @@ -107,10 +106,10 @@ def process_json_file(path: str) -> str: files += [(addr, process_json_file(f_name)) for addr, f_name in flash_dic.items()] # remove possible duplicates and sort based on the address - files = sorted([(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]) # type: ignore + files = sorted([(addr, f_name) for addr, f_name in dict(files).items()], key=lambda x: x[0]) # list of tuples to simple list - files = [item for t in files for item in t] + files_flatten = [item for t in files for item in t] cmd = [ sys.executable, '-m', 'esptool', @@ -125,10 +124,10 @@ def process_json_file(path: str) -> str: if args.md5_disable: cmd.append('--md5-disable') - cmd_str = ' '.join(cmd + files) + cmd_str = ' '.join(cmd + files_flatten) print(f'Executing: {cmd_str}') - sys.exit(subprocess.run(cmd + files).returncode) + sys.exit(subprocess.run(cmd + files_flatten).returncode) if __name__ == '__main__':