diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 9cecd3d0..6f6ae99b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -25,3 +25,6 @@ jobs: - name: Run tests run: python -m pytest -vv + + - name: Run linter + run: python -m black neucbot/ tests/ --check diff --git a/neucbot.py b/neucbot.py index 821d4321..26ae01a6 100755 --- a/neucbot.py +++ b/neucbot.py @@ -10,6 +10,7 @@ from neucbot import ensdf from neucbot import elements +from neucbot import alpha class constants: N_A = 6.0221409e+23 @@ -46,46 +47,6 @@ def parseIsotope(iso): A = A*10 + int(i) return [ele,A] -def generateAlphaFileName(ele,A): - outdir = './AlphaLists/' - fName = outdir + ele.capitalize() + str(A) + 'Alphas.dat' - return fName - -def generateAlphaList(ele, A): - print('generateAlphaList(',ele,A,')',file=constants.ofile) - ensdf.main(['parseENSDF',ele,A]) - -def loadAlphaList(fname): - f = open(fname) - tokens = [line.split() for line in f.readlines()] - alpha_list = [] - for words in tokens: - if words[0][0] == '#' or len(words) < 2: - continue - - alpha = [] - for word in words: - alpha.append(float(word)) - alpha_list.append(alpha) - f.close() - return alpha_list - -def getAlphaList(ele,A): - fname = generateAlphaFileName(ele,A) - return loadAlphaList(fname) - -def getAlphaListIfExists(ele,A): - fName = generateAlphaFileName(ele,A) - tries = 3 - while not os.path.isfile(fName): - if tries < 0: - print('Cannot generate alpha list for ele = ', ele, ' and A = ', A,file = constants.ofile) - return 0 - print('generating alpha file ', fName, file = constants.ofile) - generateAlphaList(ele,A) - tries -= 1 - return getAlphaList(ele,A) - def loadChainAlphaList(fname): f = open(fname) tokens = [line.split() for line in f.readlines()] @@ -99,7 +60,7 @@ def loadChainAlphaList(fname): [ele,A] = parseIsotope(iso) # Now get the isotope's alpha list and add it to the chain's list - aList_forIso = getAlphaListIfExists(ele,A) + aList_forIso = alpha.AlphaList(ele, A).load_or_fetch() if constants.print_alphas: print(iso, file = constants.ofile) print('\t', aList_forIso, file = constants.ofile) @@ -472,7 +433,7 @@ def main(): if arg == '-l': alphalist_file = sys.argv[sys.argv.index(arg)+1] print('load alpha list', alphalist_file, file = sys.stdout) - alpha_list = loadAlphaList(alphalist_file) + alpha_list = alpha.AlphaList.from_filepath(alphalist_file) if arg == '-c': chain_file = sys.argv[sys.argv.index(arg)+1] print('load alpha chain', chain_file, file = sys.stdout) @@ -545,4 +506,3 @@ def main(): if __name__ == '__main__': main() - diff --git a/neucbot/alpha.py b/neucbot/alpha.py new file mode 100644 index 00000000..d96544eb --- /dev/null +++ b/neucbot/alpha.py @@ -0,0 +1,63 @@ +import os +from neucbot import ensdf + +ALPHA_LIST_DIR = "./AlphaLists" + + +def _alphas_from_file_path(file_path): + file = open(file_path) + + # Parse alphalist files: + # 1. Only parse lines that have 2+ tab-separated tokens + # 2. Ignore any lines starting with "#" + # 3. Return list of lists (where each sublist is a list of floats) + alphas = [ + [float(token) for token in line.split()] # Parse each token as float + for line in file.readlines() # for each line in file + if line[0] != "#" + and len(line.split()) >= 2 # except for lines matching these conditions + ] + + file.close() + + return alphas + + +class AlphaList: + def __init__(self, element, isotope): + self.element = element + self.isotope = isotope + self.file_path = f"{ALPHA_LIST_DIR}/{self.element}{self.isotope}Alphas.dat" + self.fetch_attempts = 3 + + @classmethod + def from_filepath(cls, file_path): + return _alphas_from_file_path(file_path) + + def load_or_fetch(self): + while not os.path.isfile(self.file_path): + if self.fetch_attempts < 0: + raise RuntimeError(f"Unable to write alpha file to {self.file_path}") + self.write() + self.fetch_attempts -= 1 + + return self.load() + + def load(self): + return _alphas_from_file_path(self.file_path) + + def write(self): + if os.path.exists(self.file_path): + print(f"Alpha list file already exists at {self.file_path}") + else: + client = ensdf.Client(self.element, self.isotope) + decay_file_text = client.read_or_fetch_decay_file() + energyMaps = ensdf.Parser.parse(decay_file_text) + file = open(self.file_path, "w") + + for energy, probability in energyMaps["alphas"].items(): + file.write(f"{str(energy/1000)}\t{probability}\n") + + file.close() + + return True diff --git a/neucbot/chemistry.py b/neucbot/chemistry.py index 64a6d24a..995fdbfd 100755 --- a/neucbot/chemistry.py +++ b/neucbot/chemistry.py @@ -1,32 +1,130 @@ -elementsToZ = {'H' : 1, 'He' : 2, 'Li' : 3, 'Be' : 4, 'B' : 5, - 'C' : 6, 'N' : 7, 'O' : 8, 'F' : 9, 'Ne' : 10, - 'Na' : 11,'Mg' : 12,'Al' : 13,'Si' : 14,'P' : 15, - 'S' : 16,'Cl' : 17,'Ar' : 18,'K' : 19,'Ca' : 20, - 'Sc' : 21,'Ti' : 22,'V' : 23,'Cr' : 24,'Mn' : 25, - 'Fe' : 26,'Co' : 27,'Ni' : 28,'Cu' : 29,'Zn' : 30, - 'Ga' : 31,'Ge' : 32,'As' : 33,'Se' : 34,'Br' : 35, - 'Kr' : 36,'Rb' : 37,'Sr' : 38,'Y' : 39,'Zr' : 40, - 'Nb' : 41,'Mo' : 42,'Tc' : 43,'Ru' : 44,'Rh' : 45, - 'Pd' : 46,'Ag' : 47,'Cd' : 48,'In' : 49,'Sn' : 50, - 'Sb' : 51,'Te' : 52,'I' : 53,'Xe' : 54,'Cs' : 55, - 'Ba' : 56,'La' : 57,'Ce' : 58,'Pr' : 59,'Nd' : 60, - 'Pm' : 61,'Sm' : 62,'Eu' : 63,'Gd' : 64,'Tb' : 65, - 'Dy' : 66,'Ho' : 67,'Er' : 68,'Tm' : 69,'Yb' : 70, - 'Lu' : 71,'Hf' : 72,'Ta' : 73,'W' : 74,'Re' : 75, - 'Os' : 76,'Ir' : 77,'Pt' : 78,'Au' : 79,'Hg' : 80, - 'Tl' : 81,'Pb' : 82,'Bi' : 83,'Po' : 84,'At' : 85, - 'Rn' : 86,'Fr' : 87,'Ra' : 88,'Ac' : 89,'Th' : 90, - 'Pa' : 91,'U' : 92,'Np' : 93,'Pu' : 94,'Am' : 95, - 'Cm' : 96,'Bk' : 97,'Cf' : 98,'Es' : 99,'Fm' :100, - 'Md' :101,'No' :102,'Lr' :103,'Rf' :104,'Db' :105, - 'Sg' :106,'Bh' :107,'Hs' :108,'Mt' :109,'Ds' :110, - 'Rg' :111,'Cn' :112,'Uut':113,'Fl' :114,'Uup':115, - 'Lv' :116,'Uus':117,'Uuo':118} +elementsToZ = { + "H": 1, + "He": 2, + "Li": 3, + "Be": 4, + "B": 5, + "C": 6, + "N": 7, + "O": 8, + "F": 9, + "Ne": 10, + "Na": 11, + "Mg": 12, + "Al": 13, + "Si": 14, + "P": 15, + "S": 16, + "Cl": 17, + "Ar": 18, + "K": 19, + "Ca": 20, + "Sc": 21, + "Ti": 22, + "V": 23, + "Cr": 24, + "Mn": 25, + "Fe": 26, + "Co": 27, + "Ni": 28, + "Cu": 29, + "Zn": 30, + "Ga": 31, + "Ge": 32, + "As": 33, + "Se": 34, + "Br": 35, + "Kr": 36, + "Rb": 37, + "Sr": 38, + "Y": 39, + "Zr": 40, + "Nb": 41, + "Mo": 42, + "Tc": 43, + "Ru": 44, + "Rh": 45, + "Pd": 46, + "Ag": 47, + "Cd": 48, + "In": 49, + "Sn": 50, + "Sb": 51, + "Te": 52, + "I": 53, + "Xe": 54, + "Cs": 55, + "Ba": 56, + "La": 57, + "Ce": 58, + "Pr": 59, + "Nd": 60, + "Pm": 61, + "Sm": 62, + "Eu": 63, + "Gd": 64, + "Tb": 65, + "Dy": 66, + "Ho": 67, + "Er": 68, + "Tm": 69, + "Yb": 70, + "Lu": 71, + "Hf": 72, + "Ta": 73, + "W": 74, + "Re": 75, + "Os": 76, + "Ir": 77, + "Pt": 78, + "Au": 79, + "Hg": 80, + "Tl": 81, + "Pb": 82, + "Bi": 83, + "Po": 84, + "At": 85, + "Rn": 86, + "Fr": 87, + "Ra": 88, + "Ac": 89, + "Th": 90, + "Pa": 91, + "U": 92, + "Np": 93, + "Pu": 94, + "Am": 95, + "Cm": 96, + "Bk": 97, + "Cf": 98, + "Es": 99, + "Fm": 100, + "Md": 101, + "No": 102, + "Lr": 103, + "Rf": 104, + "Db": 105, + "Sg": 106, + "Bh": 107, + "Hs": 108, + "Mt": 109, + "Ds": 110, + "Rg": 111, + "Cn": 112, + "Uut": 113, + "Fl": 114, + "Uup": 115, + "Lv": 116, + "Uus": 117, + "Uuo": 118, +} zToElements = {z: element for element, z in elementsToZ.items()} + def getZ(ele): return elementsToZ.get(ele.capitalize()) + def getElement(z): return zToElements.get(z, "None") diff --git a/neucbot/elements.py b/neucbot/elements.py index 851013be..6aed9303 100644 --- a/neucbot/elements.py +++ b/neucbot/elements.py @@ -30,6 +30,7 @@ with open("./neucbot/elements.json", "r") as file: isotopesMap = json.load(file) + class Element: def __init__(self, element): self.symbol = element.capitalize() diff --git a/neucbot/ensdf.py b/neucbot/ensdf.py index 69a28318..2f894a5f 100644 --- a/neucbot/ensdf.py +++ b/neucbot/ensdf.py @@ -5,6 +5,7 @@ https://www.nndc.bnl.gov/ensdf/ensdf-manual.pdf """ + import os import re from requests import Session @@ -24,32 +25,21 @@ ALPHA_DECAY_HREF_PATTERN = re.compile("getdecaydataset.jsp" + ".*a\\sdecay") DECAY_DATA_DIR = "./Data/Decays/ensdf" -ALPHA_LIST_DIR = "./AlphaLists" + class Client: def __init__(self, element, isotope): self.element = elements.Element(element) self.isotope = str(isotope) - self.nndc_url = URL_BASE + DECAY_SEARCH_URL + "?unc=NDS&nuc=" + self.isotope + element.upper() + self.nndc_url = ( + URL_BASE + + DECAY_SEARCH_URL + + "?unc=NDS&nuc=" + + self.isotope + + element.upper() + ) self.http = self.setup_http() - def write_alpha_files(self): - alpha_list_file = self.alpha_list_file_path() - - if os.path.exists(alpha_list_file): - print(f"Alpha list file already exists at {self.alpha_list_file_path()}") - else: - decay_file_text = self.read_or_fetch_decay_file() - energyMaps = Parser.parse(decay_file_text) - file = open(alpha_list_file, "w") - - for energy, probability in energyMaps["alphas"].items(): - file.write(f"{str(energy/1000)}\t{probability}\n") - - file.close() - - return True - def read_or_fetch_decay_file(self): if os.path.exists(self.decay_file_path()): file = open(self.decay_file_path(), "r") @@ -62,20 +52,26 @@ def read_or_fetch_decay_file(self): def fetch_and_write_decay_file(self): search_results = self.http.get(self.nndc_url, headers=REQUEST_HEADERS).content - links = BeautifulSoup(search_results, "html.parser").find_all(href=ALPHA_DECAY_HREF_PATTERN) + links = BeautifulSoup(search_results, "html.parser").find_all( + href=ALPHA_DECAY_HREF_PATTERN + ) if len(links) == 0: raise RuntimeError(f"No Alpha Decay links found on {self.nndc_url}") else: for link in links: path = link.attrs.get("href") - decay_page = self.http.get(URL_BASE + path, headers=REQUEST_HEADERS).text + decay_page = self.http.get( + URL_BASE + path, headers=REQUEST_HEADERS + ).text decay_file = BeautifulSoup(decay_page, "html.parser").find("pre") if decay_file and len(decay_file.contents) > 0: decay_file = decay_file.contents[0].strip() else: - raise RuntimeError(f"No page content found on {self.nndc_url}{path}") + raise RuntimeError( + f"No page content found on {self.nndc_url}{path}" + ) if not Parser.is_alpha_decay(decay_file): print(f"No alpha decay found at {self.nndc_url}/{path}") @@ -84,7 +80,9 @@ def fetch_and_write_decay_file(self): else: return self.write_decay_file(decay_file) - raise RuntimeError(f"No valid ground state alpha decays found at {self.nndc_url}") + raise RuntimeError( + f"No valid ground state alpha decays found at {self.nndc_url}" + ) def write_decay_file(self, decay_file_text): file = open(self.decay_file_path(), "w") @@ -96,9 +94,6 @@ def write_decay_file(self, decay_file_text): def decay_file_path(self): return f"{DECAY_DATA_DIR}/{self.element.symbol}{self.isotope}.dat" - def alpha_list_file_path(self): - return f"{ALPHA_LIST_DIR}/{self.element.symbol}{self.isotope}Alphas.dat" - def setup_http(self): session = Session() session.mount("https://", HTTPAdapter(max_retries=Retry(total=3))) @@ -109,6 +104,8 @@ def setup_http(self): """ The Parser class parses records from the NNDC ENSDF Database """ + + class Parser: # Pattern of Parent Record for Ground State Decay is: @@ -124,16 +121,20 @@ class Parser: # Pattern of Alpha energies is: # "(5-character nuclear ID) A (Energy [up to 10 digits]) (Uncertainty Energy) (Intensity) (Uncertainty intensity)" # ENSDF Manual, page 25 - ALPHA_RECORD = re.compile(r"^[A-Z0-9]{5}\s{2}A\s(?P[0-9\s\.]{10})[0-9\.\s]{2}(?P[0-9\.\-E\s]{8})") + ALPHA_RECORD = re.compile( + r"^[A-Z0-9]{5}\s{2}A\s(?P[0-9\s\.]{10})[0-9\.\s]{2}(?P[0-9\.\-E\s]{8})" + ) # Pattern of Gamma energies is: # "(5-character nuclear ID) G (Energy [up to 10 digits]) (Uncertainty Energy) (Intensity) (Uncertainty intensity)" # ENSDF Manual, page 28 - GAMMA_RECORD = re.compile(r"^[A-Z0-9]{5}\s{2}G\s(?P[0-9\s\.]{10})[0-9\.\s]{2}(?P[0-9\.\-E\s]{8})") + GAMMA_RECORD = re.compile( + r"^[A-Z0-9]{5}\s{2}G\s(?P[0-9\s\.]{10})[0-9\.\s]{2}(?P[0-9\.\-E\s]{8})" + ) @classmethod def parse(cls, file_text): - contents = { "alphas": {}, "gammas": {}, "intensity": 0 } + contents = {"alphas": {}, "gammas": {}, "intensity": 0} for line in file_text.splitlines(): if cls.questionable_record(line): continue @@ -173,5 +174,6 @@ def is_alpha_decay(cls, page_text): @classmethod def is_ground_state_decay(cls, page_text): - return any(cls.GROUND_STATE_DECAY_RECORD.match(line) for line in page_text.splitlines()) - + return any( + cls.GROUND_STATE_DECAY_RECORD.match(line) for line in page_text.splitlines() + ) diff --git a/parseENSDF.py b/parseENSDF.py index aa1deba4..f69e6461 100755 --- a/parseENSDF.py +++ b/parseENSDF.py @@ -1,6 +1,6 @@ #!/usr/bin/python3 import sys -from neucbot import ensdf +from neucbot import alpha def main(argv): if(len(argv) != 3): @@ -10,7 +10,7 @@ def main(argv): ele = argv[1] A = int(argv[2]) - ensdf.Client(ele, A).write_alpha_files() + alpha.AlphaList(ele, A).write() if __name__ == "__main__": main(sys.argv) diff --git a/requirements.txt b/requirements.txt index 9acf2726..7b762364 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,19 @@ beautifulsoup4==4.14.2 +black==25.12.0 bs4==0.0.2 certifi==2025.10.5 charset-normalizer==3.4.4 +click==8.3.1 idna==3.11 iniconfig==2.3.0 +mypy_extensions==1.1.0 packaging==25.0 +pathspec==0.12.1 +platformdirs==4.5.1 pluggy==1.6.0 Pygments==2.19.2 pytest==8.4.2 +pytokens==0.3.0 requests==2.32.5 soupsieve==2.8 typing_extensions==4.15.0 diff --git a/tests/test_alpha.py b/tests/test_alpha.py new file mode 100644 index 00000000..2ba2e463 --- /dev/null +++ b/tests/test_alpha.py @@ -0,0 +1,137 @@ +import pytest +import re + +from unittest import TestCase +from unittest.mock import call, patch +from neucbot.alpha import AlphaList +from neucbot.ensdf import Client, Parser + + +class TestAlphaList(TestCase): + @patch.object(Parser, "parse") + @patch.object(Client, "read_or_fetch_decay_file") + @patch("os.path.exists") + def test_write_success( + self, mocked_os_path_exists, mocked_read_or_fetch, mocked_parse + ): + # Assume no decay or alpha files exist + mocked_os_path_exists.return_value = False + + mocked_read_or_fetch.return_value = "" + + expected_alphas = { + 6089.88: 27.12, + 6050.78: 69.91, + 5768: 1.70, + 5626: 0.157, + 5607: 1.13, + 5481: 0.013, + 5345: 0.0010, + 5302: 0.00011, + } + + expected_gammas = { + 39.857: 2.96, + 288.2: 0.938, + 328.03: 0.349, + 433.7: 0.047, + 452.98: 1.01, + 473.0: 0.14, + } + + mocked_parse.return_value = { + "alphas": expected_alphas, + "gammas": expected_gammas, + "intensity": 1.0, + } + + alphas = AlphaList("Bi", 212) + alphas.write() + + mocked_os_path_exists.assert_has_calls([call("./AlphaLists/Bi212Alphas.dat")]) + mocked_read_or_fetch.assert_called() + mocked_parse.assert_called() + + @patch.object(Parser, "parse") + @patch.object(Client, "read_or_fetch_decay_file") + @patch("os.path.exists") + def test_write_does_not_fetch_if_alpha_file_exists( + self, mocked_os_path_exists, mocked_read_or_fetch, mocked_parse + ): + # Assume no decay or alpha files exist + mocked_os_path_exists.return_value = True + + alphas = AlphaList("Bi", 212) + alphas.write() + + mocked_os_path_exists.assert_has_calls([call("./AlphaLists/Bi212Alphas.dat")]) + mocked_read_or_fetch.assert_not_called() + mocked_parse.assert_not_called() + + def test_load(self): + alphas = AlphaList("Bi", 212).load() + + self.assertEqual( + alphas, + [ + [6.08988, 27.12], + [6.05078, 69.91], + [5.768, 1.7], + [5.626, 0.157], + [5.607, 1.13], + [5.481, 0.013], + [5.345, 0.001], + [5.302, 0.00011], + ], + ) + + def test_from_filepath(self): + alphas = AlphaList.from_filepath("AlphaLists/Bi212Alphas.dat") + + self.assertEqual( + alphas, + [ + [6.08988, 27.12], + [6.05078, 69.91], + [5.768, 1.7], + [5.626, 0.157], + [5.607, 1.13], + [5.481, 0.013], + [5.345, 0.001], + [5.302, 0.00011], + ], + ) + + @patch.object(AlphaList, "write") + @patch("os.path.isfile") + def test_load_or_fetch_success(self, mocked_isfile, mocked_write): + mocked_isfile.side_effect = [False, True] + + mocked_write.return_value = "" + + alphas = AlphaList("Bi", 212).load_or_fetch() + + self.assertEqual(mocked_isfile.call_count, 2) + self.assertEqual(mocked_write.call_count, 1) + self.assertEqual( + alphas, + [ + [6.08988, 27.12], + [6.05078, 69.91], + [5.768, 1.7], + [5.626, 0.157], + [5.607, 1.13], + [5.481, 0.013], + [5.345, 0.001], + [5.302, 0.00011], + ], + ) + + @patch.object(AlphaList, "write") + @patch("os.path.isfile") + def test_load_or_fetch_raises_on_failed_write(self, mocked_isfile, mocked_write): + mocked_isfile.return_value = False + mocked_write.return_value = "" + + with self.assertRaisesRegex(RuntimeError, r"Unable to write alpha file"): + AlphaList("Bi", 212).load_or_fetch() diff --git a/tests/test_chemistry.py b/tests/test_chemistry.py index 87079d43..80503c1e 100644 --- a/tests/test_chemistry.py +++ b/tests/test_chemistry.py @@ -2,6 +2,7 @@ from neucbot import chemistry + class TestChemistry: def test_getZ(self): assert chemistry.getZ("H") == 1 diff --git a/tests/test_elements.py b/tests/test_elements.py index 8b59a2f1..7ee10942 100644 --- a/tests/test_elements.py +++ b/tests/test_elements.py @@ -2,6 +2,7 @@ from neucbot import elements + class TestElements: def test_isotopes(self): hydrogen = elements.Element("H") diff --git a/tests/test_ensdf.py b/tests/test_ensdf.py index cdf70b44..0f9454bb 100644 --- a/tests/test_ensdf.py +++ b/tests/test_ensdf.py @@ -7,38 +7,44 @@ from requests.exceptions import HTTPError from neucbot.ensdf import Client, Parser, REQUEST_HEADERS, URL_BASE + class MockResponse: def __init__(self, content): self.content = content self.text = content -class MockSearchResponse(): + +class MockSearchResponse: def __init__(self): with open("./tests/test_ensdf/bi212_search_results.html", "r") as file: self.content = file.read() -class MockDecayResponse(): + +class MockDecayResponse: def __init__(self): with open("./tests/test_ensdf/bi212.html", "r") as file: self.text = file.read() + def ensdf_fetch_success(url, headers): if re.search(r"decaysearchdirect", url): return MockSearchResponse() elif re.search(r"getdecaydataset", url): return MockDecayResponse() + def ensdf_fetch_empty_decay(url, headers): if re.search(r"decaysearchdirect", url): return MockSearchResponse() elif re.search(r"getdecaydataset", url): return MockResponse("
")
 
+
 @patch("os.path.exists")
 class TestClient(TestCase):
 
     @patch.object(Session, "get")
-    def test_write_alpha_files_success(self, mocked_get, mocked_os_path_exists):
+    def test_read_or_fetch_decay_file_success(self, mocked_get, mocked_os_path_exists):
         # Assume no decay or alpha files exist
         mocked_os_path_exists.return_value = False
 
@@ -46,44 +52,54 @@ def test_write_alpha_files_success(self, mocked_get, mocked_os_path_exists):
         mocked_get.side_effect = ensdf_fetch_success
 
         client = Client("Bi", 212)
-        client.write_alpha_files()
-
-        mocked_get.assert_has_calls([
-            call(client.nndc_url, headers=REQUEST_HEADERS),
-            call(URL_BASE + "getdecaydataset.jsp?nucleus=208TL&dsid=212bi a decay (25.0 m)", headers=REQUEST_HEADERS),
-        ])
-
+        client.read_or_fetch_decay_file()
+
+        mocked_get.assert_has_calls(
+            [
+                call(client.nndc_url, headers=REQUEST_HEADERS),
+                call(
+                    URL_BASE
+                    + "getdecaydataset.jsp?nucleus=208TL&dsid=212bi a decay (25.0 m)",
+                    headers=REQUEST_HEADERS,
+                ),
+            ]
+        )
 
     @patch.object(Client, "fetch_and_write_decay_file")
-    def test_reads_existing_decay_file(self, mocked_client_fetch, mocked_os_path_exists):
+    def test_reads_existing_decay_file(
+        self, mocked_client_fetch, mocked_os_path_exists
+    ):
         def decay_file_exists(path):
             return re.search(r"Data\/Decays", path)
 
         mocked_os_path_exists.side_effect = decay_file_exists
 
         client = Client("Bi", 212)
-        client.write_alpha_files()
+        client.read_or_fetch_decay_file()
 
         mocked_client_fetch.assert_not_called()
-        mocked_os_path_exists.assert_has_calls([
-            call("./AlphaLists/Bi212Alphas.dat"),
-            call("./Data/Decays/ensdf/Bi212.dat"),
-        ])
-
+        mocked_os_path_exists.assert_has_calls(
+            [
+                call("./Data/Decays/ensdf/Bi212.dat"),
+            ]
+        )
 
     @patch.object(Session, "get")
-    def test_raises_runtime_error_for_no_alpha_decay_links(self, mocked_get, mocked_os_path_exists):
+    def test_raises_runtime_error_for_no_alpha_decay_links(
+        self, mocked_get, mocked_os_path_exists
+    ):
         # Assume no decay or alpha files exist
         mocked_os_path_exists.return_value = False
 
         # Mimic search for invalid element
-        mocked_get.return_value = MockResponse("No datasets were found within the specified search parameters")
+        mocked_get.return_value = MockResponse(
+            "No datasets were found within the specified search parameters"
+        )
 
         client = Client("Bi", 212)
 
         with self.assertRaisesRegex(RuntimeError, r"No Alpha Decay links found"):
-            client.write_alpha_files()
-
+            client.read_or_fetch_decay_file()
 
     @patch.object(Session, "get")
     def test_empty_decay_page_content(self, mocked_get, mocked_os_path_exists):
@@ -96,26 +112,30 @@ def test_empty_decay_page_content(self, mocked_get, mocked_os_path_exists):
         client = Client("Bi", 212)
 
         with self.assertRaisesRegex(RuntimeError, r"No page content found"):
-            client.write_alpha_files()
-
+            client.read_or_fetch_decay_file()
 
     @patch.object(Parser, "is_alpha_decay")
     @patch.object(Session, "get")
-    def test_not_alpha_decay_file(self, mocked_get, mocked_alpha_decay, mocked_os_path_exists):
+    def test_not_alpha_decay_file(
+        self, mocked_get, mocked_alpha_decay, mocked_os_path_exists
+    ):
         mocked_os_path_exists.return_value = False
         mocked_get.side_effect = ensdf_fetch_success
         mocked_alpha_decay.return_value = False
 
         client = Client("Bi", 212)
 
-        with self.assertRaisesRegex(RuntimeError, r"No valid ground state alpha decays"):
-            client.write_alpha_files()
-
+        with self.assertRaisesRegex(
+            RuntimeError, r"No valid ground state alpha decays"
+        ):
+            client.read_or_fetch_decay_file()
 
     @patch.object(Parser, "is_ground_state_decay")
     @patch.object(Parser, "is_alpha_decay")
     @patch.object(Session, "get")
-    def test_not_ground_state_decay_file(self, mocked_get, mocked_alpha_decay, mocked_ground_state, mocked_os_path_exists):
+    def test_not_ground_state_decay_file(
+        self, mocked_get, mocked_alpha_decay, mocked_ground_state, mocked_os_path_exists
+    ):
         mocked_os_path_exists.return_value = False
         mocked_get.side_effect = ensdf_fetch_success
         mocked_alpha_decay.return_value = True
@@ -123,9 +143,10 @@ def test_not_ground_state_decay_file(self, mocked_get, mocked_alpha_decay, mocke
 
         client = Client("Bi", 212)
 
-        with self.assertRaisesRegex(RuntimeError, r"No valid ground state alpha decays"):
-            client.write_alpha_files()
-
+        with self.assertRaisesRegex(
+            RuntimeError, r"No valid ground state alpha decays"
+        ):
+            client.read_or_fetch_decay_file()
 
     @patch.object(Session, "get")
     def test_retry_on_http_errors(self, mocked_get, mocked_os_path_exists):
@@ -139,11 +160,11 @@ def test_retry_on_http_errors(self, mocked_get, mocked_os_path_exists):
         client = Client("Bi", 212)
 
         try:
-            client.write_alpha_files()
+            client.read_or_fetch_decay_file()
         except HTTPError:
             pass
 
-        client.write_alpha_files()
+        client.read_or_fetch_decay_file()
 
         self.assertEqual(mocked_get.call_count, 3)
 
@@ -154,25 +175,29 @@ def test_parse(self):
             text = file.read()
 
         expected_alphas = {
-                6089.88: 27.12,
-                6050.78: 69.91,
-                5768: 1.70,
-                5626: 0.157,
-                5607: 1.13,
-                5481: 0.013,
-                5345: 0.0010,
-                5302: 0.00011,
-                }
+            6089.88: 27.12,
+            6050.78: 69.91,
+            5768: 1.70,
+            5626: 0.157,
+            5607: 1.13,
+            5481: 0.013,
+            5345: 0.0010,
+            5302: 0.00011,
+        }
         expected_gammas = {
-  	            39.857: 2.96,
-  	            288.2: 0.938,
-  	            328.03: 0.349,
-  	            433.7: 0.047,
-  	            452.98: 1.01,
-  	            473.0: 0.14,
-                }
-
-        expected = { "alphas": expected_alphas, "gammas": expected_gammas, "intensity": 1.0}
+            39.857: 2.96,
+            288.2: 0.938,
+            328.03: 0.349,
+            433.7: 0.047,
+            452.98: 1.01,
+            473.0: 0.14,
+        }
+
+        expected = {
+            "alphas": expected_alphas,
+            "gammas": expected_gammas,
+            "intensity": 1.0,
+        }
         assert Parser.parse(text) == expected
 
     def test_questionable_record(self):