Skip to content

Commit

Permalink
Merge pull request #88 from Drazzilb08/logging-&-Versioning
Browse files Browse the repository at this point in the history
Versioning & Logging
  • Loading branch information
Drazzilb08 authored Feb 26, 2024
2 parents 7e67639 + c1cae60 commit deecc87
Show file tree
Hide file tree
Showing 20 changed files with 356 additions and 395 deletions.
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
0.0.1
0.1.0
91 changes: 67 additions & 24 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,8 @@
import time
import datetime

# Set the script name
script_name = "main"
# Set the current time
current_time = datetime.datetime.now().strftime("%H:%M")
logger = setup_logger("info", script_name)

already_run = {}

Expand All @@ -31,10 +28,10 @@
"sync_gdrive",
"upgradinatorr",
"unmatched_assets",
"backup_appdata",
]

list_of_bash_scripts = [
"backup_appdata",
"backup_folder",
"backup_plex",
"jduparr",
Expand All @@ -45,44 +42,87 @@

branch = get_current_git_branch()

def run_module(script_name, logger):
def get_logger(config, script_name):
"""
Get the logger for the script
Args:
log_level (str): The log level to use
script_name (str): The name of the script
config (dict): The config file
Returns:
logger: The logger for the script
"""
# Get loglevel from script config
log_level = config.log_level
logger = setup_logger(log_level, script_name)

return logger

def get_config(script_to_run):
"""
Get the config for the script
Args:
script_name (str): The name of the script
Returns:
dict: The config for the script
"""

# Load the config file
config = Config(script_to_run)

return config

def run_module(script_to_run, logger):
process = None
if script_name in list_of_python_scripts:
if script_to_run in list_of_python_scripts:
try:
module = importlib.import_module(f"modules.{script_name}")
process = multiprocessing.Process(target=module.main)
config = get_config(script_to_run)
script_logger = get_logger(config, script_to_run)
module = importlib.import_module(f"modules.{script_to_run}")
process = multiprocessing.Process(target=module.main, args=(script_logger, config))
if process:
if script_name == "poster_renamerr":
config = Config(script_name)
if script_to_run == "poster_renamerr":
config = Config(script_to_run)
script_config = config.script_config
sync_posters = script_config.get("sync_posters", False)
border_replacerr = script_config.get("border_replacerr", False)
posters = ", also running gdrive_sync" if sync_posters else ""
border = ", also running border_replacerr" if border_replacerr else ""
additional_scripts = f"{posters}{border}"
logger.info(f"Running script: {script_name}{additional_scripts}.")
if logger: logger.info(f"Running script: {script_to_run}{additional_scripts}.")
else: print(f"Running script: {script_to_run}{additional_scripts}.")
else:
logger.info(f"Running script: {script_name} in the list of python scripts.")

if logger: logger.info(f"Running script: {script_to_run}.")
else: print(f"Running script: {script_to_run}.")
process.start()
return process
except ModuleNotFoundError:
logger.error(f"Script: {script_name} does not exist")
if logger: logger.error(f"Script: {script_to_run} does not exist")
else: print(f"Script: {script_to_run} does not exist")
return
elif script_name and any(script in script_name for script in list_of_bash_scripts):
elif script_to_run and any(script in script_to_run for script in list_of_bash_scripts):
module = "bash_scripts"
try:
config = get_config(module)
script_logger = get_logger(config, script_to_run)
module = importlib.import_module(f"modules.{module}")
process = multiprocessing.Process(target=module.main, args=(script_name,))
process = multiprocessing.Process(target=module.main, args=(script_to_run, config, script_logger))
if process:
logger.info(f"Running script: {script_name}")
if logger: logger.info(f"Running script: {script_to_run}")
else: print(f"Running script: {script_to_run}")
process.start()
return process
except ModuleNotFoundError:
logger.error(f"Script: {script_name} does not exist in the list of bash scripts.")
if logger: logger.error(f"Script: {script_to_run} does not exist in the list of bash scripts.")
else: print(f"Script: {script_to_run} does not exist in the list of bash scripts.")
return
else:
logger.error(f"Script: {script_name} does not exist in either bash or python scripts")
if logger: logger.error(f"Script: {script_to_run} does not exist in either bash or python scripts")
else: print(f"Script: {script_to_run} does not exist in either bash or python scripts")
return

def load_schedule():
Expand All @@ -94,7 +134,7 @@ def load_schedule():
"""

# Load the config file
config = Config(script_name)
config = Config("main")

# Get the schedule from the config
schedule = config.scheduler
Expand All @@ -106,7 +146,8 @@ def main():
"""
Main function
"""

# Set the script name

initial_run = True
last_check = None
old_schedule = None
Expand All @@ -116,14 +157,16 @@ def main():
if len(sys.argv) > 1:
for input_name in sys.argv[1:]:
if input_name and any(script in input_name for script in list_of_bash_scripts):
run_module(input_name, logger)
run_module(input_name, None)
elif input_name in list_of_python_scripts:
run_module(input_name, logger)
run_module(input_name, None)
elif input_name not in list_of_python_scripts or (input_name and not any(script in input_name for script in list_of_bash_scripts)):
logger.error(f"Script: {input_name} does not exist")
print(f"Script: {input_name} does not exist")
return
else:
try:
logger = setup_logger("info", "main")
logger.info("Starting the script...")
# If config file is not found
while True:
scripts_schedules= load_schedule()
Expand Down
110 changes: 61 additions & 49 deletions modules/backup_appdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,9 @@
from util.discord import *

script_name = "backup_appdata"
config = Config(script_name)
log_level = config.log_level
dry_run = config.dry_run
logger = setup_logger(log_level, script_name)

try:
client = docker.from_env()
except DockerException as e:
logger.error(f"\nChances are your docker daemon is not running. Please start it and try again.")
logger.error(f"Error connecting to Docker: {e}\n")
sys.exit()

def filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_stop_list, exclusion_list, appdata_paths):


def filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_stop_list, exclusion_list, appdata_paths, logger):
"""
Filter containers based on stop_list, no_stop_list, and exclusion_list
Expand Down Expand Up @@ -85,21 +75,24 @@ def filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_sto
volume_mounts = container.attrs["HostConfig"]["Binds"]

# Get appdata path
for volume_mount in volume_mounts:
host_path = volume_mount.split(":")[0]
container_path = volume_mount.split(":")[1]
if container_path == "/config":
appdata_path = host_path
break
elif any(appdata_path in host_path for appdata_path in appdata_paths):
appdata_path = host_path
break
else:
appdata_path = None
if volume_mounts:
for volume_mount in volume_mounts:
host_path = volume_mount.split(":")[0]
container_path = volume_mount.split(":")[1]
if container_path == "/config":
appdata_path = host_path
break
elif any(appdata_path in host_path for appdata_path in appdata_paths):
appdata_path = host_path
break
else:
appdata_path = None
else:
logger.debug(f"No volume mounts for {container.name}")

# Check if running in Docker
host_container_name = None
if os.environ.get("DOCKER_ENV"):
if os.environ.get("DOCKER_ENV") and appdata_path:
# Get appdata path from environment variable
docker_appdata_path = os.environ.get("APPDATA_PATH")
appdata_path_basename = os.path.basename(appdata_path)
Expand Down Expand Up @@ -127,7 +120,7 @@ def filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_sto
stop = True
new = True
elif not appdata_path and container.name not in all_containers:
config.add_to_config(add_type="exclude", container=container, logger=logger)
config.add_to_config(add_type="exclude", container=container, logger=logger, message = "# Container automatically added here due to no appdata dir")
exclude = True
new = True
elif stop_list and container.name in stop_list:
Expand Down Expand Up @@ -201,12 +194,16 @@ def get_folder_size(folder_path):
str: Folder size
"""
total = 0
for dirpath, dirnames, filenames in os.walk(folder_path):
for f in filenames:
fp = os.path.join(dirpath, f)
if not os.path.islink(fp):
total += os.path.getsize(fp)
return total
if folder_path:
print(f"Getting size of {folder_path}")
for dirpath, dirnames, filenames in os.walk(folder_path):
for f in filenames:
fp = os.path.join(dirpath, f)
if not os.path.islink(fp):
total += os.path.getsize(fp)
return total
else:
return 0

def ignore_patterns_and_subdirectories(patterns):
def _ignore_patterns_and_subdirectories(dirname, filenames):
Expand All @@ -226,7 +223,7 @@ def add_to_tar(tar, path, arcname, ignore=None):
full_path = os.path.join(root, file)
tar.add(full_path, arcname=os.path.join(arcname, file))

def backup_appdata(container_name, appdata_path, destination, compress, dry_run, time):
def backup_appdata(container_name, appdata_path, destination, compress, dry_run, time, logger):
"""
Backup appdata
Expand Down Expand Up @@ -306,17 +303,19 @@ def backup_appdata(container_name, appdata_path, destination, compress, dry_run,
elif pre_size - post_size > 0:
diff_str = f"-{diff_str}"

difference_percent = (post_size - pre_size) / pre_size * 100 if pre_size != 0 else 0

table = [
["Source", pre_size_str],
["Backup Size", post_size_str],
["Difference", diff_str]
["Difference", f"{diff_str} ({difference_percent:.2f}%)"]
]
logger.info(create_table(table))

return pre_size_str, post_size_str, diff_str


def handle_container(containers_dict, destination, dry_run, compress, keep_backup):
def handle_container(client, containers_dict, destination, dry_run, compress, keep_backup, logger):
"""
Backup docker containers
Expand All @@ -340,14 +339,18 @@ def handle_container(containers_dict, destination, dry_run, compress, keep_backu
logger.info(create_bar(f"Backing up {container_name}..."))
if stop:
if current_state == "running":
logger.info(f"Stopping {container_name}...")
container = client.containers.get(container_id)
if not dry_run:
logger.info(f"Stopping {container_name}...")
container.stop()
else:
logger.info(f"DRY RUN: Would have stopped {container_name}")
pre_size_str, post_size_str, diff_str = backup_appdata(container_name, appdata_path, destination, compress, dry_run, time)
logger.info(f"Starting {container_name}...")
if not dry_run:
logger.info(f"Starting {container_name}...")
container.start()
else:
logger.info(f"DRY RUN: Would have started {container_name}")
else:
logger.info(f"{container_name} was already stopped, not starting...")
pre_size_str, post_size_str, diff_str = backup_appdata(container_name, appdata_path, destination, compress, dry_run, time)
Expand Down Expand Up @@ -405,7 +408,7 @@ def default_fields(runtime, total_size_str, all_backups_size_str):
"value": f"```{all_backups_size_str}```"
}]

def notification(containers_dict, script_name, use_summary, containers_to_remove):
def notification(containers_dict, script_name, use_summary, containers_to_remove, logger):
"""
Send notification
Expand Down Expand Up @@ -532,15 +535,22 @@ def notification(containers_dict, script_name, use_summary, containers_to_remove
discord(fields=fields, logger=logger, script_name=script_name, description=description, color=0x00ff00, content=None)


def main():
def main(logger, config):
"""
Main function
Main function.
"""
global dry_run
dry_run = config.dry_run
log_level = config.log_level
logger.setLevel(log_level.upper())
script_config = config.script_config

name = script_name.replace("_", " ").upper()
start = datetime.now()
try:
script_config = config.script_config

client = docker.from_env()

logger.info(create_bar(f"START {name}"))
# Display script settings
table = [["Script Settings"]]
Expand All @@ -558,6 +568,8 @@ def main():

# Display script settings
logger.debug(create_bar("-")) # Log separator
logger.debug(f'{"Dry_run:":<20}{dry_run}')
logger.debug(f'{"Log level:":<20}{log_level}')
logger.debug(f'{"Destination:":<20} {destination}')
logger.debug(f'{"Keep Backup:":<20} {keep_backup}')
logger.debug(f'{"Compress:":<20} {compress}')
Expand Down Expand Up @@ -594,12 +606,12 @@ def main():
containers = client.containers.list(all=True)

# Filter containers
containers_dict, containers_to_remove = filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_stop_list, exclusion_list, appdata_paths)
containers_dict, containers_to_remove = filter_containers(containers, add_to_no_stop, add_to_stop, stop_list, no_stop_list, exclusion_list, appdata_paths, logger)

# Backup containers
if containers_dict:
logger.debug(f"Containers Dictionary:\n{json.dumps(containers_dict, indent=4)}")
containers_dict = handle_container(containers_dict, destination, dry_run, compress, keep_backup)
containers_dict = handle_container(client, containers_dict, destination, dry_run, compress, keep_backup, logger)
else:
logger.debug("No containers to backup")
end = datetime.now()
Expand Down Expand Up @@ -636,17 +648,17 @@ def main():
logger.info(f"Script ran for {run_time_str}")
logger.info(f"All backups size: {all_backups_size_str}")
if discord_check(script_name):
notification(containers_dict, script_name, use_summary, containers_to_remove)
notification(containers_dict, script_name, use_summary, containers_to_remove, logger)

except KeyboardInterrupt:
print("Keyboard Interrupt detected. Exiting...")
sys.exit()
except DockerException as e:
logger.error(f"\nChances are your docker daemon is not running. Please start it and try again.")
logger.error(f"Error connecting to Docker: {e}\n")
sys.exit()
except Exception:
logger.error(f"\n\nAn error occurred:\n", exc_info=True)
logger.error(f"\n\n")
finally:
logger.info(create_bar(f"END {name}"))


if __name__ == "__main__":
main()
logger.info(create_bar(f"END {name}"))
Loading

0 comments on commit deecc87

Please sign in to comment.