diff --git a/terratorch/cli_tools.py b/terratorch/cli_tools.py index 2308e2ec..922e4ceb 100644 --- a/terratorch/cli_tools.py +++ b/terratorch/cli_tools.py @@ -342,8 +342,14 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: str) -> Non # broadcast so that all ranks are in sync on future calls to .setup() self.already_saved = trainer.strategy.broadcast(self.already_saved) + # Copying config file to log dir - shutil.copyfile(self.config_path_original, self.config_path_new) + # This is used to copy the exact original yaml file in the log + # directory in order to facilitate the reproducibility + if not os.path.samefile(self.config_path_original, self.config_path_new): + # When the file being used is already in the log directory, this + # operation is not necessary + shutil.copyfile(self.config_path_original, self.config_path_new) class StateDictAwareModelCheckpoint(ModelCheckpoint): # necessary as we wish to have one model checkpoint with only state dict and one with standard lightning checkpoints