From e0593efa4ff95ac50a11833f3cda902c6ecfecf7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jo=C3=A3o=20Lucas=20de=20Sousa=20Almeida?=
 <joao.l.sa.9.3@gmail.com>
Date: Fri, 7 Mar 2025 09:49:48 -0300
Subject: [PATCH] Avoiding to copy a config file already placed in the logging
 dir
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: João Lucas de Sousa Almeida <joao.l.sa.9.3@gmail.com>
---
 terratorch/cli_tools.py | 8 +++++++-
 1 file changed, 7 insertions(+), 1 deletion(-)

diff --git a/terratorch/cli_tools.py b/terratorch/cli_tools.py
index 2308e2ec..922e4ceb 100644
--- a/terratorch/cli_tools.py
+++ b/terratorch/cli_tools.py
@@ -342,8 +342,14 @@ def setup(self, trainer: Trainer, pl_module: LightningModule, stage: str) -> Non
 
         # broadcast so that all ranks are in sync on future calls to .setup()
         self.already_saved = trainer.strategy.broadcast(self.already_saved)
+
         # Copying config file to log dir
-        shutil.copyfile(self.config_path_original, self.config_path_new)
+        # This is used to copy the exact original yaml file in the log
+        # directory in order to facilitate the reproducibility
+        if not os.path.samefile(self.config_path_original, self.config_path_new):
+            # When the file being used is already in the log directory, this
+            # operation is not necessary
+            shutil.copyfile(self.config_path_original, self.config_path_new)
 
 class StateDictAwareModelCheckpoint(ModelCheckpoint):
     # necessary as we wish to have one model checkpoint with only state dict and one with standard lightning checkpoints