diff --git a/news-scraper-agent/config/env_config.py b/news-scraper-agent/config/env_config.py index 141f81b..d35124e 100644 --- a/news-scraper-agent/config/env_config.py +++ b/news-scraper-agent/config/env_config.py @@ -1,8 +1,6 @@ -from typing import Optional - from dotenv import load_dotenv from pydantic_settings import BaseSettings, SettingsConfigDict - +from typing import Optional load_dotenv() @@ -14,12 +12,12 @@ class Environment(BaseSettings): # db MONGO_DB_LOCAL_URI: str MONGO_DB_DEV_URI: str - MONGO_DB_REAL_URI: str + MONGO_DB_RPOD_URI: str # kakaoworks KAWORK_WEBHOOK_LOCAL_URI: str KAWORK_WEBHOOK_DEV_URI: str - KAWORK_WEBHOOK_REAL_URI: str + KAWORK_WEBHOOK_PROD_URI: str # langsmith LANGCHAIN_ENDPOINT: Optional[str] = None diff --git a/news-scraper-agent/config/log.py b/news-scraper-agent/config/log.py index 2e4ff8c..f01d255 100644 --- a/news-scraper-agent/config/log.py +++ b/news-scraper-agent/config/log.py @@ -29,7 +29,7 @@ def _initialize_logger(self): formatter = logging.Formatter(fmt="%(name)16s - %(message)s") # Logger 레벨 설정 - self.setLevel(logging.DEBUG if env.PROFILE != "real" else logging.INFO) + self.setLevel(logging.DEBUG if env.PROFILE != "prod" else logging.INFO) # RichHandler 추가 rich_handler = RichHandler( diff --git a/news-scraper-agent/external/kakaowork/client.py b/news-scraper-agent/external/kakaowork/client.py index 340c4f8..fb50280 100644 --- a/news-scraper-agent/external/kakaowork/client.py +++ b/news-scraper-agent/external/kakaowork/client.py @@ -9,7 +9,7 @@ HTTP_CONTENT_TYPE = "application/json" WEBHOOK_URL_MAP = { - "real": env.KAWORK_WEBHOOK_REAL_URI, + "prod": env.KAWORK_WEBHOOK_PROD_URI, "dev": env.KAWORK_WEBHOOK_DEV_URI, "local": env.KAWORK_WEBHOOK_LOCAL_URI, } diff --git a/news-scraper-agent/loader/connect.py b/news-scraper-agent/loader/connect.py index 28c7032..1ecd649 100644 --- a/news-scraper-agent/loader/connect.py +++ b/news-scraper-agent/loader/connect.py @@ -1,17 +1,16 @@ -from mongoengine import connect - from config.env_config import env from config.log import NewsScraperAgentLogger from models.message import Message from models.site import Site +from mongoengine import connect logger = NewsScraperAgentLogger() def connect_db(): try: - if env.PROFILE == "real": - connect(host=env.MONGO_DB_REAL_URI) + if env.PROFILE == "prod": + connect(host=env.MONGO_DB_PROD_URI) elif env.PROFILE == "develop": connect(host=env.MONGO_DB_DEV_URI) else: