Skip to content

Commit

Permalink
Fix glithes
Browse files Browse the repository at this point in the history
  • Loading branch information
aturret committed Dec 1, 2024
1 parent 7b66ca1 commit 5b1554d
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 5 deletions.
9 changes: 6 additions & 3 deletions app/routers/scraper.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,37 @@
import asyncio

from fastapi import APIRouter
from fastapi.requests import Request

from app.config import API_KEY_NAME
from app.services.scrapers.common import InfoExtractService
from fastapi import Security
from app.auth import verify_api_key
from app.utils.logger import logger
from app.utils.parse import get_url_metadata

router = APIRouter(prefix="/scraper")


@router.post("/getItem", dependencies=[Security(verify_api_key)])
async def get_item_route(request: Request):
logger.debug("A scraper getItem request received")
query_params = dict(request.query_params)
url = query_params.pop("url")
ban_list = query_params.pop("ban_list", None)
logger.debug(f"get_item_route: url: {url}, query_params: {query_params}")
if API_KEY_NAME in query_params:
query_params.pop(API_KEY_NAME)
url_metadata = await get_url_metadata(url, ban_list)

item = InfoExtractService(url_metadata, **query_params)
result = await item.get_item()
logger.debug(f"getItem result: {result}")
return result


@router.post("/getUrlMetadata", dependencies=[Security(verify_api_key)])
async def get_url_metadata_route(request: Request):
url = request.query_params.get("url")
ban_list = request.query_params.get("ban_list")

url_metadata = await get_url_metadata(url, ban_list)
return url_metadata.to_dict()

4 changes: 4 additions & 0 deletions app/routers/telegram_bot.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import asyncio

from fastapi import APIRouter, HTTPException
from fastapi.requests import Request

Expand All @@ -12,8 +14,10 @@

@router.post("/bot/webhook", dependencies=[Security(verify_telegram_api_header)])
async def telegram_bot_webhook(request: Request, background_tasks: BackgroundTasks):
logger.debug("A telegram bot webhook received")
data = await request.json()
background_tasks.add_task(process_telegram_update, data)
logger.debug(f"telegram bot webhook data received, background task added: {data}")
return "ok"


Expand Down
4 changes: 2 additions & 2 deletions app/services/scrapers/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(
def category(self) -> str:
return self.source

async def get_item(self, metadata_item: dict = None) -> dict:
async def get_item(self, metadata_item: Optional[dict] = None) -> dict:
if self.content_type == "video":
if not self.kwargs.get("category"):
self.kwargs["category"] = self.category
Expand Down Expand Up @@ -110,5 +110,5 @@ async def process_item(self, metadata_item: dict) -> dict:
logger.error(f"Error while exporting document: {e}")
if self.store_database:
logger.info("store in database")
await save_instances(Metadata.construct(**metadata_item))
await save_instances(Metadata.model_construct(**metadata_item))
return metadata_item
1 change: 1 addition & 0 deletions app/services/telegram_bot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@


async def set_webhook() -> bool:
logger.debug(f"set_webhook: {TELEGRAM_WEBHOOK_URL}, secret_token: {TELEGRAM_BOT_SECRET_TOKEN}")
return await application.bot.set_webhook(
url=TELEGRAM_WEBHOOK_URL, secret_token=TELEGRAM_BOT_SECRET_TOKEN
)
Expand Down

0 comments on commit 5b1554d

Please sign in to comment.