diff --git a/backend/backend/__init__.py b/backend/backend/__init__.py index e69de29bb..fb989c4e6 100644 --- a/backend/backend/__init__.py +++ b/backend/backend/__init__.py @@ -0,0 +1,3 @@ +from .celery import app as celery_app + +__all__ = ('celery_app',) diff --git a/backend/backend/celery.py b/backend/backend/celery.py new file mode 100644 index 000000000..2b1822d3d --- /dev/null +++ b/backend/backend/celery.py @@ -0,0 +1,14 @@ +from __future__ import absolute_import, unicode_literals +import os +from celery import Celery + +# Set default Django settings module +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings') + +app = Celery('backend') + +# Load settings from Django settings with CELERY namespace +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Auto-discover tasks in all installed apps +app.autodiscover_tasks() diff --git a/backend/backend/settings.py b/backend/backend/settings.py index 9de4f024a..b3b0a2adc 100644 --- a/backend/backend/settings.py +++ b/backend/backend/settings.py @@ -11,10 +11,14 @@ """ import os +import logging + from pathlib import Path from dotenv import load_dotenv +from celery.schedules import crontab + load_dotenv() # Build paths inside the project like this: BASE_DIR / 'subdir'. @@ -30,7 +34,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [] +ALLOWED_HOSTS = ["127.0.0.1", "localhost"] # Application definition @@ -47,8 +51,48 @@ "authentication", "chat", "gpt", + 'django_crontab', ] +LOG_DIR = BASE_DIR / "logs" +LOG_DIR.mkdir(exist_ok=True) + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "json": { + "format": '{"time":"%(asctime)s", "level":"%(levelname)s", "message":"%(message)s"}', + }, + }, + "handlers": { + "file_activity": { + "class": "logging.handlers.RotatingFileHandler", + "filename": str(LOG_DIR / "activity.log"), + "maxBytes": 5 * 1024 * 1024, + "backupCount": 3, + "encoding": "utf-8", + "formatter": "json", + }, + }, + "loggers": { + "activity": { + "handlers": ["file_activity"], + "level": "INFO", + "propagate": False, + }, + }, +} + + +CACHES = { + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "local-cache", + "TIMEOUT": 300, # default TTL (seconds) + } +} + MIDDLEWARE = [ "django.middleware.security.SecurityMiddleware", "django.contrib.sessions.middleware.SessionMiddleware", @@ -86,11 +130,23 @@ DATABASES = { "default": { - "ENGINE": "django.db.backends.sqlite3", - "NAME": BASE_DIR / "db.sqlite3", + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("POSTGRES_DB"), + "USER": os.environ.get("POSTGRES_USER"), + "PASSWORD": os.environ.get("POSTGRES_PASSWORD"), + "HOST": os.environ.get("POSTGRES_HOST", "127.0.0.1"), + "PORT": os.environ.get("POSTGRES_PORT", "5432"), } } +CELERY_BROKER_URL = 'redis://localhost:6379/0' +CELERY_BEAT_SCHEDULE = { + 'cleanup-every-day': { + 'task': 'chat.tasks.cleanup_old_conversations_task', + 'schedule': crontab(hour=0, minute=0), # every day at midnight + }, +} + # Password validation # https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators @@ -138,14 +194,18 @@ DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" CORS_ALLOWED_ORIGINS = [ - FRONTEND_URL, + "http://localhost:3000", ] CORS_ALLOW_CREDENTIALS = True CSRF_TRUSTED_ORIGINS = [ - FRONTEND_URL, + "http://localhost:3000", +] + +CORNJOBS = [ + ('0 0 * * *', 'django.core.management.call_command', ['cleanup_conversations']), ] -SESSION_COOKIE_SECURE = True +SESSION_COOKIE_SECURE = False CSRF_COOKIE_SECURE = True -CSRF_COOKIE_SAMESITE = "None" +CSRF_COOKIE_SAMESITE = 'Lax' \ No newline at end of file diff --git a/backend/celerybeat-schedule.bak b/backend/celerybeat-schedule.bak new file mode 100644 index 000000000..bcf420b52 --- /dev/null +++ b/backend/celerybeat-schedule.bak @@ -0,0 +1,4 @@ +'entries', (0, 405) +'__version__', (512, 20) +'tz', (1024, 28) +'utc_enabled', (1536, 4) diff --git a/backend/celerybeat-schedule.dat b/backend/celerybeat-schedule.dat new file mode 100644 index 000000000..2758e5a63 Binary files /dev/null and b/backend/celerybeat-schedule.dat differ diff --git a/backend/celerybeat-schedule.dir b/backend/celerybeat-schedule.dir new file mode 100644 index 000000000..bcf420b52 --- /dev/null +++ b/backend/celerybeat-schedule.dir @@ -0,0 +1,4 @@ +'entries', (0, 405) +'__version__', (512, 20) +'tz', (1024, 28) +'utc_enabled', (1536, 4) diff --git a/backend/chat/admin.py b/backend/chat/admin.py index a4e7d15fc..aa2a094e4 100644 --- a/backend/chat/admin.py +++ b/backend/chat/admin.py @@ -1,8 +1,15 @@ from django.contrib import admin from django.utils import timezone from nested_admin.nested import NestedModelAdmin, NestedStackedInline, NestedTabularInline +from .models import Conversation, Message, Role, Version, UploadedFile -from chat.models import Conversation, Message, Role, Version + +@admin.register(UploadedFile) +class UploadedFileAdmin(admin.ModelAdmin): + list_display = ["id", "file", "uploaded_at", "checksum", "user"] + list_filter = ["uploaded_at", "user"] + search_fields = ["file", "user__username"] + readonly_fields = ["checksum"] class RoleAdmin(NestedModelAdmin): @@ -51,7 +58,7 @@ def queryset(self, request, queryset): class ConversationAdmin(NestedModelAdmin): actions = ["undelete_selected", "soft_delete_selected"] inlines = [VersionInline] - list_display = ("title", "id", "created_at", "modified_at", "deleted_at", "version_count", "is_deleted", "user") + list_display = ("title", "id","summary","status", "created_at", "modified_at", "deleted_at", "version_count", "is_deleted", "user") list_filter = (DeletedListFilter,) ordering = ("-modified_at",) diff --git a/backend/chat/management/commands/cleanup_conversations.py b/backend/chat/management/commands/cleanup_conversations.py new file mode 100644 index 000000000..51daa551f --- /dev/null +++ b/backend/chat/management/commands/cleanup_conversations.py @@ -0,0 +1,14 @@ +from django.core.management.base import BaseCommand +from chat.models import Conversation +from django.utils import timezone +from datetime import timedelta + +class Command(BaseCommand): + help = 'Deletes conversations older than 30 days' + + def handle(self, *args, **kwargs): + cutoff_date = timezone.now() - timedelta(days=30) + old_conversations = Conversation.objects.filter(created_at__lt=cutoff_date) + count = old_conversations.count() + old_conversations.delete() + self.stdout.write(self.style.SUCCESS(f'Deleted {count} old conversations')) diff --git a/backend/chat/migrations/0002_conversation_status.py b/backend/chat/migrations/0002_conversation_status.py new file mode 100644 index 000000000..81c48d5d3 --- /dev/null +++ b/backend/chat/migrations/0002_conversation_status.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.2 on 2025-10-07 08:20 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("chat", "0001_initial"), + ] + + operations = [ + migrations.AddField( + model_name="conversation", + name="status", + field=models.CharField(default="active", max_length=20), + ), + ] diff --git a/backend/chat/migrations/0003_conversation_summary.py b/backend/chat/migrations/0003_conversation_summary.py new file mode 100644 index 000000000..74a0b6584 --- /dev/null +++ b/backend/chat/migrations/0003_conversation_summary.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.2 on 2025-10-07 10:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("chat", "0002_conversation_status"), + ] + + operations = [ + migrations.AddField( + model_name="conversation", + name="summary", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/backend/chat/migrations/0004_uploadedfile.py b/backend/chat/migrations/0004_uploadedfile.py new file mode 100644 index 000000000..f6bab108c --- /dev/null +++ b/backend/chat/migrations/0004_uploadedfile.py @@ -0,0 +1,26 @@ +# Generated by Django 5.0.2 on 2025-10-07 18:33 + +import django.db.models.deletion +import uuid +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("chat", "0003_conversation_summary"), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name="UploadedFile", + fields=[ + ("id", models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)), + ("file", models.FileField(upload_to="uploads/")), + ("uploaded_at", models.DateTimeField(auto_now_add=True)), + ("checksum", models.CharField(max_length=64, unique=True)), + ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + ] diff --git a/backend/chat/migrations/0005_uploadedfile_extracted_text.py b/backend/chat/migrations/0005_uploadedfile_extracted_text.py new file mode 100644 index 000000000..ea8ecb411 --- /dev/null +++ b/backend/chat/migrations/0005_uploadedfile_extracted_text.py @@ -0,0 +1,17 @@ +# Generated by Django 5.0.2 on 2025-10-08 11:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("chat", "0004_uploadedfile"), + ] + + operations = [ + migrations.AddField( + model_name="uploadedfile", + name="extracted_text", + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/backend/chat/models.py b/backend/chat/models.py index 242788f14..3b51d8fca 100644 --- a/backend/chat/models.py +++ b/backend/chat/models.py @@ -1,5 +1,5 @@ import uuid - +import io from django.db import models from authentication.models import CustomUser @@ -22,6 +22,8 @@ class Conversation(models.Model): ) deleted_at = models.DateTimeField(null=True, blank=True) user = models.ForeignKey(CustomUser, on_delete=models.CASCADE) + status = models.CharField(max_length=20, blank=False, null=False, default="active") + summary = models.TextField(blank=True, null=True) def __str__(self): return self.title @@ -58,8 +60,52 @@ class Meta: ordering = ["created_at"] def save(self, *args, **kwargs): - self.version.conversation.save() super().save(*args, **kwargs) + messages = self.version.messages.all() + summary_text = "".join([message.content for message in messages])[:200] + self.version.conversation.summary = summary_text + self.version.conversation.save() + def __str__(self): return f"{self.role}: {self.content[:20]}..." + + + +import hashlib +from django.core.exceptions import ValidationError + + +class UploadedFile(models.Model): + """ + Model to store uploaded files with a checksum for duplication check. + """ + id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) + user = models.ForeignKey(CustomUser, on_delete=models.CASCADE) + file = models.FileField(upload_to="uploads/") + uploaded_at = models.DateTimeField(auto_now_add=True) + checksum = models.CharField(max_length=64, unique=True) + extracted_text = models.TextField(null=True, blank=True) + + def __str__(self): + return f"{self.file.name}" + + def clean(self): + # Calculate checksum for duplication validation + if self.file: + sha = hashlib.sha256() + for chunk in self.file.chunks(): + sha.update(chunk) + checksum = sha.hexdigest() + if UploadedFile.objects.filter(checksum=checksum).exists(): + raise ValidationError("This file already exists.") + self.checksum = checksum + + def save(self, *args, **kwargs): + if not self.checksum and self.file: + sha = hashlib.sha256() + for chunk in self.file.chunks(): + sha.update(chunk) + self.checksum = sha.hexdigest() + super().save(*args, **kwargs) + diff --git a/backend/chat/serializers.py b/backend/chat/serializers.py index 0c721c061..0d309f5ed 100644 --- a/backend/chat/serializers.py +++ b/backend/chat/serializers.py @@ -1,9 +1,8 @@ +from chat.models import Conversation, Message, Role, Version, UploadedFile from django.core.exceptions import ValidationError from django.utils import timezone from rest_framework import serializers -from chat.models import Conversation, Message, Role, Version - def should_serialize(validated_data, field_name) -> bool: if validated_data.get(field_name) is not None: @@ -42,6 +41,16 @@ def to_representation(self, instance): return representation + +class UploadedFileSerializer(serializers.ModelSerializer): + """Serializer for file uploads with metadata.""" + + class Meta: + model = UploadedFile + fields = ['id', 'file', 'uploaded_at', 'checksum'] + read_only_fields = ['id', 'uploaded_at', 'checksum'] + + class VersionSerializer(serializers.ModelSerializer): messages = MessageSerializer(many=True) active = serializers.SerializerMethodField() diff --git a/backend/chat/tasks.py b/backend/chat/tasks.py new file mode 100644 index 000000000..070bd7f63 --- /dev/null +++ b/backend/chat/tasks.py @@ -0,0 +1,7 @@ +from celery import shared_task +from django.core.management import call_command + +@shared_task +def cleanup_old_conversations_task(): + # Calls your management command + call_command('cleanup_conversations') diff --git a/backend/chat/tests/test_api.py b/backend/chat/tests/test_api.py new file mode 100644 index 000000000..175e89b8c --- /dev/null +++ b/backend/chat/tests/test_api.py @@ -0,0 +1,73 @@ +from django.urls import reverse +from django.core.files.uploadedfile import SimpleUploadedFile +from django.contrib.auth.models import Group +from django.core.cache import cache +from rest_framework.test import APITestCase +from chat.models import Conversation +from chat.tests.utils import make_user # assuming you have a helper + +class TestRBACAndFileFlow(APITestCase): + def setUp(self): + # Ensure uploader group exists + Group.objects.get_or_create(name="uploader") + # Create user and assign to uploader group + self.uploader = make_user(email="up@u.com", groups=("uploader",)) + self.client.force_login(self.uploader) # login after group assignment + + def test_upload_list_process_delete_as_uploader(self): + # Upload file + r1 = self.client.post( + reverse("file-upload"), + {"file": SimpleUploadedFile("doc.txt", b"django and python are great")}, + format="multipart" + ) + self.assertEqual(r1.status_code, 201) + file_id = r1.data["id"] + + # You can add list, process, delete assertions here as needed + +class TestRAG(APITestCase): + def setUp(self): + Group.objects.get_or_create(name="uploader") + self.uploader = make_user(email="rag@u.com", groups=("uploader",)) + self.client.force_login(self.uploader) + + def test_rag_query(self): + up = self.client.post( + reverse("file-upload"), + {"file": SimpleUploadedFile("doc.txt", b"django and python are great")}, + format="multipart" + ) + self.assertEqual(up.status_code, 201) + fid = up.data["id"] + + # Call your RAG query endpoint + resp = self.client.post( + reverse("rag-query"), + {"file_id": fid, "query": "python"}, + format="json" + ) + self.assertEqual(resp.status_code, 200) + self.assertIn("results", resp.data) + +class TestSummariesCache(APITestCase): + def setUp(self): + Group.objects.get_or_create(name="uploader") + self.u = User.objects.create_user(email="test@test.com", password="1234") + self.client.force_login(self.u) + cache.clear() # Clear cache to avoid AnonymousUser issues + + # Create conversations for this user + self.c1 = Conversation.objects.create(user=self.u, title="First conv") + self.c2 = Conversation.objects.create(user=self.u, title="Second conv") + + def test_cached_per_user_and_query(self): + url = reverse("conversation-summary-list") + r1 = self.client.get(url) + self.assertEqual(r1.status_code, 200) + self.assertEqual(len(r1.data), 2) + + # Subsequent call should hit cache + r2 = self.client.get(url) + self.assertEqual(r2.status_code, 200) + self.assertEqual(r2.data, r1.data) diff --git a/backend/chat/tests/utils.py b/backend/chat/tests/utils.py new file mode 100644 index 000000000..b0eba5579 --- /dev/null +++ b/backend/chat/tests/utils.py @@ -0,0 +1,12 @@ +from django.contrib.auth import get_user_model +from django.contrib.auth.models import Group + +User = get_user_model() + +def make_user(email="u@u.com", password="pass", groups=()): + user = User.objects.create_user(email=email, password=password) + for name in groups: + g, _ = Group.objects.get_or_create(name=name.lower()) # normalize + user.groups.add(g) + user.save() # <- make sure to save after adding groups + return user \ No newline at end of file diff --git a/backend/chat/urls.py b/backend/chat/urls.py index bd8ceadc0..62dcc6b46 100644 --- a/backend/chat/urls.py +++ b/backend/chat/urls.py @@ -2,6 +2,9 @@ from chat import views +from chat.views import ConversationSummaryView + + urlpatterns = [ path("", views.chat_root_view, name="chat_root_view"), path("conversations/", views.get_conversations, name="get_conversations"), @@ -19,4 +22,12 @@ ), path("conversations//delete/", views.conversation_soft_delete, name="conversation_delete"), path("versions//add_message/", views.version_add_message, name="version_add_message"), + path("conversations/summaries/", views.ConversationSummaryView.as_view(), name="conversation-summaries"), + path('files/upload/', views.FileUploadView.as_view(), name="file-upload"), + path('files/', views.FileListView.as_view(), name="file-list"), + path('files//', views.FileDeleteView.as_view(), name="file-delete"), + path('rag/query/', views.rag_query, name="rag-query"), + path("files//process/", views.process_file, name="file-process"), + path("conversation-summary/", ConversationSummaryView.as_view(), name="conversation-summary-list"), + ] diff --git a/backend/chat/views.py b/backend/chat/views.py index 0d18f7a69..0e21d958b 100644 --- a/backend/chat/views.py +++ b/backend/chat/views.py @@ -1,11 +1,25 @@ +import os +import io +import logging +activity_log = logging.getLogger("activity") # <- updated logger name + +from django.core.cache import cache +from django.http import JsonResponse from django.contrib.auth.decorators import login_required from django.utils import timezone +from django.core.files.base import ContentFile +from django.core.files.storage import default_storage + +from rest_framework.permissions import BasePermission +from rest_framework.parsers import MultiPartParser, FormParser from rest_framework import status -from rest_framework.decorators import api_view +from rest_framework.decorators import api_view, permission_classes from rest_framework.response import Response +from rest_framework import generics, filters, pagination +from rest_framework.permissions import IsAuthenticated -from chat.models import Conversation, Message, Version -from chat.serializers import ConversationSerializer, MessageSerializer, TitleSerializer, VersionSerializer +from chat.models import Conversation, Message, Version, UploadedFile +from chat.serializers import ConversationSerializer, MessageSerializer, TitleSerializer, VersionSerializer, UploadedFileSerializer from chat.utils.branching import make_branched_conversation @@ -147,7 +161,6 @@ def conversation_add_message(request, pk): serializer = MessageSerializer(data=request.data) if serializer.is_valid(): serializer.save(version=version) - # return Response(serializer.data, status=status.HTTP_201_CREATED) return Response( { "message": serializer.data, @@ -171,7 +184,6 @@ def conversation_add_version(request, pk): except Message.DoesNotExist: return Response({"detail": "Root message not found"}, status=status.HTTP_404_NOT_FOUND) - # Check if root message belongs to the same conversation if root_message.version.conversation != conversation: return Response({"detail": "Root message not part of the conversation"}, status=status.HTTP_400_BAD_REQUEST) @@ -179,14 +191,12 @@ def conversation_add_version(request, pk): conversation=conversation, parent_version=root_message.version, root_message=root_message ) - # Copy messages before root_message to new_version messages_before_root = Message.objects.filter(version=version, created_at__lt=root_message.created_at) new_messages = [ Message(content=message.content, role=message.role, version=new_version) for message in messages_before_root ] Message.objects.bulk_create(new_messages) - # Set the new version as the current version conversation.active_version = new_version conversation.save() @@ -230,3 +240,137 @@ def version_add_message(request, pk): status=status.HTTP_201_CREATED, ) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +@api_view(["POST"]) +@permission_classes([IsAuthenticated]) +def rag_query(request): + query = (request.data.get("query") or "").strip() + top_k = int(request.data.get("top_k") or 3) + if not query: + return Response({"detail": "Query is required"}, status=400) + + qs = UploadedFile.objects.filter(user=request.user)\ + .exclude(extracted_text__isnull=True).exclude(extracted_text="") + + hits = [] + for uf in qs: + text = uf.extracted_text or "" + pos = text.lower().find(query.lower()) + if pos != -1: + start = max(0, pos - 200) + end = min(len(text), pos + 200) + hits.append({ + "file_id": str(uf.id), + "file": uf.file.name, + "snippet": text[start:end], + }) + return Response({"query": query, "results": hits[:top_k]}, status=200) + + +class IsUploaderRole(BasePermission): + allowed_groups = {"uploader", "admin"} + + def has_permission(self, request, view): + user = request.user + if not user or not user.is_authenticated: + return False + if getattr(user, "is_superuser", False): + return True + user_groups = set(g.lower() for g in user.groups.values_list("name", flat=True)) + return bool(self.allowed_groups & user_groups) + + +@api_view(["POST"]) +@permission_classes([IsAuthenticated, IsUploaderRole]) +def process_file(request, pk): + try: + uf = UploadedFile.objects.get(pk=pk, user=request.user) + except UploadedFile.DoesNotExist: + return Response({"detail": "Not found"}, status=404) + + name = uf.file.name.lower() + if name.endswith(".txt"): + uf.extracted_text = uf.file.read().decode("utf-8", errors="ignore") + else: + uf.extracted_text = "(processing not implemented for this file type)" + activity_log.info( + f'process user="{request.user.id}" email="{request.user.email}" ' + f'file="{uf.file.name}" id="{uf.id}" type="{"txt" if name.endswith(".txt") else "other"}"' + ) + uf.save() + return Response({"id": str(uf.id), "extracted": bool(uf.extracted_text)}, status=200) + + +class ConversationPagination(pagination.PageNumberPagination): + page_size = 10 + page_size_query_param = "page_size" + max_page_size = 100 + + +class ConversationSummaryView(generics.ListAPIView): + serializer_class = ConversationSerializer + pagination_class = ConversationPagination + filter_backends = [filters.SearchFilter, filters.OrderingFilter] + search_fields = ["title", "summary"] + ordering_fields = ["created_at", "modified_at", "title"] + ordering = ["-modified_at"] + + def get_queryset(self): + queryset = Conversation.objects.filter(user=self.request.user, deleted_at__isnull=True) + start_date = self.request.query_params.get('start_date') + end_date = self.request.query_params.get('end_date') + if start_date: + queryset = queryset.filter(created_at__gte=start_date) + if end_date: + queryset = queryset.filter(created_at__lte=end_date) + return queryset + + def list(self, request, *args, **kwargs): + cache_key = f"summaries:{request.user.id}:{request.get_full_path()}" + cached = cache.get(cache_key) + if cached is not None: + return Response(cached, status=200) + + response = super().list(request, *args, **kwargs) + cache.set(cache_key, response.data, timeout=300) # 5 minutes + return response + + +class FileUploadView(generics.CreateAPIView): + serializer_class = UploadedFileSerializer + parser_classes = [FormParser, MultiPartParser] + permission_classes = [IsAuthenticated, IsUploaderRole] + + def perform_create(self, serializer): + instance = serializer.save(user=self.request.user) + activity_log.info( + f'upload user="{self.request.user.id}" email="{self.request.user.email}" ' + f'file="{instance.file.name}" id="{instance.id}" checksum="{instance.checksum}"' + ) + + +class FileListView(generics.ListAPIView): + serializer_class = UploadedFileSerializer + permission_classes = [IsAuthenticated, IsUploaderRole] + + def get_queryset(self): + activity_log.info(f'list user="{self.request.user.id}" email="{self.request.user.email}"') + return UploadedFile.objects.filter(user=self.request.user).order_by("-uploaded_at") + + +class FileDeleteView(generics.DestroyAPIView): + serializer_class = UploadedFileSerializer + permission_classes = [IsAuthenticated, IsUploaderRole] + + def get_queryset(self): + return UploadedFile.objects.filter(user=self.request.user) + + def perform_destroy(self, instance): + activity_log.info( + f'delete user="{self.request.user.id}" email="{self.request.user.email}" ' + f'file="{instance.file.name}" id="{instance.id}"' + ) + if instance.file and default_storage.exists(instance.file.name): + default_storage.delete(instance.file.name) + instance.delete() diff --git a/backend/data.json b/backend/data.json new file mode 100644 index 000000000..a081b3ae1 --- /dev/null +++ b/backend/data.json @@ -0,0 +1,2 @@ +[ +] diff --git a/backend/db.json b/backend/db.json new file mode 100644 index 000000000..d9bacdd4f Binary files /dev/null and b/backend/db.json differ diff --git a/backend/dependencies.txt b/backend/dependencies.txt index 2363ba87e..d54aeb420 100644 --- a/backend/dependencies.txt +++ b/backend/dependencies.txt @@ -50,3 +50,4 @@ urllib3==2.0.5 uvicorn==0.27.1 virtualenv==20.24.5 yarl==1.9.2 +Pillow==10.0.1 diff --git a/backend/manage.py b/backend/manage.py index 1917e46e5..648660544 100644 --- a/backend/manage.py +++ b/backend/manage.py @@ -12,7 +12,7 @@ def main(): except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " + "available on your PYTHONPATH environment variable? Did ou " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) diff --git a/backend/uploads/Festive_Diwali_Celebration_Design.png b/backend/uploads/Festive_Diwali_Celebration_Design.png new file mode 100644 index 000000000..5eb64a923 Binary files /dev/null and b/backend/uploads/Festive_Diwali_Celebration_Design.png differ diff --git a/backend/uploads/Festive_Diwali_Celebration_Design_JVVKcbY.png b/backend/uploads/Festive_Diwali_Celebration_Design_JVVKcbY.png new file mode 100644 index 000000000..5eb64a923 Binary files /dev/null and b/backend/uploads/Festive_Diwali_Celebration_Design_JVVKcbY.png differ diff --git a/backend/uploads/Festive_Diwali_Celebration_Design_sx5XcRd.png b/backend/uploads/Festive_Diwali_Celebration_Design_sx5XcRd.png new file mode 100644 index 000000000..5eb64a923 Binary files /dev/null and b/backend/uploads/Festive_Diwali_Celebration_Design_sx5XcRd.png differ diff --git a/backend/uploads/demo.py b/backend/uploads/demo.py new file mode 100644 index 000000000..fe751a137 --- /dev/null +++ b/backend/uploads/demo.py @@ -0,0 +1,12 @@ +def bruteforce(arr,n,target): + for i in range(n): + cursum=0 + for j in range(i,n): + cursum += arr[j] + if cursum == target: + print(arr[i : j+1]) + +arr=list(map(int,input().split())) +target=int(input()) +n=len(arr) +bruteforce(arr,n,target) diff --git a/db.json b/db.json new file mode 100644 index 000000000..e69de29bb diff --git a/frontend/redux/auth.js b/frontend/redux/auth.js index 4ddcfcb1a..d23fafbb7 100644 --- a/frontend/redux/auth.js +++ b/frontend/redux/auth.js @@ -69,12 +69,12 @@ const authSlice = createSlice({ console.log('postLoginThunk.fulfilled', action.payload); // delete this line later const email = action.payload.email; state.user = email; - Cookies.set('user', email, {sameSite: 'None', secure: true}); + Cookies.set('user', email, {sameSite: 'Lax', secure: false}); }) .addCase(postLogoutThunk.fulfilled, (state, action) => { console.log('postLogoutThunk.fulfilled', action.payload); // delete this line later state.user = null; - Cookies.remove('user', {sameSite: 'None', secure: true}); + Cookies.remove('user', {sameSite: 'Lax', secure: false}); }); } });