Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ jobs:
runs-on: ubuntu-latest
services:
postgres:
image: postgres:15
image: pgvector/pgvector:pg16
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
Expand Down
55 changes: 55 additions & 0 deletions backend/background/jobs/club_embeddings.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package jobs

import (
"context"
"fmt"
"log/slog"
"time"

"github.com/GenerateNU/sac/backend/background"
"github.com/GenerateNU/sac/backend/entities/models"
"github.com/GenerateNU/sac/backend/search"

"github.com/GenerateNU/sac/backend/constants"
)

// Generate club embeddings for clubs that did not receive them when being created or updated. This could occur in the case of
// mock data (which is uploaded to postgres directly, doesn't go through the app), or in the case OpenAI API goes down (service outage, bad api key, etc)
func (j *Jobs) ClubEmbeddings(ctx context.Context) background.JobFunc {
return func() {
t := time.NewTicker(constants.EMBEDDINGS_GENERATION_INTERVAL)

for range t.C {
func() {
tx := j.db.WithContext(ctx).Begin()
defer func() {
if r := recover(); r != nil {
tx.Rollback()
}
}()

var club models.Club
if err := tx.Raw("SELECT * FROM clubs WHERE embedding IS NULL FOR UPDATE SKIP LOCKED LIMIT 1").Scan(&club).Error; err != nil {
tx.Rollback()
return
}

if club.Name == "" && club.Preview == "" && club.Description == "" { // empty club
tx.Rollback()
return
}

slog.Info(fmt.Sprintf("Generating embeddings for club '%s' (%s)", club.Name, club.ID.String()))

if err := search.UpsertClubEmbedding(tx, j.search, &club); err != nil {
tx.Rollback()
return
}

if err := tx.Commit().Error; err != nil {
return
}
}()
}
}
}
55 changes: 55 additions & 0 deletions backend/background/jobs/event_embeddings.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package jobs

import (
"context"
"fmt"
"log/slog"
"time"

"github.com/GenerateNU/sac/backend/background"
"github.com/GenerateNU/sac/backend/entities/models"
"github.com/GenerateNU/sac/backend/search"

"github.com/GenerateNU/sac/backend/constants"
)

// Generate event embeddings for events that did not receive them when being created or updated. This could occur in the case of
// mock data (which is uploaded to postgres directly, doesn't go through the app), or in the case OpenAI API goes down (service outage, bad api key, etc)
func (j *Jobs) EventEmbeddings(ctx context.Context) background.JobFunc {
return func() {
t := time.NewTicker(constants.EMBEDDINGS_GENERATION_INTERVAL)

for range t.C {
func() {
tx := j.db.WithContext(ctx).Begin()
defer func() {
if r := recover(); r != nil {
tx.Rollback()
}
}()

var event models.Event
if err := tx.Raw("SELECT * FROM events WHERE embedding IS NULL FOR UPDATE SKIP LOCKED LIMIT 1").Scan(&event).Error; err != nil {
tx.Rollback()
return
}

if event.Name == "" && event.Preview == "" && event.Description == "" { // empty club
tx.Rollback()
return
}

slog.Info(fmt.Sprintf("Generating embeddings for event '%s' (%s)", event.Name, event.ID.String()))

if err := search.UpsertEventEmbedding(tx, j.search, &event); err != nil {
tx.Rollback()
return
}

if err := tx.Commit().Error; err != nil {
return
}
}()
}
}
}
6 changes: 4 additions & 2 deletions backend/background/jobs/jobs.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
package jobs

import (
"github.com/GenerateNU/sac/backend/config"
"github.com/GenerateNU/sac/backend/integrations/email"
"gorm.io/gorm"
)

type Jobs struct {
db *gorm.DB
emailer email.Emailer
search *config.SearchSettings
}

func New(db *gorm.DB) *Jobs {
return &Jobs{db: db}
func New(db *gorm.DB, settings *config.Settings) *Jobs {
return &Jobs{db: db, search: &settings.Search}
}
19 changes: 18 additions & 1 deletion backend/config/search.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
package config

import m "github.com/garrettladley/mattress"

type SearchSettings struct {
URI string `env:"URI"`
OpenAIApiKey *m.Secret[string]
}

type intermediateSearchSettings struct {
OpenAIApiKey string `env:"OPENAI_API_KEY"`
}

func (i *intermediateSearchSettings) into() (*SearchSettings, error) {
openAiApiKey, err := m.NewSecret(i.OpenAIApiKey)
if err != nil {
return nil, err
}

return &SearchSettings{
OpenAIApiKey: openAiApiKey,
}, nil
}
9 changes: 7 additions & 2 deletions backend/config/settings.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ type intermediateSettings struct {
Google intermediateGoogleOAuthSettings `envPrefix:"SAC_GOOGLE_OAUTH_"`
MicrosoftWeb intermediateMicrosoftWebOAuthSettings `envPrefix:"SAC_MICROSOFT_OAUTH_WEB_"`
MicrosoftMobile intermediateMicrosoftMobileOAuthSettings `envPrefix:"SAC_MICROSOFT_OAUTH_MOBILE_"`
Search SearchSettings `envPrefix:"SAC_SEARCH_"`
Search intermediateSearchSettings `envPrefix:"SAC_SEARCH_"`
}

func (i *intermediateSettings) into() (*Settings, error) {
Expand Down Expand Up @@ -98,6 +98,11 @@ func (i *intermediateSettings) into() (*Settings, error) {
return nil, err
}

search, err := i.Search.into()
if err != nil {
return nil, err
}

return &Settings{
Application: i.Application,
DBCache: *dbCache,
Expand All @@ -112,7 +117,7 @@ func (i *intermediateSettings) into() (*Settings, error) {
MicrosoftMobile: *microsoftMobile,
AWS: *aws,
Resend: *resend,
Search: i.Search,
Search: *search,
},
}, nil
}
2 changes: 2 additions & 0 deletions backend/constants/jobs.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,6 @@ const (
DELETE_EXPIRED_VERIFICATION_LIMIT int = 100
EMAIL_SENDER_INTERVAL time.Duration = 5 * time.Second
MAX_EMAIL_ATTEMPTS int = 3

EMBEDDINGS_GENERATION_INTERVAL time.Duration = 1 * time.Second
)
9 changes: 0 additions & 9 deletions backend/constants/search.go

This file was deleted.

24 changes: 12 additions & 12 deletions backend/database/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (

"github.com/GenerateNU/sac/backend/config"
"github.com/GenerateNU/sac/backend/constants"
"github.com/GenerateNU/sac/backend/database/cache"
"github.com/GenerateNU/sac/backend/entities/models"
"gorm.io/driver/postgres"
"gorm.io/gorm"
Expand All @@ -18,17 +17,18 @@ func ConfigureDB(settings config.Settings) (*gorm.DB, error) {
return nil, err
}

cachePlugin := &cache.Caches{
Conf: &cache.Config{
Easer: true,
Cacher: cache.NewRedisCacher(settings.DBCache),
TTL: constants.DB_CACHE_TTL,
},
}

if err := db.Use(cachePlugin); err != nil {
return nil, err
}
// MAKE REDIS NOT ERROR EVERYTIME CHALLENGE (IMPOSSIBLE)
// cachePlugin := &cache.Caches{
// Conf: &cache.Config{
// Easer: true,
// Cacher: cache.NewRedisCacher(settings.DBCache),
// TTL: constants.DB_CACHE_TTL,
// },
// }

// if err := db.Use(cachePlugin); err != nil {
// return nil, err
// }

if err := CreateSuperUserIfNotExists(settings.SuperUser, db); err != nil {
return nil, err
Expand Down
43 changes: 1 addition & 42 deletions backend/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,48 +39,7 @@ services:
volumes:
- redis-limiter-data:/data

opensearch-node1:
image: opensearchproject/opensearch:latest
container_name: opensearch-node1
environment:
- cluster.name=opensearch-cluster
- node.name=opensearch-node1
- discovery.type=single-node
- bootstrap.memory_lock=true # along with the memlock settings below, disables swapping
- "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" # minimum and maximum Java heap size, recommend setting both to 50% of system RAM
- DISABLE_SECURITY_PLUGIN=true #
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65536 # maximum number of open files for the OpenSearch user, set to at least 65536 on modern systems
hard: 65536
volumes:
- opensearch-data1:/usr/share/opensearch/data
ports:
- 9200:9200
- 9600:9600 # required for Performance Analyzer
networks:
- opensearch-net
opensearch-dashboards:
image: opensearchproject/opensearch-dashboards:latest
container_name: opensearch-dashboards
ports:
- 5601:5601
expose:
- "5601"
environment:
OPENSEARCH_HOSTS: '["http://opensearch-node1:9200"]'
DISABLE_SECURITY_DASHBOARDS_PLUGIN: true
networks:
- opensearch-net

volumes:
redis-db-cache-data:
redis-session-data:
redis-limiter-data:
opensearch-data1:

networks:
opensearch-net:
redis-limiter-data:
31 changes: 30 additions & 1 deletion backend/entities/clubs/base/service.go
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
package base

import (
"log/slog"

"github.com/GenerateNU/sac/backend/entities/clubs"
"github.com/GenerateNU/sac/backend/entities/models"
"github.com/GenerateNU/sac/backend/errs"
"github.com/GenerateNU/sac/backend/search"
"github.com/GenerateNU/sac/backend/types"
"github.com/GenerateNU/sac/backend/utilities"
"github.com/garrettladley/fiberpaginate"
Expand Down Expand Up @@ -40,7 +43,20 @@ func (c *ClubService) CreateClub(userID uuid.UUID, clubBody CreateClubRequestBod
return nil, err
}

return CreateClub(c.DB, userID, *club)
result, err := CreateClub(c.DB, userID, *club)
if err != nil {
return nil, err
}

go func() {
err := search.UpsertClubEmbedding(c.DB, c.Search, result)

if err != nil {
slog.Error("Upsert club embedding failed")
}
}()

return result, err
}

func (c *ClubService) GetClub(id string) (*models.Club, error) {
Expand Down Expand Up @@ -71,6 +87,19 @@ func (c *ClubService) UpdateClub(id string, clubBody UpdateClubRequestBody) (*mo
return nil, err
}

result, err := UpdateClub(c.DB, *idAsUUID, *club)
if err != nil {
return nil, err
}

go func() {
err := search.UpsertClubEmbedding(c.DB, c.Search, result)

if err != nil {
slog.Error("Upsert club embedding failed")
}
}()

return UpdateClub(c.DB, *idAsUUID, *club)
}

Expand Down
Loading