diff --git a/.golangci.yml b/.golangci.yml index 80daf90..6600d3d 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,40 +1,31 @@ -version: "2" run: + timeout: 8m allow-parallel-runners: true + linters: enable: - dupl - gocyclo - - lll - misspell - nakedret - prealloc - unconvert - unparam - exclusions: - generated: lax - rules: - - linters: - - lll - path: api/* - - linters: - - dupl - path: api/v1/store_test.go - - linters: - - dupl - - lll - path: internal/* - paths: - - third_party$ - - builtin$ - - examples$ -formatters: - enable: - - gofmt - - goimports - exclusions: - generated: lax - paths: - - third_party$ - - builtin$ - - examples$ + +issues: + exclude-dirs: + - third_party + - builtin + - examples + exclude-rules: + - path: api/ + linters: + - lll + - path: api/v1/store_test.go + linters: + - dupl + - path: internal/ + linters: + - dupl + - lll + exclude-generated: lax diff --git a/api/v1/store.go b/api/v1/store.go index 8556bf6..66d2057 100644 --- a/api/v1/store.go +++ b/api/v1/store.go @@ -178,6 +178,9 @@ type OpensearchSpec struct { // +kubebuilder:default={prefix: sw, shards: 3, replicas: 3} Index OpensearchIndexSpec `json:"index,omitempty"` + + // +kubebuilder:default=false + CleanupOnDeletion bool `json:"cleanupOnDeletion,omitempty"` } type OpensearchIndexSpec struct { diff --git a/api/v1/zz_generated.deepcopy.go b/api/v1/zz_generated.deepcopy.go index 163103b..ab49f7e 100644 --- a/api/v1/zz_generated.deepcopy.go +++ b/api/v1/zz_generated.deepcopy.go @@ -440,6 +440,34 @@ func (in *NetworkSpec) DeepCopyInto(out *NetworkSpec) { (*out)[key] = val } } + if in.IngressAnnotations != nil { + in, out := &in.IngressAnnotations, &out.IngressAnnotations + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.IngressLabels != nil { + in, out := &in.IngressLabels, &out.IngressLabels + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.GatewayAnnotations != nil { + in, out := &in.GatewayAnnotations, &out.GatewayAnnotations + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } + if in.GatewayLabels != nil { + in, out := &in.GatewayLabels, &out.GatewayLabels + *out = make(map[string]string, len(*in)) + for key, val := range *in { + (*out)[key] = val + } + } } // DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new NetworkSpec. diff --git a/internal/controller/store_controller.go b/internal/controller/store_controller.go index ac73f96..0a9697b 100644 --- a/internal/controller/store_controller.go +++ b/internal/controller/store_controller.go @@ -15,6 +15,7 @@ import ( "github.com/shopware/shopware-operator/internal/job" "github.com/shopware/shopware-operator/internal/k8s" "github.com/shopware/shopware-operator/internal/logging" + "github.com/shopware/shopware-operator/internal/opensearch" "github.com/shopware/shopware-operator/internal/pdb" "github.com/shopware/shopware-operator/internal/secret" "github.com/shopware/shopware-operator/internal/service" @@ -33,6 +34,7 @@ import ( ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/builder" "sigs.k8s.io/controller-runtime/pkg/client" + "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" "sigs.k8s.io/controller-runtime/pkg/handler" "sigs.k8s.io/controller-runtime/pkg/predicate" "sigs.k8s.io/controller-runtime/pkg/reconcile" @@ -40,9 +42,10 @@ import ( ) var ( - noRequeue = ctrl.Result{} - shortRequeue = ctrl.Result{RequeueAfter: 5 * time.Second} - longRequeue = ctrl.Result{RequeueAfter: 2 * time.Minute} + noRequeue = ctrl.Result{} + shortRequeue = ctrl.Result{RequeueAfter: 5 * time.Second} + longRequeue = ctrl.Result{RequeueAfter: 2 * time.Minute} + storeFinalizer = "shop.shopware.com/opensearch-cleanup" ) // StoreReconciler reconciles a Store object @@ -154,10 +157,50 @@ func (r *StoreReconciler) Reconcile( return rr, nil } - // We don't need yet finalizers - // if store.ObjectMeta.DeletionTimestamp != nil { - // return rr, r.applyFinalizers(ctx, store) - // } + // Handle deletion with finalizers + if store.ObjectMeta.DeletionTimestamp != nil { + if controllerutil.ContainsFinalizer(store, storeFinalizer) { + // Perform opensearch cleanup + log.Info("Store is being deleted, running opensearch cleanup finalizer") + if err := opensearch.CleanupResources(ctx, r.Client, store); err != nil { + log.Errorw("Failed to cleanup opensearch resources", zap.Error(err)) + return ctrl.Result{RequeueAfter: 30 * time.Second}, err + } + + // Remove finalizer once cleanup is complete + log.Info("Opensearch cleanup completed, removing finalizer") + controllerutil.RemoveFinalizer(store, storeFinalizer) + if err := r.Update(ctx, store); err != nil { + log.Errorw("Failed to remove finalizer", zap.Error(err)) + return ctrl.Result{}, err + } + } + return ctrl.Result{}, nil + } + + // Add finalizer if opensearch cleanup is enabled and finalizer is not present + if store.Spec.OpensearchSpec.Enabled && store.Spec.OpensearchSpec.CleanupOnDeletion { + if !controllerutil.ContainsFinalizer(store, storeFinalizer) { + log.Info("Adding opensearch cleanup finalizer to store") + controllerutil.AddFinalizer(store, storeFinalizer) + if err := r.Update(ctx, store); err != nil { + log.Errorw("Failed to add finalizer", zap.Error(err)) + return ctrl.Result{}, err + } + return ctrl.Result{Requeue: true}, nil + } + } else { + // Remove finalizer if opensearch cleanup is disabled but finalizer is present + if controllerutil.ContainsFinalizer(store, storeFinalizer) { + log.Info("Opensearch cleanup is disabled, removing finalizer from store") + controllerutil.RemoveFinalizer(store, storeFinalizer) + if err := r.Update(ctx, store); err != nil { + log.Errorw("Failed to remove finalizer", zap.Error(err)) + return ctrl.Result{}, err + } + return ctrl.Result{Requeue: true}, nil + } + } if err := r.doReconcile(ctx, store); err != nil { log.Errorw("reconcile", zap.Error(err)) @@ -298,43 +341,6 @@ func (r *StoreReconciler) doReconcile( return nil } -// func (r *StoreReconciler) applyFinalizers(ctx context.Context, store *v1.Store) error { -// log := logging.FromContext(ctx).WithName(store.Name) -// log.Info("Applying finalizers") -// -// var err error -// -// // finalizers := []string{} -// // for _, f := range store.GetFinalizers() { -// // switch f { -// // case "delete-mysql-pods-in-order": -// // err = r.deleteMySQLPods(ctx, store) -// // case "delete-ssl": -// // err = r.deleteCerts(ctx, store) -// // } -// // -// // if err != nil { -// // switch err { -// // case psrestore.ErrWaitingTermination: -// // log.Info("waiting for pods to be deleted", "finalizer", f) -// // default: -// // log.Errorw("failed to run finalizer", "finalizer", f) -// // } -// // finalizers = append(finalizers, f) -// // } -// // } -// -// //store.SetFinalizers(finalizers) -// -// return k8sretry.RetryOnConflict(k8sretry.DefaultRetry, func() error { -// err = r.Client.Update(ctx, store) -// if err != nil { -// log.Errorw("Client.Update failed") -// } -// return err -// }) -// } - func (r *StoreReconciler) ensureAppSecrets(ctx context.Context, store *v1.Store) (err error) { storeSecret, err := secret.EnsureStoreSecret(ctx, r.Client, r.Recorder, store) if err != nil { diff --git a/internal/opensearch/cleanup.go b/internal/opensearch/cleanup.go new file mode 100644 index 0000000..922a042 --- /dev/null +++ b/internal/opensearch/cleanup.go @@ -0,0 +1,221 @@ +package opensearch + +import ( + "context" + "crypto/tls" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "strings" + "time" + + v1 "github.com/shopware/shopware-operator/api/v1" + "github.com/shopware/shopware-operator/internal/logging" + "go.uber.org/zap" + corev1 "k8s.io/api/core/v1" + "k8s.io/apimachinery/pkg/types" + "sigs.k8s.io/controller-runtime/pkg/client" +) + +// CleanupResources deletes all Opensearch resources (indices and aliases) for the given store +func CleanupResources(ctx context.Context, k8sClient client.Client, store *v1.Store) error { + log := logging.FromContext(ctx) + + if !store.Spec.OpensearchSpec.Enabled { + log.Info("Opensearch is not enabled for this store, skipping cleanup") + return nil + } + + if !store.Spec.OpensearchSpec.CleanupOnDeletion { + log.Info("Opensearch cleanup on deletion is not enabled for this store, skipping cleanup") + return nil + } + + // Get credentials from the store secret + credentials, err := getOpensearchCredentials(ctx, k8sClient, store) + if err != nil { + return fmt.Errorf("failed to get opensearch credentials: %w", err) + } + + // Create HTTP client with proper TLS configuration + // TLS certificate verification is enabled by default for security. + // For production environments with self-signed certificates, configure the system CA pool + // or mount custom CA certificates into the operator pod at /etc/ssl/certs/ + httpClient := &http.Client{ + Timeout: 30 * time.Second, + Transport: &http.Transport{ + TLSClientConfig: &tls.Config{ + InsecureSkipVerify: false, // Always verify certificates for security + }, + }, + } + + baseURL := fmt.Sprintf("%s://%s:%d", + store.Spec.OpensearchSpec.Schema, + store.Spec.OpensearchSpec.Host, + store.Spec.OpensearchSpec.Port, + ) + + prefix := store.Spec.OpensearchSpec.Index.Prefix + log.Infow("Starting Opensearch cleanup", + zap.String("baseURL", baseURL), + zap.String("prefix", prefix), + ) + + // List all indices with the prefix + indices, err := listIndices(ctx, httpClient, baseURL, credentials, prefix) + if err != nil { + return fmt.Errorf("failed to list indices: %w", err) + } + + log.Infow("Found indices to delete", zap.Int("count", len(indices))) + + // Track deletion errors + var deletionErrors []error + + // Delete regular indices + deletionErrors = append(deletionErrors, deleteIndicesBatch(ctx, httpClient, baseURL, credentials, indices, "index")...) + + // Also delete aliases with the prefix (admin index) + adminPrefix := fmt.Sprintf("%s-admin", prefix) + adminIndices, err := listIndices(ctx, httpClient, baseURL, credentials, adminPrefix) + if err != nil { + log.Warnw("Failed to list admin indices", zap.Error(err)) + } else { + log.Infow("Found admin indices to delete", zap.Int("count", len(adminIndices))) + deletionErrors = append(deletionErrors, deleteIndicesBatch(ctx, httpClient, baseURL, credentials, adminIndices, "admin index")...) + } + + // Return error if any deletions failed + if len(deletionErrors) > 0 { + return fmt.Errorf("failed to delete %d indices: %w", len(deletionErrors), errors.Join(deletionErrors...)) + } + + log.Info("Opensearch cleanup completed") + return nil +} + +// deleteIndicesBatch deletes a batch of indices and returns any errors encountered +func deleteIndicesBatch(ctx context.Context, httpClient *http.Client, baseURL string, credentials *opensearchCredentials, indices []string, indexType string) []error { + log := logging.FromContext(ctx) + var deletionErrors []error + + for _, index := range indices { + if err := deleteIndex(ctx, httpClient, baseURL, credentials, index); err != nil { + log.Errorw("Failed to delete "+indexType, zap.String("index", index), zap.Error(err)) + deletionErrors = append(deletionErrors, fmt.Errorf("failed to delete %s %s: %w", indexType, index, err)) + } else { + log.Infow("Successfully deleted "+indexType, zap.String("index", index)) + } + } + + return deletionErrors +} + +type opensearchCredentials struct { + Username string + Password string +} + +func getOpensearchCredentials(ctx context.Context, k8sClient client.Client, store *v1.Store) (*opensearchCredentials, error) { + // Get the password from the referenced secret + secret := &corev1.Secret{} + secretName := types.NamespacedName{ + Namespace: store.Namespace, + Name: store.Spec.OpensearchSpec.PasswordSecretRef.Name, + } + + if err := k8sClient.Get(ctx, secretName, secret); err != nil { + return nil, fmt.Errorf("failed to get opensearch password secret: %w", err) + } + + password, ok := secret.Data[store.Spec.OpensearchSpec.PasswordSecretRef.Key] + if !ok { + return nil, fmt.Errorf("password key %s not found in secret %s", + store.Spec.OpensearchSpec.PasswordSecretRef.Key, + store.Spec.OpensearchSpec.PasswordSecretRef.Name) + } + + return &opensearchCredentials{ + Username: store.Spec.OpensearchSpec.Username, + Password: string(password), + }, nil +} + +func listIndices(ctx context.Context, httpClient *http.Client, baseURL string, credentials *opensearchCredentials, prefix string) ([]string, error) { + log := logging.FromContext(ctx) + + // Use _cat/indices API to list indices + url := fmt.Sprintf("%s/_cat/indices/%s*?format=json", baseURL, prefix) + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + req.SetBasicAuth(credentials.Username, credentials.Password) + req.Header.Set("Content-Type", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode == http.StatusNotFound { + log.Info("No indices found with the given prefix") + return []string{}, nil + } + + if resp.StatusCode != http.StatusOK { + body, _ := io.ReadAll(resp.Body) + return nil, fmt.Errorf("unexpected status code %d: %s", resp.StatusCode, string(body)) + } + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response: %w", err) + } + + var indices []map[string]interface{} + if err := json.Unmarshal(body, &indices); err != nil { + return nil, fmt.Errorf("failed to parse response: %w", err) + } + + var indexNames []string + for _, index := range indices { + if name, ok := index["index"].(string); ok { + // Filter out system indices (starting with .) + if !strings.HasPrefix(name, ".") { + indexNames = append(indexNames, name) + } + } + } + + return indexNames, nil +} + +func deleteIndex(ctx context.Context, httpClient *http.Client, baseURL string, credentials *opensearchCredentials, indexName string) error { + url := fmt.Sprintf("%s/%s", baseURL, indexName) + req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) + if err != nil { + return fmt.Errorf("failed to create request: %w", err) + } + + req.SetBasicAuth(credentials.Username, credentials.Password) + req.Header.Set("Content-Type", "application/json") + + resp, err := httpClient.Do(req) + if err != nil { + return fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNotFound { + body, _ := io.ReadAll(resp.Body) + return fmt.Errorf("unexpected status code %d: %s", resp.StatusCode, string(body)) + } + + return nil +} diff --git a/internal/opensearch/cleanup_test.go b/internal/opensearch/cleanup_test.go new file mode 100644 index 0000000..89f247b --- /dev/null +++ b/internal/opensearch/cleanup_test.go @@ -0,0 +1,450 @@ +package opensearch + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "net/url" + "strconv" + "testing" + + v1 "github.com/shopware/shopware-operator/api/v1" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/runtime" + "sigs.k8s.io/controller-runtime/pkg/client/fake" +) + +func TestCleanupResources_OpensearchDisabled(t *testing.T) { + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: false, + }, + }, + } + + client := fake.NewClientBuilder().WithScheme(scheme).Build() + + err := CleanupResources(ctx, client, store) + assert.NoError(t, err, "Should not error when opensearch is disabled") +} + +func TestCleanupResources_CleanupDisabled(t *testing.T) { + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: true, + CleanupOnDeletion: false, + }, + }, + } + + client := fake.NewClientBuilder().WithScheme(scheme).Build() + + err := CleanupResources(ctx, client, store) + assert.NoError(t, err, "Should not error when cleanup is disabled") +} + +func TestListIndices(t *testing.T) { + // Mock opensearch server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/_cat/indices/test-prefix*?format=json", r.URL.Path+"?"+r.URL.RawQuery) + assert.Equal(t, "GET", r.Method) + + // Check basic auth + username, password, ok := r.BasicAuth() + assert.True(t, ok) + assert.Equal(t, "testuser", username) + assert.Equal(t, "testpass", password) + + indices := []map[string]interface{}{ + {"index": "test-prefix-product"}, + {"index": "test-prefix-category"}, + {"index": ".system-index"}, // Should be filtered out + } + + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(indices) + })) + defer server.Close() + + ctx := context.Background() + credentials := &opensearchCredentials{ + Username: "testuser", + Password: "testpass", + } + + httpClient := &http.Client{} + indices, err := listIndices(ctx, httpClient, server.URL, credentials, "test-prefix") + + require.NoError(t, err) + assert.Len(t, indices, 2, "Should return 2 indices (excluding system index)") + assert.Contains(t, indices, "test-prefix-product") + assert.Contains(t, indices, "test-prefix-category") + assert.NotContains(t, indices, ".system-index") +} + +func TestListIndices_NoIndicesFound(t *testing.T) { + // Mock opensearch server returning 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + ctx := context.Background() + credentials := &opensearchCredentials{ + Username: "testuser", + Password: "testpass", + } + + httpClient := &http.Client{} + indices, err := listIndices(ctx, httpClient, server.URL, credentials, "nonexistent") + + require.NoError(t, err) + assert.Empty(t, indices, "Should return empty list when no indices found") +} + +func TestDeleteIndex(t *testing.T) { + deleted := false + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "/test-index", r.URL.Path) + assert.Equal(t, "DELETE", r.Method) + + // Check basic auth + username, password, ok := r.BasicAuth() + assert.True(t, ok) + assert.Equal(t, "testuser", username) + assert.Equal(t, "testpass", password) + + deleted = true + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "acknowledged": true, + }) + })) + defer server.Close() + + ctx := context.Background() + credentials := &opensearchCredentials{ + Username: "testuser", + Password: "testpass", + } + + httpClient := &http.Client{} + err := deleteIndex(ctx, httpClient, server.URL, credentials, "test-index") + + require.NoError(t, err) + assert.True(t, deleted, "Index should have been deleted") +} + +func TestDeleteIndex_NotFound(t *testing.T) { + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + ctx := context.Background() + credentials := &opensearchCredentials{ + Username: "testuser", + Password: "testpass", + } + + httpClient := &http.Client{} + err := deleteIndex(ctx, httpClient, server.URL, credentials, "nonexistent") + + require.NoError(t, err, "Should not error when deleting non-existent index") +} + +func TestGetOpensearchCredentials(t *testing.T) { + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + secret := &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "opensearch-secret", + Namespace: "default", + }, + Data: map[string][]byte{ + "password": []byte("mysecretpassword"), + }, + } + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: true, + Username: "admin", + PasswordSecretRef: v1.SecretRef{ + Name: "opensearch-secret", + Key: "password", + }, + }, + }, + } + + client := fake.NewClientBuilder(). + WithScheme(scheme). + WithObjects(secret). + Build() + + credentials, err := getOpensearchCredentials(ctx, client, store) + + require.NoError(t, err) + assert.Equal(t, "admin", credentials.Username) + assert.Equal(t, "mysecretpassword", credentials.Password) +} + +func TestGetOpensearchCredentials_SecretNotFound(t *testing.T) { + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: true, + Username: "admin", + PasswordSecretRef: v1.SecretRef{ + Name: "nonexistent-secret", + Key: "password", + }, + }, + }, + } + + client := fake.NewClientBuilder().WithScheme(scheme).Build() + + _, err := getOpensearchCredentials(ctx, client, store) + + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get opensearch password secret") +} + +func TestCleanupResources_FullFlow(t *testing.T) { + // Mock opensearch server with index listing and deletion + deletedIndices := []string{} + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Check basic auth + username, password, ok := r.BasicAuth() + if !ok || username != "admin" || password != "testpassword" { + w.WriteHeader(http.StatusUnauthorized) + return + } + + switch r.Method { + case "GET": + // List indices + if r.URL.Path == "/_cat/indices/testprefix*" { + indices := []map[string]interface{}{ + {"index": "testprefix-product"}, + {"index": "testprefix-category"}, + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(indices) + } else if r.URL.Path == "/_cat/indices/testprefix-admin*" { + indices := []map[string]interface{}{ + {"index": "testprefix-admin-user"}, + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(indices) + } else { + w.WriteHeader(http.StatusNotFound) + } + case "DELETE": + // Delete index + indexName := r.URL.Path[1:] // Remove leading / + deletedIndices = append(deletedIndices, indexName) + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "acknowledged": true, + }) + default: + w.WriteHeader(http.StatusMethodNotAllowed) + } + })) + defer server.Close() + + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + secret := &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "opensearch-secret", + Namespace: "default", + }, + Data: map[string][]byte{ + "password": []byte("testpassword"), + }, + } + + // Parse server URL to get host and port + serverURL, err := url.Parse(server.URL) + require.NoError(t, err) + + portInt, err := strconv.Atoi(serverURL.Port()) + require.NoError(t, err) + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: true, + CleanupOnDeletion: true, + Host: serverURL.Hostname(), + Port: int32(portInt), + Schema: serverURL.Scheme, + Username: "admin", + PasswordSecretRef: v1.SecretRef{ + Name: "opensearch-secret", + Key: "password", + }, + Index: v1.OpensearchIndexSpec{ + Prefix: "testprefix", + }, + }, + }, + } + + client := fake.NewClientBuilder(). + WithScheme(scheme). + WithObjects(secret). + Build() + + cleanupErr := CleanupResources(ctx, client, store) + + require.NoError(t, cleanupErr) + // Verify all indices were deleted (2 regular + 1 admin) + assert.Len(t, deletedIndices, 3) + assert.Contains(t, deletedIndices, "testprefix-product") + assert.Contains(t, deletedIndices, "testprefix-category") + assert.Contains(t, deletedIndices, "testprefix-admin-user") +} + +func TestCleanupResources_PartialFailure(t *testing.T) { + // Mock opensearch server that fails to delete one index + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + username, password, ok := r.BasicAuth() + if !ok || username != "admin" || password != "testpassword" { + w.WriteHeader(http.StatusUnauthorized) + return + } + + switch r.Method { + case "GET": + // List indices + indices := []map[string]interface{}{ + {"index": "testprefix-product"}, + {"index": "testprefix-category"}, + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(indices) + case "DELETE": + // Fail to delete testprefix-category + if r.URL.Path == "/testprefix-category" { + w.WriteHeader(http.StatusInternalServerError) + json.NewEncoder(w).Encode(map[string]interface{}{ + "error": "Internal server error", + }) + } else { + w.WriteHeader(http.StatusOK) + json.NewEncoder(w).Encode(map[string]interface{}{ + "acknowledged": true, + }) + } + } + })) + defer server.Close() + + ctx := context.Background() + scheme := runtime.NewScheme() + _ = corev1.AddToScheme(scheme) + _ = v1.AddToScheme(scheme) + + secret := &corev1.Secret{ + ObjectMeta: metav1.ObjectMeta{ + Name: "opensearch-secret", + Namespace: "default", + }, + Data: map[string][]byte{ + "password": []byte("testpassword"), + }, + } + + serverURL, err := url.Parse(server.URL) + require.NoError(t, err) + + portInt, err := strconv.Atoi(serverURL.Port()) + require.NoError(t, err) + + store := &v1.Store{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-store", + Namespace: "default", + }, + Spec: v1.StoreSpec{ + OpensearchSpec: v1.OpensearchSpec{ + Enabled: true, + CleanupOnDeletion: true, + Host: serverURL.Hostname(), + Port: int32(portInt), + Schema: serverURL.Scheme, + Username: "admin", + PasswordSecretRef: v1.SecretRef{ + Name: "opensearch-secret", + Key: "password", + }, + Index: v1.OpensearchIndexSpec{ + Prefix: "testprefix", + }, + }, + }, + } + + client := fake.NewClientBuilder(). + WithScheme(scheme). + WithObjects(secret). + Build() + + cleanupErr := CleanupResources(ctx, client, store) + + require.Error(t, cleanupErr) + assert.Contains(t, cleanupErr.Error(), "failed to delete") +}