diff --git a/internal/database/crud_nested_resource.go b/internal/database/crud_nested_resource.go index ab893a2220..8ef3e0c9e5 100644 --- a/internal/database/crud_nested_resource.go +++ b/internal/database/crud_nested_resource.go @@ -67,7 +67,7 @@ func (d *nestedCosmosResourceCRUD[InternalAPIType, CosmosAPIType]) makeResourceI } parts := []string{d.parentResourceID.String()} - if d.parentResourceID.ResourceType.Namespace != api.ProviderNamespace { + if !strings.EqualFold(d.parentResourceID.ResourceType.Namespace, api.ProviderNamespace) { if len(resourceID) == 0 { // in this case, adding the actual provider type results in an illegal resourceID // for instance /subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters does not parse diff --git a/internal/databasetesting/mock_crud.go b/internal/databasetesting/mock_crud.go new file mode 100644 index 0000000000..d93dd698f0 --- /dev/null +++ b/internal/databasetesting/mock_crud.go @@ -0,0 +1,914 @@ +// Copyright 2025 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package databasetesting + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "path" + "strings" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" + "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" + + "github.com/Azure/ARO-HCP/internal/api" + "github.com/Azure/ARO-HCP/internal/api/arm" + "github.com/Azure/ARO-HCP/internal/database" +) + +// mockResourceCRUD is a generic mock implementation of database.ResourceCRUD. +type mockResourceCRUD[InternalAPIType, CosmosAPIType any] struct { + client *MockDBClient + parentResourceID *azcorearm.ResourceID + resourceType azcorearm.ResourceType +} + +func newMockResourceCRUD[InternalAPIType, CosmosAPIType any]( + client *MockDBClient, parentResourceID *azcorearm.ResourceID, resourceType azcorearm.ResourceType) *mockResourceCRUD[InternalAPIType, CosmosAPIType] { + + return &mockResourceCRUD[InternalAPIType, CosmosAPIType]{ + client: client, + parentResourceID: parentResourceID, + resourceType: resourceType, + } +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) makeResourceIDPath(resourceID string) (*azcorearm.ResourceID, error) { + if len(m.parentResourceID.SubscriptionID) == 0 { + return nil, fmt.Errorf("subscriptionID is required") + } + parts := []string{m.parentResourceID.String()} + + if !strings.EqualFold(m.parentResourceID.ResourceType.Namespace, api.ProviderNamespace) { + if len(resourceID) == 0 { + resourcePathString := path.Join(parts...) + return azcorearm.ParseResourceID(resourcePathString) + } + + parts = append(parts, + "providers", + m.resourceType.Namespace, + ) + } else { + if len(m.parentResourceID.ResourceGroupName) == 0 { + return nil, fmt.Errorf("resourceGroup is required") + } + } + parts = append(parts, m.resourceType.Types[len(m.resourceType.Types)-1]) + + if len(resourceID) > 0 { + parts = append(parts, resourceID) + } + + resourcePathString := path.Join(parts...) + return azcorearm.ParseResourceID(resourcePathString) +} + +func NewNotFoundError() *azcore.ResponseError { + return &azcore.ResponseError{ + ErrorCode: "Not Found", + StatusCode: http.StatusNotFound, + } +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) GetByID(ctx context.Context, cosmosID string) (*InternalAPIType, error) { + if strings.ToLower(cosmosID) != cosmosID { + return nil, fmt.Errorf("cosmosID must be lowercase, not: %q", cosmosID) + } + + data, ok := m.client.GetDocument(cosmosID) + if !ok { + return nil, NewNotFoundError() + } + + var cosmosObj CosmosAPIType + if err := json.Unmarshal(data, &cosmosObj); err != nil { + return nil, fmt.Errorf("failed to unmarshal document: %w", err) + } + + return database.CosmosToInternal[InternalAPIType, CosmosAPIType](&cosmosObj) +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) Get(ctx context.Context, resourceID string) (*InternalAPIType, error) { + completeResourceID, err := m.makeResourceIDPath(resourceID) + if err != nil { + return nil, fmt.Errorf("failed to make ResourceID path for '%s': %w", resourceID, err) + } + + cosmosID, err := api.ResourceIDToCosmosID(completeResourceID) + if err != nil { + return nil, err + } + + // Try exact match first + result, err := m.GetByID(ctx, cosmosID) + if err == nil { + return result, nil + } + + // If not found, search by resourceID + if !database.IsResponseError(err, http.StatusNotFound) { + return nil, err + } + + // Search all documents for matching resourceID + allDocs := m.client.GetAllDocuments() + + for _, data := range allDocs { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + // Check resource type + if !strings.EqualFold(typedDoc.ResourceType, completeResourceID.ResourceType.String()) { + continue + } + + // Check resourceID in properties + var props map[string]any + if err := json.Unmarshal(typedDoc.Properties, &props); err != nil { + continue + } + + resourceIDStr, ok := props["resourceId"].(string) + if !ok { + continue + } + + if strings.EqualFold(resourceIDStr, completeResourceID.String()) { + var cosmosObj CosmosAPIType + if err := json.Unmarshal(data, &cosmosObj); err != nil { + continue + } + return database.CosmosToInternal[InternalAPIType, CosmosAPIType](&cosmosObj) + } + } + + return nil, NewNotFoundError() +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) List(ctx context.Context, opts *database.DBClientListResourceDocsOptions) (database.DBClientIterator[InternalAPIType], error) { + prefix, err := m.makeResourceIDPath("") + if err != nil { + return nil, fmt.Errorf("failed to make ResourceID path: %w", err) + } + + documents := m.client.ListDocuments(&m.resourceType, prefix.String()+"/") + + var ids []string + var items []*InternalAPIType + + for _, data := range documents { + var cosmosObj CosmosAPIType + if err := json.Unmarshal(data, &cosmosObj); err != nil { + continue + } + + internalObj, err := database.CosmosToInternal[InternalAPIType, CosmosAPIType](&cosmosObj) + if err != nil { + continue + } + + // Get the ID from the typed document + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + ids = append(ids, typedDoc.ID) + items = append(items, internalObj) + } + + return newMockIterator(ids, items), nil +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) Create(ctx context.Context, newObj *InternalAPIType, options *azcosmos.ItemOptions) (*InternalAPIType, error) { + cosmosObj, err := database.InternalToCosmos[InternalAPIType, CosmosAPIType](newObj) + if err != nil { + return nil, fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return nil, fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + // Get cosmos ID from the object + cosmosPersistable, ok := any(newObj).(api.CosmosPersistable) + if !ok { + return nil, fmt.Errorf("type %T does not implement CosmosPersistable", newObj) + } + + cosmosData := cosmosPersistable.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + // Check for existing + if _, exists := m.client.GetDocument(cosmosID); exists { + return nil, &azcore.ResponseError{StatusCode: http.StatusConflict} + } + + m.client.StoreDocument(cosmosID, data) + + // Read back the stored object + return m.GetByID(ctx, cosmosID) +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) Replace(ctx context.Context, newObj *InternalAPIType, options *azcosmos.ItemOptions) (*InternalAPIType, error) { + cosmosObj, err := database.InternalToCosmos[InternalAPIType, CosmosAPIType](newObj) + if err != nil { + return nil, fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return nil, fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + // Get cosmos ID from the object + cosmosPersistable, ok := any(newObj).(api.CosmosPersistable) + if !ok { + return nil, fmt.Errorf("type %T does not implement CosmosPersistable", newObj) + } + + cosmosData := cosmosPersistable.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + // Check that document exists + if _, exists := m.client.GetDocument(cosmosID); !exists { + return nil, NewNotFoundError() + } + + m.client.StoreDocument(cosmosID, data) + + // Read back the stored object + return m.GetByID(ctx, cosmosID) +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) Delete(ctx context.Context, resourceID string) error { + curr, err := m.Get(ctx, resourceID) + if err != nil { + return err + } + + cosmosUID := any(curr).(api.CosmosPersistable).GetCosmosData().CosmosUID + m.client.DeleteDocument(cosmosUID) + return nil +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) AddCreateToTransaction(ctx context.Context, transaction database.DBTransaction, newObj *InternalAPIType, opts *azcosmos.TransactionalBatchItemOptions) (string, error) { + cosmosObj, err := database.InternalToCosmos[InternalAPIType, CosmosAPIType](newObj) + if err != nil { + return "", fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return "", fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosPersistable, ok := any(newObj).(api.CosmosPersistable) + if !ok { + return "", fmt.Errorf("type %T does not implement CosmosPersistable", newObj) + } + + cosmosData := cosmosPersistable.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + mockTx, ok := transaction.(*mockTransaction) + if !ok { + return "", fmt.Errorf("expected mockTransaction, got %T", transaction) + } + + transactionDetails := database.CosmosDBTransactionStepDetails{ + ActionType: "Create", + GoType: fmt.Sprintf("%T", newObj), + CosmosID: cosmosID, + } + + mockTx.steps = append(mockTx.steps, mockTransactionStep{ + details: transactionDetails, + execute: func() (string, json.RawMessage, error) { + m.client.StoreDocument(cosmosID, data) + return cosmosID, data, nil + }, + }) + + return cosmosID, nil +} + +func (m *mockResourceCRUD[InternalAPIType, CosmosAPIType]) AddReplaceToTransaction(ctx context.Context, transaction database.DBTransaction, newObj *InternalAPIType, opts *azcosmos.TransactionalBatchItemOptions) (string, error) { + cosmosObj, err := database.InternalToCosmos[InternalAPIType, CosmosAPIType](newObj) + if err != nil { + return "", fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return "", fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosPersistable, ok := any(newObj).(api.CosmosPersistable) + if !ok { + return "", fmt.Errorf("type %T does not implement CosmosPersistable", newObj) + } + + cosmosData := cosmosPersistable.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + mockTx, ok := transaction.(*mockTransaction) + if !ok { + return "", fmt.Errorf("expected mockTransaction, got %T", transaction) + } + + transactionDetails := database.CosmosDBTransactionStepDetails{ + ActionType: "Replace", + GoType: fmt.Sprintf("%T", newObj), + CosmosID: cosmosID, + } + + mockTx.steps = append(mockTx.steps, mockTransactionStep{ + details: transactionDetails, + execute: func() (string, json.RawMessage, error) { + m.client.StoreDocument(cosmosID, data) + return cosmosID, data, nil + }, + }) + + return cosmosID, nil +} + +// mockHCPClusterCRUD implements database.HCPClusterCRUD. +type mockHCPClusterCRUD struct { + *mockResourceCRUD[api.HCPOpenShiftCluster, database.HCPCluster] +} + +func newMockHCPClusterCRUD(client *MockDBClient, parentResourceID *azcorearm.ResourceID) *mockHCPClusterCRUD { + return &mockHCPClusterCRUD{ + mockResourceCRUD: newMockResourceCRUD[api.HCPOpenShiftCluster, database.HCPCluster](client, parentResourceID, api.ClusterResourceType), + } +} + +func (m *mockHCPClusterCRUD) ExternalAuth(hcpClusterName string) database.ExternalAuthsCRUD { + parentResourceID := api.Must(azcorearm.ParseResourceID( + path.Join( + m.parentResourceID.String(), + "providers", + m.resourceType.Namespace, + m.resourceType.Type, + hcpClusterName))) + + return &mockExternalAuthCRUD{ + mockResourceCRUD: newMockResourceCRUD[api.HCPOpenShiftClusterExternalAuth, database.ExternalAuth]( + m.client, + parentResourceID, + api.ExternalAuthResourceType, + ), + } +} + +func (m *mockHCPClusterCRUD) NodePools(hcpClusterName string) database.NodePoolsCRUD { + parentResourceID := api.Must(azcorearm.ParseResourceID( + path.Join( + m.parentResourceID.String(), + "providers", + m.resourceType.Namespace, + m.resourceType.Type, + hcpClusterName))) + + return &mockNodePoolsCRUD{ + mockResourceCRUD: newMockResourceCRUD[api.HCPOpenShiftClusterNodePool, database.NodePool]( + m.client, + parentResourceID, + api.NodePoolResourceType), + } +} + +func (m *mockHCPClusterCRUD) Controllers(hcpClusterName string) database.ResourceCRUD[api.Controller] { + parentResourceID := api.Must(azcorearm.ParseResourceID( + path.Join( + m.parentResourceID.String(), + "providers", + m.resourceType.Namespace, + m.resourceType.Type, + hcpClusterName))) + + return newMockResourceCRUD[api.Controller, database.Controller](m.client, parentResourceID, api.ClusterControllerResourceType) +} + +var _ database.HCPClusterCRUD = &mockHCPClusterCRUD{} + +// mockNodePoolsCRUD implements database.NodePoolsCRUD. +type mockNodePoolsCRUD struct { + *mockResourceCRUD[api.HCPOpenShiftClusterNodePool, database.NodePool] +} + +func (m *mockNodePoolsCRUD) Controllers(nodePoolName string) database.ResourceCRUD[api.Controller] { + parentResourceID := api.Must(azcorearm.ParseResourceID( + path.Join( + m.parentResourceID.String(), + m.resourceType.Types[len(m.resourceType.Types)-1], + nodePoolName, + ))) + + return newMockResourceCRUD[api.Controller, database.Controller](m.client, parentResourceID, api.NodePoolControllerResourceType) +} + +var _ database.NodePoolsCRUD = &mockNodePoolsCRUD{} + +// mockExternalAuthCRUD implements database.ExternalAuthsCRUD. +type mockExternalAuthCRUD struct { + *mockResourceCRUD[api.HCPOpenShiftClusterExternalAuth, database.ExternalAuth] +} + +func (m *mockExternalAuthCRUD) Controllers(externalAuthName string) database.ResourceCRUD[api.Controller] { + parentResourceID := api.Must(azcorearm.ParseResourceID( + path.Join( + m.parentResourceID.String(), + m.resourceType.Types[len(m.resourceType.Types)-1], + externalAuthName, + ))) + + return newMockResourceCRUD[api.Controller, database.Controller](m.client, parentResourceID, api.ExternalAuthControllerResourceType) +} + +var _ database.ExternalAuthsCRUD = &mockExternalAuthCRUD{} + +// mockOperationCRUD implements database.OperationCRUD. +type mockOperationCRUD struct { + *mockResourceCRUD[api.Operation, database.Operation] +} + +func newMockOperationCRUD(client *MockDBClient, parentResourceID *azcorearm.ResourceID) *mockOperationCRUD { + return &mockOperationCRUD{ + mockResourceCRUD: newMockResourceCRUD[api.Operation, database.Operation](client, parentResourceID, api.OperationStatusResourceType), + } +} + +func (m *mockOperationCRUD) ListActiveOperations(options *database.DBClientListActiveOperationDocsOptions) database.DBClientIterator[api.Operation] { + allDocs := m.client.GetAllDocuments() + + var ids []string + var items []*api.Operation + + for _, data := range allDocs { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + // Check resource type + if !strings.EqualFold(typedDoc.ResourceType, api.OperationStatusResourceType.String()) { + continue + } + + var cosmosObj database.Operation + if err := json.Unmarshal(data, &cosmosObj); err != nil { + continue + } + + // Filter out terminal states + status := cosmosObj.OperationProperties.Status + if status == arm.ProvisioningStateSucceeded || + status == arm.ProvisioningStateFailed || + status == arm.ProvisioningStateCanceled { + continue + } + + // Apply options filters + if options != nil { + if options.Request != nil && cosmosObj.OperationProperties.Request != *options.Request { + continue + } + + if options.ExternalID != nil { + externalID := cosmosObj.OperationProperties.ExternalID + if externalID == nil { + continue + } + + if options.IncludeNestedResources { + if !strings.HasPrefix(strings.ToLower(externalID.String()), strings.ToLower(options.ExternalID.String())) { + continue + } + } else { + if !strings.EqualFold(externalID.String(), options.ExternalID.String()) { + continue + } + } + } + } + + internalObj, err := database.CosmosToInternalOperation(&cosmosObj) + if err != nil { + continue + } + + ids = append(ids, typedDoc.ID) + items = append(items, internalObj) + } + + return newMockIterator(ids, items) +} + +var _ database.OperationCRUD = &mockOperationCRUD{} + +// mockSubscriptionCRUD implements database.SubscriptionCRUD. +type mockSubscriptionCRUD struct { + client *MockDBClient +} + +func newMockSubscriptionCRUD(client *MockDBClient) *mockSubscriptionCRUD { + return &mockSubscriptionCRUD{client: client} +} + +func (m *mockSubscriptionCRUD) GetByID(ctx context.Context, cosmosID string) (*arm.Subscription, error) { + if strings.ToLower(cosmosID) != cosmosID { + return nil, fmt.Errorf("cosmosID must be lowercase, not: %q", cosmosID) + } + + data, ok := m.client.GetDocument(cosmosID) + if !ok { + return nil, NewNotFoundError() + } + + var cosmosObj database.Subscription + if err := json.Unmarshal(data, &cosmosObj); err != nil { + return nil, fmt.Errorf("failed to unmarshal document: %w", err) + } + + return database.CosmosToInternalSubscription(&cosmosObj) +} + +func (m *mockSubscriptionCRUD) Get(ctx context.Context, resourceName string) (*arm.Subscription, error) { + completeResourceID, err := arm.ToSubscriptionResourceID(resourceName) + if err != nil { + return nil, fmt.Errorf("failed to make ResourceID path for '%s': %w", resourceName, err) + } + + cosmosID, err := api.ResourceIDToCosmosID(completeResourceID) + if err != nil { + return nil, err + } + + // Try exact match first + result, err := m.GetByID(ctx, cosmosID) + if err == nil { + return result, nil + } + + // If not found by new ID, try old lookup + if !database.IsResponseError(err, http.StatusNotFound) { + return nil, err + } + + return m.GetByID(ctx, resourceName) +} + +func (m *mockSubscriptionCRUD) List(ctx context.Context, options *database.DBClientListResourceDocsOptions) (database.DBClientIterator[arm.Subscription], error) { + documents := m.client.ListDocuments(&azcorearm.SubscriptionResourceType, "") + + var ids []string + var items []*arm.Subscription + + for _, data := range documents { + var cosmosObj database.Subscription + if err := json.Unmarshal(data, &cosmosObj); err != nil { + continue + } + + internalObj, err := database.CosmosToInternalSubscription(&cosmosObj) + if err != nil { + continue + } + + ids = append(ids, cosmosObj.ID) + items = append(items, internalObj) + } + + return newMockIterator(ids, items), nil +} + +func (m *mockSubscriptionCRUD) Create(ctx context.Context, newObj *arm.Subscription, options *azcosmos.ItemOptions) (*arm.Subscription, error) { + cosmosObj, err := database.InternalToCosmosSubscription(newObj) + if err != nil { + return nil, fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return nil, fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosData := newObj.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + if _, exists := m.client.GetDocument(cosmosID); exists { + return nil, &azcore.ResponseError{StatusCode: http.StatusConflict} + } + + m.client.StoreDocument(cosmosID, data) + return m.GetByID(ctx, cosmosID) +} + +func (m *mockSubscriptionCRUD) Replace(ctx context.Context, newObj *arm.Subscription, options *azcosmos.ItemOptions) (*arm.Subscription, error) { + cosmosObj, err := database.InternalToCosmosSubscription(newObj) + if err != nil { + return nil, fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return nil, fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosData := newObj.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + if _, exists := m.client.GetDocument(cosmosID); !exists { + return nil, NewNotFoundError() + } + + m.client.StoreDocument(cosmosID, data) + return m.GetByID(ctx, cosmosID) +} + +func (m *mockSubscriptionCRUD) Delete(ctx context.Context, resourceName string) error { + completeResourceID, err := arm.ToSubscriptionResourceID(resourceName) + if err != nil { + return fmt.Errorf("failed to make ResourceID path for '%s': %w", resourceName, err) + } + + cosmosID, err := api.ResourceIDToCosmosID(completeResourceID) + if err != nil { + return err + } + + m.client.DeleteDocument(cosmosID) + return nil +} + +func (m *mockSubscriptionCRUD) AddCreateToTransaction(ctx context.Context, transaction database.DBTransaction, newObj *arm.Subscription, opts *azcosmos.TransactionalBatchItemOptions) (string, error) { + cosmosObj, err := database.InternalToCosmosSubscription(newObj) + if err != nil { + return "", fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return "", fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosData := newObj.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + mockTx, ok := transaction.(*mockTransaction) + if !ok { + return "", fmt.Errorf("expected mockTransaction, got %T", transaction) + } + + transactionDetails := database.CosmosDBTransactionStepDetails{ + ActionType: "Create", + GoType: fmt.Sprintf("%T", newObj), + CosmosID: cosmosID, + } + + mockTx.steps = append(mockTx.steps, mockTransactionStep{ + details: transactionDetails, + execute: func() (string, json.RawMessage, error) { + m.client.StoreDocument(cosmosID, data) + return cosmosID, data, nil + }, + }) + + return cosmosID, nil +} + +func (m *mockSubscriptionCRUD) AddReplaceToTransaction(ctx context.Context, transaction database.DBTransaction, newObj *arm.Subscription, opts *azcosmos.TransactionalBatchItemOptions) (string, error) { + cosmosObj, err := database.InternalToCosmosSubscription(newObj) + if err != nil { + return "", fmt.Errorf("failed to convert to cosmos type: %w", err) + } + + data, err := json.Marshal(cosmosObj) + if err != nil { + return "", fmt.Errorf("failed to marshal cosmos object: %w", err) + } + + cosmosData := newObj.GetCosmosData() + cosmosID := cosmosData.CosmosUID + + mockTx, ok := transaction.(*mockTransaction) + if !ok { + return "", fmt.Errorf("expected mockTransaction, got %T", transaction) + } + + transactionDetails := database.CosmosDBTransactionStepDetails{ + ActionType: "Replace", + GoType: fmt.Sprintf("%T", newObj), + CosmosID: cosmosID, + } + + mockTx.steps = append(mockTx.steps, mockTransactionStep{ + details: transactionDetails, + execute: func() (string, json.RawMessage, error) { + m.client.StoreDocument(cosmosID, data) + return cosmosID, data, nil + }, + }) + + return cosmosID, nil +} + +var _ database.SubscriptionCRUD = &mockSubscriptionCRUD{} + +// mockServiceProviderClusterCRUD implements database.ServiceProviderClusterCRUD. +type mockServiceProviderClusterCRUD struct { + *mockResourceCRUD[api.ServiceProviderCluster, database.GenericDocument[api.ServiceProviderCluster]] +} + +func newMockServiceProviderClusterCRUD(client *MockDBClient, parentResourceID *azcorearm.ResourceID) *mockServiceProviderClusterCRUD { + return &mockServiceProviderClusterCRUD{ + mockResourceCRUD: newMockResourceCRUD[api.ServiceProviderCluster, database.GenericDocument[api.ServiceProviderCluster]]( + client, parentResourceID, api.ServiceProviderClusterResourceType), + } +} + +var _ database.ServiceProviderClusterCRUD = &mockServiceProviderClusterCRUD{} + +// mockUntypedCRUD implements database.UntypedResourceCRUD. +type mockUntypedCRUD struct { + client *MockDBClient + parentResourceID azcorearm.ResourceID +} + +func newMockUntypedCRUD(client *MockDBClient, parentResourceID azcorearm.ResourceID) *mockUntypedCRUD { + return &mockUntypedCRUD{ + client: client, + parentResourceID: parentResourceID, + } +} + +func (m *mockUntypedCRUD) Get(ctx context.Context, resourceID *azcorearm.ResourceID) (*database.TypedDocument, error) { + if !strings.HasPrefix(strings.ToLower(resourceID.String()), strings.ToLower(m.parentResourceID.String())) { + return nil, fmt.Errorf("resourceID %q must be a descendent of parentResourceID %q", resourceID.String(), m.parentResourceID.String()) + } + + cosmosID, err := api.ResourceIDToCosmosID(resourceID) + if err != nil { + return nil, err + } + + data, ok := m.client.GetDocument(cosmosID) + if !ok { + // Search by resourceID + allDocs := m.client.GetAllDocuments() + + for _, docData := range allDocs { + var typedDoc database.TypedDocument + if err := json.Unmarshal(docData, &typedDoc); err != nil { + continue + } + + var props map[string]any + if err := json.Unmarshal(typedDoc.Properties, &props); err != nil { + continue + } + + resourceIDStr, ok := props["resourceId"].(string) + if !ok { + continue + } + + if strings.EqualFold(resourceIDStr, resourceID.String()) { + if err := json.Unmarshal(docData, &typedDoc); err != nil { + continue + } + return &typedDoc, nil + } + } + + return nil, NewNotFoundError() + } + + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + return nil, fmt.Errorf("failed to unmarshal document: %w", err) + } + + return &typedDoc, nil +} + +func (m *mockUntypedCRUD) List(ctx context.Context, opts *database.DBClientListResourceDocsOptions) (database.DBClientIterator[database.TypedDocument], error) { + return m.listInternal(ctx, opts, true) +} + +func (m *mockUntypedCRUD) ListRecursive(ctx context.Context, opts *database.DBClientListResourceDocsOptions) (database.DBClientIterator[database.TypedDocument], error) { + return m.listInternal(ctx, opts, false) +} + +func (m *mockUntypedCRUD) listInternal(ctx context.Context, opts *database.DBClientListResourceDocsOptions, nonRecursive bool) (database.DBClientIterator[database.TypedDocument], error) { + allDocs := m.client.GetAllDocuments() + + prefix := m.parentResourceID.String() + "/" + requiredSlashes := strings.Count(m.parentResourceID.String(), "/") + 2 + if strings.EqualFold(m.parentResourceID.ResourceType.Type, "resourceGroups") { + requiredSlashes = strings.Count(m.parentResourceID.String(), "/") + 4 + } + + var ids []string + var items []*database.TypedDocument + + for _, data := range allDocs { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + var props map[string]any + if err := json.Unmarshal(typedDoc.Properties, &props); err != nil { + continue + } + + resourceIDStr, ok := props["resourceId"].(string) + if !ok { + continue + } + + if !strings.HasPrefix(strings.ToLower(resourceIDStr), strings.ToLower(prefix)) { + continue + } + + // For non-recursive, check slash count + if nonRecursive { + slashCount := strings.Count(resourceIDStr, "/") + if slashCount != requiredSlashes { + continue + } + } + + docCopy := typedDoc + ids = append(ids, typedDoc.ID) + items = append(items, &docCopy) + } + + return newMockIterator(ids, items), nil +} + +func (m *mockUntypedCRUD) Delete(ctx context.Context, resourceID *azcorearm.ResourceID) error { + curr, err := m.Get(ctx, resourceID) + if err != nil { + return err + } + + cosmosUID := curr.ID + m.client.DeleteDocument(cosmosUID) + return nil +} + +func (m *mockUntypedCRUD) Child(resourceType azcorearm.ResourceType, resourceName string) (database.UntypedResourceCRUD, error) { + if len(resourceName) == 0 { + return nil, fmt.Errorf("resourceName is required") + } + + parts := []string{m.parentResourceID.String()} + + switch { + case strings.EqualFold(resourceType.Type, "resourcegroups"): + // no provider needed here. + case resourceType.Namespace == api.ProviderNamespace && m.parentResourceID.ResourceType.Namespace != api.ProviderNamespace: + parts = append(parts, + "providers", + resourceType.Namespace, + ) + case resourceType.Namespace != api.ProviderNamespace && m.parentResourceID.ResourceType.Namespace == api.ProviderNamespace: + return nil, fmt.Errorf("cannot switch to a non-RH provider: %q", resourceType.Namespace) + } + parts = append(parts, resourceType.Types[len(resourceType.Types)-1]) + parts = append(parts, resourceName) + + resourcePathString := path.Join(parts...) + newParentResourceID, err := azcorearm.ParseResourceID(resourcePathString) + if err != nil { + return nil, err + } + + return newMockUntypedCRUD(m.client, *newParentResourceID), nil +} + +var _ database.UntypedResourceCRUD = &mockUntypedCRUD{} diff --git a/internal/databasetesting/mock_dbclient.go b/internal/databasetesting/mock_dbclient.go new file mode 100644 index 0000000000..998c8b55a4 --- /dev/null +++ b/internal/databasetesting/mock_dbclient.go @@ -0,0 +1,528 @@ +// Copyright 2025 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package databasetesting + +import ( + "context" + "encoding/json" + "fmt" + "net/http" + "os" + "path" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/Azure/azure-sdk-for-go/sdk/azcore" + azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" + "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" + + "github.com/Azure/ARO-HCP/internal/api" + "github.com/Azure/ARO-HCP/internal/database" +) + +// MockDBClient implements the database.DBClient interface for unit testing. +// It stores documents in memory and supports loading from cosmos-record context directories. +type MockDBClient struct { + mu sync.RWMutex + + // documents stores all documents keyed by their cosmos ID + documents map[string]json.RawMessage + + // billing stores billing documents keyed by their ID + billing map[string]*database.BillingDocument + + // lockClient is an optional mock lock client + lockClient database.LockClientInterface +} + +// NewMockDBClient creates a new mock DBClient with empty storage. +func NewMockDBClient() *MockDBClient { + lockClient := NewMockLockClient(10) + + return &MockDBClient{ + documents: make(map[string]json.RawMessage), + billing: make(map[string]*database.BillingDocument), + lockClient: lockClient, + } +} + +// SetLockClient sets a mock lock client for testing. +func (m *MockDBClient) SetLockClient(lockClient database.LockClientInterface) { + m.lockClient = lockClient +} + +// GetLockClient returns the mock lock client, or nil if not set. +func (m *MockDBClient) GetLockClient() database.LockClientInterface { + return m.lockClient +} + +// NewTransaction creates a new mock transaction. +func (m *MockDBClient) NewTransaction(pk string) database.DBTransaction { + return newMockTransaction(pk, m) +} + +// CreateBillingDoc creates a new billing document. +func (m *MockDBClient) CreateBillingDoc(ctx context.Context, doc *database.BillingDocument) error { + if doc.ResourceID == nil { + return fmt.Errorf("BillingDocument is missing a ResourceID") + } + + m.mu.Lock() + defer m.mu.Unlock() + + if _, exists := m.billing[doc.ID]; exists { + return &azcore.ResponseError{StatusCode: http.StatusConflict} + } + + m.billing[doc.ID] = doc + return nil +} + +// PatchBillingDoc patches a billing document. +func (m *MockDBClient) PatchBillingDoc(ctx context.Context, resourceID *azcorearm.ResourceID, ops database.BillingDocumentPatchOperations) error { + m.mu.Lock() + defer m.mu.Unlock() + + // Find the billing document by resourceID + var foundID string + for id, doc := range m.billing { + if strings.EqualFold(doc.ResourceID.String(), resourceID.String()) && doc.DeletionTime == nil { + foundID = id + break + } + } + + if foundID == "" { + return &azcore.ResponseError{StatusCode: http.StatusNotFound} + } + + // Apply patch operations would be implemented here + // For now, just acknowledge the operation + return nil +} + +// UntypedCRUD provides access to untyped resource operations. +func (m *MockDBClient) UntypedCRUD(parentResourceID azcorearm.ResourceID) (database.UntypedResourceCRUD, error) { + return newMockUntypedCRUD(m, parentResourceID), nil +} + +// HCPClusters returns a CRUD interface for HCPCluster resources. +func (m *MockDBClient) HCPClusters(subscriptionID, resourceGroupName string) database.HCPClusterCRUD { + parts := []string{ + "/subscriptions", + strings.ToLower(subscriptionID), + } + if len(resourceGroupName) > 0 { + parts = append(parts, + "resourceGroups", + resourceGroupName) + } + parentResourceID := api.Must(azcorearm.ParseResourceID(strings.ToLower(path.Join(parts...)))) + + return newMockHCPClusterCRUD(m, parentResourceID) +} + +// Operations returns a CRUD interface for operation resources. +func (m *MockDBClient) Operations(subscriptionID string) database.OperationCRUD { + parts := []string{ + "/subscriptions", + strings.ToLower(subscriptionID), + } + parentResourceID := api.Must(azcorearm.ParseResourceID(path.Join(parts...))) + + return newMockOperationCRUD(m, parentResourceID) +} + +// Subscriptions returns a CRUD interface for subscription resources. +func (m *MockDBClient) Subscriptions() database.SubscriptionCRUD { + return newMockSubscriptionCRUD(m) +} + +// ServiceProviderClusters returns a CRUD interface for service provider cluster resources. +func (m *MockDBClient) ServiceProviderClusters(subscriptionID, resourceGroupName, clusterName string) database.ServiceProviderClusterCRUD { + clusterResourceID := database.NewClusterResourceID(subscriptionID, resourceGroupName, clusterName) + return newMockServiceProviderClusterCRUD(m, clusterResourceID) +} + +// LoadFromDirectory loads cosmos-record context data from a directory. +// It reads all JSON files that match the pattern for "load" directories. +func (m *MockDBClient) LoadFromDirectory(dirPath string) error { + m.mu.Lock() + defer m.mu.Unlock() + + return filepath.Walk(dirPath, func(filePath string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Skip directories + if info.IsDir() { + return nil + } + + // Only process JSON files + if !strings.HasSuffix(strings.ToLower(filePath), ".json") { + return nil + } + + // Read the file + data, err := os.ReadFile(filePath) + if err != nil { + return fmt.Errorf("failed to read file %s: %w", filePath, err) + } + + // Parse as TypedDocument to get the ID + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + return fmt.Errorf("failed to unmarshal file %s: %w", filePath, err) + } + + // Store the document + if typedDoc.ID != "" { + m.documents[strings.ToLower(typedDoc.ID)] = data + } + + return nil + }) +} + +// LoadContent loads a single JSON document into the mock database. +// This implements the ContentLoader interface from integrationutils. +func (m *MockDBClient) LoadContent(ctx context.Context, content []byte) error { + // Parse as TypedDocument to get the ID + var typedDoc database.TypedDocument + if err := json.Unmarshal(content, &typedDoc); err != nil { + return fmt.Errorf("failed to unmarshal content: %w", err) + } + + if typedDoc.ID == "" { + return fmt.Errorf("document is missing ID field") + } + + m.mu.Lock() + defer m.mu.Unlock() + m.documents[strings.ToLower(typedDoc.ID)] = content + return nil +} + +// ListAllDocuments returns all documents in the mock database. +// This implements the DocumentLister interface from integrationutils. +func (m *MockDBClient) ListAllDocuments(ctx context.Context) ([]*database.TypedDocument, error) { + m.mu.RLock() + defer m.mu.RUnlock() + + var results []*database.TypedDocument + for _, data := range m.documents { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + return nil, fmt.Errorf("failed to unmarshal document: %w", err) + } + results = append(results, &typedDoc) + } + return results, nil +} + +// StoreDocument stores a raw JSON document in the mock database. +func (m *MockDBClient) StoreDocument(cosmosID string, data json.RawMessage) { + m.mu.Lock() + defer m.mu.Unlock() + m.documents[strings.ToLower(cosmosID)] = data +} + +// GetDocument retrieves a raw JSON document from the mock database. +func (m *MockDBClient) GetDocument(cosmosID string) (json.RawMessage, bool) { + m.mu.RLock() + defer m.mu.RUnlock() + data, ok := m.documents[strings.ToLower(cosmosID)] + return data, ok +} + +// DeleteDocument removes a document from the mock database. +func (m *MockDBClient) DeleteDocument(cosmosID string) { + m.mu.Lock() + defer m.mu.Unlock() + delete(m.documents, strings.ToLower(cosmosID)) +} + +// ListDocuments returns all documents matching the given resource type and prefix. +func (m *MockDBClient) ListDocuments(resourceType *azcorearm.ResourceType, prefix string) []json.RawMessage { + m.mu.RLock() + defer m.mu.RUnlock() + + var results []json.RawMessage + for _, data := range m.documents { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + // Check resource type match if specified + if resourceType != nil { + if !strings.EqualFold(typedDoc.ResourceType, resourceType.String()) { + continue + } + } + + // Check prefix match if specified + if prefix != "" { + var props map[string]any + if err := json.Unmarshal(typedDoc.Properties, &props); err != nil { + continue + } + resourceIDStr, ok := props["resourceId"].(string) + if !ok || !strings.HasPrefix(strings.ToLower(resourceIDStr), strings.ToLower(prefix)) { + continue + } + } + + results = append(results, data) + } + + return results +} + +// Clear removes all documents from the mock database. +func (m *MockDBClient) Clear() { + m.mu.Lock() + defer m.mu.Unlock() + m.documents = make(map[string]json.RawMessage) + m.billing = make(map[string]*database.BillingDocument) +} + +// GetAllDocuments returns a copy of all documents (for testing purposes). +func (m *MockDBClient) GetAllDocuments() map[string]json.RawMessage { + m.mu.RLock() + defer m.mu.RUnlock() + + result := make(map[string]json.RawMessage, len(m.documents)) + for k, v := range m.documents { + result[k] = v + } + return result +} + +var _ database.DBClient = &MockDBClient{} + +// mockTransaction implements database.DBTransaction for the mock client. +type mockTransaction struct { + pk string + client *MockDBClient + steps []mockTransactionStep + onSuccess []database.DBTransactionCallback +} + +type mockTransactionStep struct { + details database.CosmosDBTransactionStepDetails + execute func() (string, json.RawMessage, error) +} + +func newMockTransaction(pk string, client *MockDBClient) *mockTransaction { + return &mockTransaction{ + pk: strings.ToLower(pk), + client: client, + } +} + +func (t *mockTransaction) GetPartitionKey() string { + return t.pk +} + +func (t *mockTransaction) AddStep(details database.CosmosDBTransactionStepDetails, stepFn database.CosmosDBTransactionStep) { + // We need to capture what the step does for the mock + t.steps = append(t.steps, mockTransactionStep{ + details: details, + execute: func() (string, json.RawMessage, error) { + // The real implementation uses TransactionalBatch, but we just execute directly + // We'll handle this in Execute by storing the details + return details.CosmosID, nil, nil + }, + }) +} + +func (t *mockTransaction) OnSuccess(callback database.DBTransactionCallback) { + if callback != nil { + t.onSuccess = append(t.onSuccess, callback) + } +} + +func (t *mockTransaction) Execute(ctx context.Context, o *azcosmos.TransactionalBatchOptions) (database.DBTransactionResult, error) { + result := &mockTransactionResult{ + items: make(map[string]json.RawMessage), + } + + // Execute all steps + for _, step := range t.steps { + cosmosID, data, err := step.execute() + if err != nil { + return nil, err + } + if data != nil { + result.items[cosmosID] = data + } + } + + // Call success callbacks + for _, callback := range t.onSuccess { + callback(result) + } + + return result, nil +} + +var _ database.DBTransaction = &mockTransaction{} + +// mockTransactionResult implements database.DBTransactionResult. +type mockTransactionResult struct { + items map[string]json.RawMessage +} + +func (r *mockTransactionResult) GetItem(cosmosUID string) (any, error) { + data, ok := r.items[cosmosUID] + if !ok { + return nil, database.ErrItemNotFound + } + + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + return nil, err + } + + switch strings.ToLower(typedDoc.ResourceType) { + case strings.ToLower(api.ClusterResourceType.String()): + var cosmosObj database.HCPCluster + if err := json.Unmarshal(data, &cosmosObj); err != nil { + return nil, err + } + return database.CosmosToInternalCluster(&cosmosObj) + case strings.ToLower(api.NodePoolResourceType.String()): + var cosmosObj database.NodePool + if err := json.Unmarshal(data, &cosmosObj); err != nil { + return nil, err + } + return database.CosmosToInternalNodePool(&cosmosObj) + case strings.ToLower(api.ExternalAuthResourceType.String()): + var cosmosObj database.ExternalAuth + if err := json.Unmarshal(data, &cosmosObj); err != nil { + return nil, err + } + return database.CosmosToInternalExternalAuth(&cosmosObj) + default: + return nil, fmt.Errorf("unknown resource type '%s'", typedDoc.ResourceType) + } +} + +var _ database.DBTransactionResult = &mockTransactionResult{} + +// mockIterator implements database.DBClientIterator for in-memory iteration. +type mockIterator[T any] struct { + items []*T + ids []string + continuationToken string + err error + index int +} + +func newMockIterator[T any](ids []string, items []*T) *mockIterator[T] { + return &mockIterator[T]{ + items: items, + ids: ids, + index: 0, + } +} + +func (iter *mockIterator[T]) Items(ctx context.Context) database.DBClientIteratorItem[T] { + return func(yield func(string, *T) bool) { + for i, item := range iter.items { + if !yield(iter.ids[i], item) { + return + } + } + } +} + +func (iter *mockIterator[T]) GetContinuationToken() string { + return iter.continuationToken +} + +func (iter *mockIterator[T]) GetError() error { + return iter.err +} + +var _ database.DBClientIterator[api.HCPOpenShiftCluster] = &mockIterator[api.HCPOpenShiftCluster]{} + +// MockLockClient implements database.LockClientInterface for testing. +type MockLockClient struct { + defaultTTL time.Duration + locks map[string]bool + mu sync.Mutex +} + +// NewMockLockClient creates a new mock lock client. +func NewMockLockClient(defaultTTL time.Duration) *MockLockClient { + return &MockLockClient{ + defaultTTL: defaultTTL, + locks: make(map[string]bool), + } +} + +func (c *MockLockClient) GetDefaultTimeToLive() time.Duration { + return c.defaultTTL +} + +func (c *MockLockClient) SetRetryAfterHeader(header http.Header) { + header.Set("Retry-After", fmt.Sprintf("%d", int(c.defaultTTL.Seconds()))) +} + +func (c *MockLockClient) AcquireLock(ctx context.Context, id string, timeout *time.Duration) (*azcosmos.ItemResponse, error) { + c.mu.Lock() + defer c.mu.Unlock() + + if c.locks[id] { + return nil, nil + } + c.locks[id] = true + return &azcosmos.ItemResponse{}, nil +} + +func (c *MockLockClient) TryAcquireLock(ctx context.Context, id string) (*azcosmos.ItemResponse, error) { + c.mu.Lock() + defer c.mu.Unlock() + + if c.locks[id] { + return nil, nil + } + c.locks[id] = true + return &azcosmos.ItemResponse{}, nil +} + +func (c *MockLockClient) HoldLock(ctx context.Context, item *azcosmos.ItemResponse) (context.Context, database.StopHoldLock) { + cancelCtx, cancel := context.WithCancel(ctx) + return cancelCtx, func() *azcosmos.ItemResponse { + cancel() + return item + } +} + +func (c *MockLockClient) RenewLock(ctx context.Context, item *azcosmos.ItemResponse) (*azcosmos.ItemResponse, error) { + return item, nil +} + +func (c *MockLockClient) ReleaseLock(ctx context.Context, item *azcosmos.ItemResponse) error { + return nil +} + +var _ database.LockClientInterface = &MockLockClient{} diff --git a/internal/databasetesting/mock_dbclient_test.go b/internal/databasetesting/mock_dbclient_test.go new file mode 100644 index 0000000000..c7c52dc1ef --- /dev/null +++ b/internal/databasetesting/mock_dbclient_test.go @@ -0,0 +1,582 @@ +// Copyright 2025 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package databasetesting + +import ( + "context" + "encoding/json" + "path/filepath" + "testing" + "time" + + azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" + + "github.com/Azure/ARO-HCP/internal/api" + "github.com/Azure/ARO-HCP/internal/api/arm" + "github.com/Azure/ARO-HCP/internal/database" +) + +func TestMockDBClient_LoadFromDirectory(t *testing.T) { + // Find a test directory with cosmos-record context data + testDataDir := filepath.Join("..", "..", "test-integration", "frontend", "artifacts", "FrontendCRUD", "NodePool", "read-old-data", "01-load-old-data") + + mock := NewMockDBClient() + err := mock.LoadFromDirectory(testDataDir) + if err != nil { + t.Fatalf("Failed to load test data from %s: %v", testDataDir, err) + } + + // Verify that documents were loaded + allDocs := mock.GetAllDocuments() + docCount := len(allDocs) + + if docCount == 0 { + t.Fatal("Expected documents to be loaded, but got 0") + } + + t.Logf("Loaded %d documents from %s", docCount, testDataDir) + + // Verify we can read different resource types + foundCluster := false + foundNodePool := false + foundSubscription := false + foundOperation := false + + for _, data := range allDocs { + var typedDoc database.TypedDocument + if err := json.Unmarshal(data, &typedDoc); err != nil { + continue + } + + switch { + case typedDoc.ResourceType == api.ClusterResourceType.String(): + foundCluster = true + case typedDoc.ResourceType == api.NodePoolResourceType.String(): + foundNodePool = true + case typedDoc.ResourceType == azcorearm.SubscriptionResourceType.String(): + foundSubscription = true + case typedDoc.ResourceType == api.OperationStatusResourceType.String(): + foundOperation = true + } + } + + if !foundCluster { + t.Error("Expected to find a cluster document") + } + if !foundNodePool { + t.Error("Expected to find a node pool document") + } + if !foundSubscription { + t.Error("Expected to find a subscription document") + } + if !foundOperation { + t.Error("Expected to find an operation document") + } +} + +func TestMockDBClient_LoadAndQuery(t *testing.T) { + // Load test data + testDataDir := filepath.Join("..", "..", "test-integration", "frontend", "artifacts", "FrontendCRUD", "NodePool", "read-old-data", "01-load-old-data") + + mock := NewMockDBClient() + err := mock.LoadFromDirectory(testDataDir) + if err != nil { + t.Fatalf("Failed to load test data: %v", err) + } + + ctx := context.Background() + + // Try to query the loaded data + // The test data contains clusters in the subscription 6b690bec-0c16-4ecb-8f67-781caf40bba7 + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + resourceGroupName := "test-rg" + + clusterCRUD := mock.HCPClusters(subscriptionID, resourceGroupName) + + // List clusters + iter, err := clusterCRUD.List(ctx, nil) + if err != nil { + t.Fatalf("Failed to list clusters: %v", err) + } + + count := 0 + for _, item := range iter.Items(ctx) { + if item != nil { + count++ + t.Logf("Found cluster: %s", item.Name) + } + } + + if iter.GetError() != nil { + t.Fatalf("Iterator error: %v", iter.GetError()) + } + + t.Logf("Found %d clusters", count) +} + +func TestMockDBClient_CRUD_Cluster(t *testing.T) { + mock := NewMockDBClient() + ctx := context.Background() + + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + resourceGroupName := "test-rg" + clusterName := "test-cluster" + + // Create a cluster + clusterResourceID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/resourceGroups/" + resourceGroupName + + "/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/" + clusterName)) + + internalID, err := api.NewInternalID("/api/clusters_mgmt/v1/clusters/abc123") + if err != nil { + t.Fatalf("Failed to create internal ID: %v", err) + } + + cluster := &api.HCPOpenShiftCluster{ + TrackedResource: arm.TrackedResource{ + Resource: arm.Resource{ + ID: clusterResourceID, + Name: clusterName, + Type: api.ClusterResourceType.String(), + }, + Location: "eastus", + }, + ServiceProviderProperties: api.HCPOpenShiftClusterServiceProviderProperties{ + ProvisioningState: arm.ProvisioningStateSucceeded, + ClusterServiceID: internalID, + }, + } + + clusterCRUD := mock.HCPClusters(subscriptionID, resourceGroupName) + + // Create + created, err := clusterCRUD.Create(ctx, cluster, nil) + if err != nil { + t.Fatalf("Failed to create cluster: %v", err) + } + + if created.Name != clusterName { + t.Errorf("Expected cluster name %s, got %s", clusterName, created.Name) + } + + // Get + retrieved, err := clusterCRUD.Get(ctx, clusterName) + if err != nil { + t.Fatalf("Failed to get cluster: %v", err) + } + + if retrieved.Name != clusterName { + t.Errorf("Expected cluster name %s, got %s", clusterName, retrieved.Name) + } + + // List + iter, err := clusterCRUD.List(ctx, nil) + if err != nil { + t.Fatalf("Failed to list clusters: %v", err) + } + + count := 0 + for _, item := range iter.Items(ctx) { + if item != nil { + count++ + } + } + + if iter.GetError() != nil { + t.Fatalf("Iterator error: %v", iter.GetError()) + } + + if count != 1 { + t.Errorf("Expected 1 cluster in list, got %d", count) + } + + // Delete + err = clusterCRUD.Delete(ctx, clusterName) + if err != nil { + t.Fatalf("Failed to delete cluster: %v", err) + } + + // Verify deletion + _, err = clusterCRUD.Get(ctx, clusterName) + if !database.IsResponseError(err, 404) { + t.Errorf("Expected 404 after deletion, got: %v", err) + } +} + +func TestMockDBClient_CRUD_Operation(t *testing.T) { + mock := NewMockDBClient() + ctx := context.Background() + + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + + // Create an operation + operationID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/providers/Microsoft.RedHatOpenShift/locations/eastus/hcpOperationStatuses/op-123")) + + externalID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/resourceGroups/test-rg/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/test-cluster")) + + resourceID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/op-123")) + + now := time.Now().UTC() + operation := &api.Operation{ + ResourceID: resourceID, + OperationID: operationID, + ExternalID: externalID, + Request: api.OperationRequestCreate, + Status: arm.ProvisioningStateAccepted, + StartTime: now, + LastTransitionTime: now, + } + + operationCRUD := mock.Operations(subscriptionID) + + // Create + created, err := operationCRUD.Create(ctx, operation, nil) + if err != nil { + t.Fatalf("Failed to create operation: %v", err) + } + + if created.OperationID.Name != operationID.Name { + t.Errorf("Expected operation ID %s, got %s", operationID.Name, created.OperationID.Name) + } + + // List active operations + iter := operationCRUD.ListActiveOperations(nil) + + count := 0 + for _, item := range iter.Items(ctx) { + if item != nil { + count++ + } + } + + if iter.GetError() != nil { + t.Fatalf("Iterator error: %v", iter.GetError()) + } + + if count != 1 { + t.Errorf("Expected 1 active operation, got %d", count) + } + + // List with filter + createRequest := api.OperationRequestCreate + iterFiltered := operationCRUD.ListActiveOperations(&database.DBClientListActiveOperationDocsOptions{ + Request: &createRequest, + }) + + countFiltered := 0 + for _, item := range iterFiltered.Items(ctx) { + if item != nil { + countFiltered++ + } + } + + if iterFiltered.GetError() != nil { + t.Fatalf("Iterator error: %v", iterFiltered.GetError()) + } + + if countFiltered != 1 { + t.Errorf("Expected 1 active operation with filter, got %d", countFiltered) + } +} + +func TestMockDBClient_CRUD_Subscription(t *testing.T) { + mock := NewMockDBClient() + ctx := context.Background() + + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + subscriptionResourceID := api.Must(arm.ToSubscriptionResourceID(subscriptionID)) + + registrationDate := "2025-01-01T00:00:00Z" + subscription := &arm.Subscription{ + ResourceID: subscriptionResourceID, + State: arm.SubscriptionStateRegistered, + RegistrationDate: ®istrationDate, + } + + subscriptionCRUD := mock.Subscriptions() + + // Create + created, err := subscriptionCRUD.Create(ctx, subscription, nil) + if err != nil { + t.Fatalf("Failed to create subscription: %v", err) + } + + if created.State != arm.SubscriptionStateRegistered { + t.Errorf("Expected state %s, got %s", arm.SubscriptionStateRegistered, created.State) + } + + // Get + retrieved, err := subscriptionCRUD.Get(ctx, subscriptionID) + if err != nil { + t.Fatalf("Failed to get subscription: %v", err) + } + + if retrieved.State != arm.SubscriptionStateRegistered { + t.Errorf("Expected state %s, got %s", arm.SubscriptionStateRegistered, retrieved.State) + } + + // Replace + subscription.State = arm.SubscriptionStateSuspended + replaced, err := subscriptionCRUD.Replace(ctx, subscription, nil) + if err != nil { + t.Fatalf("Failed to replace subscription: %v", err) + } + + if replaced.State != arm.SubscriptionStateSuspended { + t.Errorf("Expected state %s, got %s", arm.SubscriptionStateSuspended, replaced.State) + } + + // Delete + err = subscriptionCRUD.Delete(ctx, subscriptionID) + if err != nil { + t.Fatalf("Failed to delete subscription: %v", err) + } + + // Verify deletion + _, err = subscriptionCRUD.Get(ctx, subscriptionID) + if !database.IsResponseError(err, 404) { + t.Errorf("Expected 404 after deletion, got: %v", err) + } +} + +func TestMockDBClient_Transaction(t *testing.T) { + mock := NewMockDBClient() + ctx := context.Background() + + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + resourceGroupName := "test-rg" + clusterName := "test-cluster" + + // Create a cluster via transaction + clusterResourceID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/resourceGroups/" + resourceGroupName + + "/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/" + clusterName)) + + internalID, err := api.NewInternalID("/api/clusters_mgmt/v1/clusters/abc123") + if err != nil { + t.Fatalf("Failed to create internal ID: %v", err) + } + + cluster := &api.HCPOpenShiftCluster{ + TrackedResource: arm.TrackedResource{ + Resource: arm.Resource{ + ID: clusterResourceID, + Name: clusterName, + Type: api.ClusterResourceType.String(), + }, + Location: "eastus", + }, + ServiceProviderProperties: api.HCPOpenShiftClusterServiceProviderProperties{ + ProvisioningState: arm.ProvisioningStateSucceeded, + ClusterServiceID: internalID, + }, + } + + transaction := mock.NewTransaction(subscriptionID) + clusterCRUD := mock.HCPClusters(subscriptionID, resourceGroupName) + + _, err = clusterCRUD.AddCreateToTransaction(ctx, transaction, cluster, nil) + if err != nil { + t.Fatalf("Failed to add create to transaction: %v", err) + } + + _, err = transaction.Execute(ctx, nil) + if err != nil { + t.Fatalf("Failed to execute transaction: %v", err) + } + + // Verify cluster was created + retrieved, err := clusterCRUD.Get(ctx, clusterName) + if err != nil { + t.Fatalf("Failed to get cluster after transaction: %v", err) + } + + if retrieved.Name != clusterName { + t.Errorf("Expected cluster name %s, got %s", clusterName, retrieved.Name) + } +} + +func TestMockDBClient_UntypedCRUD(t *testing.T) { + mock := NewMockDBClient() + ctx := context.Background() + + subscriptionID := "6b690bec-0c16-4ecb-8f67-781caf40bba7" + resourceGroupName := "test-rg" + clusterName := "test-cluster" + + // First create a cluster using the typed CRUD + clusterResourceID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/resourceGroups/" + resourceGroupName + + "/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/" + clusterName)) + + internalID, err := api.NewInternalID("/api/clusters_mgmt/v1/clusters/abc123") + if err != nil { + t.Fatalf("Failed to create internal ID: %v", err) + } + + cluster := &api.HCPOpenShiftCluster{ + TrackedResource: arm.TrackedResource{ + Resource: arm.Resource{ + ID: clusterResourceID, + Name: clusterName, + Type: api.ClusterResourceType.String(), + }, + Location: "eastus", + }, + ServiceProviderProperties: api.HCPOpenShiftClusterServiceProviderProperties{ + ProvisioningState: arm.ProvisioningStateSucceeded, + ClusterServiceID: internalID, + }, + } + + clusterCRUD := mock.HCPClusters(subscriptionID, resourceGroupName) + _, err = clusterCRUD.Create(ctx, cluster, nil) + if err != nil { + t.Fatalf("Failed to create cluster: %v", err) + } + + // Now use untyped CRUD to access it + parentResourceID := api.Must(azcorearm.ParseResourceID( + "/subscriptions/" + subscriptionID + + "/resourceGroups/" + resourceGroupName)) + + untypedCRUD, err := mock.UntypedCRUD(*parentResourceID) + if err != nil { + t.Fatalf("Failed to get untyped CRUD: %v", err) + } + + // Get the cluster + retrieved, err := untypedCRUD.Get(ctx, clusterResourceID) + if err != nil { + t.Fatalf("Failed to get cluster via untyped CRUD: %v", err) + } + + if retrieved.ResourceType != api.ClusterResourceType.String() { + t.Errorf("Expected resource type %s, got %s", api.ClusterResourceType.String(), retrieved.ResourceType) + } +} + +func TestMockDBClient_StoreAndRetrieveRawDocument(t *testing.T) { + mock := NewMockDBClient() + + cosmosID := "test-cosmos-id" + testData := map[string]any{ + "id": cosmosID, + "partitionKey": "test-partition", + "resourceType": "TestType", + "properties": map[string]any{ + "foo": "bar", + }, + } + + data, err := json.Marshal(testData) + if err != nil { + t.Fatalf("Failed to marshal test data: %v", err) + } + + // Store + mock.StoreDocument(cosmosID, data) + + // Retrieve + retrieved, ok := mock.GetDocument(cosmosID) + if !ok { + t.Fatal("Document not found") + } + + var result map[string]any + if err := json.Unmarshal(retrieved, &result); err != nil { + t.Fatalf("Failed to unmarshal retrieved data: %v", err) + } + + if result["id"] != cosmosID { + t.Errorf("Expected id %s, got %s", cosmosID, result["id"]) + } + + // Delete + mock.DeleteDocument(cosmosID) + + // Verify deletion + _, ok = mock.GetDocument(cosmosID) + if ok { + t.Error("Document should have been deleted") + } +} + +func TestMockDBClient_Clear(t *testing.T) { + mock := NewMockDBClient() + + // Store some documents + mock.StoreDocument("doc1", json.RawMessage(`{"id": "doc1"}`)) + mock.StoreDocument("doc2", json.RawMessage(`{"id": "doc2"}`)) + + countBefore := len(mock.GetAllDocuments()) + + if countBefore != 2 { + t.Errorf("Expected 2 documents before clear, got %d", countBefore) + } + + // Clear + mock.Clear() + + countAfter := len(mock.GetAllDocuments()) + + if countAfter != 0 { + t.Errorf("Expected 0 documents after clear, got %d", countAfter) + } +} + +func TestMockLockClient(t *testing.T) { + ctx := context.Background() + lockClient := NewMockLockClient(30 * time.Second) + + // Test GetDefaultTimeToLive + ttl := lockClient.GetDefaultTimeToLive() + if ttl != 30*time.Second { + t.Errorf("Expected TTL 30s, got %v", ttl) + } + + // Test TryAcquireLock + lock, err := lockClient.TryAcquireLock(ctx, "test-lock") + if err != nil { + t.Fatalf("Failed to acquire lock: %v", err) + } + if lock == nil { + t.Fatal("Expected lock to be acquired") + } + + // Test that same lock can't be acquired again + lock2, err := lockClient.TryAcquireLock(ctx, "test-lock") + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if lock2 != nil { + t.Error("Expected lock to not be acquired (already held)") + } + + // Test ReleaseLock + err = lockClient.ReleaseLock(ctx, lock) + if err != nil { + t.Fatalf("Failed to release lock: %v", err) + } +} diff --git a/test-integration/backend/controllers/do_nothing/do_nothing_test.go b/test-integration/backend/controllers/do_nothing/do_nothing_test.go index b1ced412ae..b961ff26d6 100644 --- a/test-integration/backend/controllers/do_nothing/do_nothing_test.go +++ b/test-integration/backend/controllers/do_nothing/do_nothing_test.go @@ -32,8 +32,10 @@ import ( var artifacts embed.FS func TestDoNothingController(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testDoNothingController) +} +func testDoNothingController(t *testing.T, withMock bool) { testCases := []controllertesthelpers.BasicControllerTest{ { Name: "sync_deleted_cluster", @@ -66,7 +68,7 @@ func TestDoNothingController(t *testing.T) { } for _, tc := range testCases { + tc.WithMock = withMock t.Run(tc.Name, tc.RunTest) } - } diff --git a/test-integration/backend/controllers/mismatches/cluster_mismatch_test.go b/test-integration/backend/controllers/mismatches/cluster_mismatch_test.go index f20c3fecc1..4ccadb4143 100644 --- a/test-integration/backend/controllers/mismatches/cluster_mismatch_test.go +++ b/test-integration/backend/controllers/mismatches/cluster_mismatch_test.go @@ -40,8 +40,10 @@ import ( var artifacts embed.FS func TestClusterMismatchController(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testClusterMismatchController) +} +func testClusterMismatchController(t *testing.T, withMock bool) { testCases := []controllertesthelpers.BasicControllerTest{ { Name: "remove_orphaned_cluster_descendents", @@ -96,6 +98,7 @@ func TestClusterMismatchController(t *testing.T) { } for _, tc := range testCases { + tc.WithMock = withMock t.Run(tc.Name, tc.RunTest) } diff --git a/test-integration/backend/controllers/mismatches/externalauth_mismatch_test.go b/test-integration/backend/controllers/mismatches/externalauth_mismatch_test.go index 3c49ddaa77..0479065df8 100644 --- a/test-integration/backend/controllers/mismatches/externalauth_mismatch_test.go +++ b/test-integration/backend/controllers/mismatches/externalauth_mismatch_test.go @@ -34,8 +34,10 @@ import ( ) func TestExternalAuthMismatchController(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testExternalAuthMismatchController) +} +func testExternalAuthMismatchController(t *testing.T, withMock bool) { testCases := []controllertesthelpers.BasicControllerTest{ { Name: "remove_orphaned_externalauth_descendents", @@ -91,6 +93,7 @@ func TestExternalAuthMismatchController(t *testing.T) { } for _, tc := range testCases { + tc.WithMock = withMock t.Run(tc.Name, tc.RunTest) } diff --git a/test-integration/backend/controllers/mismatches/nodepool_mismatch_test.go b/test-integration/backend/controllers/mismatches/nodepool_mismatch_test.go index daaeff71f4..6a7312e0dd 100644 --- a/test-integration/backend/controllers/mismatches/nodepool_mismatch_test.go +++ b/test-integration/backend/controllers/mismatches/nodepool_mismatch_test.go @@ -34,8 +34,10 @@ import ( ) func TestNodePoolMismatchController(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testNodePoolMismatchController) +} +func testNodePoolMismatchController(t *testing.T, withMock bool) { testCases := []controllertesthelpers.BasicControllerTest{ { Name: "remove_orphaned_nodepool_descendents", @@ -91,6 +93,7 @@ func TestNodePoolMismatchController(t *testing.T) { } for _, tc := range testCases { + tc.WithMock = withMock t.Run(tc.Name, tc.RunTest) } diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/01-create-01/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/01-create-01/00-key.json index e9f0f5039d..08ce11f288 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/01-create-01/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/01-create-01/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/nodePools/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-first-nodepool/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-first-nodepool/00-key.json index e9f0f5039d..08ce11f288 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-first-nodepool/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-first-nodepool/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/nodePools/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-second-nodepool/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-second-nodepool/00-key.json index 92c5b42ca5..b1a66707c2 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-second-nodepool/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/02-list-second-nodepool/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/second-node-pool" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/second-node-pool", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/nodePools/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/03-replace-update-01/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/03-replace-update-01/00-key.json index e9f0f5039d..3bee1ff4a4 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/03-replace-update-01/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/03-replace-update-01/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool" + "resourceId": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool/hcpOpenShiftControllers/first-controller" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/04-list-first-nodepool/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/04-list-first-nodepool/00-key.json index e9f0f5039d..08ce11f288 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/04-list-first-nodepool/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/04-list-first-nodepool/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/nodePools/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/05-delete-first-first-controller/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/05-delete-first-first-controller/00-key.json index 97b8a68664..861ed0e875 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/05-delete-first-first-controller/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/05-delete-first-first-controller/00-key.json @@ -1,4 +1,3 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool", - "deleteResourceName": "first-controller" + "resourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool/hcpOpenShiftControllers/first-controller" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/06-list-first-nodepool/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/06-list-first-nodepool/00-key.json index e9f0f5039d..08ce11f288 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/06-list-first-nodepool/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic-nodepool/06-list-first-nodepool/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/basic/nodePools/first-node-pool", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/nodePools/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/02-get-initial/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/02-get-initial/00-key.json index 8d7a773a81..41cce70a8f 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/02-get-initial/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/02-get-initial/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "resourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/hcpOpenShiftControllers/test-controller" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/03-list-another/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/03-list-another/00-key.json index 8d7a773a81..1177a2c2e5 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/03-list-another/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/03-list-another/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/04-create-another/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/04-create-another/00-key.json index 8d7a773a81..1177a2c2e5 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/04-create-another/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/04-create-another/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/05-get-new-instance/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/05-get-new-instance/00-key.json index 8d7a773a81..f3edd19606 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/05-get-new-instance/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/05-get-new-instance/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "resourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/hcpOpenShiftControllers/second-controller" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/06-list-both-instances/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/06-list-both-instances/00-key.json index 8d7a773a81..1177a2c2e5 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/06-list-both-instances/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/06-list-both-instances/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/07-get-other-resource-group/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/07-get-other-resource-group/00-key.json index d8a2d9d406..11ba81f798 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/07-get-other-resource-group/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/07-get-other-resource-group/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/otherResourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "resourceID": "/subscriptions/subscriptionID/resourceGroups/otherResourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/hcpOpenShiftControllers/second-controller" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/08-list-other-resource-group/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/08-list-other-resource-group/00-key.json index d8a2d9d406..a90c32291a 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/08-list-other-resource-group/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ControllerCRUD/basic/08-list-other-resource-group/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/otherResourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/subscriptionID/resourceGroups/otherResourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/hcpOpenShiftControllers" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-1.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-1.json index 4a180a0fbe..ec84bfcab8 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-1.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-1.json @@ -1,21 +1,22 @@ { - "_attachments": "attachments/", - "_etag": "\"00000000-0000-0000-6625-ed6b640501dc\"", - "_rid": "KDJCAI5tVlcEAAAAAAAAAA==", - "_self": "dbs/KDJCAA==/colls/KDJCAI5tVlc=/docs/KDJCAI5tVlcEAAAAAAAAAA==/", - "_ts": 1764966599, - "id": "7c6b7caa-572f-41b6-9f18-1bb11adfba31", - "partitionKey": "0465bc32-c654-41b8-8d87-9815d7abe8f6", - "properties": { - "externalId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags", - "internalId": "/api/clusters_mgmt/v1/clusters/dv8blqb5g9", - "lastTransitionTime": "2025-12-05T20:29:59.734335781Z", - "operationId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/7c6b7caa-572f-41b6-9f18-1bb11adfba31", - "request": "Create", - "startTime": "2025-12-05T20:29:59.734335781Z", - "status": "Accepted", - "tenantId": "00000000-0000-0000-0000-000000000000" - }, - "resourceType": "microsoft.redhatopenshift/hcpoperationstatuses", - "ttl": 604800 + "_attachments": "attachments/", + "_etag": "\"00000000-0000-0000-6625-ed6b640501dc\"", + "_rid": "KDJCAI5tVlcEAAAAAAAAAA==", + "_self": "dbs/KDJCAA==/colls/KDJCAI5tVlc=/docs/KDJCAI5tVlcEAAAAAAAAAA==/", + "_ts": 1764966599, + "id": "7c6b7caa-572f-41b6-9f18-1bb11adfba31", + "partitionKey": "0465bc32-c654-41b8-8d87-9815d7abe8f6", + "properties": { + "externalId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags", + "internalId": "/api/clusters_mgmt/v1/clusters/dv8blqb5g9", + "lastTransitionTime": "2025-12-05T20:29:59.734335781Z", + "operationId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/7c6b7caa-572f-41b6-9f18-1bb11adfba31", + "request": "Create", + "startTime": "2025-12-05T20:29:59.734335781Z", + "status": "Accepted", + "tenantId": "00000000-0000-0000-0000-000000000000", + "resourceIId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/7c6b7caa-572f-41b6-9f18-1bb11adfba31" + }, + "resourceType": "microsoft.redhatopenshift/hcpoperationstatuses", + "ttl": 604800 } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-2.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-2.json index 176d3f3f66..7548097524 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-2.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/01-load-initial/operation-2.json @@ -14,7 +14,8 @@ "request": "Create", "startTime": "2025-12-05T20:30:25.067364685Z", "status": "Accepted", - "tenantId": "00000000-0000-0000-0000-000000000000" + "tenantId": "00000000-0000-0000-0000-000000000000", + "resourceId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/37332594-70b3-4796-9af6-301a5d47d1e6" }, "resourceType": "microsoft.redhatopenshift/hcpoperationstatuses", "ttl": 604800 diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/00-key.json index 00fa7dbeb1..b570e73394 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6" + "parentResourceID": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-1.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-1.json index dd448a54c5..2fca20e283 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-1.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-1.json @@ -7,5 +7,6 @@ "request": "Create", "startTime": "2025-12-05T20:29:59.734335781Z", "status": "Accepted", - "tenantId": "00000000-0000-0000-0000-000000000000" + "tenantId": "00000000-0000-0000-0000-000000000000", + "resourceId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/7c6b7caa-572f-41b6-9f18-1bb11adfba31" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-2.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-2.json index 7af5c206d2..a0437ff2b5 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-2.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/02-listActiveOperations-initial/operation-2.json @@ -7,5 +7,6 @@ "request": "Create", "startTime": "2025-12-05T20:30:25.067364685Z", "status": "Accepted", - "tenantId": "00000000-0000-0000-0000-000000000000" + "tenantId": "00000000-0000-0000-0000-000000000000", + "resourceId": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/37332594-70b3-4796-9af6-301a5d47d1e6" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/03-getByID-old/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/03-getByID-old/00-key.json index 03aeff63b7..2cc206e7e0 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/03-getByID-old/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/OperationCRUD/new-operation-storage-compatible-with-old/03-getByID-old/00-key.json @@ -1,4 +1,5 @@ { "parentResourceID": "/subscriptions/0465bc32-c654-41b8-8d87-9815d7abe8f6", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses", "cosmosID": "37332594-70b3-4796-9af6-301a5d47d1e6" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/02-create-another/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/02-create-another/00-key.json index d0e5a9dc81..55fe2e6ba1 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/02-create-another/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/02-create-another/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/serviceProviderCluster" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/03-get-new-instance/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/03-get-new-instance/00-key.json index d0e5a9dc81..7428bbba46 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/03-get-new-instance/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/03-get-new-instance/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "resourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/serviceProviderCluster/default" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/04-replace-instance/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/04-replace-instance/00-key.json index d0e5a9dc81..7428bbba46 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/04-replace-instance/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/04-replace-instance/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "resourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/serviceProviderCluster/default" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/06-list-instance/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/06-list-instance/00-key.json index d0e5a9dc81..55fe2e6ba1 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/06-list-instance/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/06-list-instance/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/serviceProviderCluster" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/07-delete-instance/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/07-delete-instance/00-key.json index c1548b7e18..7428bbba46 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/07-delete-instance/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/07-delete-instance/00-key.json @@ -1,4 +1,3 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", - "deleteResourceName": "default" + "resourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster/serviceProviderCluster/default" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/09-list-nothing/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/09-list-nothing/00-key.json index d0e5a9dc81..55fe2e6ba1 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/09-list-nothing/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/ServiceProviderClusterCRUD/basic/09-list-nothing/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster" + "parentResourceID": "/subscriptions/ca3b7be4-6ac8-4784-b2b5-0e398a60269a/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/parentCluster", + "resourceType": "Microsoft.RedHatOpenShift/hcpOpenShiftClusters/serviceProviderCluster" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/03-untypedGet-immutability-cluster-via-child/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/03-untypedGet-immutability-cluster-via-child/00-key.json index 0d36cddf74..73e5a6dd69 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/03-untypedGet-immutability-cluster-via-child/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/03-untypedGet-immutability-cluster-via-child/00-key.json @@ -1,3 +1,3 @@ { - "parentResourceID": "/subscriptions/f52dfea2-47ee-4396-8006-4a27d47d59c5/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/immutability" + "resourceId": "/subscriptions/f52dfea2-47ee-4396-8006-4a27d47d59c5/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/immutability" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/11-untypedDelete-immutability-cluster/00-key.json b/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/11-untypedDelete-immutability-cluster/00-key.json index 59688111a4..73e5a6dd69 100644 --- a/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/11-untypedDelete-immutability-cluster/00-key.json +++ b/test-integration/frontend/artifacts/DatabaseCRUD/UntypedCRUD/basic/11-untypedDelete-immutability-cluster/00-key.json @@ -1,4 +1,3 @@ { - "parentResourceID": "/subscriptions/f52dfea2-47ee-4396-8006-4a27d47d59c5/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/immutability", - "deleteResourceID": "/subscriptions/f52dfea2-47ee-4396-8006-4a27d47d59c5/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/immutability" + "resourceId": "/subscriptions/f52dfea2-47ee-4396-8006-4a27d47d59c5/resourceGroups/some-resource-group/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/immutability" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/00-key.json b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/00-key.json index 7f2c44f66d..13de4fa4fb 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/00-key.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7" + "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json index f99a370354..75093b10f2 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/create-current/05-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/07314510-beb6-4379-902a-ab6452603265", "startTime": "2025-12-19T20:41:44.99062756Z", "lastTransitionTime": "2025-12-19T20:41:44.99062756Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/07314510-beb6-4379-902a-ab6452603265" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/00-key.json b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/00-key.json index 7f2c44f66d..13de4fa4fb 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/00-key.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7" + "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json index f99a370354..75093b10f2 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/Cluster/read-old-data/04-listActiveOperations-cluster-create/operation-cluster-create-with-tags-create.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/07314510-beb6-4379-902a-ab6452603265", "startTime": "2025-12-19T20:41:44.99062756Z", "lastTransitionTime": "2025-12-19T20:41:44.99062756Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/07314510-beb6-4379-902a-ab6452603265" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/00-key.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/00-key.json index 7f2c44f66d..13de4fa4fb 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/00-key.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7" + "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-basic-nodepool.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-basic-nodepool.json index fdf5862986..124480db2b 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-basic-nodepool.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-basic-nodepool.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags/nodePools/basic-node-pool", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/7c3de93c-ff5d-4865-a177-291549ba8020", "startTime": "2025-12-19T21:03:42.649357713Z", "lastTransitionTime": "2025-12-19T21:03:42.649357713Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/7c3de93c-ff5d-4865-a177-291549ba8020" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-nodepool-02.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-nodepool-02.json index baf8c69da9..163ff27933 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-nodepool-02.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/create-current/05-listActiveOperations-cluster-create/operation-nodepool-create-nodepool-02.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags/nodePools/node-pool-02", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/c1216e33-a694-4a65-a504-d821c30dc67c", "startTime": "2025-12-19T21:03:42.924755264Z", "lastTransitionTime": "2025-12-19T21:03:42.924755264Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/c1216e33-a694-4a65-a504-d821c30dc67c" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/00-key.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/00-key.json index 7f2c44f66d..13de4fa4fb 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/00-key.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/00-key.json @@ -1,3 +1,4 @@ { - "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7" + "parentResourceID": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7", + "resourceType": "Microsoft.RedHatOpenShift/hcpOperationStatuses" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-basicnodepool.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-basicnodepool.json index 53cd85f42c..5d87563360 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-basicnodepool.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-basicnodepool.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags/nodePools/basic-node-pool", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/cdfb496e-6e70-4022-9f8d-b0dacf6d2ff5", "startTime": "2026-01-08T20:04:05.054190978Z", "lastTransitionTime": "2026-01-08T20:04:05.054190978Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/cdfb496e-6e70-4022-9f8d-b0dacf6d2ff5" } \ No newline at end of file diff --git a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-nodepool02.json b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-nodepool02.json index 75bccd10f0..ea9c6f4196 100644 --- a/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-nodepool02.json +++ b/test-integration/frontend/artifacts/FrontendCRUD/NodePool/read-old-data/04-listActiveOperations-node-create/operation-nodepool-create-nodepool02.json @@ -1,5 +1,4 @@ { - "resourceId": null, "tenantId": "00000000-0000-0000-0000-000000000000", "request": "Create", "externalId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/resourceGroups/resourceGroupName/providers/Microsoft.RedHatOpenShift/hcpOpenShiftClusters/create-with-tags/nodePools/node-pool-02", @@ -7,5 +6,6 @@ "operationId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/locations/fake-location/hcpOperationStatuses/4067756a-fcc1-4732-a211-d785d888203c", "startTime": "2026-01-08T20:04:05.330050272Z", "lastTransitionTime": "2026-01-08T20:04:05.330050272Z", - "status": "Accepted" + "status": "Accepted", + "resourceId": "/subscriptions/6b690bec-0c16-4ecb-8f67-781caf40bba7/providers/Microsoft.RedHatOpenShift/hcpOperationStatuses/4067756a-fcc1-4732-a211-d785d888203c" } \ No newline at end of file diff --git a/test-integration/frontend/cluster_mutation_test.go b/test-integration/frontend/cluster_mutation_test.go index b037f6ee66..0c08c7db6a 100644 --- a/test-integration/frontend/cluster_mutation_test.go +++ b/test-integration/frontend/cluster_mutation_test.go @@ -30,13 +30,15 @@ import ( ) func TestFrontendClusterMutation(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testFrontendClusterMutation) +} +func testFrontendClusterMutation(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) + frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t, withMock) require.NoError(t, err) defer testInfo.Cleanup(context.Background()) @@ -44,7 +46,7 @@ func TestFrontendClusterMutation(t *testing.T) { subscriptionID := "0465bc32-c654-41b8-8d87-9815d7abe8f6" // TODO could read from JSON resourceGroupName := "some-resource-group" - err = testInfo.CreateInitialCosmosContent(ctx, api.Must(fs.Sub(artifacts, "artifacts/ClusterMutation/initial-cosmos-state"))) + err = integrationutils.LoadAllContent(ctx, testInfo, api.Must(fs.Sub(artifacts, "artifacts/ClusterMutation/initial-cosmos-state"))) require.NoError(t, err) dirContent := api.Must(artifacts.ReadDir("artifacts/ClusterMutation")) @@ -98,7 +100,7 @@ func (tt *clusterMutationTest) runTest(t *testing.T) { if tt.genericMutationTestInfo.IsUpdateTest() || tt.genericMutationTestInfo.IsPatchTest() { require.NoError(t, mutationErr) - require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.DBClient, tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) + require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.CosmosClient(), tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) } switch { diff --git a/test-integration/frontend/cluster_read_test.go b/test-integration/frontend/cluster_read_test.go index 45099ce1b9..8b125f3653 100644 --- a/test-integration/frontend/cluster_read_test.go +++ b/test-integration/frontend/cluster_read_test.go @@ -31,20 +31,22 @@ import ( var artifacts embed.FS func TestFrontendClusterRead(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testFrontendClusterRead) +} +func testFrontendClusterRead(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) + frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t, withMock) require.NoError(t, err) defer testInfo.Cleanup(context.Background()) go frontend.Run(ctx, ctx.Done()) subscriptionID := "0465bc32-c654-41b8-8d87-9815d7abe8f6" // TODO could read from JSON - err = testInfo.CreateInitialCosmosContent(ctx, api.Must(fs.Sub(artifacts, "artifacts/ClusterReadOldData/initial-cosmos-state"))) + err = integrationutils.LoadAllContent(ctx, testInfo, api.Must(fs.Sub(artifacts, "artifacts/ClusterReadOldData/initial-cosmos-state"))) require.NoError(t, err) err = testInfo.AddContent(t, api.Must(fs.Sub(artifacts, "artifacts/ClusterReadOldData/initial-cluster-service-state"))) diff --git a/test-integration/frontend/database_crud_test.go b/test-integration/frontend/database_crud_test.go index 36b0078a7a..cae86bed2b 100644 --- a/test-integration/frontend/database_crud_test.go +++ b/test-integration/frontend/database_crud_test.go @@ -22,13 +22,17 @@ import ( "github.com/stretchr/testify/require" "github.com/Azure/ARO-HCP/internal/api" + "github.com/Azure/ARO-HCP/internal/api/arm" + "github.com/Azure/ARO-HCP/internal/database" "github.com/Azure/ARO-HCP/test-integration/utils/databasemutationhelpers" "github.com/Azure/ARO-HCP/test-integration/utils/integrationutils" ) func TestDatabaseCRUD(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testDatabaseCRUD) +} +func testDatabaseCRUD(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -42,49 +46,50 @@ func TestDatabaseCRUD(t *testing.T) { switch crudSuiteDirEntry.Name() { case "ControllerCRUD": t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[api.Controller]( ctx, t, - databasemutationhelpers.ControllerCRUDSpecializer{}, - crudSuiteDir) + crudSuiteDir, + withMock) + }) case "OperationCRUD": t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[api.Operation]( ctx, t, - databasemutationhelpers.OperationCRUDSpecializer{}, - crudSuiteDir) + crudSuiteDir, + withMock) + }) case "SubscriptionCRUD": t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[arm.Subscription]( ctx, t, - databasemutationhelpers.SubscriptionCRUDSpecializer{}, - crudSuiteDir) + crudSuiteDir, + withMock) + }) case "ServiceProviderClusterCRUD": t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[api.ServiceProviderCluster]( ctx, t, - databasemutationhelpers.GenericCRUDSpecializer[api.ServiceProviderCluster]{ - ResourceType: api.ServiceProviderClusterResourceType, - }, - crudSuiteDir) + crudSuiteDir, + withMock) }) case "UntypedCRUD": t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[database.TypedDocument]( ctx, t, - databasemutationhelpers.OperationCRUDSpecializer{}, - crudSuiteDir) + crudSuiteDir, + withMock) }) default: @@ -93,16 +98,16 @@ func TestDatabaseCRUD(t *testing.T) { } } -func testCRUDSuite[InternalAPIType any](ctx context.Context, t *testing.T, specializer databasemutationhelpers.ResourceCRUDTestSpecializer[InternalAPIType], crudSuiteDir fs.FS) { +func testCRUDSuite[InternalAPIType any](ctx context.Context, t *testing.T, crudSuiteDir fs.FS, withMock bool) { testDirs := api.Must(fs.ReadDir(crudSuiteDir, ".")) for _, testDirEntry := range testDirs { testDir := api.Must(fs.Sub(crudSuiteDir, testDirEntry.Name())) - currTest, err := databasemutationhelpers.NewResourceMutationTest( + currTest, err := databasemutationhelpers.NewResourceMutationTest[InternalAPIType]( ctx, - specializer, testDirEntry.Name(), testDir, + withMock, ) require.NoError(t, err) diff --git a/test-integration/frontend/externalauth_mutation_test.go b/test-integration/frontend/externalauth_mutation_test.go index 3e112934c4..27927348d1 100644 --- a/test-integration/frontend/externalauth_mutation_test.go +++ b/test-integration/frontend/externalauth_mutation_test.go @@ -31,13 +31,15 @@ import ( ) func TestFrontendExternalAuthMutation(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testFrontendExternalAuthMutation) +} +func testFrontendExternalAuthMutation(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) + frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t, withMock) require.NoError(t, err) defer testInfo.Cleanup(context.Background()) @@ -45,7 +47,7 @@ func TestFrontendExternalAuthMutation(t *testing.T) { subscriptionID := "0465bc32-c654-41b8-8d87-9815d7abe8f6" // TODO could read from JSON resourceGroupName := "some-resource-group" - err = testInfo.CreateInitialCosmosContent(ctx, api.Must(fs.Sub(artifacts, "artifacts/ExternalAuthMutation/initial-cosmos-state"))) + err = integrationutils.LoadAllContent(ctx, testInfo, api.Must(fs.Sub(artifacts, "artifacts/ExternalAuthMutation/initial-cosmos-state"))) require.NoError(t, err) // create anything and round trip anything for nodePool-service @@ -98,7 +100,7 @@ func (tt *externalAuthMutationTest) runTest(t *testing.T) { require.NoError(t, tt.genericMutationTestInfo.Initialize(ctx, tt.testInfo)) // better solutions welcome to be coded. This is simple and works for the moment. - hcpClusterName := strings.Split(t.Name(), "/")[1] + hcpClusterName := strings.Split(t.Name(), "/")[2] toCreate := &hcpsdk20240610preview.ExternalAuth{} require.NoError(t, json.Unmarshal(tt.genericMutationTestInfo.CreateJSON, toCreate)) externalAuthClient := tt.testInfo.Get20240610ClientFactory(tt.subscriptionID).NewExternalAuthsClient() @@ -106,7 +108,7 @@ func (tt *externalAuthMutationTest) runTest(t *testing.T) { if tt.genericMutationTestInfo.IsUpdateTest() || tt.genericMutationTestInfo.IsPatchTest() { require.NoError(t, mutationErr) - require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.DBClient, tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) + require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.CosmosClient(), tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) } switch { diff --git a/test-integration/frontend/frontend_crud_test.go b/test-integration/frontend/frontend_crud_test.go index e90aeb6a97..22dadb5e9c 100644 --- a/test-integration/frontend/frontend_crud_test.go +++ b/test-integration/frontend/frontend_crud_test.go @@ -22,13 +22,14 @@ import ( "github.com/stretchr/testify/require" "github.com/Azure/ARO-HCP/internal/api" - "github.com/Azure/ARO-HCP/test-integration/utils/databasemutationhelpers" "github.com/Azure/ARO-HCP/test-integration/utils/integrationutils" ) func TestFrontendCRUD(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testFrontendCRUD) +} +func testFrontendCRUD(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -40,11 +41,11 @@ func TestFrontendCRUD(t *testing.T) { for _, crudSuiteDirEntry := range crudSuiteDirs { crudSuiteDir := api.Must(fs.Sub(allCRUDDirFS, crudSuiteDirEntry.Name())) t.Run(crudSuiteDirEntry.Name(), func(t *testing.T) { - testCRUDSuite( + testCRUDSuite[any]( ctx, t, - databasemutationhelpers.NothingCRUDSpecializer{}, - crudSuiteDir) + crudSuiteDir, + withMock) }) } } diff --git a/test-integration/frontend/launch_test.go b/test-integration/frontend/launch_test.go deleted file mode 100644 index 90393ebce8..0000000000 --- a/test-integration/frontend/launch_test.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2025 Microsoft Corporation -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package frontend - -import ( - "context" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/Azure/ARO-HCP/test-integration/utils/integrationutils" -) - -func TestLaunch(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) - - ctx := context.Background() - ctx, cancel := context.WithCancel(ctx) - defer cancel() - - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) - require.NoError(t, err) - defer testInfo.Cleanup(context.Background()) - - go frontend.Run(ctx, ctx.Done()) - - // run for a little bit and don't crash - time.Sleep(5 * time.Second) -} diff --git a/test-integration/frontend/nodepool_mutation_test.go b/test-integration/frontend/nodepool_mutation_test.go index 98f864819c..567191e804 100644 --- a/test-integration/frontend/nodepool_mutation_test.go +++ b/test-integration/frontend/nodepool_mutation_test.go @@ -31,13 +31,15 @@ import ( ) func TestFrontendNodePoolMutation(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) + integrationutils.WithAndWithoutCosmos(t, testFrontendNodePoolMutation) +} +func testFrontendNodePoolMutation(t *testing.T, withMock bool) { ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) + frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t, withMock) require.NoError(t, err) defer testInfo.Cleanup(context.Background()) @@ -45,7 +47,7 @@ func TestFrontendNodePoolMutation(t *testing.T) { subscriptionID := "0465bc32-c654-41b8-8d87-9815d7abe8f6" // TODO could read from JSON resourceGroupName := "some-resource-group" - err = testInfo.CreateInitialCosmosContent(ctx, api.Must(fs.Sub(artifacts, "artifacts/NodePoolMutation/initial-cosmos-state"))) + err = integrationutils.LoadAllContent(ctx, testInfo, api.Must(fs.Sub(artifacts, "artifacts/NodePoolMutation/initial-cosmos-state"))) require.NoError(t, err) // create anything and round trip anything for nodePool-service @@ -98,7 +100,7 @@ func (tt *nodePoolMutationTest) runTest(t *testing.T) { require.NoError(t, tt.genericMutationTestInfo.Initialize(ctx, tt.testInfo)) // better solutions welcome to be coded. This is simple and works for the moment. - hcpClusterName := strings.Split(t.Name(), "/")[1] + hcpClusterName := strings.Split(t.Name(), "/")[2] toCreate := &hcpsdk20240610preview.NodePool{} require.NoError(t, json.Unmarshal(tt.genericMutationTestInfo.CreateJSON, toCreate)) nodePoolClient := tt.testInfo.Get20240610ClientFactory(tt.subscriptionID).NewNodePoolsClient() @@ -106,7 +108,7 @@ func (tt *nodePoolMutationTest) runTest(t *testing.T) { if tt.genericMutationTestInfo.IsUpdateTest() || tt.genericMutationTestInfo.IsPatchTest() { require.NoError(t, mutationErr) - require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.DBClient, tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) + require.NoError(t, integrationutils.MarkOperationsCompleteForName(ctx, tt.testInfo.CosmosClient(), tt.subscriptionID, ptr.Deref(toCreate.Name, ""))) } switch { diff --git a/test-integration/go.mod b/test-integration/go.mod index 7f1a434c58..95ff1430d3 100644 --- a/test-integration/go.mod +++ b/test-integration/go.mod @@ -9,7 +9,6 @@ require ( github.com/Azure/ARO-HCP/internal v0.0.0-00010101000000-000000000000 github.com/Azure/ARO-HCP/test/sdk/v20240610preview/resourcemanager/redhatopenshifthcp/armredhatopenshifthcp v0.0.0-00010101000000-000000000000 github.com/google/go-cmp v0.7.0 - github.com/google/uuid v1.6.0 github.com/neilotoole/slogt v1.1.0 github.com/openshift-online/ocm-api-model/clientapi v0.0.435 github.com/openshift-online/ocm-sdk-go v0.1.480 @@ -32,6 +31,7 @@ require ( github.com/go-logr/stdr v1.2.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.5.4 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect github.com/hashicorp/go-version v1.7.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect diff --git a/test-integration/utils/controllertesthelpers/basic_controller.go b/test-integration/utils/controllertesthelpers/basic_controller.go index d14df948f4..fe5e73d7e7 100644 --- a/test-integration/utils/controllertesthelpers/basic_controller.go +++ b/test-integration/utils/controllertesthelpers/basic_controller.go @@ -52,11 +52,10 @@ type BasicControllerTest struct { ControllerInitializerFn ControllerInitializerFunc ControllerVerifierFn ControllerVerifierFunc + WithMock bool } func (tc *BasicControllerTest) RunTest(t *testing.T) { - integrationutils.SkipIfNotSimulationTesting(t) - ctx := context.Background() ctx, cancel := context.WithCancel(ctx) defer cancel() @@ -70,11 +69,19 @@ func (tc *BasicControllerTest) RunTest(t *testing.T) { logger = tc.ControllerKey.AddLoggerValues(logger) ctx = utils.ContextWithLogger(ctx, logger) - cosmosTestInfo, err := integrationutils.NewCosmosFromTestingEnv(ctx, t) + var storageIntegrationTestInfo integrationutils.StorageIntegrationTestInfo + if tc.WithMock { + storageIntegrationTestInfo, err = integrationutils.NewMockCosmosFromTestingEnv(ctx, t) + } else { + storageIntegrationTestInfo, err = integrationutils.NewCosmosFromTestingEnv(ctx, t) + } + require.NoError(t, err) require.NoError(t, err) - defer cosmosTestInfo.Cleanup(utils.ContextWithLogger(context.Background(), slogt.New(t, slogt.JSON()))) - clusterServiceMockInfo := integrationutils.NewClusterServiceMock(t, cosmosTestInfo.ArtifactsDir) + defer storageIntegrationTestInfo.Cleanup(utils.ContextWithLogger(context.Background(), slogt.New(t, slogt.JSON()))) + clusterServiceMockInfo := integrationutils.NewClusterServiceMock(t, storageIntegrationTestInfo.GetArtifactDir()) defer clusterServiceMockInfo.Cleanup(utils.ContextWithLogger(context.Background(), slogt.New(t, slogt.JSON()))) + stepInput := databasemutationhelpers.NewCosmosStepInput(storageIntegrationTestInfo) + stepInput.ClusterServiceMockInfo = clusterServiceMockInfo initialCosmosState, err := fs.Sub(testDir, path.Join("00-load-initial-state")) require.NoError(t, err) @@ -84,10 +91,7 @@ func (tc *BasicControllerTest) RunTest(t *testing.T) { initialCosmosState, ) require.NoError(t, err) - input := databasemutationhelpers.StepInput{ - CosmosContainer: cosmosTestInfo.CosmosResourcesContainer(), - } - loadInitialStateStep.RunTest(ctx, t, input) + loadInitialStateStep.RunTest(ctx, t, *stepInput) } initialClusterServiceState, err := fs.Sub(testDir, path.Join("00-loadClusterService-initial_state")) @@ -98,16 +102,12 @@ func (tc *BasicControllerTest) RunTest(t *testing.T) { initialClusterServiceState, ) require.NoError(t, err) - input := databasemutationhelpers.StepInput{ - CosmosContainer: cosmosTestInfo.CosmosResourcesContainer(), - ClusterServiceMockInfo: clusterServiceMockInfo, - } - loadInitialStateStep.RunTest(ctx, t, input) + loadInitialStateStep.RunTest(ctx, t, *stepInput) } controllerInput := &ControllerInitializationInput{ - CosmosClient: cosmosTestInfo.DBClient, - SubscriptionLister: livelisters.NewSubscriptionLiveLister(cosmosTestInfo.DBClient), + CosmosClient: storageIntegrationTestInfo.CosmosClient(), + SubscriptionLister: livelisters.NewSubscriptionLiveLister(storageIntegrationTestInfo.CosmosClient()), ClusterServiceClient: clusterServiceMockInfo.MockClusterServiceClient, } @@ -123,10 +123,7 @@ func (tc *BasicControllerTest) RunTest(t *testing.T) { endState, ) require.NoError(t, err) - input := databasemutationhelpers.StepInput{ - CosmosContainer: cosmosTestInfo.CosmosResourcesContainer(), - } - verifyEndStateStep.RunTest(ctx, t, input) + verifyEndStateStep.RunTest(ctx, t, *stepInput) } tc.ControllerVerifierFn(ctx, t, controllerInstance, testMemory, controllerInput) diff --git a/test-integration/utils/databasemutationhelpers/per_resource_comparer.go b/test-integration/utils/databasemutationhelpers/per_resource_comparer.go index 4605279988..b783918f77 100644 --- a/test-integration/utils/databasemutationhelpers/per_resource_comparer.go +++ b/test-integration/utils/databasemutationhelpers/per_resource_comparer.go @@ -16,6 +16,7 @@ package databasemutationhelpers import ( "encoding/json" + "fmt" "strings" "testing" @@ -24,10 +25,12 @@ import ( "k8s.io/apimachinery/pkg/api/equality" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/utils/ptr" azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" "github.com/Azure/ARO-HCP/internal/api" + "github.com/Azure/ARO-HCP/internal/database" ) func ResourceInstanceEquals(t *testing.T, expected, actual any) (string, bool) { @@ -107,3 +110,34 @@ func prepend(first string, rest ...string) []string { } return append([]string{first}, rest...) } + +func ResourceName(resource any) string { + switch cast := resource.(type) { + case api.CosmosMetadataAccessor: + return ptr.To(cast.GetResourceID()).String() + case api.CosmosPersistable: + cosmosUID := cast.GetCosmosData().CosmosUID + resourceID, err := api.CosmosIDToResourceID(cosmosUID) + if err != nil { + return cosmosUID + } + return resourceID.String() + case database.TypedDocument: + cosmosUID := cast.ID + resourceID, err := api.CosmosIDToResourceID(cosmosUID) + if err != nil { + return cosmosUID + } + return resourceID.String() + case *database.TypedDocument: + cosmosUID := cast.ID + resourceID, err := api.CosmosIDToResourceID(cosmosUID) + if err != nil { + return cosmosUID + } + return resourceID.String() + + default: + return fmt.Sprintf("%v", resource) + } +} diff --git a/test-integration/utils/databasemutationhelpers/per_resource_crud.go b/test-integration/utils/databasemutationhelpers/per_resource_crud.go index 6f21941d6e..8b24e57e97 100644 --- a/test-integration/utils/databasemutationhelpers/per_resource_crud.go +++ b/test-integration/utils/databasemutationhelpers/per_resource_crud.go @@ -15,247 +15,40 @@ package databasemutationhelpers import ( - "encoding/json" - "fmt" - "path/filepath" "strings" "testing" - "github.com/stretchr/testify/require" - - "k8s.io/apimachinery/pkg/api/equality" - "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" - azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" - "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/ARO-HCP/internal/api" - "github.com/Azure/ARO-HCP/internal/api/arm" "github.com/Azure/ARO-HCP/internal/database" ) -type ResourceCRUDTestSpecializer[InternalAPIType any] interface { - ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[InternalAPIType] - InstanceEquals(expected, actual *InternalAPIType) bool - NameFromInstance(*InternalAPIType) string - WriteCosmosID(newObj, oldObj *InternalAPIType) -} - -type ControllerCRUDSpecializer struct { -} - -var _ ResourceCRUDTestSpecializer[api.Controller] = &ControllerCRUDSpecializer{} - -func (ControllerCRUDSpecializer) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[api.Controller] { - parentResourceID, err := azcorearm.ParseResourceID(key.ParentResourceID) - require.NoError(t, err) - controllerResourceType, err := azcorearm.ParseResourceType(filepath.Join(parentResourceID.ResourceType.String(), api.ControllerResourceTypeName)) - require.NoError(t, err) - - return database.NewControllerCRUD(cosmosContainer, parentResourceID, controllerResourceType) -} - -func (ControllerCRUDSpecializer) InstanceEquals(expected, actual *api.Controller) bool { - // clear the fields that don't compare - shallowExpected := *expected - shallowActual := *actual - shallowExpected.CosmosUID = "" - shallowActual.CosmosUID = "" - return equality.Semantic.DeepEqual(shallowExpected, shallowActual) -} - -func (ControllerCRUDSpecializer) NameFromInstance(obj *api.Controller) string { - return obj.ResourceID.Name -} - -func (ControllerCRUDSpecializer) WriteCosmosID(newObj, oldObj *api.Controller) { - newObj.CosmosUID = oldObj.CosmosUID -} - -type OperationCRUDSpecializer struct { -} - -var _ ResourceCRUDTestSpecializer[api.Operation] = &OperationCRUDSpecializer{} - -func (OperationCRUDSpecializer) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[api.Operation] { - parentResourceID, err := azcorearm.ParseResourceID(key.ParentResourceID) - require.NoError(t, err) - - return database.NewCosmosResourceCRUD[api.Operation, database.Operation](cosmosContainer, parentResourceID, api.OperationStatusResourceType) -} - -func (OperationCRUDSpecializer) InstanceEquals(expected, actual *api.Operation) bool { - // clear the fields that don't compare - shallowExpected := *expected - shallowActual := *actual - return equality.Semantic.DeepEqual(shallowExpected, shallowActual) -} - -func (OperationCRUDSpecializer) NameFromInstance(obj *api.Operation) string { - return obj.OperationID.Name -} - -func (OperationCRUDSpecializer) WriteCosmosID(newObj, oldObj *api.Operation) { - // the cosmosID is derived from the operationID -} - -type NothingCRUDSpecializer struct { -} - -var _ ResourceCRUDTestSpecializer[any] = &NothingCRUDSpecializer{} - -func (NothingCRUDSpecializer) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[any] { - panic("unsupported") -} - -func (NothingCRUDSpecializer) InstanceEquals(expected, actual *any) bool { - panic("unsupported") -} - -func (NothingCRUDSpecializer) NameFromInstance(obj *any) string { - panic("unsupported") -} - -func (NothingCRUDSpecializer) WriteCosmosID(newObj, oldObj *any) { - panic("unsupported") -} - -type UntypedCRUDSpecializer struct { -} - -var _ ResourceCRUDTestSpecializer[database.TypedDocument] = &UntypedCRUDSpecializer{} - -func (UntypedCRUDSpecializer) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[database.TypedDocument] { - panic("unsupported") -} - -func (UntypedCRUDSpecializer) InstanceEquals(expected, actual *database.TypedDocument) bool { - // clear the fields that don't compare - shallowExpected := *expected - shallowActual := *actual - shallowExpected.ID = "" - shallowExpected.CosmosResourceID = "" - shallowExpected.CosmosSelf = "" - shallowExpected.CosmosETag = "" - shallowExpected.CosmosAttachments = "" - shallowExpected.CosmosTimestamp = 0 - shallowActual.ID = "" - shallowActual.CosmosResourceID = "" - shallowActual.CosmosSelf = "" - shallowActual.CosmosETag = "" - shallowActual.CosmosAttachments = "" - shallowActual.CosmosTimestamp = 0 - - expectedProperties := map[string]any{} - actualProperties := map[string]any{} - if err := json.Unmarshal(shallowExpected.Properties, &expectedProperties); err != nil { - panic(err) - } - if err := json.Unmarshal(shallowActual.Properties, &actualProperties); err != nil { - panic(err) - } - shallowExpected.Properties = nil - shallowActual.Properties = nil - - if !equality.Semantic.DeepEqual(shallowExpected, shallowActual) { - return false - } - - // clear some per-type details - switch strings.ToLower(actual.ResourceType) { - case strings.ToLower(api.ClusterControllerResourceType.String()), - strings.ToLower(api.NodePoolControllerResourceType.String()), - strings.ToLower(api.ExternalAuthControllerResourceType.String()): - - expectedConditions, found, err := unstructured.NestedSlice(expectedProperties, "internalState", "status", "conditions") - if found && err == nil { - for i := range expectedConditions { - delete(expectedConditions[i].(map[string]any), "lastTransitionTime") - } - if err := unstructured.SetNestedSlice(expectedProperties, expectedConditions, "internalState", "status", "conditions"); err != nil { - panic(err) - } - } - - actualConditions, found, err := unstructured.NestedSlice(actualProperties, "internalState", "status", "conditions") - if found && err == nil { - for i := range actualConditions { - delete(actualConditions[i].(map[string]any), "lastTransitionTime") - } - if err := unstructured.SetNestedSlice(actualProperties, actualConditions, "internalState", "status", "conditions"); err != nil { - panic(err) - } - } - } - - return equality.Semantic.DeepEqual(expectedProperties, actualProperties) -} - -func (UntypedCRUDSpecializer) NameFromInstance(obj *database.TypedDocument) string { - return obj.ID -} - -func (UntypedCRUDSpecializer) WriteCosmosID(newObj, oldObj *database.TypedDocument) { - newObj.ID = oldObj.ID -} - -type SubscriptionCRUDSpecializer struct { -} - -var _ ResourceCRUDTestSpecializer[arm.Subscription] = &SubscriptionCRUDSpecializer{} - -func (SubscriptionCRUDSpecializer) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[arm.Subscription] { - return database.NewSubscriptionCRUD(cosmosContainer) -} - -func (SubscriptionCRUDSpecializer) InstanceEquals(expected, actual *arm.Subscription) bool { - // clear the fields that don't compare - shallowExpected := *expected - shallowActual := *actual - shallowExpected.LastUpdated = 0 - shallowActual.LastUpdated = 0 - return equality.Semantic.DeepEqual(shallowExpected, shallowActual) -} - -func (SubscriptionCRUDSpecializer) NameFromInstance(obj *arm.Subscription) string { - return obj.ResourceID.Name -} - -func (SubscriptionCRUDSpecializer) WriteCosmosID(newObj, oldObj *arm.Subscription) { - newObj.ResourceID = oldObj.ResourceID -} - -type GenericCRUDSpecializer[InternalAPIType any] struct { - ResourceType azcorearm.ResourceType -} - -var _ ResourceCRUDTestSpecializer[any] = &GenericCRUDSpecializer[any]{} - -func (c GenericCRUDSpecializer[InternalAPIType]) ResourceCRUDFromKey(t *testing.T, cosmosContainer *azcosmos.ContainerClient, key CosmosCRUDKey) database.ResourceCRUD[InternalAPIType] { - var instance InternalAPIType - _, ok := any(&instance).(api.CosmosMetadataAccessor) - if !ok { - panic(fmt.Sprintf("must be CosmosMetadataAccessor: %T", &instance)) - } - clusterResourceID := api.Must(azcorearm.ParseResourceID(key.ParentResourceID)) - return database.NewCosmosResourceCRUD[InternalAPIType, database.GenericDocument[InternalAPIType]]( - cosmosContainer, clusterResourceID, c.ResourceType) -} - -func (GenericCRUDSpecializer[InternalAPIType]) InstanceEquals(expected, actual *InternalAPIType) bool { - // clear the fields that don't compare - shallowExpected := *expected - shallowActual := *actual - return equality.Semantic.DeepEqual(shallowExpected, shallowActual) -} - -func (GenericCRUDSpecializer[InternalAPIType]) NameFromInstance(obj *InternalAPIType) string { - if obj == nil { - return "" +func NewCosmosCRUD[InternalAPIType any](t *testing.T, cosmosClient database.DBClient, parentResourceID *azcorearm.ResourceID, resourceType azcorearm.ResourceType) database.ResourceCRUD[InternalAPIType] { + switch { + case strings.EqualFold(resourceType.String(), api.ClusterControllerResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName).Controllers(parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + case strings.EqualFold(resourceType.String(), api.ExternalAuthControllerResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName).ExternalAuth(parentResourceID.Parent.Name).Controllers(parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + case strings.EqualFold(resourceType.String(), api.NodePoolControllerResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName).NodePools(parentResourceID.Parent.Name).Controllers(parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + + case strings.EqualFold(resourceType.String(), api.ClusterResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName)).(database.ResourceCRUD[InternalAPIType]) + case strings.EqualFold(resourceType.String(), api.ExternalAuthResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName).ExternalAuth(parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + case strings.EqualFold(resourceType.String(), api.NodePoolResourceType.String()): + return any(cosmosClient.HCPClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName).NodePools(parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + + case strings.EqualFold(resourceType.String(), api.OperationStatusResourceType.String()): + return any(cosmosClient.Operations(parentResourceID.SubscriptionID)).(database.ResourceCRUD[InternalAPIType]) + + case strings.EqualFold(resourceType.String(), api.ServiceProviderClusterResourceType.String()): + return any(cosmosClient.ServiceProviderClusters(parentResourceID.SubscriptionID, parentResourceID.ResourceGroupName, parentResourceID.Name)).(database.ResourceCRUD[InternalAPIType]) + + default: + t.Fatalf("unsupported resource type and parent: %q under %v", resourceType, parentResourceID.ResourceType.String()) } - return any(obj).(api.CosmosMetadataAccessor).GetResourceID().Name -} -func (GenericCRUDSpecializer[InternalAPIType]) WriteCosmosID(newObj, oldObj *InternalAPIType) { - // do nothing + panic("unreachable") } diff --git a/test-integration/utils/databasemutationhelpers/resource_crud_test_util.go b/test-integration/utils/databasemutationhelpers/resource_crud_test_util.go index 453d711e90..73a8d5447d 100644 --- a/test-integration/utils/databasemutationhelpers/resource_crud_test_util.go +++ b/test-integration/utils/databasemutationhelpers/resource_crud_test_util.go @@ -31,6 +31,7 @@ import ( "k8s.io/apimachinery/pkg/util/wait" + azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/ARO-HCP/internal/api" @@ -41,7 +42,8 @@ import ( ) type ResourceMutationTest struct { - testDir fs.FS + testDir fs.FS + withMock bool steps []IntegrationTestStep } @@ -51,18 +53,19 @@ type IntegrationTestStep interface { RunTest(ctx context.Context, t *testing.T, stepInput StepInput) } -func NewResourceMutationTest[InternalAPIType any](ctx context.Context, specializer ResourceCRUDTestSpecializer[InternalAPIType], testName string, testDir fs.FS) (*ResourceMutationTest, error) { - steps, err := readSteps(ctx, testDir, specializer) +func NewResourceMutationTest[InternalAPIType any](ctx context.Context, testName string, testDir fs.FS, withMock bool) (*ResourceMutationTest, error) { + steps, err := readSteps[InternalAPIType](ctx, testDir) if err != nil { return nil, fmt.Errorf("failed to read steps for test %q: %w", testName, err) } return &ResourceMutationTest{ - testDir: testDir, - steps: steps, + testDir: testDir, + withMock: withMock, + steps: steps, }, nil } -func readSteps[InternalAPIType any](ctx context.Context, testDir fs.FS, specializer ResourceCRUDTestSpecializer[InternalAPIType]) ([]IntegrationTestStep, error) { +func readSteps[InternalAPIType any](ctx context.Context, testDir fs.FS) ([]IntegrationTestStep, error) { steps := []IntegrationTestStep{} numLoadClusterServiceSteps := 0 @@ -84,7 +87,7 @@ func readSteps[InternalAPIType any](ctx context.Context, testDir fs.FS, speciali numLoadClusterServiceSteps++ } - testStep, err := NewStep(index, stepType, stepName, testDir, dirEntry.Name(), specializer) + testStep, err := NewStep[InternalAPIType](index, stepType, stepName, testDir, dirEntry.Name()) if err != nil { return nil, fmt.Errorf("failed to create new step %q: %w", dirEntry.Name(), err) } @@ -104,7 +107,7 @@ func (tt *ResourceMutationTest) RunTest(t *testing.T) { defer cancel() ctx = utils.ContextWithLogger(ctx, slogt.New(t, slogt.JSON())) - frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t) + frontend, testInfo, err := integrationutils.NewFrontendFromTestingEnv(ctx, t, tt.withMock) require.NoError(t, err) cleanupCtx := context.Background() cleanupCtx = utils.ContextWithLogger(cleanupCtx, slogt.New(t, slogt.JSON())) @@ -122,23 +125,21 @@ func (tt *ResourceMutationTest) RunTest(t *testing.T) { }) require.NoError(t, err) - stepInput := StepInput{ - CosmosContainer: testInfo.CosmosResourcesContainer(), - DBClient: testInfo.DBClient, - FrontendClient: testInfo.Get20240610ClientFactory, - FrontendURL: testInfo.FrontendURL, - ClusterServiceMockInfo: testInfo.ClusterServiceMock, - } + stepInput := NewCosmosStepInput(testInfo) + stepInput.FrontendClient = testInfo.Get20240610ClientFactory + stepInput.FrontendURL = testInfo.FrontendURL + stepInput.ClusterServiceMockInfo = testInfo.ClusterServiceMock + for _, step := range tt.steps { t.Logf("Running step %s", step.StepID()) ctx := t.Context() ctx = utils.ContextWithLogger(ctx, slogt.New(t, slogt.JSON())) - step.RunTest(ctx, t, stepInput) + step.RunTest(ctx, t, *stepInput) } } -func NewStep[InternalAPIType any](indexString, stepType, stepName string, testDir fs.FS, path string, specializer ResourceCRUDTestSpecializer[InternalAPIType]) (IntegrationTestStep, error) { +func NewStep[InternalAPIType any](indexString, stepType, stepName string, testDir fs.FS, path string) (IntegrationTestStep, error) { itoInt, err := strconv.Atoi(indexString) if err != nil { return nil, fmt.Errorf("failed to convert %s to int: %w", indexString, err) @@ -160,22 +161,22 @@ func NewStep[InternalAPIType any](indexString, stepType, stepName string, testDi return NewCosmosCompareStep(stepID, stepDir) case "create": - return newCreateStep(stepID, specializer, stepDir) + return newCreateStep[InternalAPIType](stepID, stepDir) case "replace": - return newReplaceStep(stepID, specializer, stepDir) + return newReplaceStep[InternalAPIType](stepID, stepDir) case "get": - return newGetStep(stepID, specializer, stepDir) + return newGetStep[InternalAPIType](stepID, stepDir) case "getByID": - return newGetByIDStep(stepID, specializer, stepDir) + return newGetByIDStep[InternalAPIType](stepID, stepDir) case "untypedGet": return newUntypedGetStep(stepID, stepDir) case "list": - return newListStep(stepID, specializer, stepDir) + return newListStep[InternalAPIType](stepID, stepDir) case "listActiveOperations": return newListActiveOperationsStep(stepID, stepDir) @@ -187,7 +188,7 @@ func NewStep[InternalAPIType any](indexString, stepType, stepName string, testDi return newUntypedListStep(stepID, stepDir) case "delete": - return newDeleteStep(stepID, specializer, stepDir) + return newDeleteStep[InternalAPIType](stepID, stepDir) case "untypedDelete": return newUntypedDeleteStep(stepID, stepDir) @@ -241,7 +242,27 @@ func stringifyResource(controller any) string { } type CosmosCRUDKey struct { - ParentResourceID string `json:"parentResourceId"` + ParentResourceID *azcorearm.ResourceID `json:"parentResourceId"` + ResourceType ResourceType `json:"resourceType"` +} + +type ResourceType struct { + azcorearm.ResourceType +} + +// MarshalText returns a textual representation of the ResourceID +func (o *ResourceType) MarshalText() ([]byte, error) { + return []byte(o.String()), nil +} + +// UnmarshalText decodes the textual representation of a ResourceID +func (o *ResourceType) UnmarshalText(text []byte) error { + newType, err := azcorearm.ParseResourceType(string(text)) + if err != nil { + return err + } + o.ResourceType = newType + return nil } func readResourcesInDir[InternalAPIType any](dir fs.FS) ([]*InternalAPIType, error) { @@ -298,9 +319,19 @@ func readRawBytesInDir(dir fs.FS) ([][]byte, error) { type StepInput struct { CosmosContainer *azcosmos.ContainerClient + ContentLoader integrationutils.ContentLoader + DocumentLister integrationutils.DocumentLister DBClient database.DBClient FrontendClient func(subscriptionID string) *hcpsdk20240610preview.ClientFactory FrontendURL string ClusterServiceMockInfo *integrationutils.ClusterServiceMock } + +func NewCosmosStepInput(storageInfo integrationutils.StorageIntegrationTestInfo) *StepInput { + return &StepInput{ + ContentLoader: storageInfo, + DocumentLister: storageInfo, + DBClient: storageInfo.CosmosClient(), + } +} diff --git a/test-integration/utils/databasemutationhelpers/step_cosmoscompare.go b/test-integration/utils/databasemutationhelpers/step_cosmoscompare.go index ae63879f6d..269b62a190 100644 --- a/test-integration/utils/databasemutationhelpers/step_cosmoscompare.go +++ b/test-integration/utils/databasemutationhelpers/step_cosmoscompare.go @@ -16,14 +16,11 @@ package databasemutationhelpers import ( "context" - "encoding/json" "io/fs" "testing" "github.com/stretchr/testify/require" - "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" - "github.com/Azure/ARO-HCP/internal/database" "github.com/Azure/ARO-HCP/internal/utils" ) @@ -53,27 +50,12 @@ func (l *cosmosCompare) StepID() StepID { } func (l *cosmosCompare) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - // Query all documents in the container - querySQL := "SELECT * FROM c" - queryOptions := &azcosmos.QueryOptions{ - QueryParameters: []azcosmos.QueryParameter{}, - } - - queryPager := stepInput.CosmosContainer.NewQueryItemsPager(querySQL, azcosmos.PartitionKey{}, queryOptions) + var allActual []*database.TypedDocument + var err error - allActual := []*database.TypedDocument{} - for queryPager.More() { - queryResponse, err := queryPager.NextPage(ctx) - require.NoError(t, err) - - for _, item := range queryResponse.Items { - // Parse the document to get its ID for filename - curr := &database.TypedDocument{} - err = json.Unmarshal(item, curr) - require.NoError(t, err) - allActual = append(allActual, curr) - } - } + // Use the DocumentLister interface (works with both Cosmos and mock) + allActual, err = stepInput.DocumentLister.ListAllDocuments(ctx) + require.NoError(t, err) for _, currExpected := range l.expectedContent { found := false diff --git a/test-integration/utils/databasemutationhelpers/step_create.go b/test-integration/utils/databasemutationhelpers/step_create.go index 47ab569fac..eef5f329d8 100644 --- a/test-integration/utils/databasemutationhelpers/step_create.go +++ b/test-integration/utils/databasemutationhelpers/step_create.go @@ -25,14 +25,13 @@ import ( ) type createStep[InternalAPIType any] struct { - stepID StepID - key CosmosCRUDKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] + stepID StepID + key CosmosCRUDKey resources []*InternalAPIType } -func newCreateStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*createStep[InternalAPIType], error) { +func newCreateStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*createStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) @@ -48,10 +47,9 @@ func newCreateStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDT } return &createStep[InternalAPIType]{ - stepID: stepID, - key: key, - specializer: specializer, - resources: resources, + stepID: stepID, + key: key, + resources: resources, }, nil } @@ -62,10 +60,10 @@ func (l *createStep[InternalAPIType]) StepID() StepID { } func (l *createStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - controllerCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ParentResourceID, l.key.ResourceType.ResourceType) for _, resource := range l.resources { - _, err := controllerCRUDClient.Create(ctx, resource, nil) + _, err := resourceCRUDClient.Create(ctx, resource, nil) require.NoError(t, err, "failed to create controller") } } diff --git a/test-integration/utils/databasemutationhelpers/step_delete.go b/test-integration/utils/databasemutationhelpers/step_delete.go index e6bcf81df2..57a38cca44 100644 --- a/test-integration/utils/databasemutationhelpers/step_delete.go +++ b/test-integration/utils/databasemutationhelpers/step_delete.go @@ -26,26 +26,18 @@ import ( "github.com/stretchr/testify/require" ) -type CosmosDeleteKey struct { - CosmosCRUDKey `json:",inline"` - - DeleteResourceName string `json:"deleteResourceName"` -} - type deleteStep[InternalAPIType any] struct { - stepID StepID - key CosmosDeleteKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] - + stepID StepID + key CosmosItemKey expectedError string } -func newDeleteStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*deleteStep[InternalAPIType], error) { +func newDeleteStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*deleteStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) } - var key CosmosDeleteKey + var key CosmosItemKey if err := json.Unmarshal(keyBytes, &key); err != nil { return nil, fmt.Errorf("failed to unmarshal key.json: %w", err) } @@ -59,7 +51,6 @@ func newDeleteStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDT return &deleteStep[InternalAPIType]{ stepID: stepID, key: key, - specializer: specializer, expectedError: expectedError, }, nil } @@ -71,8 +62,8 @@ func (l *deleteStep[InternalAPIType]) StepID() StepID { } func (l *deleteStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - controllerCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key.CosmosCRUDKey) - err := controllerCRUDClient.Delete(ctx, l.key.DeleteResourceName) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ResourceID.Parent, l.key.ResourceID.ResourceType) + err := resourceCRUDClient.Delete(ctx, l.key.ResourceID.Name) switch { case len(l.expectedError) > 0: require.ErrorContains(t, err, l.expectedError) diff --git a/test-integration/utils/databasemutationhelpers/step_get.go b/test-integration/utils/databasemutationhelpers/step_get.go index 9c93a04886..e1e2e1dcbe 100644 --- a/test-integration/utils/databasemutationhelpers/step_get.go +++ b/test-integration/utils/databasemutationhelpers/step_get.go @@ -24,23 +24,28 @@ import ( "testing" "github.com/stretchr/testify/require" + + azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" ) +type CosmosItemKey struct { + ResourceID *azcorearm.ResourceID `json:"resourceId"` +} + type getStep[InternalAPIType any] struct { - stepID StepID - key CosmosCRUDKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] + stepID StepID + key CosmosItemKey expectedResource *InternalAPIType expectedError string } -func newGetStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*getStep[InternalAPIType], error) { +func newGetStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*getStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) } - var key CosmosCRUDKey + var key CosmosItemKey if err := json.Unmarshal(keyBytes, &key); err != nil { return nil, fmt.Errorf("failed to unmarshal key.json: %w", err) } @@ -71,7 +76,6 @@ func newGetStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTest return &getStep[InternalAPIType]{ stepID: stepID, key: key, - specializer: specializer, expectedResource: expectedResource, expectedError: expectedError, }, nil @@ -84,9 +88,8 @@ func (l *getStep[InternalAPIType]) StepID() StepID { } func (l *getStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - controllerCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key) - resourceName := l.specializer.NameFromInstance(l.expectedResource) - actualController, err := controllerCRUDClient.Get(ctx, resourceName) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ResourceID.Parent, l.key.ResourceID.ResourceType) + actualResource, err := resourceCRUDClient.Get(ctx, l.key.ResourceID.Name) switch { case len(l.expectedError) > 0: require.ErrorContains(t, err, l.expectedError) @@ -95,10 +98,10 @@ func (l *getStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, st require.NoError(t, err) } - if !l.specializer.InstanceEquals(l.expectedResource, actualController) { - t.Logf("actual:\n%v", stringifyResource(actualController)) + if reason, equals := ResourceInstanceEquals(t, l.expectedResource, actualResource); !equals { + t.Logf("actual:\n%v", stringifyResource(actualResource)) + t.Log(reason) // cmpdiff doesn't handle private fields gracefully - require.Equal(t, l.expectedResource, actualController) - t.Fatal("unexpected") + require.Equal(t, l.expectedResource, actualResource) } } diff --git a/test-integration/utils/databasemutationhelpers/step_getbyid.go b/test-integration/utils/databasemutationhelpers/step_getbyid.go index 59c4e02d82..e219cd65d0 100644 --- a/test-integration/utils/databasemutationhelpers/step_getbyid.go +++ b/test-integration/utils/databasemutationhelpers/step_getbyid.go @@ -33,15 +33,14 @@ type GetByIDCRUDKey struct { } type getByIDStep[InternalAPIType any] struct { - stepID StepID - key GetByIDCRUDKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] + stepID StepID + key GetByIDCRUDKey expectedResource *InternalAPIType expectedError string } -func newGetByIDStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*getByIDStep[InternalAPIType], error) { +func newGetByIDStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*getByIDStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) @@ -77,7 +76,6 @@ func newGetByIDStep[InternalAPIType any](stepID StepID, specializer ResourceCRUD return &getByIDStep[InternalAPIType]{ stepID: stepID, key: key, - specializer: specializer, expectedResource: expectedResource, expectedError: expectedError, }, nil @@ -90,8 +88,8 @@ func (l *getByIDStep[InternalAPIType]) StepID() StepID { } func (l *getByIDStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - controllerCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key.CosmosCRUDKey) - actualController, err := controllerCRUDClient.GetByID(ctx, l.key.CosmosID) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ParentResourceID, l.key.ResourceType.ResourceType) + actualResource, err := resourceCRUDClient.GetByID(ctx, l.key.CosmosID) switch { case len(l.expectedError) > 0: require.ErrorContains(t, err, l.expectedError) @@ -100,10 +98,10 @@ func (l *getByIDStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T require.NoError(t, err) } - if !l.specializer.InstanceEquals(l.expectedResource, actualController) { - t.Logf("actual:\n%v", stringifyResource(actualController)) + if reason, equals := ResourceInstanceEquals(t, l.expectedResource, actualResource); !equals { + t.Logf("actual:\n%v", stringifyResource(actualResource)) + t.Log(reason) // cmpdiff doesn't handle private fields gracefully - require.Equal(t, l.expectedResource, actualController) - t.Fatal("unexpected") + require.Equal(t, l.expectedResource, actualResource) } } diff --git a/test-integration/utils/databasemutationhelpers/step_list.go b/test-integration/utils/databasemutationhelpers/step_list.go index 9ed6578343..a56889a232 100644 --- a/test-integration/utils/databasemutationhelpers/step_list.go +++ b/test-integration/utils/databasemutationhelpers/step_list.go @@ -25,14 +25,13 @@ import ( ) type listStep[InternalAPIType any] struct { - stepID StepID - key CosmosCRUDKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] + stepID StepID + key CosmosCRUDKey expectedResources []*InternalAPIType } -func newListStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*listStep[InternalAPIType], error) { +func newListStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*listStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) @@ -50,7 +49,6 @@ func newListStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTes return &listStep[InternalAPIType]{ stepID: stepID, key: key, - specializer: specializer, expectedResources: expectedResources, }, nil } @@ -62,8 +60,8 @@ func (l *listStep[InternalAPIType]) StepID() StepID { } func (l *listStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - controllerCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key) - actualControllersIterator, err := controllerCRUDClient.List(ctx, nil) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ParentResourceID, l.key.ResourceType.ResourceType) + actualControllersIterator, err := resourceCRUDClient.List(ctx, nil) require.NoError(t, err) actualResources := []*InternalAPIType{} @@ -81,7 +79,7 @@ func (l *listStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, s for _, expected := range l.expectedResources { found := false for _, actual := range actualResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -89,14 +87,14 @@ func (l *listStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, s if !found { t.Logf("actual:\n%v", stringifyResource(actualResources)) } - require.True(t, found, "expected resource not found: %v", l.specializer.NameFromInstance(expected)) + require.True(t, found, "expected resource not found: %v", ResourceName(expected)) } // all the actual must be expected for _, actual := range actualResources { found := false for _, expected := range l.expectedResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -104,6 +102,6 @@ func (l *listStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, s if !found { t.Logf("expected:\n%v", stringifyResource(l.expectedResources)) } - require.True(t, found, "actual resource not found: %v", l.specializer.NameFromInstance(actual)) + require.True(t, found, "actual resource not found: %v", ResourceName(actual)) } } diff --git a/test-integration/utils/databasemutationhelpers/step_list_active_operations.go b/test-integration/utils/databasemutationhelpers/step_list_active_operations.go index 6742b5c5b6..37aa127764 100644 --- a/test-integration/utils/databasemutationhelpers/step_list_active_operations.go +++ b/test-integration/utils/databasemutationhelpers/step_list_active_operations.go @@ -23,8 +23,6 @@ import ( "github.com/stretchr/testify/require" - azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" - "github.com/Azure/ARO-HCP/internal/api" "github.com/Azure/ARO-HCP/internal/database" ) @@ -65,12 +63,10 @@ func (l *listActiveOperationsStep) StepID() StepID { } func (l *listActiveOperationsStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - parentResourceID, err := azcorearm.ParseResourceID(l.key.ParentResourceID) - require.NoError(t, err) + resourceCRUDClient := NewCosmosCRUD[api.Operation](t, stepInput.DBClient, l.key.ParentResourceID, l.key.ResourceType.ResourceType) - operationsCRUD := database.NewOperationCRUD(stepInput.CosmosContainer, parentResourceID.SubscriptionID) + var operationsCRUD = any(resourceCRUDClient).(database.OperationCRUD) actualControllersIterator := operationsCRUD.ListActiveOperations(nil) - require.NoError(t, err) actualControllers := []*api.Operation{} for _, actual := range actualControllersIterator.Items(ctx) { @@ -82,7 +78,6 @@ func (l *listActiveOperationsStep) RunTest(ctx context.Context, t *testing.T, st t.Logf("actual:\n%v", stringifyResource(actualControllers)) } - specializer := OperationCRUDSpecializer{} require.Equal(t, len(l.expectedOperations), len(actualControllers), "unexpected number of resources") // all the expected must be present for _, expected := range l.expectedOperations { @@ -98,7 +93,7 @@ func (l *listActiveOperationsStep) RunTest(ctx context.Context, t *testing.T, st if !found { t.Logf("actual:\n%v", stringifyResource(actualControllers)) } - require.True(t, found, "expected resource not found: %v", specializer.NameFromInstance(expected)) + require.True(t, found, "expected resource not found: %v", ResourceName(expected)) } // all the actual must be expected @@ -115,6 +110,6 @@ func (l *listActiveOperationsStep) RunTest(ctx context.Context, t *testing.T, st if !found { t.Logf("expected:\n%v", stringifyResource(l.expectedOperations)) } - require.True(t, found, "actual resource not found: %v", specializer.NameFromInstance(actual)) + require.True(t, found, "actual resource not found: %v", ResourceName(actual)) } } diff --git a/test-integration/utils/databasemutationhelpers/step_load_clusterservice.go b/test-integration/utils/databasemutationhelpers/step_load_clusterservice.go index 3d5b435d2e..d6aa6310c0 100644 --- a/test-integration/utils/databasemutationhelpers/step_load_clusterservice.go +++ b/test-integration/utils/databasemutationhelpers/step_load_clusterservice.go @@ -43,7 +43,7 @@ func (l *loadClusterServiceStep) StepID() StepID { func (l *loadClusterServiceStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { if stepInput.ClusterServiceMockInfo == nil { - t.Fatal("FrontendTestInfo must not be nil in loadClusterServiceStep, probably using from the wrong kind of test") + t.Fatal("ClusterServiceMockInfo must not be nil in loadClusterServiceStep, probably using from the wrong kind of test") } require.NoError(t, stepInput.ClusterServiceMockInfo.AddContent(t, l.clusterServiceContent)) } diff --git a/test-integration/utils/databasemutationhelpers/step_load_cosmos.go b/test-integration/utils/databasemutationhelpers/step_load_cosmos.go index 80d89a14c2..46d8d42369 100644 --- a/test-integration/utils/databasemutationhelpers/step_load_cosmos.go +++ b/test-integration/utils/databasemutationhelpers/step_load_cosmos.go @@ -22,7 +22,6 @@ import ( "github.com/stretchr/testify/require" "github.com/Azure/ARO-HCP/internal/utils" - "github.com/Azure/ARO-HCP/test-integration/utils/integrationutils" ) type loadCosmosStep struct { @@ -51,7 +50,8 @@ func (l *loadCosmosStep) StepID() StepID { func (l *loadCosmosStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { for _, content := range l.contents { - err := integrationutils.LoadCosmosContent(ctx, stepInput.CosmosContainer, content) + // Use the ContentLoader interface (works with both Cosmos and mock) + err := stepInput.ContentLoader.LoadContent(ctx, content) require.NoError(t, err, "failed to load cosmos content: %v", string(content)) } } diff --git a/test-integration/utils/databasemutationhelpers/step_replace.go b/test-integration/utils/databasemutationhelpers/step_replace.go index 35cd5aa529..0fd526af66 100644 --- a/test-integration/utils/databasemutationhelpers/step_replace.go +++ b/test-integration/utils/databasemutationhelpers/step_replace.go @@ -25,19 +25,18 @@ import ( ) type replaceStep[InternalAPIType any] struct { - stepID StepID - key CosmosCRUDKey - specializer ResourceCRUDTestSpecializer[InternalAPIType] + stepID StepID + key CosmosItemKey resources []*InternalAPIType } -func newReplaceStep[InternalAPIType any](stepID StepID, specializer ResourceCRUDTestSpecializer[InternalAPIType], stepDir fs.FS) (*replaceStep[InternalAPIType], error) { +func newReplaceStep[InternalAPIType any](stepID StepID, stepDir fs.FS) (*replaceStep[InternalAPIType], error) { keyBytes, err := fs.ReadFile(stepDir, "00-key.json") if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) } - var key CosmosCRUDKey + var key CosmosItemKey if err := json.Unmarshal(keyBytes, &key); err != nil { return nil, fmt.Errorf("failed to unmarshal key.json: %w", err) } @@ -48,10 +47,9 @@ func newReplaceStep[InternalAPIType any](stepID StepID, specializer ResourceCRUD } return &replaceStep[InternalAPIType]{ - stepID: stepID, - key: key, - specializer: specializer, - resources: resources, + stepID: stepID, + key: key, + resources: resources, }, nil } @@ -62,15 +60,10 @@ func (l *replaceStep[InternalAPIType]) StepID() StepID { } func (l *replaceStep[InternalAPIType]) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - resourceCRUDClient := l.specializer.ResourceCRUDFromKey(t, stepInput.CosmosContainer, l.key) + resourceCRUDClient := NewCosmosCRUD[InternalAPIType](t, stepInput.DBClient, l.key.ResourceID.Parent, l.key.ResourceID.ResourceType) for _, resource := range l.resources { - // find the existing to set the UID for an replace to replace instead of creating a new record. - existing, err := resourceCRUDClient.Get(ctx, l.specializer.NameFromInstance(resource)) - require.NoError(t, err) - l.specializer.WriteCosmosID(resource, existing) - - _, err = resourceCRUDClient.Replace(ctx, resource, nil) - require.NoError(t, err, "failed to replace controller") + _, err := resourceCRUDClient.Replace(ctx, resource, nil) + require.NoError(t, err, "failed to replace resource") } } diff --git a/test-integration/utils/databasemutationhelpers/step_untypeddelete.go b/test-integration/utils/databasemutationhelpers/step_untypeddelete.go index e73e4124e2..2fb866e580 100644 --- a/test-integration/utils/databasemutationhelpers/step_untypeddelete.go +++ b/test-integration/utils/databasemutationhelpers/step_untypeddelete.go @@ -26,21 +26,11 @@ import ( "github.com/stretchr/testify/require" azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" - - "github.com/Azure/ARO-HCP/internal/api" - "github.com/Azure/ARO-HCP/internal/database" ) -type UntypedDeleteKey struct { - UntypedCRUDKey `json:",inline"` - - DeleteResourceID string `json:"deleteResourceId"` -} - type untypedDeleteStep struct { - stepID StepID - key UntypedDeleteKey - specializer ResourceCRUDTestSpecializer[database.TypedDocument] + stepID StepID + key UntypedItemKey expectedError string } @@ -50,7 +40,7 @@ func newUntypedDeleteStep(stepID StepID, stepDir fs.FS) (*untypedDeleteStep, err if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) } - var key UntypedDeleteKey + var key UntypedItemKey if err := json.Unmarshal(keyBytes, &key); err != nil { return nil, fmt.Errorf("failed to unmarshal key.json: %w", err) } @@ -64,7 +54,6 @@ func newUntypedDeleteStep(stepID StepID, stepDir fs.FS) (*untypedDeleteStep, err return &untypedDeleteStep{ stepID: stepID, key: key, - specializer: UntypedCRUDSpecializer{}, expectedError: expectedError, }, nil } @@ -76,17 +65,12 @@ func (l *untypedDeleteStep) StepID() StepID { } func (l *untypedDeleteStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - parentResourceID, err := azcorearm.ParseResourceID(l.key.ParentResourceID) + resourceID, err := azcorearm.ParseResourceID(l.key.ResourceID) require.NoError(t, err) - untypedCRUD := database.NewUntypedCRUD(stepInput.CosmosContainer, *parentResourceID) - for _, childKey := range l.key.Descendents { - childResourceType, err := azcorearm.ParseResourceType(childKey.ResourceType) - require.NoError(t, err) - untypedCRUD, err = untypedCRUD.Child(childResourceType, childKey.ResourceName) - require.NoError(t, err) - } - err = untypedCRUD.Delete(ctx, api.Must(azcorearm.ParseResourceID(l.key.DeleteResourceID))) + untypedCRUD, err := stepInput.DBClient.UntypedCRUD(*resourceID.Parent) + require.NoError(t, err) + err = untypedCRUD.Delete(ctx, resourceID) switch { case len(l.expectedError) > 0: require.ErrorContains(t, err, l.expectedError) diff --git a/test-integration/utils/databasemutationhelpers/step_untypedget.go b/test-integration/utils/databasemutationhelpers/step_untypedget.go index 4f8b1fff8d..6d8f0d6844 100644 --- a/test-integration/utils/databasemutationhelpers/step_untypedget.go +++ b/test-integration/utils/databasemutationhelpers/step_untypedget.go @@ -30,10 +30,13 @@ import ( "github.com/Azure/ARO-HCP/internal/database" ) +type UntypedItemKey struct { + ResourceID string `json:"resourceId"` +} + type untypedGetStep struct { - stepID StepID - key UntypedCRUDKey - specializer ResourceCRUDTestSpecializer[database.TypedDocument] + stepID StepID + key UntypedItemKey expectedResource *database.TypedDocument expectedError string @@ -44,7 +47,7 @@ func newUntypedGetStep(stepID StepID, stepDir fs.FS) (*untypedGetStep, error) { if err != nil { return nil, fmt.Errorf("failed to read key.json: %w", err) } - var key UntypedCRUDKey + var key UntypedItemKey if err := json.Unmarshal(keyBytes, &key); err != nil { return nil, fmt.Errorf("failed to unmarshal key.json: %w", err) } @@ -75,7 +78,6 @@ func newUntypedGetStep(stepID StepID, stepDir fs.FS) (*untypedGetStep, error) { return &untypedGetStep{ stepID: stepID, key: key, - specializer: UntypedCRUDSpecializer{}, expectedResource: expectedResource, expectedError: expectedError, }, nil @@ -88,17 +90,14 @@ func (l *untypedGetStep) StepID() StepID { } func (l *untypedGetStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - parentResourceID, err := azcorearm.ParseResourceID(l.key.ParentResourceID) + resourceID, err := azcorearm.ParseResourceID(l.key.ResourceID) require.NoError(t, err) - untypedCRUD := database.NewUntypedCRUD(stepInput.CosmosContainer, *parentResourceID) - for _, childKey := range l.key.Descendents { - childResourceType, err := azcorearm.ParseResourceType(childKey.ResourceType) - require.NoError(t, err) - untypedCRUD, err = untypedCRUD.Child(childResourceType, childKey.ResourceName) - require.NoError(t, err) - } - actualResource, err := untypedCRUD.Get(ctx, parentResourceID) + untypedCRUD, err := stepInput.DBClient.UntypedCRUD(*resourceID.Parent) + require.NoError(t, err) + untypedCRUD, err = untypedCRUD.Child(resourceID.ResourceType, resourceID.Name) + require.NoError(t, err) + actualResource, err := untypedCRUD.Get(ctx, resourceID) switch { case len(l.expectedError) > 0: require.ErrorContains(t, err, l.expectedError) @@ -107,10 +106,10 @@ func (l *untypedGetStep) RunTest(ctx context.Context, t *testing.T, stepInput St require.NoError(t, err) } - if !l.specializer.InstanceEquals(l.expectedResource, actualResource) { + if reason, equals := ResourceInstanceEquals(t, l.expectedResource, actualResource); !equals { t.Logf("actual:\n%v", stringifyResource(actualResource)) + t.Log(reason) // cmpdiff doesn't handle private fields gracefully require.Equal(t, l.expectedResource, actualResource) - t.Fatal("unexpected") } } diff --git a/test-integration/utils/databasemutationhelpers/step_untypedlist.go b/test-integration/utils/databasemutationhelpers/step_untypedlist.go index f3dd72ab58..1ff5611c19 100644 --- a/test-integration/utils/databasemutationhelpers/step_untypedlist.go +++ b/test-integration/utils/databasemutationhelpers/step_untypedlist.go @@ -29,9 +29,8 @@ import ( ) type untypedListStep struct { - stepID StepID - key UntypedCRUDKey - specializer ResourceCRUDTestSpecializer[database.TypedDocument] + stepID StepID + key UntypedCRUDKey expectedResources []*database.TypedDocument } @@ -54,7 +53,6 @@ func newUntypedListStep(stepID StepID, stepDir fs.FS) (*untypedListStep, error) return &untypedListStep{ stepID: stepID, key: key, - specializer: UntypedCRUDSpecializer{}, expectedResources: expectedResources, }, nil } @@ -66,10 +64,8 @@ func (l *untypedListStep) StepID() StepID { } func (l *untypedListStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - parentResourceID, err := azcorearm.ParseResourceID(l.key.ParentResourceID) + untypedCRUD, err := stepInput.DBClient.UntypedCRUD(*l.key.ParentResourceID) require.NoError(t, err) - - untypedCRUD := database.NewUntypedCRUD(stepInput.CosmosContainer, *parentResourceID) for _, childKey := range l.key.Descendents { childResourceType, err := azcorearm.ParseResourceType(childKey.ResourceType) require.NoError(t, err) @@ -94,7 +90,7 @@ func (l *untypedListStep) RunTest(ctx context.Context, t *testing.T, stepInput S for _, expected := range l.expectedResources { found := false for _, actual := range actualResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -102,14 +98,14 @@ func (l *untypedListStep) RunTest(ctx context.Context, t *testing.T, stepInput S if !found { t.Logf("actual:\n%v", stringifyResource(actualResources)) } - require.True(t, found, "expected resource not found: %v", l.specializer.NameFromInstance(expected)) + require.True(t, found, "expected resource not found: %v", ResourceName(expected)) } // all the actual must be expected for _, actual := range actualResources { found := false for _, expected := range l.expectedResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -117,6 +113,6 @@ func (l *untypedListStep) RunTest(ctx context.Context, t *testing.T, stepInput S if !found { t.Logf("expected:\n%v", stringifyResource(l.expectedResources)) } - require.True(t, found, "actual resource not found: %v", l.specializer.NameFromInstance(actual)) + require.True(t, found, "actual resource not found: %v", ResourceName(actual)) } } diff --git a/test-integration/utils/databasemutationhelpers/step_untypedlistrecursive.go b/test-integration/utils/databasemutationhelpers/step_untypedlistrecursive.go index e0888af000..505cd734ab 100644 --- a/test-integration/utils/databasemutationhelpers/step_untypedlistrecursive.go +++ b/test-integration/utils/databasemutationhelpers/step_untypedlistrecursive.go @@ -40,9 +40,8 @@ type UntypedChild struct { } type untypedListRecursiveStep struct { - stepID StepID - key UntypedCRUDKey - specializer ResourceCRUDTestSpecializer[database.TypedDocument] + stepID StepID + key UntypedCRUDKey expectedResources []*database.TypedDocument } @@ -65,7 +64,6 @@ func newUntypedListRecursiveStep(stepID StepID, stepDir fs.FS) (*untypedListRecu return &untypedListRecursiveStep{ stepID: stepID, key: key, - specializer: UntypedCRUDSpecializer{}, expectedResources: expectedResources, }, nil } @@ -77,10 +75,8 @@ func (l *untypedListRecursiveStep) StepID() StepID { } func (l *untypedListRecursiveStep) RunTest(ctx context.Context, t *testing.T, stepInput StepInput) { - parentResourceID, err := azcorearm.ParseResourceID(l.key.ParentResourceID) + untypedCRUD, err := stepInput.DBClient.UntypedCRUD(*l.key.ParentResourceID) require.NoError(t, err) - - untypedCRUD := database.NewUntypedCRUD(stepInput.CosmosContainer, *parentResourceID) for _, childKey := range l.key.Descendents { childResourceType, err := azcorearm.ParseResourceType(childKey.ResourceType) require.NoError(t, err) @@ -105,7 +101,7 @@ func (l *untypedListRecursiveStep) RunTest(ctx context.Context, t *testing.T, st for _, expected := range l.expectedResources { found := false for _, actual := range actualResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -113,14 +109,14 @@ func (l *untypedListRecursiveStep) RunTest(ctx context.Context, t *testing.T, st if !found { t.Logf("actual:\n%v", stringifyResource(actualResources)) } - require.True(t, found, "expected resource not found: %v", l.specializer.NameFromInstance(expected)) + require.True(t, found, "expected resource not found: %v", ResourceName(expected)) } // all the actual must be expected for _, actual := range actualResources { found := false for _, expected := range l.expectedResources { - if l.specializer.InstanceEquals(expected, actual) { + if _, equals := ResourceInstanceEquals(t, expected, actual); equals { found = true break } @@ -128,6 +124,6 @@ func (l *untypedListRecursiveStep) RunTest(ctx context.Context, t *testing.T, st if !found { t.Logf("expected:\n%v", stringifyResource(l.expectedResources)) } - require.True(t, found, "actual resource not found: %v", l.specializer.NameFromInstance(actual)) + require.True(t, found, "actual resource not found: %v", ResourceName(actual)) } } diff --git a/test-integration/utils/integrationutils/content_loader.go b/test-integration/utils/integrationutils/content_loader.go new file mode 100644 index 0000000000..3c66f9731a --- /dev/null +++ b/test-integration/utils/integrationutils/content_loader.go @@ -0,0 +1,84 @@ +// Copyright 2025 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package integrationutils + +import ( + "context" + "encoding/json" + + "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" + + "github.com/Azure/ARO-HCP/internal/database" +) + +// ContentLoader is an interface for loading test content into a database. +// This abstraction allows tests to run against either a real Cosmos DB +// or a mock database implementation. +type ContentLoader interface { + // LoadContent loads a single JSON document into the database. + LoadContent(ctx context.Context, content []byte) error +} + +// DocumentLister is an interface for listing all documents from a database. +// This is used by the cosmosCompare step to verify database contents. +type DocumentLister interface { + // ListAllDocuments returns all documents in the database. + ListAllDocuments(ctx context.Context) ([]*database.TypedDocument, error) +} + +// CosmosContentLoader implements ContentLoader and DocumentLister using a real Cosmos DB container. +type CosmosContentLoader struct { + container *azcosmos.ContainerClient +} + +// NewCosmosContentLoader creates a new CosmosContentLoader from a Cosmos container. +func NewCosmosContentLoader(container *azcosmos.ContainerClient) *CosmosContentLoader { + return &CosmosContentLoader{container: container} +} + +// LoadContent loads a single JSON document into Cosmos DB. +func (c *CosmosContentLoader) LoadContent(ctx context.Context, content []byte) error { + return LoadCosmosContent(ctx, c.container, content) +} + +// ListAllDocuments returns all documents in the Cosmos container. +func (c *CosmosContentLoader) ListAllDocuments(ctx context.Context) ([]*database.TypedDocument, error) { + querySQL := "SELECT * FROM c" + queryOptions := &azcosmos.QueryOptions{ + QueryParameters: []azcosmos.QueryParameter{}, + } + + queryPager := c.container.NewQueryItemsPager(querySQL, azcosmos.PartitionKey{}, queryOptions) + + var results []*database.TypedDocument + for queryPager.More() { + queryResponse, err := queryPager.NextPage(ctx) + if err != nil { + return nil, err + } + + for _, item := range queryResponse.Items { + var doc database.TypedDocument + if err := json.Unmarshal(item, &doc); err != nil { + return nil, err + } + results = append(results, &doc) + } + } + return results, nil +} + +var _ ContentLoader = &CosmosContentLoader{} +var _ DocumentLister = &CosmosContentLoader{} diff --git a/test-integration/utils/integrationutils/cosmos_testinfo.go b/test-integration/utils/integrationutils/cosmos_testinfo.go index b874d03909..f09db94f69 100644 --- a/test-integration/utils/integrationutils/cosmos_testinfo.go +++ b/test-integration/utils/integrationutils/cosmos_testinfo.go @@ -29,8 +29,6 @@ import ( "strings" "testing" - "github.com/google/uuid" - "k8s.io/apimachinery/pkg/util/rand" "github.com/Azure/azure-sdk-for-go/sdk/azcore" @@ -40,12 +38,20 @@ import ( "github.com/Azure/ARO-HCP/frontend/cmd" "github.com/Azure/ARO-HCP/internal/api" - "github.com/Azure/ARO-HCP/internal/api/arm" "github.com/Azure/ARO-HCP/internal/database" "github.com/Azure/ARO-HCP/internal/utils" ) -func NewCosmosFromTestingEnv(ctx context.Context, t *testing.T) (*CosmosIntegrationTestInfo, error) { +type CosmosIntegrationTestInfo struct { + ArtifactsDir string + + CosmosDatabaseClient *azcosmos.DatabaseClient + DBClient database.DBClient + cosmosClient *azcosmos.Client + DatabaseName string +} + +func NewCosmosFromTestingEnv(ctx context.Context, t *testing.T) (StorageIntegrationTestInfo, error) { cosmosClient, err := createCosmosClientFromEnv() if err != nil { return nil, err @@ -64,7 +70,7 @@ func NewCosmosFromTestingEnv(ctx context.Context, t *testing.T) (*CosmosIntegrat ArtifactsDir: path.Join(getArtifactDir(), t.Name()), CosmosDatabaseClient: cosmosDatabaseClient, DBClient: dbClient, - CosmosClient: cosmosClient, + cosmosClient: cosmosClient, DatabaseName: cosmosDatabaseName, } return testInfo, nil @@ -96,6 +102,14 @@ func LoadCosmosContentFromFS(ctx context.Context, cosmosContainer *azcosmos.Cont return nil } +func (s *CosmosIntegrationTestInfo) ListAllDocuments(ctx context.Context) ([]*database.TypedDocument, error) { + return NewCosmosContentLoader(s.CosmosResourcesContainer()).ListAllDocuments(ctx) +} + +func (s *CosmosIntegrationTestInfo) CosmosClient() database.DBClient { + return s.DBClient +} + func LoadCosmosContent(ctx context.Context, cosmosContainer *azcosmos.ContainerClient, content []byte) error { contentMap := map[string]any{} if err := json.Unmarshal(content, &contentMap); err != nil { @@ -130,7 +144,7 @@ func LoadCosmosContent(ctx context.Context, cosmosContainer *azcosmos.ContainerC } func (s *CosmosIntegrationTestInfo) CosmosResourcesContainer() *azcosmos.ContainerClient { - resources, err := s.CosmosClient.NewContainer(s.DatabaseName, "Resources") + resources, err := s.cosmosClient.NewContainer(s.DatabaseName, "Resources") if err != nil { panic(err) } @@ -138,24 +152,6 @@ func (s *CosmosIntegrationTestInfo) CosmosResourcesContainer() *azcosmos.Contain return resources } -func (s *CosmosIntegrationTestInfo) CreateNewSubscription(ctx context.Context) (string, *arm.Subscription, error) { - subscriptionID := uuid.NewString() - return s.CreateSpecificSubscription(ctx, subscriptionID) -} - -func (s *CosmosIntegrationTestInfo) CreateSpecificSubscription(ctx context.Context, subscriptionID string) (string, *arm.Subscription, error) { - subscription := &arm.Subscription{ - ResourceID: api.Must(arm.ToSubscriptionResourceID(subscriptionID)), - State: arm.SubscriptionStateRegistered, - } - ret, err := s.DBClient.Subscriptions().Create(ctx, subscription, nil) - if err != nil { - return "", nil, err - } - - return subscriptionID, ret, err -} - func (s *CosmosIntegrationTestInfo) Cleanup(ctx context.Context) { logger := utils.LoggerFromContext(ctx) if err := s.cleanupDatabase(ctx); err != nil { @@ -171,7 +167,7 @@ func (s *CosmosIntegrationTestInfo) cleanupDatabase(ctx context.Context) error { } // Save all database content before deleting - if err := s.saveAllDatabaseContent(ctx); err != nil { + if err := saveAllDatabaseContent(ctx, s, s.ArtifactsDir); err != nil { logger.Error("Failed to save database content", "error", err) // Continue with deletion even if saving fails } @@ -190,137 +186,125 @@ func (s *CosmosIntegrationTestInfo) cleanupDatabase(ctx context.Context) error { } // saveAllDatabaseContent reads all records from all containers and saves them to files -func (s *CosmosIntegrationTestInfo) saveAllDatabaseContent(ctx context.Context) error { +func saveAllDatabaseContent(ctx context.Context, documentLister DocumentLister, artifactDir string) error { logger := utils.LoggerFromContext(ctx) // Create timestamped subdirectory for this database - cosmosDir := filepath.Join(s.ArtifactsDir, "cosmos-content") + cosmosDir := filepath.Join(artifactDir, "cosmos-content") if err := os.MkdirAll(cosmosDir, 0755); err != nil { return fmt.Errorf("failed to create artifact directory %s: %w", cosmosDir, err) } logger.Info("Saving Cosmos DB content", "cosmosDir", cosmosDir) // List all containers in the database - containers := []string{"Resources", "Billing", "Locks"} - for _, containerName := range containers { - if err := s.saveContainerContent(ctx, containerName, cosmosDir); err != nil { - logger.Error("Failed to save container content", "error", err, "containerName", containerName) - // Continue with other containers - } + if err := saveContainerContent(ctx, documentLister, cosmosDir); err != nil { + logger.Error("Failed to save container content", "error", err) + // Continue with other containers } return nil } // saveContainerContent saves all documents from a specific container -func (s *CosmosIntegrationTestInfo) saveContainerContent(ctx context.Context, containerName, outputDir string) error { +func saveContainerContent(ctx context.Context, documentLister DocumentLister, outputDir string) error { logger := utils.LoggerFromContext(ctx) - containerClient, err := s.CosmosDatabaseClient.NewContainer(containerName) - if err != nil { - return fmt.Errorf("failed to get container client for %s: %w", containerName, err) - } - // Create subdirectory for this container - containerDir := filepath.Join(outputDir, containerName) + containerDir := filepath.Join(outputDir, "Resources") if err := os.MkdirAll(containerDir, 0755); err != nil { return fmt.Errorf("failed to create container directory %s: %w", containerDir, err) } - // Query all documents in the container - querySQL := "SELECT * FROM c" - queryOptions := &azcosmos.QueryOptions{ - QueryParameters: []azcosmos.QueryParameter{}, + documents, err := documentLister.ListAllDocuments(ctx) + if err != nil { + return fmt.Errorf("failed to list documents: %w", err) } - queryPager := containerClient.NewQueryItemsPager(querySQL, azcosmos.PartitionKey{}, queryOptions) - docCount := 0 - for queryPager.More() { - queryResponse, err := queryPager.NextPage(ctx) + for _, currTypedDocument := range documents { + item, err := json.MarshalIndent(currTypedDocument, "", " ") if err != nil { - return fmt.Errorf("failed to query container %s: %w", containerName, err) + logger.Error("Failed to serialize", "error", err) + continue } - for _, item := range queryResponse.Items { - // Parse the document to get its ID for filename - var docMap map[string]interface{} - if err := json.Unmarshal(item, &docMap); err != nil { - logger.Error("Failed to parse document in", "error", err, "containerName", containerName) - continue - } + // Parse the document to get its ID for filename + var docMap map[string]interface{} + if err := json.Unmarshal(item, &docMap); err != nil { + logger.Error("Failed to parse document in", "error", err) + continue + } - filename := "" - resourceType := docMap["resourceType"] - var armResourceID *azcorearm.ResourceID - var properties map[string]any - obj, hasProperties := docMap["properties"] - if hasProperties { - properties = obj.(map[string]any) - if resourceID, hasResourceID := properties["resourceId"]; hasResourceID && resourceID != nil { - armResourceID, _ = azcorearm.ParseResourceID(resourceID.(string)) - } + filename := "" + resourceType := docMap["resourceType"] + var armResourceID *azcorearm.ResourceID + var properties map[string]any + obj, hasProperties := docMap["properties"] + if hasProperties { + properties = obj.(map[string]any) + if resourceID, hasResourceID := properties["resourceId"]; hasResourceID && resourceID != nil { + armResourceID, _ = azcorearm.ParseResourceID(resourceID.(string)) } - switch { - case armResourceID != nil: + } + switch { + case armResourceID != nil: + filename = filepath.Join( + resourceIDToDir(armResourceID), + armResourceID.Name+".json", + ) + + case strings.EqualFold(resourceType.(string), azcorearm.SubscriptionResourceType.String()): + filename = filepath.Join( + "subscriptions", + fmt.Sprintf("subscription_%s.json", docMap["id"].(string))) + + case strings.EqualFold(resourceType.(string), api.OperationStatusResourceType.String()): + externalID := properties["externalId"].(string) + if clusterResourceID, _ := azcorearm.ParseResourceID(externalID); clusterResourceID != nil { + clusterDir := resourceIDToDir(clusterResourceID) filename = filepath.Join( - resourceIDToDir(armResourceID), - armResourceID.Name+".json", + clusterDir, + fmt.Sprintf("hcpoperationstatuses_%v_%v_%v.json", properties["startTime"], properties["request"], docMap["id"]), ) - - case strings.EqualFold(resourceType.(string), azcorearm.SubscriptionResourceType.String()): - filename = filepath.Join( - "subscriptions", - fmt.Sprintf("subscription_%s.json", docMap["id"].(string))) - - case strings.EqualFold(resourceType.(string), api.OperationStatusResourceType.String()): - externalID := properties["externalId"].(string) - if clusterResourceID, _ := azcorearm.ParseResourceID(externalID); clusterResourceID != nil { - clusterDir := resourceIDToDir(clusterResourceID) - filename = filepath.Join( - clusterDir, - fmt.Sprintf("hcpoperationstatuses_%v_%v_%v.json", properties["startTime"], properties["request"], docMap["id"]), - ) - } } + } - if len(filename) == 0 { - if id, ok := docMap["id"].(string); ok { - // Sanitize filename - basename := strings.ReplaceAll("unknown-type-"+id+".json", "/", "_") - basename = strings.ReplaceAll(basename, "\\", "_") - basename = strings.ReplaceAll(basename, ":", "_") - filename = filepath.Join("unknown", basename) - } else { - filename = filepath.Join("unknown", fmt.Sprintf("unknown_%d.json", docCount)) - } - } - filename = filepath.Join(containerDir, filename) - logger.Info("Saving document", "filename", filename) - - dirName := filepath.Dir(filename) - if err := os.MkdirAll(dirName, 0755); err != nil { - return fmt.Errorf("failed to create directory %s: %w", dirName, err) - } - prettyPrint, err := json.MarshalIndent(docMap, "", " ") - if err != nil { - return fmt.Errorf("failed to marshal document: %w", err) - } - // Write document to file - if err := os.WriteFile(filename, prettyPrint, 0644); err != nil { - logger.Error("Failed to write document", "error", err, "filename", filename) - continue + if len(filename) == 0 { + if id, ok := docMap["id"].(string); ok { + // Sanitize filename + basename := strings.ReplaceAll("unknown-type-"+id+".json", "/", "_") + basename = strings.ReplaceAll(basename, "\\", "_") + basename = strings.ReplaceAll(basename, ":", "_") + filename = filepath.Join("unknown", basename) + } else { + filename = filepath.Join("unknown", fmt.Sprintf("unknown_%d.json", docCount)) } + } + filename = filepath.Join(containerDir, filename) + logger.Info("Saving document", "filename", filename) - docCount++ + dirName := filepath.Dir(filename) + if err := os.MkdirAll(dirName, 0755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", dirName, err) + } + prettyPrint, err := json.MarshalIndent(docMap, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal document: %w", err) } + // Write document to file + if err := os.WriteFile(filename, prettyPrint, 0644); err != nil { + logger.Error("Failed to write document", "error", err, "filename", filename) + continue + } + + docCount++ } - logger.Info("Saved documents from container", "numDocs", docCount, "containerName", containerName) + logger.Info("Saved documents from container", "numDocs", docCount) return nil } -func (s *CosmosIntegrationTestInfo) CreateInitialCosmosContent(ctx context.Context, createDir fs.FS) error { +func LoadAllContent(ctx context.Context, contentLoader ContentLoader, createDir fs.FS) error { dirContent, err := fs.ReadDir(createDir, ".") if err != nil { return fmt.Errorf("failed to read dir: %w", err) @@ -338,14 +322,14 @@ func (s *CosmosIntegrationTestInfo) CreateInitialCosmosContent(ctx context.Conte if err != nil { return fmt.Errorf("failed to read file %s: %w", dirEntry.Name(), err) } - if err := s.createInitialCosmosContent(ctx, fileContent); err != nil { + if err := contentLoader.LoadContent(ctx, fileContent); err != nil { return fmt.Errorf("failed to create initial Cosmos content: %w", err) } } return nil } -func (s *CosmosIntegrationTestInfo) createInitialCosmosContent(ctx context.Context, content []byte) error { +func (s *CosmosIntegrationTestInfo) LoadContent(ctx context.Context, content []byte) error { return LoadCosmosContent(ctx, s.CosmosResourcesContainer(), content) } @@ -434,3 +418,7 @@ func initializeCosmosDBForFrontend(ctx context.Context, cosmosClient *azcosmos.C return cosmosDatabaseClient, nil } + +func (s *CosmosIntegrationTestInfo) GetArtifactDir() string { + return s.ArtifactsDir +} diff --git a/test-integration/utils/integrationutils/frontend_testinfo.go b/test-integration/utils/integrationutils/frontend_testinfo.go index bd640b4c96..eed8d59550 100644 --- a/test-integration/utils/integrationutils/frontend_testinfo.go +++ b/test-integration/utils/integrationutils/frontend_testinfo.go @@ -23,7 +23,6 @@ import ( azcorearm "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm" "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud" "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" - "github.com/Azure/azure-sdk-for-go/sdk/data/azcosmos" "github.com/Azure/ARO-HCP/frontend/pkg/frontend" "github.com/Azure/ARO-HCP/internal/api" @@ -32,17 +31,18 @@ import ( hcpsdk20240610preview "github.com/Azure/ARO-HCP/test/sdk/v20240610preview/resourcemanager/redhatopenshifthcp/armredhatopenshifthcp" ) -type CosmosIntegrationTestInfo struct { - ArtifactsDir string +type StorageIntegrationTestInfo interface { + ContentLoader + DocumentLister + + GetArtifactDir() string + CosmosClient() database.DBClient - CosmosDatabaseClient *azcosmos.DatabaseClient - DBClient database.DBClient - CosmosClient *azcosmos.Client - DatabaseName string + Cleanup(ctx context.Context) } type FrontendIntegrationTestInfo struct { - *CosmosIntegrationTestInfo + StorageIntegrationTestInfo *ClusterServiceMock ArtifactsDir string @@ -88,7 +88,7 @@ func (emptySystemData) Do(req *policy.Request) (*http.Response, error) { } func (s *FrontendIntegrationTestInfo) Cleanup(ctx context.Context) { - s.CosmosIntegrationTestInfo.Cleanup(ctx) + s.StorageIntegrationTestInfo.Cleanup(ctx) s.ClusterServiceMock.Cleanup(ctx) } diff --git a/test-integration/utils/integrationutils/mock_cosmos_testinfo.go b/test-integration/utils/integrationutils/mock_cosmos_testinfo.go new file mode 100644 index 0000000000..5766f1d0c3 --- /dev/null +++ b/test-integration/utils/integrationutils/mock_cosmos_testinfo.go @@ -0,0 +1,67 @@ +// Copyright 2025 Microsoft Corporation +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package integrationutils + +import ( + "context" + "path" + "testing" + + "github.com/Azure/ARO-HCP/internal/database" + "github.com/Azure/ARO-HCP/internal/databasetesting" + "github.com/Azure/ARO-HCP/internal/utils" +) + +type MockCosmosIntegrationTestInfo struct { + ArtifactsDir string + + MockDBClient *databasetesting.MockDBClient +} + +func NewMockCosmosFromTestingEnv(ctx context.Context, t *testing.T) (StorageIntegrationTestInfo, error) { + mockDBClient := databasetesting.NewMockDBClient() + + testInfo := &MockCosmosIntegrationTestInfo{ + ArtifactsDir: path.Join(getArtifactDir(), t.Name()), + MockDBClient: mockDBClient, + } + return testInfo, nil +} + +func (m *MockCosmosIntegrationTestInfo) CosmosClient() database.DBClient { + return m.MockDBClient +} + +func (m *MockCosmosIntegrationTestInfo) LoadContent(ctx context.Context, content []byte) error { + return m.MockDBClient.LoadContent(ctx, content) +} + +func (m *MockCosmosIntegrationTestInfo) ListAllDocuments(ctx context.Context) ([]*database.TypedDocument, error) { + return m.MockDBClient.ListAllDocuments(ctx) +} + +func (m *MockCosmosIntegrationTestInfo) Cleanup(ctx context.Context) { + logger := utils.LoggerFromContext(ctx) + + // Save all database content before deleting + if err := saveAllDatabaseContent(ctx, m.MockDBClient, m.ArtifactsDir); err != nil { + logger.Error("Failed to save database content", "error", err) + // Continue with deletion even if saving fails + } +} + +func (m *MockCosmosIntegrationTestInfo) GetArtifactDir() string { + return m.ArtifactsDir +} diff --git a/test-integration/utils/integrationutils/mutation_test_utils.go b/test-integration/utils/integrationutils/mutation_test_utils.go index 39a8a0dc95..33a9ba6ff1 100644 --- a/test-integration/utils/integrationutils/mutation_test_utils.go +++ b/test-integration/utils/integrationutils/mutation_test_utils.go @@ -88,7 +88,7 @@ type GenericMutationTest struct { func (h *GenericMutationTest) Initialize(ctx context.Context, testInfo *FrontendIntegrationTestInfo) error { if h.initialCosmosState != nil { - err := testInfo.CreateInitialCosmosContent(ctx, h.initialCosmosState) + err := LoadAllContent(ctx, testInfo, h.initialCosmosState) if err != nil { return err } diff --git a/test-integration/utils/integrationutils/utils.go b/test-integration/utils/integrationutils/utils.go index 690639d77a..329f3ce6cb 100644 --- a/test-integration/utils/integrationutils/utils.go +++ b/test-integration/utils/integrationutils/utils.go @@ -34,12 +34,28 @@ import ( "github.com/Azure/ARO-HCP/internal/utils" ) +func WithAndWithoutCosmos(t *testing.T, testFn func(t *testing.T, withMock bool)) { + t.Run("WithMock", func(t *testing.T) { + testFn(t, true) + }) + + if HasCosmos() { + t.Run("WithCosmos", func(t *testing.T) { + testFn(t, false) + }) + } +} + func SkipIfNotSimulationTesting(t *testing.T) { if os.Getenv("FRONTEND_SIMULATION_TESTING") != "true" { t.Skip("Skipping test") } } +func HasCosmos() bool { + return os.Getenv("FRONTEND_SIMULATION_TESTING") == "true" +} + var ( artifactDir string artifactDirInit sync.Once @@ -60,8 +76,20 @@ func getArtifactDir() string { return artifactDir } -func NewFrontendFromTestingEnv(ctx context.Context, t *testing.T) (*frontend.Frontend, *FrontendIntegrationTestInfo, error) { - cosmosTestEnv, err := NewCosmosFromTestingEnv(ctx, t) +// NewFrontendFromMockDB creates a new Frontend using a mock database client. +// This allows tests to run without requiring a Cosmos DB emulator. +func NewFrontendFromMockDB(ctx context.Context, t *testing.T) (*frontend.Frontend, *FrontendIntegrationTestInfo, error) { + return NewFrontendFromTestingEnv(ctx, t, true) +} + +func NewFrontendFromTestingEnv(ctx context.Context, t *testing.T, withMock bool) (*frontend.Frontend, *FrontendIntegrationTestInfo, error) { + var storageIntegrationTestInfo StorageIntegrationTestInfo + var err error + if withMock { + storageIntegrationTestInfo, err = NewMockCosmosFromTestingEnv(ctx, t) + } else { + storageIntegrationTestInfo, err = NewCosmosFromTestingEnv(ctx, t) + } if err != nil { return nil, nil, err } @@ -85,14 +113,14 @@ func NewFrontendFromTestingEnv(ctx context.Context, t *testing.T) (*frontend.Fro metricsRegistry := prometheus.NewRegistry() - clusterServiceMockInfo := NewClusterServiceMock(t, cosmosTestEnv.ArtifactsDir) + clusterServiceMockInfo := NewClusterServiceMock(t, storageIntegrationTestInfo.GetArtifactDir()) - aroHCPFrontend := frontend.NewFrontend(logger, listener, metricsListener, metricsRegistry, cosmosTestEnv.DBClient, clusterServiceMockInfo.MockClusterServiceClient, noOpAuditClient, "fake-location") + aroHCPFrontend := frontend.NewFrontend(logger, listener, metricsListener, metricsRegistry, storageIntegrationTestInfo.CosmosClient(), clusterServiceMockInfo.MockClusterServiceClient, noOpAuditClient, "fake-location") testInfo := &FrontendIntegrationTestInfo{ - CosmosIntegrationTestInfo: cosmosTestEnv, - ClusterServiceMock: clusterServiceMockInfo, - ArtifactsDir: cosmosTestEnv.ArtifactsDir, - FrontendURL: fmt.Sprintf("http://%s", listener.Addr().String()), + StorageIntegrationTestInfo: storageIntegrationTestInfo, + ClusterServiceMock: clusterServiceMockInfo, + ArtifactsDir: storageIntegrationTestInfo.GetArtifactDir(), + FrontendURL: fmt.Sprintf("http://%s", listener.Addr().String()), } return aroHCPFrontend, testInfo, nil }