diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 23d505bc4..4a2f209c4 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -11,12 +11,15 @@ import shutil import tempfile from copy import deepcopy -from typing import Dict, List, Optional, Union, Tuple +from typing import Dict, List, Optional, Tuple, Union import pandas +import yaml from jsonschema import ValidationError, validate +from ads.common import oci_client as oc from ads.common import utils +from ads.common.extended_enum import ExtendedEnumMeta from ads.common.object_storage_details import ObjectStorageDetails from ads.config import ( COMPARTMENT_OCID, @@ -41,7 +44,6 @@ ModelProvenanceNotFoundError, OCIDataScienceModel, ) -from ads.common import oci_client as oc logger = logging.getLogger(__name__) @@ -78,6 +80,345 @@ class InvalidArtifactType(Exception): # pragma: no cover pass +class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): + NONE = "NONE" + ALL = "ALL" + ON_FAILURE = "ON_FAILURE" + ON_SUCCESS = "ON_SUCCESS" + + +class SettingStatus(str, metaclass=ExtendedEnumMeta): + """Enum to represent the status of retention settings.""" + + PENDING = "PENDING" + SUCCEEDED = "SUCCEEDED" + FAILED = "FAILED" + + +class ModelBackupSetting: + """ + Class that represents Model Backup Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupSetting': + Constructs backup settings from a dictionary. + to_json(self) -> str: + Serializes the backup settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupSetting': + Constructs backup settings from a JSON string. + to_yaml(self) -> str: + Serializes the backup settings into a YAML string. + validate(self) -> bool: + Validates the backup settings details. + """ + + def __init__( + self, + is_backup_enabled: Optional[bool] = None, + backup_region: Optional[str] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): + self.is_backup_enabled = ( + is_backup_enabled if is_backup_enabled is not None else False + ) + self.backup_region = backup_region + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) + + def to_dict(self) -> Dict: + """Serializes the backup settings into a dictionary.""" + return { + "is_backup_enabled": self.is_backup_enabled, + "backup_region": self.backup_region, + "customer_notification_type": self.customer_notification_type, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelBackupSetting": + """Constructs backup settings from a dictionary.""" + return cls( + is_backup_enabled=data.get("is_backup_enabled"), + backup_region=data.get("backup_region"), + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, + ) + + def to_json(self) -> str: + """Serializes the backup settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str) -> "ModelBackupSetting": + """Constructs backup settings from a JSON string or dictionary.""" + data = json.loads(json_str) if isinstance(json_str, str) else json_str + + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup settings details. Returns True if valid, False otherwise.""" + return all([ + isinstance(self.is_backup_enabled, bool), + not self.backup_region or isinstance(self.backup_region, str), + isinstance(self.customer_notification_type, str) and self.customer_notification_type in + CustomerNotificationType.values() + ]) + + def __repr__(self): + return self.to_yaml() + + +class ModelRetentionSetting: + """ + Class that represents Model Retention Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionSetting': + Constructs retention settings from a dictionary. + to_json(self) -> str: + Serializes the retention settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionSetting': + Constructs retention settings from a JSON string. + to_yaml(self) -> str: + Serializes the retention settings into a YAML string. + validate(self) -> bool: + Validates the retention settings details. + """ + + def __init__( + self, + archive_after_days: Optional[int] = None, + delete_after_days: Optional[int] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): + self.archive_after_days = archive_after_days + self.delete_after_days = delete_after_days + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) + + def to_dict(self) -> Dict: + """Serializes the retention settings into a dictionary.""" + return { + "archive_after_days": self.archive_after_days, + "delete_after_days": self.delete_after_days, + "customer_notification_type": self.customer_notification_type, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelRetentionSetting": + """Constructs retention settings from a dictionary.""" + return cls( + archive_after_days=data.get("archive_after_days"), + delete_after_days=data.get("delete_after_days"), + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, + ) + + def to_json(self) -> str: + """Serializes the retention settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str) -> "ModelRetentionSetting": + """Constructs retention settings from a JSON string.""" + data = json.loads(json_str) if isinstance(json_str, str) else json_str + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention settings details. Returns True if valid, False otherwise.""" + return all([ + self.archive_after_days is None or ( + isinstance(self.archive_after_days, int) and self.archive_after_days >= 0), + self.delete_after_days is None or (isinstance(self.delete_after_days, int) and self.delete_after_days >= 0), + isinstance(self.customer_notification_type, str) and self.customer_notification_type in + CustomerNotificationType.values() + ]) + + def __repr__(self): + return self.to_yaml() + + +class ModelRetentionOperationDetails: + """ + Class that represents Model Retention Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a dictionary. + to_json(self) -> str: + Serializes the retention operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a JSON string. + to_yaml(self) -> str: + Serializes the retention operation details into a YAML string. + validate(self) -> bool: + Validates the retention operation details. + """ + + def __init__( + self, + archive_state: Optional[SettingStatus] = None, + archive_state_details: Optional[str] = None, + delete_state: Optional[SettingStatus] = None, + delete_state_details: Optional[str] = None, + time_archival_scheduled: Optional[int] = None, + time_deletion_scheduled: Optional[int] = None, + ): + self.archive_state = archive_state + self.archive_state_details = archive_state_details + self.delete_state = delete_state + self.delete_state_details = delete_state_details + self.time_archival_scheduled = time_archival_scheduled + self.time_deletion_scheduled = time_deletion_scheduled + + def to_dict(self) -> Dict: + """Serializes the retention operation details into a dictionary.""" + return { + "archive_state": self.archive_state or None, + "archive_state_details": self.archive_state_details, + "delete_state": self.delete_state or None, + "delete_state_details": self.delete_state_details, + "time_archival_scheduled": self.time_archival_scheduled, + "time_deletion_scheduled": self.time_deletion_scheduled, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelRetentionOperationDetails": + """Constructs retention operation details from a dictionary.""" + return cls( + archive_state=SettingStatus(data.get("archive_state")) or None, + archive_state_details=data.get("archive_state_details"), + delete_state=SettingStatus(data.get("delete_state")) or None, + delete_state_details=data.get("delete_state_details"), + time_archival_scheduled=data.get("time_archival_scheduled"), + time_deletion_scheduled=data.get("time_deletion_scheduled"), + ) + + def to_json(self) -> str: + """Serializes the retention operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> "ModelRetentionOperationDetails": + """Constructs retention operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention operation details.""" + return all( + [ + self.archive_state is None or self.archive_state in SettingStatus.values(), + self.delete_state is None or self.delete_state in SettingStatus.values(), + self.time_archival_scheduled is None + or isinstance(self.time_archival_scheduled, int), + self.time_deletion_scheduled is None + or isinstance(self.time_deletion_scheduled, int), + ] + ) + + def __repr__(self): + return self.to_yaml() + + +class ModelBackupOperationDetails: + """ + Class that represents Model Backup Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a dictionary. + to_json(self) -> str: + Serializes the backup operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a JSON string. + to_yaml(self) -> str: + Serializes the backup operation details into a YAML string. + validate(self) -> bool: + Validates the backup operation details. + """ + + def __init__( + self, + backup_state: Optional[SettingStatus] = None, + backup_state_details: Optional[str] = None, + time_last_backup: Optional[int] = None, + ): + self.backup_state = backup_state + self.backup_state_details = backup_state_details + self.time_last_backup = time_last_backup + + def to_dict(self) -> Dict: + """Serializes the backup operation details into a dictionary.""" + return { + "backup_state": self.backup_state or None, + "backup_state_details": self.backup_state_details, + "time_last_backup": self.time_last_backup, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelBackupOperationDetails": + """Constructs backup operation details from a dictionary.""" + return cls( + backup_state=SettingStatus(data.get("backup_state")) or None, + backup_state_details=data.get("backup_state_details"), + time_last_backup=data.get("time_last_backup"), + ) + + def to_json(self) -> str: + """Serializes the backup operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> "ModelBackupOperationDetails": + """Constructs backup operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup operation details.""" + return not ( + (self.backup_state is not None and self.backup_state not in SettingStatus.values()) or + (self.time_last_backup is not None and not isinstance(self.time_last_backup, int)) + ) + + def __repr__(self): + return self.to_yaml() + + class DataScienceModel(Builder): """Represents a Data Science Model. @@ -120,6 +461,14 @@ class DataScienceModel(Builder): Model version id model_file_description: dict Contains object path details for models created by reference. + backup_setting: ModelBackupSetting + The value to assign to the backup_setting property of this CreateModelDetails. + retention_setting: ModelRetentionSetting + The value to assign to the retention_setting property of this CreateModelDetails. + retention_operation_details: ModelRetentionOperationDetails + The value to assign to the retention_operation_details property for the Model. + backup_operation_details: ModelBackupOperationDetails + The value to assign to the backup_operation_details property for the Model. Methods ------- @@ -217,7 +566,12 @@ class DataScienceModel(Builder): CONST_MODEL_VERSION_ID = "versionId" CONST_TIME_CREATED = "timeCreated" CONST_LIFECYCLE_STATE = "lifecycleState" + CONST_LIFECYCLE_DETAILS = "lifecycleDetails" CONST_MODEL_FILE_DESCRIPTION = "modelDescription" + CONST_BACKUP_SETTING = "backupSetting" + CONST_RETENTION_SETTING = "retentionSetting" + CONST_BACKUP_OPERATION_DETAILS = "backupOperationDetails" + CONST_RETENTION_OPERATION_DETAILS = "retentionOperationDetails" attribute_map = { CONST_ID: "id", @@ -239,7 +593,12 @@ class DataScienceModel(Builder): CONST_MODEL_VERSION_ID: "version_id", CONST_TIME_CREATED: "time_created", CONST_LIFECYCLE_STATE: "lifecycle_state", + CONST_LIFECYCLE_DETAILS: "lifecycle_details", CONST_MODEL_FILE_DESCRIPTION: "model_description", + CONST_BACKUP_SETTING: "backup_setting", + CONST_RETENTION_SETTING: "retention_setting", + CONST_BACKUP_OPERATION_DETAILS: "backup_operation_details", + CONST_RETENTION_OPERATION_DETAILS: "retention_operation_details", } def __init__(self, spec: Dict = None, **kwargs) -> None: @@ -308,6 +667,28 @@ def lifecycle_state(self) -> Union[str, None]: return self.dsc_model.status return None + @property + def lifecycle_details(self) -> str: + """ + Gets the lifecycle_details of this DataScienceModel. + Details about the lifecycle state of the model. + + :return: The lifecycle_details of this DataScienceModel. + :rtype: str + """ + return self.get_spec(self.CONST_LIFECYCLE_DETAILS) + + @lifecycle_details.setter + def lifecycle_details(self, lifecycle_details: str) -> "DataScienceModel": + """ + Sets the lifecycle_details of this DataScienceModel. + Details about the lifecycle state of the model. + + :param lifecycle_details: The lifecycle_details of this DataScienceModel. + :type: str + """ + return self.set_spec(self.CONST_LIFECYCLE_DETAILS, lifecycle_details) + @property def kind(self) -> str: """The kind of the object as showing in a YAML.""" @@ -685,6 +1066,85 @@ def with_model_file_description( return self.set_spec(self.CONST_MODEL_FILE_DESCRIPTION, json_data) + @property + def retention_setting(self) -> ModelRetentionSetting: + """ + Gets the retention_setting of this model. + + :return: The retention_setting of this model. + :rtype: RetentionSetting + """ + return self.get_spec(self.CONST_RETENTION_SETTING) + + def with_retention_setting( + self, retention_setting: Union[Dict, ModelRetentionSetting] + ) -> "DataScienceModel": + """ + Sets the retention setting details for the model. + + Parameters + ---------- + retention_setting : Union[Dict, RetentionSetting] + The retention setting details for the model. Can be provided as either a dictionary or + an instance of the `RetentionSetting` class. + + Returns + ------- + DataScienceModel + The `DataScienceModel` instance (self) for method chaining. + """ + return self.set_spec(self.CONST_RETENTION_SETTING, retention_setting) + + @property + def backup_setting(self) -> ModelBackupSetting: + """ + Gets the backup_setting of this model. + + :return: The backup_setting of this model. + :rtype: BackupSetting + """ + return self.get_spec(self.CONST_BACKUP_SETTING) + + def with_backup_setting( + self, backup_setting: Union[Dict, ModelBackupSetting] + ) -> "DataScienceModel": + """ + Sets the model's backup setting details. + + Parameters + ---------- + backup_setting : Union[Dict, BackupSetting] + The backup setting details for the model. This can be passed as either a dictionary or + an instance of the `BackupSetting` class. + + Returns + ------- + DataScienceModel + The `DataScienceModel` instance (self) for method chaining. + """ + + return self.set_spec(self.CONST_BACKUP_SETTING, backup_setting) + + @property + def retention_operation_details(self) -> ModelRetentionOperationDetails: + """ + Gets the retention_operation_details of this Model using the spec constant. + + :return: The retention_operation_details of this Model. + :rtype: ModelRetentionOperationDetails + """ + return self.get_spec(self.CONST_RETENTION_OPERATION_DETAILS) + + @property + def backup_operation_details(self) -> "ModelBackupOperationDetails": + """ + Gets the backup_operation_details of this Model using the spec constant. + + :return: The backup_operation_details of this Model. + :rtype: ModelBackupOperationDetails + """ + return self.get_spec(self.CONST_BACKUP_OPERATION_DETAILS) + def create(self, **kwargs) -> "DataScienceModel": """Creates datascience model. @@ -907,6 +1367,44 @@ def _remove_file_description_artifact(self): if self.local_copy_dir: shutil.rmtree(self.local_copy_dir, ignore_errors=True) + def restore_model( + self, + restore_model_for_hours_specified: Optional[int] = None, + ) -> None: + """ + Restore archived model artifact. + + Parameters + ---------- + + restore_model_for_hours_specified : Optional[int] + Duration in hours for which the archived model is available for access. + + Returns + ------- + None + + Raises + ------ + ValueError + If the model ID is invalid or if any parameters are incorrect. + """ + # Validate model_id + if not self.id: + logger.warn( + "Model needs to be saved to the model catalog before it can be restored." + ) + return + + # Optional: Validate restore_model_for_hours_specified + if restore_model_for_hours_specified is not None and ( + not isinstance(restore_model_for_hours_specified, int) or restore_model_for_hours_specified <= 0): + raise ValueError("restore_model_for_hours_specified must be a positive integer.") + + self.dsc_model.restore_archived_model_artifact( + restore_model_for_hours_specified=restore_model_for_hours_specified, + ) + def download_artifact( self, target_dir: str, @@ -1039,15 +1537,16 @@ def update(self, **kwargs) -> "DataScienceModel": self.dsc_model = self._to_oci_dsc_model(**kwargs).update() logger.debug(f"Updating a model provenance metadata {self.provenance_metadata}") - try: - self.dsc_model.get_model_provenance() - self.dsc_model.update_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) - except ModelProvenanceNotFoundError: - self.dsc_model.create_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) + if self.provenance_metadata: + try: + self.dsc_model.get_model_provenance() + self.dsc_model.update_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) + except ModelProvenanceNotFoundError: + self.dsc_model.create_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) return self.sync() @@ -1219,6 +1718,10 @@ def _update_from_oci_dsc_model( self.CONST_OUTPUT_SCHEMA: [Schema.from_json, json.loads], self.CONST_CUSTOM_METADATA: ModelCustomMetadata._from_oci_metadata, self.CONST_DEFINED_METADATA: ModelTaxonomyMetadata._from_oci_metadata, + self.CONST_BACKUP_SETTING: ModelBackupSetting.to_dict, + self.CONST_RETENTION_SETTING: ModelRetentionSetting.to_dict, + self.CONST_BACKUP_OPERATION_DETAILS: ModelBackupOperationDetails.to_dict, + self.CONST_RETENTION_OPERATION_DETAILS: ModelRetentionOperationDetails.to_dict } # Update the main properties @@ -1533,7 +2036,7 @@ def add_artifact( "Both 'prefix' and 'files' cannot be provided. Please provide only one." ) - if self.model_file_description == None: + if self.model_file_description is None: self.empty_json = { "version": "1.0", "type": "modelOSSReferenceDescription", @@ -1583,7 +2086,7 @@ def list_obj_versions_unpaginated(): # Fetch object details and put it into the objects variable objectStorageList = [] - if files == None: + if files is None: objectStorageList = list_obj_versions_unpaginated() else: for fileName in files: @@ -1671,7 +2174,7 @@ def remove_artifact( if (not namespace) or (not bucket): raise ValueError("Both 'namespace' and 'bucket' must be provided.") - def findModelIdx(): + def find_model_idx(): for idx, model in enumerate(self.model_file_description["models"]): if ( model["namespace"], @@ -1681,10 +2184,10 @@ def findModelIdx(): return idx return -1 - if self.model_file_description == None: + if self.model_file_description is None: return - modelSearchIdx = findModelIdx() + modelSearchIdx = find_model_idx() if modelSearchIdx == -1: return else: diff --git a/ads/model/service/oci_datascience_model.py b/ads/model/service/oci_datascience_model.py index b5c1541b2..44ba091a6 100644 --- a/ads/model/service/oci_datascience_model.py +++ b/ads/model/service/oci_datascience_model.py @@ -278,6 +278,34 @@ def get_artifact_info(self) -> Dict: raise ModelArtifactNotFoundError() return {} + @check_for_model_id( + msg="Model needs to be restored before the archived artifact content can be accessed." + ) + def restore_archived_model_artifact( + self, restore_model_for_hours_specified: Optional[int] = None + ) -> None: + """Restores the archived model artifact. + + Parameters + ---------- + model_id : str + The unique identifier of the model to restore. + restore_model_for_hours_specified : Optional[int] + The duration (in hours) for which the model should be restored. + + Returns + ------- + None + + Raises + ------ + ModelArtifactNotFoundError + If model artifact not found. + """ + return self.client.restore_archived_model_artifact( + model_id=self.id, + restore_model_for_hours_specified=restore_model_for_hours_specified).headers["opc-work-request-id"] + @check_for_model_id( msg="Model needs to be saved to the Model Catalog before the artifact content can be read." ) diff --git a/docs/source/user_guide/model_catalog/model_catalog.rst b/docs/source/user_guide/model_catalog/model_catalog.rst index aaebaa87c..4a8732915 100644 --- a/docs/source/user_guide/model_catalog/model_catalog.rst +++ b/docs/source/user_guide/model_catalog/model_catalog.rst @@ -1394,6 +1394,10 @@ These are the metadata attributes: * ``schema_output``: Output schema. However, this field can't be updated. * ``time_created``: The date and time that the model artifacts were stored in the model catalog. * ``user_name``: User name of the account that created the entry. +* ``backup_setting``: The backup setting property of the model +* ``retention_setting``: The retention setting property of the model +* ``retention_operation_details``: The retention operation details of the model +* ``backup_operation_details``: The back up operation details of the model The ``provenance_metadata`` attribute returns a `ModelProvenance `__ object. This object has the attributes to access the metadata. @@ -1545,3 +1549,8 @@ In the next example, the model that was stored in the model catalog as part of t .. code-block:: python3 mc.delete_model(mc_model.id) + +Restore Archived Model +********************** + +The ``.restore_model()`` method of Model catalog restores the model for a specified number of hours. Restored models can be downloaded for 1-240 hours, defaulting to 24 hours. \ No newline at end of file diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index 9cdf06ca7..f331576db 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -26,7 +26,7 @@ ModelArtifactSizeError, BucketNotVersionedError, ModelFileDescriptionError, - InvalidArtifactType, + InvalidArtifactType, ModelRetentionSetting, ModelBackupSetting, ) from ads.model.model_metadata import ( ModelCustomMetadata, @@ -44,7 +44,7 @@ from ads.config import AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET MODEL_OCID = "ocid1.datasciencemodel.oc1.iad." - + OCI_MODEL_PAYLOAD = { "id": MODEL_OCID, "compartment_id": "ocid1.compartment.oc1..", @@ -72,6 +72,16 @@ {"key": "Hyperparameters"}, {"key": "ArtifactTestResults"}, ], + "backup_setting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL" + }, + "retention_setting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL" + }, "input_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', "output_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', } @@ -148,6 +158,16 @@ "training_id": None, "artifact_dir": "test_script_dir", }, + "backupSetting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL" + }, + "retentionSetting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL" + }, "artifact": "ocid1.datasciencemodel.oc1.iad..zip", } @@ -307,6 +327,8 @@ def test_with_methods_1(self, mock_load_default_properties): .with_defined_metadata_list(self.payload["definedMetadataList"]) .with_provenance_metadata(self.payload["provenanceMetadata"]) .with_artifact(self.payload["artifact"]) + .with_backup_setting(self.payload['backupSetting']) + .with_retention_setting(self.payload['retentionSetting']) ) assert self.prepare_dict(dsc_model.to_dict()["spec"]) == self.prepare_dict( self.payload @@ -334,6 +356,8 @@ def test_with_methods_2(self): ModelProvenanceMetadata.from_dict(self.payload["provenanceMetadata"]) ) .with_artifact(self.payload["artifact"]) + .with_backup_setting(ModelBackupSetting.from_dict(self.payload['backupSetting'])) + .with_retention_setting(ModelRetentionSetting.from_dict(self.payload['retentionSetting'])) ) assert self.prepare_dict(dsc_model.to_dict()["spec"]) == self.prepare_dict( self.payload @@ -617,6 +641,16 @@ def test__update_from_oci_dsc_model( {"key": "Hyperparameters", "value": "new test"}, {"key": "ArtifactTestResults", "value": "new test"}, ], + "backup_setting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL", + }, + "retention_setting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL", + }, "input_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 1, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', "output_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 1, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', } @@ -681,6 +715,16 @@ def test__update_from_oci_dsc_model( {"key": "ArtifactTestResults", "value": "new test"}, ] }, + "backupSetting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL", + }, + "retentionSetting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL", + }, "provenanceMetadata": { "git_branch": "master", "git_commit": "7c8c8502896ba36837f15037b67e05a3cf9722c7", diff --git a/tests/unitary/default_setup/model/test_model_metadata.py b/tests/unitary/default_setup/model/test_model_metadata.py index f38af703a..6411c2355 100644 --- a/tests/unitary/default_setup/model/test_model_metadata.py +++ b/tests/unitary/default_setup/model/test_model_metadata.py @@ -31,8 +31,10 @@ ModelTaxonomyMetadata, ModelTaxonomyMetadataItem, MetadataTaxonomyKeys, - UseCaseType, + UseCaseType ) +from ads.model.datascience_model import ModelRetentionSetting, CustomerNotificationType, SettingStatus, \ + ModelBackupSetting, ModelRetentionOperationDetails, ModelBackupOperationDetails from oci.data_science.models import Metadata as OciMetadataItem try: @@ -1012,3 +1014,413 @@ def test_to_json_file_success(self): ) open_mock.assert_called_with(mock_file_path, mode="w", **mock_storage_options) open_mock.return_value.write.assert_called_with(metadata_taxonomy.to_json()) + +class TestModelBackupSetting: + """Unit tests for ModelBackupSetting class.""" + + def test_initialization(self): + """Test default initialization of ModelBackupSetting.""" + backup_setting = ModelBackupSetting() + assert backup_setting.is_backup_enabled == False + assert backup_setting.backup_region is None + assert backup_setting.customer_notification_type == CustomerNotificationType.NONE + + # Test with parameters + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_dict(self): + """Test conversion to dictionary.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_dict = { + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + } + assert backup_setting.to_dict() == expected_dict + + def test_from_dict(self): + """Test constructing from dictionary.""" + data = { + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + } + backup_setting = ModelBackupSetting.from_dict(data) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_json(self): + """Test conversion to JSON.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_json = json.dumps({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + assert backup_setting.to_json() == expected_json + + def test_from_json(self): + """Test constructing from JSON.""" + json_str = json.dumps({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + backup_setting = ModelBackupSetting.from_json(json_str) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_yaml(self): + """Test conversion to YAML.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_yaml = yaml.dump({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + assert backup_setting.to_yaml() == expected_yaml + + def test_validate(self): + """Test validation of backup settings.""" + # Valid settings + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + assert backup_setting.validate() == True + + # Invalid settings (wrong types) + backup_setting.is_backup_enabled = "Yes" # Should be boolean + assert backup_setting.validate() == False + + backup_setting.is_backup_enabled = True + backup_setting.backup_region = 123 # Should be a string + assert backup_setting.validate() == False + + backup_setting.backup_region = "us-west-1" + backup_setting.customer_notification_type = "all_notif" # Should be CustomerNotificationType Enum + assert backup_setting.validate() == False + +class TestModelRetentionSetting: + """Test cases for ModelRetentionSetting class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + expected_dict = { + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + } + assert setting.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelRetentionSetting object.""" + data = { + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + } + setting = ModelRetentionSetting.from_dict(data) + assert setting.archive_after_days == 30 + assert setting.delete_after_days == 60 + assert setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_json(self): + """Test that to_json serializes the settings to a JSON string.""" + setting = ModelRetentionSetting( + archive_after_days=30, + delete_after_days=60, + customer_notification_type=CustomerNotificationType.ALL) + expected_json = json.dumps({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + assert setting.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the settings from a JSON string.""" + json_str = json.dumps({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + setting = ModelRetentionSetting.from_json(json_str) + assert setting.archive_after_days == 30 + assert setting.delete_after_days == 60 + assert setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_yaml(self): + """Test that to_yaml serializes the settings to a YAML string.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + expected_yaml = yaml.dump({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + assert setting.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid retention settings.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is True + + def test_validate_invalid_days(self): + """Test that validate returns False for invalid archive or delete days.""" + setting = ModelRetentionSetting(archive_after_days=-1, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is False + + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=-10, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is False + + def test_validate_invalid_customer_notification_type(self): + """Test that validate method returns False for an invalid notification type.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type="INVALID") + assert setting.validate() is False + +class TestModelRetentionOperationDetails: + """Test cases for ModelRetentionOperationDetails class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_dict = { + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + } + assert details.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelRetentionOperationDetails object.""" + data = { + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + } + details = ModelRetentionOperationDetails.from_dict(data) + assert details.archive_state == SettingStatus.SUCCEEDED + assert details.archive_state_details == "Archived successfully" + assert details.delete_state == SettingStatus.PENDING + assert details.delete_state_details == "Deletion pending" + assert details.time_archival_scheduled == 1633046400 + assert details.time_deletion_scheduled == 1635638400 + + def test_to_json(self): + """Test that to_json serializes the details to a JSON string.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_json = json.dumps({ + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + assert details.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the details from a JSON string.""" + json_str = json.dumps({ + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + details = ModelRetentionOperationDetails.from_json(json_str) + assert details.archive_state == SettingStatus.SUCCEEDED + assert details.archive_state_details == "Archived successfully" + assert details.delete_state == SettingStatus.PENDING + assert details.delete_state_details == "Deletion pending" + assert details.time_archival_scheduled == 1633046400 + assert details.time_deletion_scheduled == 1635638400 + + def test_to_yaml(self): + """Test that to_yaml serializes the details to a YAML string.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_yaml = yaml.dump({ + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + assert details.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid retention operation details.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + delete_state=SettingStatus.PENDING, + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + assert details.validate() is True + + def test_validate_invalid_state(self): + """Test that validate method returns False for invalid archive or delete state.""" + details = ModelRetentionOperationDetails( + archive_state="INVALID_STATE", # Invalid state + delete_state=SettingStatus.PENDING, + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + assert details.validate() is False + + def test_validate_invalid_time(self): + """Test that validate method returns False for invalid time values.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + delete_state=SettingStatus.PENDING, + time_archival_scheduled="invalid_time", # Invalid time + time_deletion_scheduled=1635638400 + ) + assert details.validate() is False + +class TestModelBackupOperationDetails: + """Test cases for ModelBackupOperationDetails class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backup=1633046400 + ) + expected_dict = { + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backup": 1633046400 + } + assert details.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelBackupOperationDetails object.""" + data = { + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backup": 1633046400 + } + details = ModelBackupOperationDetails.from_dict(data) + assert details.backup_state == SettingStatus.SUCCEEDED + assert details.backup_state_details == "Backup completed successfully" + assert details.time_last_backup == 1633046400 + + def test_to_json(self): + """Test that to_json serializes the details to a JSON string.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backup=1633046400 + ) + expected_json = json.dumps({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backup": 1633046400 + }) + assert details.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the details from a JSON string.""" + json_str = json.dumps({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backup": 1633046400 + }) + details = ModelBackupOperationDetails.from_json(json_str) + assert details.backup_state == SettingStatus.SUCCEEDED + assert details.backup_state_details == "Backup completed successfully" + assert details.time_last_backup == 1633046400 + + def test_to_yaml(self): + """Test that to_yaml serializes the details to a YAML string.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backup=1633046400 + ) + expected_yaml = yaml.dump({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backup": 1633046400 + }) + assert details.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid backup operation details.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + time_last_backup=1633046400 + ) + assert details.validate() is True + + def test_validate_invalid_state(self): + """Test that validate method returns False for an invalid backup state.""" + details = ModelBackupOperationDetails( + backup_state="INVALID_STATE", + time_last_backup=1633046400 + ) + assert details.validate() is False + + def test_validate_invalid_time(self): + """Test that validate method returns False for an invalid time value.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + time_last_backup="invalid_time" # Invalid time + ) + assert details.validate() is False + +