From 9916616425ca06cc4012754b4ebc123a6a92b048 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 4 Oct 2024 09:46:22 +0530 Subject: [PATCH 01/15] added changes for backup and retention --- ads/model/datascience_model.py | 142 ++++++++++++++++++++++++++++++++- ads/model/model_metadata.py | 80 +++++++++++++++++++ 2 files changed, 221 insertions(+), 1 deletion(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 23d505bc4..de00c5aa3 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -36,6 +36,10 @@ ModelCustomMetadataItem, ModelProvenanceMetadata, ModelTaxonomyMetadata, + ModelBackupSetting, + ModelRetentionSetting, + ModelRetentionOperationDetails, + ModelBackupOperationDetails ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, @@ -120,6 +124,19 @@ class DataScienceModel(Builder): Model version id model_file_description: dict Contains object path details for models created by reference. + backup_setting: ModelBackupSetting + The value to assign to the backup_setting property of this CreateModelDetails. + retention_setting: ModelRetentionSetting + The value to assign to the retention_setting property of this CreateModelDetails. + retention_operation_details: ModelRetentionOperationDetails + The value to assign to the retention_operation_details property for the Model. + backup_operation_details: ModelBackupOperationDetails + The value to assign to the backup_operation_details property for the Model. + + + + + Methods ------- @@ -178,7 +195,6 @@ class DataScienceModel(Builder): Sets path details for models created by reference. Input can be either a dict, string or json file and the schema is dictated by model_file_description_schema.json - Examples -------- >>> ds_model = (DataScienceModel() @@ -217,7 +233,12 @@ class DataScienceModel(Builder): CONST_MODEL_VERSION_ID = "versionId" CONST_TIME_CREATED = "timeCreated" CONST_LIFECYCLE_STATE = "lifecycleState" + CONST_LIFECYCLE_DETAILS = "lifecycleDetails" CONST_MODEL_FILE_DESCRIPTION = "modelDescription" + CONST_BACKUP_SETTING = "backupSetting" + CONST_RETENTION_SETTING = "retentionSetting" + CONST_BACKUP_OPERATION_DETAILS = "backupOperationDetails" + CONST_RETENTION_OPERATION_DETAILS = "retentionOperationDetails" attribute_map = { CONST_ID: "id", @@ -239,7 +260,12 @@ class DataScienceModel(Builder): CONST_MODEL_VERSION_ID: "version_id", CONST_TIME_CREATED: "time_created", CONST_LIFECYCLE_STATE: "lifecycle_state", + CONST_LIFECYCLE_DETAILS: "lifecycle_details", CONST_MODEL_FILE_DESCRIPTION: "model_description", + CONST_BACKUP_SETTING: "backup_setting", + CONST_RETENTION_SETTING: "retention_setting", + CONST_BACKUP_OPERATION_DETAILS: "backup_operation_details", + CONST_RETENTION_OPERATION_DETAILS: "retention_operation_details" } def __init__(self, spec: Dict = None, **kwargs) -> None: @@ -685,6 +711,114 @@ def with_model_file_description( return self.set_spec(self.CONST_MODEL_FILE_DESCRIPTION, json_data) + @property + def retention_setting(self) -> ModelRetentionSetting: + """ + Gets the retention_setting of this model. + + :return: The retention_setting of this model. + :rtype: RetentionSetting + """ + return self.get_spec(self.CONST_RETENTION_SETTING) + + def with_retention_setting(self, retention_setting: Union[Dict, ModelRetentionSetting]) -> "DataScienceModel": + """ + Sets the retention setting details for the model. + + Parameters + ---------- + retention_setting : Union[Dict, RetentionSetting] + The retention setting details for the model. Can be provided as either a dictionary or + an instance of the `RetentionSetting` class. + + Returns + ------- + DataScienceModel + The `DataScienceModel` instance (self) for method chaining. + """ + if retention_setting and isinstance(retention_setting, dict): + try: + retention_setting = ModelRetentionSetting.from_dict(retention_setting) + except Exception as err: + logger.warn(f"Failed to convert retention_setting from dict: {err}") + + return self.set_spec(self.CONST_RETENTION_SETTING, retention_setting) + + + + @property + def backup_setting(self) -> ModelBackupSetting: + """ + Gets the backup_setting of this model. + + :return: The backup_setting of this model. + :rtype: BackupSetting + """ + return self.get_spec(self.CONST_BACKUP_SETTING) + + def with_backup_setting(self, backup_setting: Union[Dict, ModelBackupSetting]) -> "DataScienceModel": + """ + Sets the model's backup setting details. + + Parameters + ---------- + backup_setting : Union[Dict, BackupSetting] + The backup setting details for the model. This can be passed as either a dictionary or + an instance of the `BackupSetting` class. + + Returns + ------- + DataScienceModel + The `DataScienceModel` instance (self) for method chaining. + """ + if backup_setting and isinstance(backup_setting, dict): + try: + backup_setting = ModelBackupSetting.from_dict(backup_setting) + except Exception as err: + logger.warn(f"Failed to convert backup_setting from dict: {err}") + + return self.set_spec(self.CONST_BACKUP_SETTING, backup_setting) + + @property + def retention_operation_details(self) -> ModelRetentionOperationDetails: + """ + Gets the retention_operation_details of this Model using the spec constant. + + :return: The retention_operation_details of this Model. + :rtype: ModelRetentionOperationDetails + """ + return self.get_spec(self.CONST_RETENTION_OPERATION_DETAILS) + + @retention_operation_details.setter + def retention_operation_details(self, retention_operation_details: ModelRetentionOperationDetails) -> "DataScienceModel": + """ + Sets the retention_operation_details of this Model using the spec constant. + + :param retention_operation_details: The retention_operation_details of this Model. + :type: ModelRetentionOperationDetails + """ + return self.set_spec(self.CONST_RETENTION_OPERATION_DETAILS, retention_operation_details) + + @property + def backup_operation_details(self) -> "ModelBackupOperationDetails": + """ + Gets the backup_operation_details of this Model using the spec constant. + + :return: The backup_operation_details of this Model. + :rtype: ModelBackupOperationDetails + """ + return self.get_spec(self.CONST_BACKUP_OPERATION_DETAILS) + + @backup_operation_details.setter + def backup_operation_details(self, backup_operation_details: "ModelBackupOperationDetails") -> "DataScienceModel": + """ + Sets the backup_operation_details of this Model using the spec constant. + + :param backup_operation_details: The backup_operation_details of this Model. + :type: ModelBackupOperationDetails + """ + return self.set_spec(self.CONST_BACKUP_OPERATION_DETAILS, backup_operation_details) + def create(self, **kwargs) -> "DataScienceModel": """Creates datascience model. @@ -900,6 +1034,8 @@ def upload_artifact( artifact_uploader.upload() self._remove_file_description_artifact() + + def _remove_file_description_artifact(self): """Removes temporary model file description artifact for model by reference.""" @@ -1181,6 +1317,8 @@ def _to_oci_dsc_model(self, **kwargs): self.CONST_CUSTOM_METADATA: "_to_oci_metadata", self.CONST_DEFINED_METADATA: "_to_oci_metadata", self.CONST_PROVENANCE_METADATA: "_to_oci_metadata", + self.CONST_BACKUP_SETTING: "to_json", + self.CONST_RETENTION_SETTING: "to_json" } dsc_spec = {} for infra_attr, dsc_attr in self.attribute_map.items(): @@ -1219,6 +1357,8 @@ def _update_from_oci_dsc_model( self.CONST_OUTPUT_SCHEMA: [Schema.from_json, json.loads], self.CONST_CUSTOM_METADATA: ModelCustomMetadata._from_oci_metadata, self.CONST_DEFINED_METADATA: ModelTaxonomyMetadata._from_oci_metadata, + self.CONST_BACKUP_SETTING: ModelBackupSetting.from_json, + self.CONST_RETENTION_SETTING: ModelRetentionSetting.from_json, } # Update the main properties diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index 2667b82ad..f49705813 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -1761,3 +1761,83 @@ def __repr__(self): Serialized version of object as a YAML string """ return self.to_yaml() + + +class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): + NONE = "NONE" + ALL = "ALL" + ON_FAILURE = "ON_FAILURE" + ON_SUCCESS = "ON_SUCCESS" + + +class ModelBackupSettingDetailsMetadata: + """ + Class that represents Model Backup Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupSettingDetailsMetadata': + Constructs backup settings from a dictionary. + to_json(self) -> str: + Serializes the backup settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupSettingDetailsMetadata': + Constructs backup settings from a JSON string. + to_yaml(self) -> str: + Serializes the backup settings into a YAML string. + validate(self) -> bool: + Validates the backup settings details. + """ + + def __init__(self, is_backup_enabled: Optional[bool] = None, backup_region: Optional[str] = None, + customer_notification_type: Optional[CustomerNotificationType] = None): + self.is_backup_enabled = is_backup_enabled if is_backup_enabled is not None else False + self.backup_region = backup_region + self.customer_notification_type = customer_notification_type if customer_notification_type is not None else CustomerNotificationType.NONE + + def to_dict(self) -> Dict: + """Serializes the backup settings into a dictionary.""" + return { + "is_backup_enabled": self.is_backup_enabled, + "backup_region": self.backup_region, + "customer_notification_type": self.customer_notification_type.value + } + + @classmethod + def from_dict(cls, data: Dict) -> 'ModelBackupSettingDetailsMetadata': + """Constructs backup settings from a dictionary.""" + return cls( + is_backup_enabled=data.get("is_backup_enabled"), + backup_region=data.get("backup_region"), + customer_notification_type=CustomerNotificationType(data.get("customer_notification_type", CustomerNotificationType.NONE.value)) + ) + + def to_json(self) -> str: + """Serializes the backup settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> 'ModelBackupSettingDetailsMetadata': + """Constructs backup settings from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup settings details. Returns True if valid, False otherwise.""" + if not isinstance(self.is_backup_enabled, bool): + return False + if self.backup_region and not isinstance(self.backup_region, str): + return False + if not isinstance(self.customer_notification_type, CustomerNotificationType): + return False + return True + + def __repr__(self): + return f"ModelBackupSettingDetailsMetadata(is_backup_enabled={self.is_backup_enabled}, backup_region={self.backup_region}, customer_notification_type={self.customer_notification_type})" + + From 428677f4b00d779b4be6bd84f3459e3b4a114346 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Tue, 8 Oct 2024 10:34:04 +0530 Subject: [PATCH 02/15] added changes for backup and retention --- ads/model/model_metadata.py | 257 ++++++++++- .../model/test_datascience_model.py | 48 +- .../model/test_model_metadata.py | 413 ++++++++++++++++++ 3 files changed, 711 insertions(+), 7 deletions(-) diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index f49705813..17cfaf94d 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -1770,7 +1770,7 @@ class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): ON_SUCCESS = "ON_SUCCESS" -class ModelBackupSettingDetailsMetadata: +class ModelBackupSetting: """ Class that represents Model Backup Setting Details Metadata. @@ -1778,11 +1778,11 @@ class ModelBackupSettingDetailsMetadata: ------- to_dict(self) -> Dict: Serializes the backup settings into a dictionary. - from_dict(cls, data: Dict) -> 'ModelBackupSettingDetailsMetadata': + from_dict(cls, data: Dict) -> 'ModelBackupSetting': Constructs backup settings from a dictionary. to_json(self) -> str: Serializes the backup settings into a JSON string. - from_json(cls, json_str: str) -> 'ModelBackupSettingDetailsMetadata': + from_json(cls, json_str: str) -> 'ModelBackupSetting': Constructs backup settings from a JSON string. to_yaml(self) -> str: Serializes the backup settings into a YAML string. @@ -1805,7 +1805,7 @@ def to_dict(self) -> Dict: } @classmethod - def from_dict(cls, data: Dict) -> 'ModelBackupSettingDetailsMetadata': + def from_dict(cls, data: Dict) -> 'ModelBackupSetting': """Constructs backup settings from a dictionary.""" return cls( is_backup_enabled=data.get("is_backup_enabled"), @@ -1818,7 +1818,7 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> 'ModelBackupSettingDetailsMetadata': + def from_json(cls, json_str: str) -> 'ModelBackupSetting': """Constructs backup settings from a JSON string.""" data = json.loads(json_str) return cls.from_dict(data) @@ -1838,6 +1838,251 @@ def validate(self) -> bool: return True def __repr__(self): - return f"ModelBackupSettingDetailsMetadata(is_backup_enabled={self.is_backup_enabled}, backup_region={self.backup_region}, customer_notification_type={self.customer_notification_type})" + return f"ModelBackupSetting(is_backup_enabled={self.is_backup_enabled}, backup_region={self.backup_region}, customer_notification_type={self.customer_notification_type})" + + +class ModelRetentionSetting: + """ + Class that represents Model Retention Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionSetting': + Constructs retention settings from a dictionary. + to_json(self) -> str: + Serializes the retention settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionSetting': + Constructs retention settings from a JSON string. + to_yaml(self) -> str: + Serializes the retention settings into a YAML string. + validate(self) -> bool: + Validates the retention settings details. + """ + + def __init__(self, archive_after_days: Optional[int] = None, delete_after_days: Optional[int] = None, + customer_notification_type: Optional[CustomerNotificationType] = None): + self.archive_after_days = archive_after_days + self.delete_after_days = delete_after_days + self.customer_notification_type = customer_notification_type if customer_notification_type is not None else CustomerNotificationType.NONE + + def to_dict(self) -> Dict: + """Serializes the retention settings into a dictionary.""" + return { + "archive_after_days": self.archive_after_days, + "delete_after_days": self.delete_after_days, + "customer_notification_type": self.customer_notification_type.value + } + + @classmethod + def from_dict(cls, data: Dict) -> 'ModelRetentionSetting': + """Constructs retention settings from a dictionary.""" + return cls( + archive_after_days=data.get("archive_after_days"), + delete_after_days=data.get("delete_after_days"), + customer_notification_type=CustomerNotificationType(data.get("customer_notification_type", CustomerNotificationType.NONE.value)) + ) + + def to_json(self) -> str: + """Serializes the retention settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> 'ModelRetentionSetting': + """Constructs retention settings from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention settings details. Returns True if valid, False otherwise.""" + if self.archive_after_days is not None and (not isinstance(self.archive_after_days, int) or self.archive_after_days < 0): + return False + if self.delete_after_days is not None and (not isinstance(self.delete_after_days, int) or self.delete_after_days < 0): + return False + if not isinstance(self.customer_notification_type, CustomerNotificationType): + return False + return True + + def __repr__(self): + return f"ModelRetentionSetting(archive_after_days={self.archive_after_days}, delete_after_days={self.delete_after_days}, customer_notification_type={self.customer_notification_type})" + + +class SettingStatus(str, ExtendedEnumMeta): + """Enum to represent the status of retention settings.""" + PENDING = "PENDING" + SUCCEEDED = "SUCCEEDED" + FAILED = "FAILED" + +class ModelRetentionOperationDetails: + """ + Class that represents Model Retention Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a dictionary. + to_json(self) -> str: + Serializes the retention operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a JSON string. + to_yaml(self) -> str: + Serializes the retention operation details into a YAML string. + validate(self) -> bool: + Validates the retention operation details. + """ + + def __init__(self, + archive_state: Optional[SettingStatus] = None, + archive_state_details: Optional[str] = None, + delete_state: Optional[SettingStatus] = None, + delete_state_details: Optional[str] = None, + time_archival_scheduled: Optional[int] = None, + time_deletion_scheduled: Optional[int] = None): + self.archive_state = archive_state if archive_state is not None else SettingStatus.PENDING + self.archive_state_details = archive_state_details + self.delete_state = delete_state if delete_state is not None else SettingStatus.PENDING + self.delete_state_details = delete_state_details + self.time_archival_scheduled = time_archival_scheduled + self.time_deletion_scheduled = time_deletion_scheduled + + def to_dict(self) -> Dict: + """Serializes the retention operation details into a dictionary.""" + return { + "archive_state": self.archive_state.value, + "archive_state_details": self.archive_state_details, + "delete_state": self.delete_state.value, + "delete_state_details": self.delete_state_details, + "time_archival_scheduled": self.time_archival_scheduled, + "time_deletion_scheduled": self.time_deletion_scheduled + } + + @classmethod + def from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': + """Constructs retention operation details from a dictionary.""" + return cls( + archive_state=SettingStatus(data.get("archive_state", SettingStatus.PENDING.value)), + archive_state_details=data.get("archive_state_details"), + delete_state=SettingStatus(data.get("delete_state", SettingStatus.PENDING.value)), + delete_state_details=data.get("delete_state_details"), + time_archival_scheduled=data.get("time_archival_scheduled"), + time_deletion_scheduled=data.get("time_deletion_scheduled") + ) + + def to_json(self) -> str: + """Serializes the retention operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': + """Constructs retention operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention operation details.""" + if not isinstance(self.archive_state, SettingStatus): + return False + if not isinstance(self.delete_state, SettingStatus): + return False + if self.time_archival_scheduled is not None and not isinstance(self.time_archival_scheduled, int): + return False + if self.time_deletion_scheduled is not None and not isinstance(self.time_deletion_scheduled, int): + return False + return True + + def __repr__(self): + return (f"ModelRetentionOperationDetails(" + f"archive_state={self.archive_state}, " + f"archive_state_details={self.archive_state_details}, " + f"delete_state={self.delete_state}, " + f"delete_state_details={self.delete_state_details}, " + f"time_archival_scheduled={self.time_archival_scheduled}, " + f"time_deletion_scheduled={self.time_deletion_scheduled})") + + +class ModelBackupOperationDetails: + """ + Class that represents Model Backup Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a dictionary. + to_json(self) -> str: + Serializes the backup operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a JSON string. + to_yaml(self) -> str: + Serializes the backup operation details into a YAML string. + validate(self) -> bool: + Validates the backup operation details. + """ + + def __init__(self, + backup_state: Optional['SettingStatus'] = None, + backup_state_details: Optional[str] = None, + time_last_backed_up: Optional[int] = None): + self.backup_state = backup_state if backup_state is not None else SettingStatus.PENDING + self.backup_state_details = backup_state_details + self.time_last_backed_up = time_last_backed_up + + def to_dict(self) -> Dict: + """Serializes the backup operation details into a dictionary.""" + return { + "backup_state": self.backup_state.value, + "backup_state_details": self.backup_state_details, + "time_last_backed_up": self.time_last_backed_up + } + + @classmethod + def from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': + """Constructs backup operation details from a dictionary.""" + return cls( + backup_state=SettingStatus(data.get("backup_state", SettingStatus.PENDING.value)), + backup_state_details=data.get("backup_state_details"), + time_last_backed_up=data.get("time_last_backed_up") + ) + + def to_json(self) -> str: + """Serializes the backup operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': + """Constructs backup operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup operation details.""" + if not isinstance(self.backup_state, SettingStatus): + return False + if self.time_last_backed_up is not None and not isinstance(self.time_last_backed_up, int): + return False + return True + + def __repr__(self): + return (f"ModelBackupOperationDetails(" + f"backup_state={self.backup_state}, " + f"backup_state_details={self.backup_state_details}, " + f"time_last_backed_up={self.time_last_backed_up})") + diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index 9cdf06ca7..d063bdacc 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -34,6 +34,8 @@ ModelTaxonomyMetadata, ModelCustomMetadataItem, MetadataCustomCategory, + ModelBackupSetting, + ModelRetentionSetting ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, @@ -44,7 +46,7 @@ from ads.config import AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET MODEL_OCID = "ocid1.datasciencemodel.oc1.iad." - + OCI_MODEL_PAYLOAD = { "id": MODEL_OCID, "compartment_id": "ocid1.compartment.oc1..", @@ -72,6 +74,16 @@ {"key": "Hyperparameters"}, {"key": "ArtifactTestResults"}, ], + "backupSetting": { + "isBackupEnabled": True, + "backupRegion": "us-phoenix-1", + "customerNotificationType": "ALL" + }, + "retentionSetting": { + "archiveAfterDays": 30, + "deleteAfterDays": 90, + "customerNotificationType": "ALL" + }, "input_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', "output_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', } @@ -148,6 +160,16 @@ "training_id": None, "artifact_dir": "test_script_dir", }, + "backupSetting": { + "isBackupEnabled": True, + "backupRegion": "us-phoenix-1", + "customerNotificationType": "ALL" + }, + "retentionSetting": { + "archiveAfterDays": 30, + "deleteAfterDays": 90, + "customerNotificationType": "ALL" + }, "artifact": "ocid1.datasciencemodel.oc1.iad..zip", } @@ -307,6 +329,8 @@ def test_with_methods_1(self, mock_load_default_properties): .with_defined_metadata_list(self.payload["definedMetadataList"]) .with_provenance_metadata(self.payload["provenanceMetadata"]) .with_artifact(self.payload["artifact"]) + .with_backup_setting(self.payload['backupSetting']) + .with_retention_setting(self.payload['retentionSetting']) ) assert self.prepare_dict(dsc_model.to_dict()["spec"]) == self.prepare_dict( self.payload @@ -334,6 +358,8 @@ def test_with_methods_2(self): ModelProvenanceMetadata.from_dict(self.payload["provenanceMetadata"]) ) .with_artifact(self.payload["artifact"]) + .with_backup_setting(ModelBackupSetting.from_dict(self.payload['backupSetting'])) + .with_retention_setting(ModelRetentionSetting.from_dict(self.payload['retentionSetting'])) ) assert self.prepare_dict(dsc_model.to_dict()["spec"]) == self.prepare_dict( self.payload @@ -617,6 +643,16 @@ def test__update_from_oci_dsc_model( {"key": "Hyperparameters", "value": "new test"}, {"key": "ArtifactTestResults", "value": "new test"}, ], + "backup_setting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL", + }, + "retention_setting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL", + }, "input_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 1, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', "output_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 1, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', } @@ -681,6 +717,16 @@ def test__update_from_oci_dsc_model( {"key": "ArtifactTestResults", "value": "new test"}, ] }, + "backupSetting": { + "isBackupEnabled": True, + "backupRegion": "us-phoenix-1", + "customerNotificationType": "ALL", + }, + "retentionSetting": { + "archiveAfterDays": 30, + "deleteAfterDays": 90, + "customerNotificationType": "ALL", + }, "provenanceMetadata": { "git_branch": "master", "git_commit": "7c8c8502896ba36837f15037b67e05a3cf9722c7", diff --git a/tests/unitary/default_setup/model/test_model_metadata.py b/tests/unitary/default_setup/model/test_model_metadata.py index f38af703a..6a261c800 100644 --- a/tests/unitary/default_setup/model/test_model_metadata.py +++ b/tests/unitary/default_setup/model/test_model_metadata.py @@ -32,6 +32,12 @@ ModelTaxonomyMetadataItem, MetadataTaxonomyKeys, UseCaseType, + ModelBackupSetting, + ModelBackupOperationDetails, + ModelRetentionSetting, + ModelRetentionOperationDetails, + CustomerNotificationType, + SettingStatus ) from oci.data_science.models import Metadata as OciMetadataItem @@ -1012,3 +1018,410 @@ def test_to_json_file_success(self): ) open_mock.assert_called_with(mock_file_path, mode="w", **mock_storage_options) open_mock.return_value.write.assert_called_with(metadata_taxonomy.to_json()) + +class TestModelBackupSetting: + """Unit tests for ModelBackupSetting class.""" + + def test_initialization(self): + """Test default initialization of ModelBackupSetting.""" + backup_setting = ModelBackupSetting() + assert backup_setting.is_backup_enabled == False + assert backup_setting.backup_region is None + assert backup_setting.customer_notification_type == CustomerNotificationType.NONE + + # Test with parameters + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_dict(self): + """Test conversion to dictionary.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_dict = { + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + } + assert backup_setting.to_dict() == expected_dict + + def test_from_dict(self): + """Test constructing from dictionary.""" + data = { + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + } + backup_setting = ModelBackupSetting.from_dict(data) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_json(self): + """Test conversion to JSON.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_json = json.dumps({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + assert backup_setting.to_json() == expected_json + + def test_from_json(self): + """Test constructing from JSON.""" + json_str = json.dumps({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + backup_setting = ModelBackupSetting.from_json(json_str) + assert backup_setting.is_backup_enabled == True + assert backup_setting.backup_region == "us-west-1" + assert backup_setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_yaml(self): + """Test conversion to YAML.""" + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + expected_yaml = yaml.dump({ + "is_backup_enabled": True, + "backup_region": "us-west-1", + "customer_notification_type": "ALL" + }) + assert backup_setting.to_yaml() == expected_yaml + + def test_validate(self): + """Test validation of backup settings.""" + # Valid settings + backup_setting = ModelBackupSetting( + is_backup_enabled=True, + backup_region="us-west-1", + customer_notification_type=CustomerNotificationType.ALL + ) + assert backup_setting.validate() == True + + # Invalid settings (wrong types) + backup_setting.is_backup_enabled = "Yes" # Should be boolean + assert backup_setting.validate() == False + + backup_setting.is_backup_enabled = True + backup_setting.backup_region = 123 # Should be a string + assert backup_setting.validate() == False + + backup_setting.backup_region = "us-west-1" + backup_setting.customer_notification_type = "all_notif" # Should be CustomerNotificationType Enum + assert backup_setting.validate() == False + +class TestModelRetentionSetting: + """Test cases for ModelRetentionSetting class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + expected_dict = { + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + } + assert setting.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelRetentionSetting object.""" + data = { + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + } + setting = ModelRetentionSetting.from_dict(data) + assert setting.archive_after_days == 30 + assert setting.delete_after_days == 60 + assert setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_json(self): + """Test that to_json serializes the settings to a JSON string.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.EMAIL) + expected_json = json.dumps({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + assert setting.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the settings from a JSON string.""" + json_str = json.dumps({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + setting = ModelRetentionSetting.from_json(json_str) + assert setting.archive_after_days == 30 + assert setting.delete_after_days == 60 + assert setting.customer_notification_type == CustomerNotificationType.ALL + + def test_to_yaml(self): + """Test that to_yaml serializes the settings to a YAML string.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + expected_yaml = yaml.dump({ + "archive_after_days": 30, + "delete_after_days": 60, + "customer_notification_type": "ALL" + }) + assert setting.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid retention settings.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is True + + def test_validate_invalid_days(self): + """Test that validate returns False for invalid archive or delete days.""" + setting = ModelRetentionSetting(archive_after_days=-1, delete_after_days=60, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is False + + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=-10, customer_notification_type=CustomerNotificationType.ALL) + assert setting.validate() is False + + def test_validate_invalid_customer_notification_type(self): + """Test that validate method returns False for an invalid notification type.""" + setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type="INVALID") + assert setting.validate() is False + +class TestModelRetentionOperationDetails: + """Test cases for ModelRetentionOperationDetails class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_dict = { + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + } + assert details.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelRetentionOperationDetails object.""" + data = { + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + } + details = ModelRetentionOperationDetails.from_dict(data) + assert details.archive_state == SettingStatus.SUCCEEDED + assert details.archive_state_details == "Archived successfully" + assert details.delete_state == SettingStatus.PENDING + assert details.delete_state_details == "Deletion pending" + assert details.time_archival_scheduled == 1633046400 + assert details.time_deletion_scheduled == 1635638400 + + def test_to_json(self): + """Test that to_json serializes the details to a JSON string.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_json = json.dumps({ + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + assert details.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the details from a JSON string.""" + json_str = json.dumps({ + "archive_state": "SUCCESS", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + details = ModelRetentionOperationDetails.from_json(json_str) + assert details.archive_state == SettingStatus.SUCCEEDED + assert details.archive_state_details == "Archived successfully" + assert details.delete_state == SettingStatus.PENDING + assert details.delete_state_details == "Deletion pending" + assert details.time_archival_scheduled == 1633046400 + assert details.time_deletion_scheduled == 1635638400 + + def test_to_yaml(self): + """Test that to_yaml serializes the details to a YAML string.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + archive_state_details="Archived successfully", + delete_state=SettingStatus.PENDING, + delete_state_details="Deletion pending", + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + expected_yaml = yaml.dump({ + "archive_state": "SUCCEEDED", + "archive_state_details": "Archived successfully", + "delete_state": "PENDING", + "delete_state_details": "Deletion pending", + "time_archival_scheduled": 1633046400, + "time_deletion_scheduled": 1635638400 + }) + assert details.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid retention operation details.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + delete_state=SettingStatus.PENDING, + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + assert details.validate() is True + + def test_validate_invalid_state(self): + """Test that validate method returns False for invalid archive or delete state.""" + details = ModelRetentionOperationDetails( + archive_state="INVALID_STATE", # Invalid state + delete_state=SettingStatus.PENDING, + time_archival_scheduled=1633046400, + time_deletion_scheduled=1635638400 + ) + assert details.validate() is False + + def test_validate_invalid_time(self): + """Test that validate method returns False for invalid time values.""" + details = ModelRetentionOperationDetails( + archive_state=SettingStatus.SUCCEEDED, + delete_state=SettingStatus.PENDING, + time_archival_scheduled="invalid_time", # Invalid time + time_deletion_scheduled=1635638400 + ) + assert details.validate() is False + +class TestModelBackupOperationDetails: + """Test cases for ModelBackupOperationDetails class.""" + + def test_to_dict(self): + """Test that to_dict method returns the correct dictionary.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backed_up=1633046400 + ) + expected_dict = { + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backed_up": 1633046400 + } + assert details.to_dict() == expected_dict + + def test_from_dict(self): + """Test that from_dict method correctly creates a ModelBackupOperationDetails object.""" + data = { + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backed_up": 1633046400 + } + details = ModelBackupOperationDetails.from_dict(data) + assert details.backup_state == SettingStatus.SUCCEEDED + assert details.backup_state_details == "Backup completed successfully" + assert details.time_last_backed_up == 1633046400 + + def test_to_json(self): + """Test that to_json serializes the details to a JSON string.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backed_up=1633046400 + ) + expected_json = json.dumps({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backed_up": 1633046400 + }) + assert details.to_json() == expected_json + + def test_from_json(self): + """Test that from_json correctly deserializes the details from a JSON string.""" + json_str = json.dumps({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backed_up": 1633046400 + }) + details = ModelBackupOperationDetails.from_json(json_str) + assert details.backup_state == SettingStatus.SUCCEEDED + assert details.backup_state_details == "Backup completed successfully" + assert details.time_last_backed_up == 1633046400 + + def test_to_yaml(self): + """Test that to_yaml serializes the details to a YAML string.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + backup_state_details="Backup completed successfully", + time_last_backed_up=1633046400 + ) + expected_yaml = yaml.dump({ + "backup_state": "SUCCEEDED", + "backup_state_details": "Backup completed successfully", + "time_last_backed_up": 1633046400 + }) + assert details.to_yaml() == expected_yaml + + def test_validate_valid(self): + """Test that validate method returns True for valid backup operation details.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + time_last_backed_up=1633046400 + ) + assert details.validate() is True + + def test_validate_invalid_state(self): + """Test that validate method returns False for an invalid backup state.""" + details = ModelBackupOperationDetails( + backup_state="INVALID_STATE", + time_last_backed_up=1633046400 + ) + assert details.validate() is False + + def test_validate_invalid_time(self): + """Test that validate method returns False for an invalid time value.""" + details = ModelBackupOperationDetails( + backup_state=SettingStatus.SUCCEEDED, + time_last_backed_up="invalid_time" # Invalid time + ) + assert details.validate() is False + + From cd5f03c1667903f3f28c9cd8b4f7f15119d58e39 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Thu, 10 Oct 2024 00:37:40 +0530 Subject: [PATCH 03/15] updated the case in test cases --- .../model/test_datascience_model.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index d063bdacc..12f091baf 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -74,15 +74,15 @@ {"key": "Hyperparameters"}, {"key": "ArtifactTestResults"}, ], - "backupSetting": { - "isBackupEnabled": True, - "backupRegion": "us-phoenix-1", - "customerNotificationType": "ALL" + "backup_setting": { + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL" }, - "retentionSetting": { - "archiveAfterDays": 30, - "deleteAfterDays": 90, - "customerNotificationType": "ALL" + "retention_setting": { + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL" }, "input_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', "output_schema": '{"schema": [{"dtype": "int64", "feature_type": "Integer", "name": 0, "domain": {"values": "", "stats": {}, "constraints": []}, "required": true, "description": "0", "order": 0}], "version": "1.1"}', From 2f1d661bc1fe4efdffbcabe24435d1b40ece5297 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Wed, 16 Oct 2024 15:54:06 +0530 Subject: [PATCH 04/15] updated as per comments and added missing api and column --- ads/model/datascience_model.py | 98 ++++++++++++++++++++++++---------- ads/model/model_metadata.py | 67 ++++++++++------------- 2 files changed, 96 insertions(+), 69 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index de00c5aa3..62c31d04d 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -333,6 +333,29 @@ def lifecycle_state(self) -> Union[str, None]: if self.dsc_model: return self.dsc_model.status return None + + @property + def lifecycle_details(self) -> str: + """ + Gets the lifecycle_details of this DataScienceModel. + Details about the lifecycle state of the model. + + :return: The lifecycle_details of this DataScienceModel. + :rtype: str + """ + return self.get_spec(self.CONST_LIFECYCLE_DETAILS) + + @lifecycle_details.setter + def lifecycle_details(self, lifecycle_details: str) -> "DataScienceModel": + """ + Sets the lifecycle_details of this DataScienceModel. + Details about the lifecycle state of the model. + + :param lifecycle_details: The lifecycle_details of this DataScienceModel. + :type: str + """ + return self.set_spec(self.CONST_LIFECYCLE_DETAILS, lifecycle_details) + @property def kind(self) -> str: @@ -737,15 +760,13 @@ def with_retention_setting(self, retention_setting: Union[Dict, ModelRetentionSe The `DataScienceModel` instance (self) for method chaining. """ if retention_setting and isinstance(retention_setting, dict): - try: - retention_setting = ModelRetentionSetting.from_dict(retention_setting) - except Exception as err: - logger.warn(f"Failed to convert retention_setting from dict: {err}") + retention_setting = ModelRetentionSetting.from_dict(retention_setting) return self.set_spec(self.CONST_RETENTION_SETTING, retention_setting) + @property def backup_setting(self) -> ModelBackupSetting: """ @@ -769,15 +790,13 @@ def with_backup_setting(self, backup_setting: Union[Dict, ModelBackupSetting]) - Returns ------- DataScienceModel - The `DataScienceModel` instance (self) for method chaining. + The `DataScienceModel` instance (self) for method chaining. """ if backup_setting and isinstance(backup_setting, dict): - try: - backup_setting = ModelBackupSetting.from_dict(backup_setting) - except Exception as err: - logger.warn(f"Failed to convert backup_setting from dict: {err}") + backup_setting = ModelBackupSetting.from_dict(backup_setting) return self.set_spec(self.CONST_BACKUP_SETTING, backup_setting) + @property def retention_operation_details(self) -> ModelRetentionOperationDetails: @@ -788,16 +807,6 @@ def retention_operation_details(self) -> ModelRetentionOperationDetails: :rtype: ModelRetentionOperationDetails """ return self.get_spec(self.CONST_RETENTION_OPERATION_DETAILS) - - @retention_operation_details.setter - def retention_operation_details(self, retention_operation_details: ModelRetentionOperationDetails) -> "DataScienceModel": - """ - Sets the retention_operation_details of this Model using the spec constant. - - :param retention_operation_details: The retention_operation_details of this Model. - :type: ModelRetentionOperationDetails - """ - return self.set_spec(self.CONST_RETENTION_OPERATION_DETAILS, retention_operation_details) @property def backup_operation_details(self) -> "ModelBackupOperationDetails": @@ -809,16 +818,6 @@ def backup_operation_details(self) -> "ModelBackupOperationDetails": """ return self.get_spec(self.CONST_BACKUP_OPERATION_DETAILS) - @backup_operation_details.setter - def backup_operation_details(self, backup_operation_details: "ModelBackupOperationDetails") -> "DataScienceModel": - """ - Sets the backup_operation_details of this Model using the spec constant. - - :param backup_operation_details: The backup_operation_details of this Model. - :type: ModelBackupOperationDetails - """ - return self.set_spec(self.CONST_BACKUP_OPERATION_DETAILS, backup_operation_details) - def create(self, **kwargs) -> "DataScienceModel": """Creates datascience model. @@ -1043,6 +1042,45 @@ def _remove_file_description_artifact(self): if self.local_copy_dir: shutil.rmtree(self.local_copy_dir, ignore_errors=True) + + def restore_model( + self, + model_id: str, + restore_model_for_hours_specified: Optional[int] = None, + ): + """ + Restore archived model artifact. + + Parameters + ---------- + model_id : str + The `OCID` of the model to be restored. + restore_model_for_hours_specified : Optional[int] + Duration in hours for which the archived model is available for access. + + Returns + ------- + None + + Raises + ------ + ValueError + If the model ID is invalid or if any parameters are incorrect. + """ + # Validate model_id + if not model_id or not isinstance(model_id, str): + raise ValueError("model_id must be a non-empty string.") + + # Optional: Validate restore_model_for_hours_specified + if restore_model_for_hours_specified is not None: + if not isinstance(restore_model_for_hours_specified, int) or restore_model_for_hours_specified <= 0: + raise ValueError("restore_model_for_hours_specified must be a positive integer.") + + self.dsc_model.restore_archived_model_artifact( + model_id=model_id, + restore_model_for_hours_specified=restore_model_for_hours_specified) + + def download_artifact( self, target_dir: str, @@ -1298,6 +1336,8 @@ def _init_complex_attributes(self): self.with_provenance_metadata(self.provenance_metadata) self.with_input_schema(self.input_schema) self.with_output_schema(self.output_schema) + self.with_backup_setting(self.backup_setting) + self.with_retention_setting(self.retention_setting) def _to_oci_dsc_model(self, **kwargs): """Creates an `OCIDataScienceModel` instance from the `DataScienceModel`. diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index 17cfaf94d..cbddd0618 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -1794,7 +1794,7 @@ def __init__(self, is_backup_enabled: Optional[bool] = None, backup_region: Opti customer_notification_type: Optional[CustomerNotificationType] = None): self.is_backup_enabled = is_backup_enabled if is_backup_enabled is not None else False self.backup_region = backup_region - self.customer_notification_type = customer_notification_type if customer_notification_type is not None else CustomerNotificationType.NONE + self.customer_notification_type = customer_notification_type or CustomerNotificationType.NONE def to_dict(self) -> Dict: """Serializes the backup settings into a dictionary.""" @@ -1810,7 +1810,7 @@ def from_dict(cls, data: Dict) -> 'ModelBackupSetting': return cls( is_backup_enabled=data.get("is_backup_enabled"), backup_region=data.get("backup_region"), - customer_notification_type=CustomerNotificationType(data.get("customer_notification_type", CustomerNotificationType.NONE.value)) + customer_notification_type = CustomerNotificationType(data.get("customer_notification_type")) or None ) def to_json(self) -> str: @@ -1838,7 +1838,8 @@ def validate(self) -> bool: return True def __repr__(self): - return f"ModelBackupSetting(is_backup_enabled={self.is_backup_enabled}, backup_region={self.backup_region}, customer_notification_type={self.customer_notification_type})" + return self.to_yaml() + class ModelRetentionSetting: @@ -1865,7 +1866,7 @@ def __init__(self, archive_after_days: Optional[int] = None, delete_after_days: customer_notification_type: Optional[CustomerNotificationType] = None): self.archive_after_days = archive_after_days self.delete_after_days = delete_after_days - self.customer_notification_type = customer_notification_type if customer_notification_type is not None else CustomerNotificationType.NONE + self.customer_notification_type = customer_notification_type or CustomerNotificationType.NONE def to_dict(self) -> Dict: """Serializes the retention settings into a dictionary.""" @@ -1881,7 +1882,7 @@ def from_dict(cls, data: Dict) -> 'ModelRetentionSetting': return cls( archive_after_days=data.get("archive_after_days"), delete_after_days=data.get("delete_after_days"), - customer_notification_type=CustomerNotificationType(data.get("customer_notification_type", CustomerNotificationType.NONE.value)) + customer_notification_type = CustomerNotificationType(data.get("customer_notification_type")) or None ) def to_json(self) -> str: @@ -1909,7 +1910,7 @@ def validate(self) -> bool: return True def __repr__(self): - return f"ModelRetentionSetting(archive_after_days={self.archive_after_days}, delete_after_days={self.delete_after_days}, customer_notification_type={self.customer_notification_type})" + return self.to_yaml() class SettingStatus(str, ExtendedEnumMeta): @@ -1945,9 +1946,9 @@ def __init__(self, delete_state_details: Optional[str] = None, time_archival_scheduled: Optional[int] = None, time_deletion_scheduled: Optional[int] = None): - self.archive_state = archive_state if archive_state is not None else SettingStatus.PENDING + self.archive_state = archive_state self.archive_state_details = archive_state_details - self.delete_state = delete_state if delete_state is not None else SettingStatus.PENDING + self.delete_state = delete_state self.delete_state_details = delete_state_details self.time_archival_scheduled = time_archival_scheduled self.time_deletion_scheduled = time_deletion_scheduled @@ -1955,9 +1956,9 @@ def __init__(self, def to_dict(self) -> Dict: """Serializes the retention operation details into a dictionary.""" return { - "archive_state": self.archive_state.value, + "archive_state": self.archive_state.value or None, "archive_state_details": self.archive_state_details, - "delete_state": self.delete_state.value, + "delete_state": self.delete_state.value or None, "delete_state_details": self.delete_state_details, "time_archival_scheduled": self.time_archival_scheduled, "time_deletion_scheduled": self.time_deletion_scheduled @@ -1967,9 +1968,9 @@ def to_dict(self) -> Dict: def from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': """Constructs retention operation details from a dictionary.""" return cls( - archive_state=SettingStatus(data.get("archive_state", SettingStatus.PENDING.value)), + archive_state = SettingStatus(data.get("archive_state")) or None, archive_state_details=data.get("archive_state_details"), - delete_state=SettingStatus(data.get("delete_state", SettingStatus.PENDING.value)), + delete_state = SettingStatus(data.get("delete_state")) or None, delete_state_details=data.get("delete_state_details"), time_archival_scheduled=data.get("time_archival_scheduled"), time_deletion_scheduled=data.get("time_deletion_scheduled") @@ -1991,24 +1992,16 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the retention operation details.""" - if not isinstance(self.archive_state, SettingStatus): - return False - if not isinstance(self.delete_state, SettingStatus): - return False - if self.time_archival_scheduled is not None and not isinstance(self.time_archival_scheduled, int): - return False - if self.time_deletion_scheduled is not None and not isinstance(self.time_deletion_scheduled, int): - return False - return True + return all([ + self.archive_state is None or isinstance(self.archive_state, SettingStatus), + self.delete_state is None or isinstance(self.delete_state, SettingStatus), + self.time_archival_scheduled is None or isinstance(self.time_archival_scheduled, int), + self.time_deletion_scheduled is None or isinstance(self.time_deletion_scheduled, int), + ]) + def __repr__(self): - return (f"ModelRetentionOperationDetails(" - f"archive_state={self.archive_state}, " - f"archive_state_details={self.archive_state_details}, " - f"delete_state={self.delete_state}, " - f"delete_state_details={self.delete_state_details}, " - f"time_archival_scheduled={self.time_archival_scheduled}, " - f"time_deletion_scheduled={self.time_deletion_scheduled})") + return self.to_yaml() class ModelBackupOperationDetails: @@ -2032,17 +2025,17 @@ class ModelBackupOperationDetails: """ def __init__(self, - backup_state: Optional['SettingStatus'] = None, + backup_state: Optional[SettingStatus] = None, backup_state_details: Optional[str] = None, time_last_backed_up: Optional[int] = None): - self.backup_state = backup_state if backup_state is not None else SettingStatus.PENDING + self.backup_state = backup_state self.backup_state_details = backup_state_details self.time_last_backed_up = time_last_backed_up def to_dict(self) -> Dict: """Serializes the backup operation details into a dictionary.""" return { - "backup_state": self.backup_state.value, + "backup_state": self.backup_state.value or None, "backup_state_details": self.backup_state_details, "time_last_backed_up": self.time_last_backed_up } @@ -2051,7 +2044,7 @@ def to_dict(self) -> Dict: def from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': """Constructs backup operation details from a dictionary.""" return cls( - backup_state=SettingStatus(data.get("backup_state", SettingStatus.PENDING.value)), + backup_state=SettingStatus(data.get("backup_state")) or None, backup_state_details=data.get("backup_state_details"), time_last_backed_up=data.get("time_last_backed_up") ) @@ -2072,17 +2065,11 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the backup operation details.""" - if not isinstance(self.backup_state, SettingStatus): + if self.backup_state is not None and not isinstance(self.backup_state, SettingStatus): return False if self.time_last_backed_up is not None and not isinstance(self.time_last_backed_up, int): return False return True def __repr__(self): - return (f"ModelBackupOperationDetails(" - f"backup_state={self.backup_state}, " - f"backup_state_details={self.backup_state_details}, " - f"time_last_backed_up={self.time_last_backed_up})") - - - + return self.to_yaml() \ No newline at end of file From 754adf8c02e138645e46df519acdee41c6ad9239 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 18 Oct 2024 14:22:25 +0530 Subject: [PATCH 05/15] updated issues with test cases --- ads/model/model_metadata.py | 27 ++++++++++++++++++- .../model/test_datascience_model.py | 2 +- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index cbddd0618..60fa762da 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -1769,6 +1769,31 @@ class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): ON_FAILURE = "ON_FAILURE" ON_SUCCESS = "ON_SUCCESS" + _value_map = { + "NONE": NONE, + "ALL": ALL, + "ON_FAILURE": ON_FAILURE, + "ON_SUCCESS": ON_SUCCESS, + } + + @classmethod + def create(cls, key): + if key in cls._value_map: + return cls._value_map[key] + raise ValueError(f"Invalid CustomerNotificationType: {key}") + + @classmethod + def from_string(cls, value): + for member in cls: + if member.value == value: + return member + raise ValueError(f"Invalid CustomerNotificationType: {value}") + + @property + def value(self): + return str(self) + + class ModelBackupSetting: """ @@ -1913,7 +1938,7 @@ def __repr__(self): return self.to_yaml() -class SettingStatus(str, ExtendedEnumMeta): +class SettingStatus(str, metaclass=ExtendedEnumMeta): """Enum to represent the status of retention settings.""" PENDING = "PENDING" SUCCEEDED = "SUCCEEDED" diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index 12f091baf..58fb5a7ce 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -35,7 +35,7 @@ ModelCustomMetadataItem, MetadataCustomCategory, ModelBackupSetting, - ModelRetentionSetting + ModelRetentionSetting, ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, From 4547ae3ee751a787589cfa1d569cd6e97dc755f0 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Mon, 21 Oct 2024 15:24:32 +0530 Subject: [PATCH 06/15] updated test cases --- ads/model/datascience_model.py | 69 ++++--- ads/model/generic_model.py | 14 +- ads/model/model_metadata.py | 173 ++++++++++++------ .../model/test_datascience_model.py | 24 +-- .../model/test_model_metadata.py | 7 +- 5 files changed, 171 insertions(+), 116 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 62c31d04d..24a1eb6df 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -39,7 +39,7 @@ ModelBackupSetting, ModelRetentionSetting, ModelRetentionOperationDetails, - ModelBackupOperationDetails + ModelBackupOperationDetails, ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, @@ -135,7 +135,7 @@ class DataScienceModel(Builder): - + Methods @@ -265,7 +265,7 @@ class DataScienceModel(Builder): CONST_BACKUP_SETTING: "backup_setting", CONST_RETENTION_SETTING: "retention_setting", CONST_BACKUP_OPERATION_DETAILS: "backup_operation_details", - CONST_RETENTION_OPERATION_DETAILS: "retention_operation_details" + CONST_RETENTION_OPERATION_DETAILS: "retention_operation_details", } def __init__(self, spec: Dict = None, **kwargs) -> None: @@ -333,7 +333,7 @@ def lifecycle_state(self) -> Union[str, None]: if self.dsc_model: return self.dsc_model.status return None - + @property def lifecycle_details(self) -> str: """ @@ -356,7 +356,6 @@ def lifecycle_details(self, lifecycle_details: str) -> "DataScienceModel": """ return self.set_spec(self.CONST_LIFECYCLE_DETAILS, lifecycle_details) - @property def kind(self) -> str: """The kind of the object as showing in a YAML.""" @@ -743,15 +742,17 @@ def retention_setting(self) -> ModelRetentionSetting: :rtype: RetentionSetting """ return self.get_spec(self.CONST_RETENTION_SETTING) - - def with_retention_setting(self, retention_setting: Union[Dict, ModelRetentionSetting]) -> "DataScienceModel": + + def with_retention_setting( + self, retention_setting: Union[Dict, ModelRetentionSetting] + ) -> "DataScienceModel": """ Sets the retention setting details for the model. Parameters ---------- retention_setting : Union[Dict, RetentionSetting] - The retention setting details for the model. Can be provided as either a dictionary or + The retention setting details for the model. Can be provided as either a dictionary or an instance of the `RetentionSetting` class. Returns @@ -759,14 +760,8 @@ def with_retention_setting(self, retention_setting: Union[Dict, ModelRetentionSe DataScienceModel The `DataScienceModel` instance (self) for method chaining. """ - if retention_setting and isinstance(retention_setting, dict): - retention_setting = ModelRetentionSetting.from_dict(retention_setting) - return self.set_spec(self.CONST_RETENTION_SETTING, retention_setting) - - - @property def backup_setting(self) -> ModelBackupSetting: """ @@ -776,8 +771,10 @@ def backup_setting(self) -> ModelBackupSetting: :rtype: BackupSetting """ return self.get_spec(self.CONST_BACKUP_SETTING) - - def with_backup_setting(self, backup_setting: Union[Dict, ModelBackupSetting]) -> "DataScienceModel": + + def with_backup_setting( + self, backup_setting: Union[Dict, ModelBackupSetting] + ) -> "DataScienceModel": """ Sets the model's backup setting details. @@ -792,12 +789,9 @@ def with_backup_setting(self, backup_setting: Union[Dict, ModelBackupSetting]) - DataScienceModel The `DataScienceModel` instance (self) for method chaining. """ - if backup_setting and isinstance(backup_setting, dict): - backup_setting = ModelBackupSetting.from_dict(backup_setting) return self.set_spec(self.CONST_BACKUP_SETTING, backup_setting) - @property def retention_operation_details(self) -> ModelRetentionOperationDetails: """ @@ -807,7 +801,7 @@ def retention_operation_details(self) -> ModelRetentionOperationDetails: :rtype: ModelRetentionOperationDetails """ return self.get_spec(self.CONST_RETENTION_OPERATION_DETAILS) - + @property def backup_operation_details(self) -> "ModelBackupOperationDetails": """ @@ -1033,8 +1027,6 @@ def upload_artifact( artifact_uploader.upload() self._remove_file_description_artifact() - - def _remove_file_description_artifact(self): """Removes temporary model file description artifact for model by reference.""" @@ -1042,11 +1034,10 @@ def _remove_file_description_artifact(self): if self.local_copy_dir: shutil.rmtree(self.local_copy_dir, ignore_errors=True) - def restore_model( - self, - model_id: str, - restore_model_for_hours_specified: Optional[int] = None, + self, + model_id: str, + restore_model_for_hours_specified: Optional[int] = None, ): """ Restore archived model artifact. @@ -1073,13 +1064,18 @@ def restore_model( # Optional: Validate restore_model_for_hours_specified if restore_model_for_hours_specified is not None: - if not isinstance(restore_model_for_hours_specified, int) or restore_model_for_hours_specified <= 0: - raise ValueError("restore_model_for_hours_specified must be a positive integer.") + if ( + not isinstance(restore_model_for_hours_specified, int) + or restore_model_for_hours_specified <= 0 + ): + raise ValueError( + "restore_model_for_hours_specified must be a positive integer." + ) self.dsc_model.restore_archived_model_artifact( - model_id=model_id, - restore_model_for_hours_specified=restore_model_for_hours_specified) - + model_id=model_id, + restore_model_for_hours_specified=restore_model_for_hours_specified, + ) def download_artifact( self, @@ -1357,8 +1353,6 @@ def _to_oci_dsc_model(self, **kwargs): self.CONST_CUSTOM_METADATA: "_to_oci_metadata", self.CONST_DEFINED_METADATA: "_to_oci_metadata", self.CONST_PROVENANCE_METADATA: "_to_oci_metadata", - self.CONST_BACKUP_SETTING: "to_json", - self.CONST_RETENTION_SETTING: "to_json" } dsc_spec = {} for infra_attr, dsc_attr in self.attribute_map.items(): @@ -1375,6 +1369,8 @@ def _to_oci_dsc_model(self, **kwargs): dsc_spec[dsc_attr] = value dsc_spec.update(**kwargs) + print("Model Dsc spec") + print(dsc_spec) return OCIDataScienceModel(**dsc_spec) def _update_from_oci_dsc_model( @@ -1397,12 +1393,15 @@ def _update_from_oci_dsc_model( self.CONST_OUTPUT_SCHEMA: [Schema.from_json, json.loads], self.CONST_CUSTOM_METADATA: ModelCustomMetadata._from_oci_metadata, self.CONST_DEFINED_METADATA: ModelTaxonomyMetadata._from_oci_metadata, - self.CONST_BACKUP_SETTING: ModelBackupSetting.from_json, - self.CONST_RETENTION_SETTING: ModelRetentionSetting.from_json, + self.CONST_BACKUP_SETTING: ModelBackupSetting.to_dict, + self.CONST_RETENTION_SETTING: ModelRetentionSetting.to_dict, } # Update the main properties + self.dsc_model = dsc_model + print("Model Details from here") + print(dsc_model) for infra_attr, dsc_attr in self.attribute_map.items(): value = utils.get_value(dsc_model, dsc_attr) if value: diff --git a/ads/model/generic_model.py b/ads/model/generic_model.py index 7a44b82de..225b9750c 100644 --- a/ads/model/generic_model.py +++ b/ads/model/generic_model.py @@ -1422,9 +1422,9 @@ def from_model_artifact( ) model.update_summary_status( detail=PREPARE_STATUS_POPULATE_METADATA_DETAIL, - status=ModelState.AVAILABLE.value - if reload - else ModelState.NOTAPPLICABLE.value, + status=( + ModelState.AVAILABLE.value if reload else ModelState.NOTAPPLICABLE.value + ), ) return model @@ -1706,9 +1706,11 @@ def from_model_catalog( ) result_model.update_summary_status( detail=SAVE_STATUS_INTROSPECT_TEST_DETAIL, - status=ModelState.AVAILABLE.value - if not result_model.ignore_conda_error - else ModelState.NOTAVAILABLE.value, + status=( + ModelState.AVAILABLE.value + if not result_model.ignore_conda_error + else ModelState.NOTAVAILABLE.value + ), ) return result_model diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index 60fa762da..58f99c14b 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -1789,12 +1789,15 @@ def from_string(cls, value): return member raise ValueError(f"Invalid CustomerNotificationType: {value}") + @classmethod + def is_valid(cls, value): + return value in (cls.NONE, cls.ALL, cls.ON_FAILURE, cls.ON_SUCCESS) + @property def value(self): return str(self) - class ModelBackupSetting: """ Class that represents Model Backup Setting Details Metadata. @@ -1815,27 +1818,38 @@ class ModelBackupSetting: Validates the backup settings details. """ - def __init__(self, is_backup_enabled: Optional[bool] = None, backup_region: Optional[str] = None, - customer_notification_type: Optional[CustomerNotificationType] = None): - self.is_backup_enabled = is_backup_enabled if is_backup_enabled is not None else False + def __init__( + self, + is_backup_enabled: Optional[bool] = None, + backup_region: Optional[str] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): + self.is_backup_enabled = ( + is_backup_enabled if is_backup_enabled is not None else False + ) self.backup_region = backup_region - self.customer_notification_type = customer_notification_type or CustomerNotificationType.NONE + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) def to_dict(self) -> Dict: """Serializes the backup settings into a dictionary.""" return { "is_backup_enabled": self.is_backup_enabled, "backup_region": self.backup_region, - "customer_notification_type": self.customer_notification_type.value + "customer_notification_type": self.customer_notification_type, } @classmethod - def from_dict(cls, data: Dict) -> 'ModelBackupSetting': + def from_dict(cls, data: Dict) -> "ModelBackupSetting": """Constructs backup settings from a dictionary.""" return cls( is_backup_enabled=data.get("is_backup_enabled"), backup_region=data.get("backup_region"), - customer_notification_type = CustomerNotificationType(data.get("customer_notification_type")) or None + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, ) def to_json(self) -> str: @@ -1843,9 +1857,13 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> 'ModelBackupSetting': - """Constructs backup settings from a JSON string.""" - data = json.loads(json_str) + def from_json(cls, json_str) -> "ModelBackupSetting": + """Constructs backup settings from a JSON string or dictionary.""" + if isinstance(json_str, str): + data = json.loads(json_str) + else: + data = json_str # Assume it's already a dictionary or appropriate type + return cls.from_dict(data) def to_yaml(self) -> str: @@ -1858,14 +1876,14 @@ def validate(self) -> bool: return False if self.backup_region and not isinstance(self.backup_region, str): return False - if not isinstance(self.customer_notification_type, CustomerNotificationType): + if not isinstance(self.customer_notification_type, str) \ + or not CustomerNotificationType.is_valid(self.customer_notification_type): return False return True def __repr__(self): return self.to_yaml() - class ModelRetentionSetting: """ @@ -1887,27 +1905,36 @@ class ModelRetentionSetting: Validates the retention settings details. """ - def __init__(self, archive_after_days: Optional[int] = None, delete_after_days: Optional[int] = None, - customer_notification_type: Optional[CustomerNotificationType] = None): + def __init__( + self, + archive_after_days: Optional[int] = None, + delete_after_days: Optional[int] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): self.archive_after_days = archive_after_days self.delete_after_days = delete_after_days - self.customer_notification_type = customer_notification_type or CustomerNotificationType.NONE + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) def to_dict(self) -> Dict: """Serializes the retention settings into a dictionary.""" return { "archive_after_days": self.archive_after_days, "delete_after_days": self.delete_after_days, - "customer_notification_type": self.customer_notification_type.value + "customer_notification_type": self.customer_notification_type, } @classmethod - def from_dict(cls, data: Dict) -> 'ModelRetentionSetting': + def from_dict(cls, data: Dict) -> "ModelRetentionSetting": """Constructs retention settings from a dictionary.""" return cls( archive_after_days=data.get("archive_after_days"), delete_after_days=data.get("delete_after_days"), - customer_notification_type = CustomerNotificationType(data.get("customer_notification_type")) or None + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, ) def to_json(self) -> str: @@ -1915,9 +1942,12 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> 'ModelRetentionSetting': + def from_json(cls, json_str) -> "ModelRetentionSetting": """Constructs retention settings from a JSON string.""" - data = json.loads(json_str) + if isinstance(json_str, str): + data = json.loads(json_str) + else: + data = json_str return cls.from_dict(data) def to_yaml(self) -> str: @@ -1926,24 +1956,36 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the retention settings details. Returns True if valid, False otherwise.""" - if self.archive_after_days is not None and (not isinstance(self.archive_after_days, int) or self.archive_after_days < 0): + if self.archive_after_days is not None and ( + not isinstance(self.archive_after_days, int) or self.archive_after_days < 0 + ): return False - if self.delete_after_days is not None and (not isinstance(self.delete_after_days, int) or self.delete_after_days < 0): + if self.delete_after_days is not None and ( + not isinstance(self.delete_after_days, int) or self.delete_after_days < 0 + ): return False - if not isinstance(self.customer_notification_type, CustomerNotificationType): + if not isinstance(self.customer_notification_type, str) or not \ + CustomerNotificationType.is_valid(self.customer_notification_type): return False return True def __repr__(self): return self.to_yaml() - + class SettingStatus(str, metaclass=ExtendedEnumMeta): """Enum to represent the status of retention settings.""" + PENDING = "PENDING" SUCCEEDED = "SUCCEEDED" FAILED = "FAILED" + @classmethod + def is_valid(cls, state: str) -> bool: + """Validates the given state against allowed SettingStatus values.""" + return state in (cls.PENDING, cls.SUCCEEDED, cls.FAILED) + + class ModelRetentionOperationDetails: """ Class that represents Model Retention Operation Details Metadata. @@ -1964,13 +2006,15 @@ class ModelRetentionOperationDetails: Validates the retention operation details. """ - def __init__(self, - archive_state: Optional[SettingStatus] = None, - archive_state_details: Optional[str] = None, - delete_state: Optional[SettingStatus] = None, - delete_state_details: Optional[str] = None, - time_archival_scheduled: Optional[int] = None, - time_deletion_scheduled: Optional[int] = None): + def __init__( + self, + archive_state: Optional[SettingStatus] = None, + archive_state_details: Optional[str] = None, + delete_state: Optional[SettingStatus] = None, + delete_state_details: Optional[str] = None, + time_archival_scheduled: Optional[int] = None, + time_deletion_scheduled: Optional[int] = None, + ): self.archive_state = archive_state self.archive_state_details = archive_state_details self.delete_state = delete_state @@ -1981,24 +2025,24 @@ def __init__(self, def to_dict(self) -> Dict: """Serializes the retention operation details into a dictionary.""" return { - "archive_state": self.archive_state.value or None, + "archive_state": self.archive_state or None, "archive_state_details": self.archive_state_details, - "delete_state": self.delete_state.value or None, + "delete_state": self.delete_state or None, "delete_state_details": self.delete_state_details, "time_archival_scheduled": self.time_archival_scheduled, - "time_deletion_scheduled": self.time_deletion_scheduled + "time_deletion_scheduled": self.time_deletion_scheduled, } @classmethod - def from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': + def from_dict(cls, data: Dict) -> "ModelRetentionOperationDetails": """Constructs retention operation details from a dictionary.""" return cls( - archive_state = SettingStatus(data.get("archive_state")) or None, + archive_state=SettingStatus(data.get("archive_state")) or None, archive_state_details=data.get("archive_state_details"), - delete_state = SettingStatus(data.get("delete_state")) or None, + delete_state=SettingStatus(data.get("delete_state")) or None, delete_state_details=data.get("delete_state_details"), time_archival_scheduled=data.get("time_archival_scheduled"), - time_deletion_scheduled=data.get("time_deletion_scheduled") + time_deletion_scheduled=data.get("time_deletion_scheduled"), ) def to_json(self) -> str: @@ -2006,7 +2050,7 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': + def from_json(cls, json_str: str) -> "ModelRetentionOperationDetails": """Constructs retention operation details from a JSON string.""" data = json.loads(json_str) return cls.from_dict(data) @@ -2017,17 +2061,20 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the retention operation details.""" - return all([ - self.archive_state is None or isinstance(self.archive_state, SettingStatus), - self.delete_state is None or isinstance(self.delete_state, SettingStatus), - self.time_archival_scheduled is None or isinstance(self.time_archival_scheduled, int), - self.time_deletion_scheduled is None or isinstance(self.time_deletion_scheduled, int), - ]) - + return all( + [ + self.archive_state is None or SettingStatus.is_valid(self.archive_state), + self.delete_state is None or SettingStatus.is_valid(self.delete_state), + self.time_archival_scheduled is None + or isinstance(self.time_archival_scheduled, int), + self.time_deletion_scheduled is None + or isinstance(self.time_deletion_scheduled, int), + ] + ) def __repr__(self): return self.to_yaml() - + class ModelBackupOperationDetails: """ @@ -2049,29 +2096,31 @@ class ModelBackupOperationDetails: Validates the backup operation details. """ - def __init__(self, - backup_state: Optional[SettingStatus] = None, - backup_state_details: Optional[str] = None, - time_last_backed_up: Optional[int] = None): - self.backup_state = backup_state + def __init__( + self, + backup_state: Optional[SettingStatus] = None, + backup_state_details: Optional[str] = None, + time_last_backed_up: Optional[int] = None, + ): + self.backup_state = backup_state self.backup_state_details = backup_state_details self.time_last_backed_up = time_last_backed_up def to_dict(self) -> Dict: """Serializes the backup operation details into a dictionary.""" return { - "backup_state": self.backup_state.value or None, + "backup_state": self.backup_state or None, "backup_state_details": self.backup_state_details, - "time_last_backed_up": self.time_last_backed_up + "time_last_backed_up": self.time_last_backed_up, } @classmethod - def from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': + def from_dict(cls, data: Dict) -> "ModelBackupOperationDetails": """Constructs backup operation details from a dictionary.""" return cls( backup_state=SettingStatus(data.get("backup_state")) or None, backup_state_details=data.get("backup_state_details"), - time_last_backed_up=data.get("time_last_backed_up") + time_last_backed_up=data.get("time_last_backed_up"), ) def to_json(self) -> str: @@ -2079,7 +2128,7 @@ def to_json(self) -> str: return json.dumps(self.to_dict()) @classmethod - def from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': + def from_json(cls, json_str: str) -> "ModelBackupOperationDetails": """Constructs backup operation details from a JSON string.""" data = json.loads(json_str) return cls.from_dict(data) @@ -2090,11 +2139,13 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the backup operation details.""" - if self.backup_state is not None and not isinstance(self.backup_state, SettingStatus): + if self.backup_state is not None and not SettingStatus.is_valid(self.backup_state): return False - if self.time_last_backed_up is not None and not isinstance(self.time_last_backed_up, int): + if self.time_last_backed_up is not None and not isinstance( + self.time_last_backed_up, int + ): return False return True def __repr__(self): - return self.to_yaml() \ No newline at end of file + return self.to_yaml() diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index 58fb5a7ce..69d778278 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -161,14 +161,14 @@ "artifact_dir": "test_script_dir", }, "backupSetting": { - "isBackupEnabled": True, - "backupRegion": "us-phoenix-1", - "customerNotificationType": "ALL" + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL" }, "retentionSetting": { - "archiveAfterDays": 30, - "deleteAfterDays": 90, - "customerNotificationType": "ALL" + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL" }, "artifact": "ocid1.datasciencemodel.oc1.iad..zip", } @@ -718,14 +718,14 @@ def test__update_from_oci_dsc_model( ] }, "backupSetting": { - "isBackupEnabled": True, - "backupRegion": "us-phoenix-1", - "customerNotificationType": "ALL", + "is_backup_enabled": True, + "backup_region": "us-phoenix-1", + "customer_notification_type": "ALL", }, "retentionSetting": { - "archiveAfterDays": 30, - "deleteAfterDays": 90, - "customerNotificationType": "ALL", + "archive_after_days": 30, + "delete_after_days": 90, + "customer_notification_type": "ALL", }, "provenanceMetadata": { "git_branch": "master", diff --git a/tests/unitary/default_setup/model/test_model_metadata.py b/tests/unitary/default_setup/model/test_model_metadata.py index 6a261c800..fd9b530ab 100644 --- a/tests/unitary/default_setup/model/test_model_metadata.py +++ b/tests/unitary/default_setup/model/test_model_metadata.py @@ -1154,7 +1154,10 @@ def test_from_dict(self): def test_to_json(self): """Test that to_json serializes the settings to a JSON string.""" - setting = ModelRetentionSetting(archive_after_days=30, delete_after_days=60, customer_notification_type=CustomerNotificationType.EMAIL) + setting = ModelRetentionSetting( + archive_after_days=30, + delete_after_days=60, + customer_notification_type=CustomerNotificationType.ALL) expected_json = json.dumps({ "archive_after_days": 30, "delete_after_days": 60, @@ -1266,7 +1269,7 @@ def test_to_json(self): def test_from_json(self): """Test that from_json correctly deserializes the details from a JSON string.""" json_str = json.dumps({ - "archive_state": "SUCCESS", + "archive_state": "SUCCEEDED", "archive_state_details": "Archived successfully", "delete_state": "PENDING", "delete_state_details": "Deletion pending", From d04df619f4ae07db1ea07ff3f77fac3dbd8f113c Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Mon, 21 Oct 2024 16:17:23 +0530 Subject: [PATCH 07/15] added restore model --- ads/model/service/oci_datascience_model.py | 34 ++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/ads/model/service/oci_datascience_model.py b/ads/model/service/oci_datascience_model.py index b5c1541b2..bb85b31d5 100644 --- a/ads/model/service/oci_datascience_model.py +++ b/ads/model/service/oci_datascience_model.py @@ -278,6 +278,40 @@ def get_artifact_info(self) -> Dict: raise ModelArtifactNotFoundError() return {} + @check_for_model_id( + msg="Model needs to be restored before the archived artifact content can be accessed." + ) + def restore_archived_model_artifact( + self, model_id: str, restore_model_for_hours_specified: Optional[int] = None + ) -> None: + """Restores the archived model artifact. + + Parameters + ---------- + model_id : str + The unique identifier of the model to restore. + restore_model_for_hours_specified : Optional[int] + The duration (in hours) for which the model should be restored. + + Returns + ------- + None + + Raises + ------ + ModelArtifactNotFoundError + If model artifact not found. + """ + try: + # Call the underlying client method to restore the model artifact + return self.client.restore_archived_model_artifact( + model_id=model_id, + restore_model_for_hours_specified=restore_model_for_hours_specified, + ).data.content + except ServiceError as ex: + if ex.status == 404: + raise ModelArtifactNotFoundError() + @check_for_model_id( msg="Model needs to be saved to the Model Catalog before the artifact content can be read." ) From 4acfcd4d6b8067802c009913567a37323664ae54 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Wed, 23 Oct 2024 18:38:50 +0530 Subject: [PATCH 08/15] Changes as per comments --- ads/model/datascience_model.py | 505 +++++++++++++++--- ads/model/model_metadata.py | 497 ++--------------- ads/model/service/oci_datascience_model.py | 12 +- .../model/test_datascience_model.py | 4 +- .../model/test_model_metadata.py | 10 +- 5 files changed, 491 insertions(+), 537 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 24a1eb6df..3fa0c468d 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -14,9 +14,11 @@ from typing import Dict, List, Optional, Union, Tuple import pandas +import yaml from jsonschema import ValidationError, validate from ads.common import utils +from ads.common.extended_enum import ExtendedEnumMeta from ads.common.object_storage_details import ObjectStorageDetails from ads.config import ( COMPARTMENT_OCID, @@ -35,12 +37,7 @@ ModelCustomMetadata, ModelCustomMetadataItem, ModelProvenanceMetadata, - ModelTaxonomyMetadata, - ModelBackupSetting, - ModelRetentionSetting, - ModelRetentionOperationDetails, - ModelBackupOperationDetails, -) + ModelTaxonomyMetadata, ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, OCIDataScienceModel, @@ -49,7 +46,6 @@ logger = logging.getLogger(__name__) - _MAX_ARTIFACT_SIZE_IN_BYTES = 2147483648 # 2GB MODEL_BY_REFERENCE_VERSION = "1.0" MODEL_BY_REFERENCE_JSON_FILE_NAME = "model_description.json" @@ -67,8 +63,8 @@ def __init__(self, max_artifact_size: str): class BucketNotVersionedError(Exception): # pragma: no cover def __init__( - self, - msg="Model artifact bucket is not versioned. Enable versioning on the bucket to proceed with model creation by reference.", + self, + msg="Model artifact bucket is not versioned. Enable versioning on the bucket to proceed with model creation by reference.", ): super().__init__(msg) @@ -82,6 +78,374 @@ class InvalidArtifactType(Exception): # pragma: no cover pass +class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): + NONE = "NONE" + ALL = "ALL" + ON_FAILURE = "ON_FAILURE" + ON_SUCCESS = "ON_SUCCESS" + + @classmethod + def is_valid(cls, value): + return value in (cls.NONE, cls.ALL, cls.ON_FAILURE, cls.ON_SUCCESS) + + @property + def value(self): + return str(self) + + +class SettingStatus(str, metaclass=ExtendedEnumMeta): + """Enum to represent the status of retention settings.""" + + PENDING = "PENDING" + SUCCEEDED = "SUCCEEDED" + FAILED = "FAILED" + + @classmethod + def is_valid(cls, state: str) -> bool: + """Validates the given state against allowed SettingStatus values.""" + return state in (cls.PENDING, cls.SUCCEEDED, cls.FAILED) + + +class ModelBackupSetting: + """ + Class that represents Model Backup Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupSetting': + Constructs backup settings from a dictionary. + to_json(self) -> str: + Serializes the backup settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupSetting': + Constructs backup settings from a JSON string. + to_yaml(self) -> str: + Serializes the backup settings into a YAML string. + validate(self) -> bool: + Validates the backup settings details. + """ + + def __init__( + self, + is_backup_enabled: Optional[bool] = None, + backup_region: Optional[str] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): + self.is_backup_enabled = ( + is_backup_enabled if is_backup_enabled is not None else False + ) + self.backup_region = backup_region + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) + + def to_dict(self) -> Dict: + """Serializes the backup settings into a dictionary.""" + return { + "is_backup_enabled": self.is_backup_enabled, + "backup_region": self.backup_region, + "customer_notification_type": self.customer_notification_type, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelBackupSetting": + """Constructs backup settings from a dictionary.""" + return cls( + is_backup_enabled=data.get("is_backup_enabled"), + backup_region=data.get("backup_region"), + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, + ) + + def to_json(self) -> str: + """Serializes the backup settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str) -> "ModelBackupSetting": + """Constructs backup settings from a JSON string or dictionary.""" + if isinstance(json_str, str): + data = json.loads(json_str) + else: + data = json_str + + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup settings details. Returns True if valid, False otherwise.""" + if not isinstance(self.is_backup_enabled, bool): + return False + if self.backup_region and not isinstance(self.backup_region, str): + return False + if not isinstance(self.customer_notification_type, str) \ + or not CustomerNotificationType.is_valid(self.customer_notification_type): + return False + return True + + def __repr__(self): + return self.to_yaml() + + +class ModelRetentionSetting: + """ + Class that represents Model Retention Setting Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention settings into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionSetting': + Constructs retention settings from a dictionary. + to_json(self) -> str: + Serializes the retention settings into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionSetting': + Constructs retention settings from a JSON string. + to_yaml(self) -> str: + Serializes the retention settings into a YAML string. + validate(self) -> bool: + Validates the retention settings details. + """ + + def __init__( + self, + archive_after_days: Optional[int] = None, + delete_after_days: Optional[int] = None, + customer_notification_type: Optional[CustomerNotificationType] = None, + ): + self.archive_after_days = archive_after_days + self.delete_after_days = delete_after_days + self.customer_notification_type = ( + customer_notification_type or CustomerNotificationType.NONE + ) + + def to_dict(self) -> Dict: + """Serializes the retention settings into a dictionary.""" + return { + "archive_after_days": self.archive_after_days, + "delete_after_days": self.delete_after_days, + "customer_notification_type": self.customer_notification_type, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelRetentionSetting": + """Constructs retention settings from a dictionary.""" + return cls( + archive_after_days=data.get("archive_after_days"), + delete_after_days=data.get("delete_after_days"), + customer_notification_type=CustomerNotificationType( + data.get("customer_notification_type") + ) + or None, + ) + + def to_json(self) -> str: + """Serializes the retention settings into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str) -> "ModelRetentionSetting": + """Constructs retention settings from a JSON string.""" + if isinstance(json_str, str): + data = json.loads(json_str) + else: + data = json_str + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention settings into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention settings details. Returns True if valid, False otherwise.""" + if self.archive_after_days is not None and ( + not isinstance(self.archive_after_days, int) or self.archive_after_days < 0 + ): + return False + if self.delete_after_days is not None and ( + not isinstance(self.delete_after_days, int) or self.delete_after_days < 0 + ): + return False + if not isinstance(self.customer_notification_type, str) or not \ + CustomerNotificationType.is_valid(self.customer_notification_type): + return False + return True + + def __repr__(self): + return self.to_yaml() + + +class ModelRetentionOperationDetails: + """ + Class that represents Model Retention Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the retention operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a dictionary. + to_json(self) -> str: + Serializes the retention operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': + Constructs retention operation details from a JSON string. + to_yaml(self) -> str: + Serializes the retention operation details into a YAML string. + validate(self) -> bool: + Validates the retention operation details. + """ + + def __init__( + self, + archive_state: Optional[SettingStatus] = None, + archive_state_details: Optional[str] = None, + delete_state: Optional[SettingStatus] = None, + delete_state_details: Optional[str] = None, + time_archival_scheduled: Optional[int] = None, + time_deletion_scheduled: Optional[int] = None, + ): + self.archive_state = archive_state + self.archive_state_details = archive_state_details + self.delete_state = delete_state + self.delete_state_details = delete_state_details + self.time_archival_scheduled = time_archival_scheduled + self.time_deletion_scheduled = time_deletion_scheduled + + def to_dict(self) -> Dict: + """Serializes the retention operation details into a dictionary.""" + return { + "archive_state": self.archive_state or None, + "archive_state_details": self.archive_state_details, + "delete_state": self.delete_state or None, + "delete_state_details": self.delete_state_details, + "time_archival_scheduled": self.time_archival_scheduled, + "time_deletion_scheduled": self.time_deletion_scheduled, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelRetentionOperationDetails": + """Constructs retention operation details from a dictionary.""" + return cls( + archive_state=SettingStatus(data.get("archive_state")) or None, + archive_state_details=data.get("archive_state_details"), + delete_state=SettingStatus(data.get("delete_state")) or None, + delete_state_details=data.get("delete_state_details"), + time_archival_scheduled=data.get("time_archival_scheduled"), + time_deletion_scheduled=data.get("time_deletion_scheduled"), + ) + + def to_json(self) -> str: + """Serializes the retention operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> "ModelRetentionOperationDetails": + """Constructs retention operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the retention operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the retention operation details.""" + return all( + [ + self.archive_state is None or SettingStatus.is_valid(self.archive_state), + self.delete_state is None or SettingStatus.is_valid(self.delete_state), + self.time_archival_scheduled is None + or isinstance(self.time_archival_scheduled, int), + self.time_deletion_scheduled is None + or isinstance(self.time_deletion_scheduled, int), + ] + ) + + def __repr__(self): + return self.to_yaml() + + +class ModelBackupOperationDetails: + """ + Class that represents Model Backup Operation Details Metadata. + + Methods + ------- + to_dict(self) -> Dict: + Serializes the backup operation details into a dictionary. + from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a dictionary. + to_json(self) -> str: + Serializes the backup operation details into a JSON string. + from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': + Constructs backup operation details from a JSON string. + to_yaml(self) -> str: + Serializes the backup operation details into a YAML string. + validate(self) -> bool: + Validates the backup operation details. + """ + + def __init__( + self, + backup_state: Optional[SettingStatus] = None, + backup_state_details: Optional[str] = None, + time_last_backed_up: Optional[int] = None, + ): + self.backup_state = backup_state + self.backup_state_details = backup_state_details + self.time_last_backed_up = time_last_backed_up + + def to_dict(self) -> Dict: + """Serializes the backup operation details into a dictionary.""" + return { + "backup_state": self.backup_state or None, + "backup_state_details": self.backup_state_details, + "time_last_backed_up": self.time_last_backed_up, + } + + @classmethod + def from_dict(cls, data: Dict) -> "ModelBackupOperationDetails": + """Constructs backup operation details from a dictionary.""" + return cls( + backup_state=SettingStatus(data.get("backup_state")) or None, + backup_state_details=data.get("backup_state_details"), + time_last_backed_up=data.get("time_last_backed_up"), + ) + + def to_json(self) -> str: + """Serializes the backup operation details into a JSON string.""" + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> "ModelBackupOperationDetails": + """Constructs backup operation details from a JSON string.""" + data = json.loads(json_str) + return cls.from_dict(data) + + def to_yaml(self) -> str: + """Serializes the backup operation details into a YAML string.""" + return yaml.dump(self.to_dict()) + + def validate(self) -> bool: + """Validates the backup operation details.""" + if self.backup_state is not None and not SettingStatus.is_valid(self.backup_state): + return False + if self.time_last_backed_up is not None and not isinstance( + self.time_last_backed_up, int + ): + return False + return True + + def __repr__(self): + return self.to_yaml() + + class DataScienceModel(Builder): """Represents a Data Science Model. @@ -133,11 +497,6 @@ class DataScienceModel(Builder): backup_operation_details: ModelBackupOperationDetails The value to assign to the backup_operation_details property for the Model. - - - - - Methods ------- create(self, **kwargs) -> "DataScienceModel" @@ -464,7 +823,7 @@ def defined_tags(self) -> Dict[str, Dict[str, object]]: return self.get_spec(self.CONST_DEFINED_TAG) def with_defined_tags( - self, **kwargs: Dict[str, Dict[str, object]] + self, **kwargs: Dict[str, Dict[str, object]] ) -> "DataScienceModel": """Sets defined tags. @@ -545,7 +904,7 @@ def defined_metadata_list(self) -> ModelTaxonomyMetadata: return self.get_spec(self.CONST_DEFINED_METADATA) def with_defined_metadata_list( - self, metadata: Union[ModelTaxonomyMetadata, Dict] + self, metadata: Union[ModelTaxonomyMetadata, Dict] ) -> "DataScienceModel": """Sets model taxonomy (defined) metadata. @@ -569,7 +928,7 @@ def custom_metadata_list(self) -> ModelCustomMetadata: return self.get_spec(self.CONST_CUSTOM_METADATA) def with_custom_metadata_list( - self, metadata: Union[ModelCustomMetadata, Dict] + self, metadata: Union[ModelCustomMetadata, Dict] ) -> "DataScienceModel": """Sets model custom metadata. @@ -593,7 +952,7 @@ def provenance_metadata(self) -> ModelProvenanceMetadata: return self.get_spec(self.CONST_PROVENANCE_METADATA) def with_provenance_metadata( - self, metadata: Union[ModelProvenanceMetadata, Dict] + self, metadata: Union[ModelProvenanceMetadata, Dict] ) -> "DataScienceModel": """Sets model provenance metadata. @@ -686,7 +1045,7 @@ def model_file_description(self) -> dict: return self.get_spec(self.CONST_MODEL_FILE_DESCRIPTION) def with_model_file_description( - self, json_dict: dict = None, json_string: str = None, json_uri: str = None + self, json_dict: dict = None, json_string: str = None, json_uri: str = None ): """Sets the json file description for model passed by reference Parameters @@ -744,7 +1103,7 @@ def retention_setting(self) -> ModelRetentionSetting: return self.get_spec(self.CONST_RETENTION_SETTING) def with_retention_setting( - self, retention_setting: Union[Dict, ModelRetentionSetting] + self, retention_setting: Union[Dict, ModelRetentionSetting] ) -> "DataScienceModel": """ Sets the retention setting details for the model. @@ -773,7 +1132,7 @@ def backup_setting(self) -> ModelBackupSetting: return self.get_spec(self.CONST_BACKUP_SETTING) def with_backup_setting( - self, backup_setting: Union[Dict, ModelBackupSetting] + self, backup_setting: Union[Dict, ModelBackupSetting] ) -> "DataScienceModel": """ Sets the model's backup setting details. @@ -924,15 +1283,15 @@ def create(self, **kwargs) -> "DataScienceModel": return self def upload_artifact( - self, - bucket_uri: Optional[str] = None, - auth: Optional[Dict] = None, - region: Optional[str] = None, - overwrite_existing_artifact: Optional[bool] = True, - remove_existing_artifact: Optional[bool] = True, - timeout: Optional[int] = None, - parallel_process_count: int = utils.DEFAULT_PARALLEL_PROCESS_COUNT, - model_by_reference: Optional[bool] = False, + self, + bucket_uri: Optional[str] = None, + auth: Optional[Dict] = None, + region: Optional[str] = None, + overwrite_existing_artifact: Optional[bool] = True, + remove_existing_artifact: Optional[bool] = True, + timeout: Optional[int] = None, + parallel_process_count: int = utils.DEFAULT_PARALLEL_PROCESS_COUNT, + model_by_reference: Optional[bool] = False, ) -> None: """Uploads model artifacts to the model catalog. @@ -1002,7 +1361,7 @@ def upload_artifact( bucket_uri = self.artifact if not model_by_reference and ( - bucket_uri or utils.folder_size(self.artifact) > _MAX_ARTIFACT_SIZE_IN_BYTES + bucket_uri or utils.folder_size(self.artifact) > _MAX_ARTIFACT_SIZE_IN_BYTES ): if not bucket_uri: raise ModelArtifactSizeError( @@ -1034,10 +1393,11 @@ def _remove_file_description_artifact(self): if self.local_copy_dir: shutil.rmtree(self.local_copy_dir, ignore_errors=True) + @classmethod def restore_model( - self, - model_id: str, - restore_model_for_hours_specified: Optional[int] = None, + cls, + model_id: str, + restore_model_for_hours_specified: Optional[int] = None, ): """ Restore archived model artifact. @@ -1046,7 +1406,7 @@ def restore_model( ---------- model_id : str The `OCID` of the model to be restored. - restore_model_for_hours_specified : Optional[int] + restore_model_for_hours_specified : Optional[int] Duration in hours for which the archived model is available for access. Returns @@ -1065,28 +1425,28 @@ def restore_model( # Optional: Validate restore_model_for_hours_specified if restore_model_for_hours_specified is not None: if ( - not isinstance(restore_model_for_hours_specified, int) - or restore_model_for_hours_specified <= 0 + not isinstance(restore_model_for_hours_specified, int) + or restore_model_for_hours_specified <= 0 ): raise ValueError( "restore_model_for_hours_specified must be a positive integer." ) - self.dsc_model.restore_archived_model_artifact( + cls.dsc_model.restore_archived_model_artifact( model_id=model_id, restore_model_for_hours_specified=restore_model_for_hours_specified, ) def download_artifact( - self, - target_dir: str, - auth: Optional[Dict] = None, - force_overwrite: Optional[bool] = False, - bucket_uri: Optional[str] = None, - region: Optional[str] = None, - overwrite_existing_artifact: Optional[bool] = True, - remove_existing_artifact: Optional[bool] = True, - timeout: Optional[int] = None, + self, + target_dir: str, + auth: Optional[Dict] = None, + force_overwrite: Optional[bool] = False, + bucket_uri: Optional[str] = None, + region: Optional[str] = None, + overwrite_existing_artifact: Optional[bool] = True, + remove_existing_artifact: Optional[bool] = True, + timeout: Optional[int] = None, ): """Downloads model artifacts from the model catalog. @@ -1161,9 +1521,9 @@ def download_artifact( ) if ( - artifact_size > _MAX_ARTIFACT_SIZE_IN_BYTES - or bucket_uri - or model_by_reference + artifact_size > _MAX_ARTIFACT_SIZE_IN_BYTES + or bucket_uri + or model_by_reference ): artifact_downloader = LargeArtifactDownloader( dsc_model=self.dsc_model, @@ -1222,8 +1582,8 @@ def update(self, **kwargs) -> "DataScienceModel": return self.sync() def delete( - self, - delete_associated_model_deployment: Optional[bool] = False, + self, + delete_associated_model_deployment: Optional[bool] = False, ) -> "DataScienceModel": """Removes model from the model catalog. @@ -1242,7 +1602,7 @@ def delete( @classmethod def list( - cls, compartment_id: str = None, project_id: str = None, **kwargs + cls, compartment_id: str = None, project_id: str = None, **kwargs ) -> List["DataScienceModel"]: """Lists datascience models in a given compartment. @@ -1269,7 +1629,7 @@ def list( @classmethod def list_df( - cls, compartment_id: str = None, project_id: str = None, **kwargs + cls, compartment_id: str = None, project_id: str = None, **kwargs ) -> "pandas.DataFrame": """Lists datascience models in a given compartment. @@ -1289,7 +1649,7 @@ def list_df( """ records = [] for model in OCIDataScienceModel.list_resource( - compartment_id, project_id=project_id, **kwargs + compartment_id, project_id=project_id, **kwargs ): records.append( { @@ -1369,12 +1729,10 @@ def _to_oci_dsc_model(self, **kwargs): dsc_spec[dsc_attr] = value dsc_spec.update(**kwargs) - print("Model Dsc spec") - print(dsc_spec) return OCIDataScienceModel(**dsc_spec) def _update_from_oci_dsc_model( - self, dsc_model: OCIDataScienceModel + self, dsc_model: OCIDataScienceModel ) -> "DataScienceModel": """Update the properties from an OCIDataScienceModel object. @@ -1398,10 +1756,7 @@ def _update_from_oci_dsc_model( } # Update the main properties - self.dsc_model = dsc_model - print("Model Details from here") - print(dsc_model) for infra_attr, dsc_attr in self.attribute_map.items(): value = utils.get_value(dsc_model, dsc_attr) if value: @@ -1648,12 +2003,12 @@ def _download_file_description_artifact(self) -> Tuple[Union[str, List[str]], in return bucket_uri[0] if len(bucket_uri) == 1 else bucket_uri, artifact_size def add_artifact( - self, - uri: Optional[str] = None, - namespace: Optional[str] = None, - bucket: Optional[str] = None, - prefix: Optional[str] = None, - files: Optional[List[str]] = None, + self, + uri: Optional[str] = None, + namespace: Optional[str] = None, + bucket: Optional[str] = None, + prefix: Optional[str] = None, + files: Optional[List[str]] = None, ): """ Adds information about objects in a specified bucket to the model description JSON. @@ -1802,11 +2157,11 @@ def list_obj_versions_unpaginated(): self.set_spec(self.CONST_MODEL_FILE_DESCRIPTION, tmp_model_file_description) def remove_artifact( - self, - uri: Optional[str] = None, - namespace: Optional[str] = None, - bucket: Optional[str] = None, - prefix: Optional[str] = None, + self, + uri: Optional[str] = None, + namespace: Optional[str] = None, + bucket: Optional[str] = None, + prefix: Optional[str] = None, ): """ Removes information about objects in a specified bucket or using a specified URI from the model description JSON. @@ -1853,9 +2208,9 @@ def remove_artifact( def findModelIdx(): for idx, model in enumerate(self.model_file_description["models"]): if ( - model["namespace"], - model["bucketName"], - (model["prefix"] if ("prefix" in model) else None), + model["namespace"], + model["bucketName"], + (model["prefix"] if ("prefix" in model) else None), ) == (namespace, bucket, "" if not prefix else prefix): return idx return -1 diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index 58f99c14b..fd0059d64 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -31,7 +31,6 @@ except: from yaml import Dumper as dumper - logging.basicConfig(stream=sys.stdout, level=logging.INFO) logger = logging.getLogger("ADS") @@ -256,9 +255,9 @@ def to_json(self): return json.dumps(self.to_dict()) def to_json_file( - self, - file_path: str, - storage_options: dict = None, + self, + file_path: str, + storage_options: dict = None, ) -> None: """Saves the metadata item value to a local file or object storage. @@ -313,9 +312,9 @@ def to_json_file( storage_options = factory.default_storage_options or {"config": {}} with fsspec.open( - file_path, - mode="w", - **(storage_options), + file_path, + mode="w", + **(storage_options), ) as f: f.write(json.dumps(self.value)) @@ -400,9 +399,9 @@ class ModelTaxonomyMetadataItem(ModelMetadataItem): _FIELDS = ["key", "value"] def __init__( - self, - key: str, - value: str = None, + self, + key: str, + value: str = None, ): self.key = key self.value = value @@ -500,17 +499,17 @@ def validate(self) -> bool: If invalid Framework provided. """ if ( - self.key.lower() == MetadataTaxonomyKeys.USE_CASE_TYPE.lower() - and self.value - and (not isinstance(self.value, str) or self.value not in UseCaseType) + self.key.lower() == MetadataTaxonomyKeys.USE_CASE_TYPE.lower() + and self.value + and (not isinstance(self.value, str) or self.value not in UseCaseType) ): raise ValueError( f"Invalid value of `UseCaseType`. Choose from {UseCaseType.values()}." ) if ( - self.key.lower() == MetadataTaxonomyKeys.FRAMEWORK.lower() - and self.value - and (not isinstance(self.value, str) or self.value not in Framework) + self.key.lower() == MetadataTaxonomyKeys.FRAMEWORK.lower() + and self.value + and (not isinstance(self.value, str) or self.value not in Framework) ): raise ValueError( f"Invalid value of `Framework`. Choose from {Framework.values()}." @@ -557,11 +556,11 @@ class ModelCustomMetadataItem(ModelTaxonomyMetadataItem): _FIELDS = ["key", "value", "description", "category"] def __init__( - self, - key: str, - value: str = None, - description: str = None, - category: str = None, + self, + key: str, + value: str = None, + description: str = None, + category: str = None, ): super().__init__(key=key, value=value) self.description = description @@ -689,8 +688,8 @@ def validate(self) -> bool: raise MetadataValueTooLong(self.key, len(value)) if ( - self.description - and len(self.description) > METADATA_DESCRIPTION_LENGTH_LIMIT + self.description + and len(self.description) > METADATA_DESCRIPTION_LENGTH_LIMIT ): raise MetadataDescriptionTooLong(self.key, len(self.description)) @@ -730,7 +729,7 @@ def __init__(self): self._items = set() def get( - self, key: str, value: Optional[Any] = _sentinel + self, key: str, value: Optional[Any] = _sentinel ) -> Union[ModelMetadataItem, Any]: """Returns the model metadata item by provided key. @@ -891,9 +890,9 @@ def _to_oci_metadata(self): return [item._to_oci_metadata() for item in self._items] def to_json_file( - self, - file_path: str, - storage_options: dict = None, + self, + file_path: str, + storage_options: dict = None, ) -> None: """Saves the metadata to a local file or object storage. @@ -948,9 +947,9 @@ def to_json_file( storage_options = factory.default_storage_options or {"config": {}} with fsspec.open( - file_path, - mode="w", - **(storage_options), + file_path, + mode="w", + **(storage_options), ) as f: f.write(self.to_json()) @@ -1082,12 +1081,12 @@ def __init__(self): self._items = set() def add( - self, - key: str, - value: str, - description: str = "", - category: str = MetadataCustomCategory.OTHER, - replace: bool = False, + self, + key: str, + value: str, + description: str = "", + category: str = MetadataCustomCategory.OTHER, + replace: bool = False, ) -> None: """Adds a new model metadata item. Overrides the existing one if replace flag is True. @@ -1396,10 +1395,10 @@ def from_dict(cls, data: Dict) -> "ModelCustomMetadata": In case of the wrong input data format. """ if ( - not data - or not isinstance(data, Dict) - or not "data" in data - or not isinstance(data["data"], List) + not data + or not isinstance(data, Dict) + or not "data" in data + or not isinstance(data["data"], List) ): raise ValueError( "An error occurred when attempting to deserialize the model custom metadata from a dictionary. " @@ -1548,10 +1547,10 @@ def from_dict(cls, data: Dict) -> "ModelTaxonomyMetadata": In case of the wrong input data format. """ if ( - not data - or not isinstance(data, Dict) - or not "data" in data - or not isinstance(data["data"], List) + not data + or not isinstance(data, Dict) + or not "data" in data + or not isinstance(data["data"], List) ): raise ValueError( "An error occurred when attempting to deserialize the model taxonomy metadata from a dictionary. " @@ -1586,10 +1585,10 @@ class ModelProvenanceMetadata(DataClassSerializable): @classmethod def fetch_training_code_details( - cls, - training_script_path: str = None, - training_id: str = None, - artifact_dir: str = None, + cls, + training_script_path: str = None, + training_id: str = None, + artifact_dir: str = None, ): """Fetches the training code details: repo, git_branch, git_commit, repository_url, training_script_path and training_id. @@ -1672,15 +1671,15 @@ def assert_path_not_dirty(self, path: str, ignore: bool): if self.repo is not None and not ignore: path_abs = os.path.abspath(path) if ( - os.path.commonpath([path_abs, self.repo.working_dir]) - == self.repo.working_dir + os.path.commonpath([path_abs, self.repo.working_dir]) + == self.repo.working_dir ): path_relpath = os.path.relpath(path_abs, self.repo.working_dir) if self.repo.is_dirty(path=path_relpath) or any( - [ - os.path.commonpath([path_relpath, untracked]) == path_relpath - for untracked in self.repo.untracked_files - ] + [ + os.path.commonpath([path_relpath, untracked]) == path_relpath + for untracked in self.repo.untracked_files + ] ): raise ChangesNotCommitted(path_abs) @@ -1703,7 +1702,7 @@ def _to_oci_metadata(self) -> oci.data_science.models.ModelProvenance: @classmethod def _from_oci_metadata( - cls, model_provenance: oci.data_science.models.ModelProvenance + cls, model_provenance: oci.data_science.models.ModelProvenance ) -> "ModelProvenanceMetadata": """Creates a new model provenance metadata item from the `oci.data_science.models.ModelProvenance` object. @@ -1761,391 +1760,3 @@ def __repr__(self): Serialized version of object as a YAML string """ return self.to_yaml() - - -class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): - NONE = "NONE" - ALL = "ALL" - ON_FAILURE = "ON_FAILURE" - ON_SUCCESS = "ON_SUCCESS" - - _value_map = { - "NONE": NONE, - "ALL": ALL, - "ON_FAILURE": ON_FAILURE, - "ON_SUCCESS": ON_SUCCESS, - } - - @classmethod - def create(cls, key): - if key in cls._value_map: - return cls._value_map[key] - raise ValueError(f"Invalid CustomerNotificationType: {key}") - - @classmethod - def from_string(cls, value): - for member in cls: - if member.value == value: - return member - raise ValueError(f"Invalid CustomerNotificationType: {value}") - - @classmethod - def is_valid(cls, value): - return value in (cls.NONE, cls.ALL, cls.ON_FAILURE, cls.ON_SUCCESS) - - @property - def value(self): - return str(self) - - -class ModelBackupSetting: - """ - Class that represents Model Backup Setting Details Metadata. - - Methods - ------- - to_dict(self) -> Dict: - Serializes the backup settings into a dictionary. - from_dict(cls, data: Dict) -> 'ModelBackupSetting': - Constructs backup settings from a dictionary. - to_json(self) -> str: - Serializes the backup settings into a JSON string. - from_json(cls, json_str: str) -> 'ModelBackupSetting': - Constructs backup settings from a JSON string. - to_yaml(self) -> str: - Serializes the backup settings into a YAML string. - validate(self) -> bool: - Validates the backup settings details. - """ - - def __init__( - self, - is_backup_enabled: Optional[bool] = None, - backup_region: Optional[str] = None, - customer_notification_type: Optional[CustomerNotificationType] = None, - ): - self.is_backup_enabled = ( - is_backup_enabled if is_backup_enabled is not None else False - ) - self.backup_region = backup_region - self.customer_notification_type = ( - customer_notification_type or CustomerNotificationType.NONE - ) - - def to_dict(self) -> Dict: - """Serializes the backup settings into a dictionary.""" - return { - "is_backup_enabled": self.is_backup_enabled, - "backup_region": self.backup_region, - "customer_notification_type": self.customer_notification_type, - } - - @classmethod - def from_dict(cls, data: Dict) -> "ModelBackupSetting": - """Constructs backup settings from a dictionary.""" - return cls( - is_backup_enabled=data.get("is_backup_enabled"), - backup_region=data.get("backup_region"), - customer_notification_type=CustomerNotificationType( - data.get("customer_notification_type") - ) - or None, - ) - - def to_json(self) -> str: - """Serializes the backup settings into a JSON string.""" - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str) -> "ModelBackupSetting": - """Constructs backup settings from a JSON string or dictionary.""" - if isinstance(json_str, str): - data = json.loads(json_str) - else: - data = json_str # Assume it's already a dictionary or appropriate type - - return cls.from_dict(data) - - def to_yaml(self) -> str: - """Serializes the backup settings into a YAML string.""" - return yaml.dump(self.to_dict()) - - def validate(self) -> bool: - """Validates the backup settings details. Returns True if valid, False otherwise.""" - if not isinstance(self.is_backup_enabled, bool): - return False - if self.backup_region and not isinstance(self.backup_region, str): - return False - if not isinstance(self.customer_notification_type, str) \ - or not CustomerNotificationType.is_valid(self.customer_notification_type): - return False - return True - - def __repr__(self): - return self.to_yaml() - - -class ModelRetentionSetting: - """ - Class that represents Model Retention Setting Details Metadata. - - Methods - ------- - to_dict(self) -> Dict: - Serializes the retention settings into a dictionary. - from_dict(cls, data: Dict) -> 'ModelRetentionSetting': - Constructs retention settings from a dictionary. - to_json(self) -> str: - Serializes the retention settings into a JSON string. - from_json(cls, json_str: str) -> 'ModelRetentionSetting': - Constructs retention settings from a JSON string. - to_yaml(self) -> str: - Serializes the retention settings into a YAML string. - validate(self) -> bool: - Validates the retention settings details. - """ - - def __init__( - self, - archive_after_days: Optional[int] = None, - delete_after_days: Optional[int] = None, - customer_notification_type: Optional[CustomerNotificationType] = None, - ): - self.archive_after_days = archive_after_days - self.delete_after_days = delete_after_days - self.customer_notification_type = ( - customer_notification_type or CustomerNotificationType.NONE - ) - - def to_dict(self) -> Dict: - """Serializes the retention settings into a dictionary.""" - return { - "archive_after_days": self.archive_after_days, - "delete_after_days": self.delete_after_days, - "customer_notification_type": self.customer_notification_type, - } - - @classmethod - def from_dict(cls, data: Dict) -> "ModelRetentionSetting": - """Constructs retention settings from a dictionary.""" - return cls( - archive_after_days=data.get("archive_after_days"), - delete_after_days=data.get("delete_after_days"), - customer_notification_type=CustomerNotificationType( - data.get("customer_notification_type") - ) - or None, - ) - - def to_json(self) -> str: - """Serializes the retention settings into a JSON string.""" - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str) -> "ModelRetentionSetting": - """Constructs retention settings from a JSON string.""" - if isinstance(json_str, str): - data = json.loads(json_str) - else: - data = json_str - return cls.from_dict(data) - - def to_yaml(self) -> str: - """Serializes the retention settings into a YAML string.""" - return yaml.dump(self.to_dict()) - - def validate(self) -> bool: - """Validates the retention settings details. Returns True if valid, False otherwise.""" - if self.archive_after_days is not None and ( - not isinstance(self.archive_after_days, int) or self.archive_after_days < 0 - ): - return False - if self.delete_after_days is not None and ( - not isinstance(self.delete_after_days, int) or self.delete_after_days < 0 - ): - return False - if not isinstance(self.customer_notification_type, str) or not \ - CustomerNotificationType.is_valid(self.customer_notification_type): - return False - return True - - def __repr__(self): - return self.to_yaml() - - -class SettingStatus(str, metaclass=ExtendedEnumMeta): - """Enum to represent the status of retention settings.""" - - PENDING = "PENDING" - SUCCEEDED = "SUCCEEDED" - FAILED = "FAILED" - - @classmethod - def is_valid(cls, state: str) -> bool: - """Validates the given state against allowed SettingStatus values.""" - return state in (cls.PENDING, cls.SUCCEEDED, cls.FAILED) - - -class ModelRetentionOperationDetails: - """ - Class that represents Model Retention Operation Details Metadata. - - Methods - ------- - to_dict(self) -> Dict: - Serializes the retention operation details into a dictionary. - from_dict(cls, data: Dict) -> 'ModelRetentionOperationDetails': - Constructs retention operation details from a dictionary. - to_json(self) -> str: - Serializes the retention operation details into a JSON string. - from_json(cls, json_str: str) -> 'ModelRetentionOperationDetails': - Constructs retention operation details from a JSON string. - to_yaml(self) -> str: - Serializes the retention operation details into a YAML string. - validate(self) -> bool: - Validates the retention operation details. - """ - - def __init__( - self, - archive_state: Optional[SettingStatus] = None, - archive_state_details: Optional[str] = None, - delete_state: Optional[SettingStatus] = None, - delete_state_details: Optional[str] = None, - time_archival_scheduled: Optional[int] = None, - time_deletion_scheduled: Optional[int] = None, - ): - self.archive_state = archive_state - self.archive_state_details = archive_state_details - self.delete_state = delete_state - self.delete_state_details = delete_state_details - self.time_archival_scheduled = time_archival_scheduled - self.time_deletion_scheduled = time_deletion_scheduled - - def to_dict(self) -> Dict: - """Serializes the retention operation details into a dictionary.""" - return { - "archive_state": self.archive_state or None, - "archive_state_details": self.archive_state_details, - "delete_state": self.delete_state or None, - "delete_state_details": self.delete_state_details, - "time_archival_scheduled": self.time_archival_scheduled, - "time_deletion_scheduled": self.time_deletion_scheduled, - } - - @classmethod - def from_dict(cls, data: Dict) -> "ModelRetentionOperationDetails": - """Constructs retention operation details from a dictionary.""" - return cls( - archive_state=SettingStatus(data.get("archive_state")) or None, - archive_state_details=data.get("archive_state_details"), - delete_state=SettingStatus(data.get("delete_state")) or None, - delete_state_details=data.get("delete_state_details"), - time_archival_scheduled=data.get("time_archival_scheduled"), - time_deletion_scheduled=data.get("time_deletion_scheduled"), - ) - - def to_json(self) -> str: - """Serializes the retention operation details into a JSON string.""" - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> "ModelRetentionOperationDetails": - """Constructs retention operation details from a JSON string.""" - data = json.loads(json_str) - return cls.from_dict(data) - - def to_yaml(self) -> str: - """Serializes the retention operation details into a YAML string.""" - return yaml.dump(self.to_dict()) - - def validate(self) -> bool: - """Validates the retention operation details.""" - return all( - [ - self.archive_state is None or SettingStatus.is_valid(self.archive_state), - self.delete_state is None or SettingStatus.is_valid(self.delete_state), - self.time_archival_scheduled is None - or isinstance(self.time_archival_scheduled, int), - self.time_deletion_scheduled is None - or isinstance(self.time_deletion_scheduled, int), - ] - ) - - def __repr__(self): - return self.to_yaml() - - -class ModelBackupOperationDetails: - """ - Class that represents Model Backup Operation Details Metadata. - - Methods - ------- - to_dict(self) -> Dict: - Serializes the backup operation details into a dictionary. - from_dict(cls, data: Dict) -> 'ModelBackupOperationDetails': - Constructs backup operation details from a dictionary. - to_json(self) -> str: - Serializes the backup operation details into a JSON string. - from_json(cls, json_str: str) -> 'ModelBackupOperationDetails': - Constructs backup operation details from a JSON string. - to_yaml(self) -> str: - Serializes the backup operation details into a YAML string. - validate(self) -> bool: - Validates the backup operation details. - """ - - def __init__( - self, - backup_state: Optional[SettingStatus] = None, - backup_state_details: Optional[str] = None, - time_last_backed_up: Optional[int] = None, - ): - self.backup_state = backup_state - self.backup_state_details = backup_state_details - self.time_last_backed_up = time_last_backed_up - - def to_dict(self) -> Dict: - """Serializes the backup operation details into a dictionary.""" - return { - "backup_state": self.backup_state or None, - "backup_state_details": self.backup_state_details, - "time_last_backed_up": self.time_last_backed_up, - } - - @classmethod - def from_dict(cls, data: Dict) -> "ModelBackupOperationDetails": - """Constructs backup operation details from a dictionary.""" - return cls( - backup_state=SettingStatus(data.get("backup_state")) or None, - backup_state_details=data.get("backup_state_details"), - time_last_backed_up=data.get("time_last_backed_up"), - ) - - def to_json(self) -> str: - """Serializes the backup operation details into a JSON string.""" - return json.dumps(self.to_dict()) - - @classmethod - def from_json(cls, json_str: str) -> "ModelBackupOperationDetails": - """Constructs backup operation details from a JSON string.""" - data = json.loads(json_str) - return cls.from_dict(data) - - def to_yaml(self) -> str: - """Serializes the backup operation details into a YAML string.""" - return yaml.dump(self.to_dict()) - - def validate(self) -> bool: - """Validates the backup operation details.""" - if self.backup_state is not None and not SettingStatus.is_valid(self.backup_state): - return False - if self.time_last_backed_up is not None and not isinstance( - self.time_last_backed_up, int - ): - return False - return True - - def __repr__(self): - return self.to_yaml() diff --git a/ads/model/service/oci_datascience_model.py b/ads/model/service/oci_datascience_model.py index bb85b31d5..a61bb8565 100644 --- a/ads/model/service/oci_datascience_model.py +++ b/ads/model/service/oci_datascience_model.py @@ -302,15 +302,9 @@ def restore_archived_model_artifact( ModelArtifactNotFoundError If model artifact not found. """ - try: - # Call the underlying client method to restore the model artifact - return self.client.restore_archived_model_artifact( - model_id=model_id, - restore_model_for_hours_specified=restore_model_for_hours_specified, - ).data.content - except ServiceError as ex: - if ex.status == 404: - raise ModelArtifactNotFoundError() + return self.client.restore_archived_model_artifact( + model_id=model_id, + restore_model_for_hours_specified=restore_model_for_hours_specified).headers["opc-work-request-id"] @check_for_model_id( msg="Model needs to be saved to the Model Catalog before the artifact content can be read." diff --git a/tests/unitary/default_setup/model/test_datascience_model.py b/tests/unitary/default_setup/model/test_datascience_model.py index 69d778278..f331576db 100644 --- a/tests/unitary/default_setup/model/test_datascience_model.py +++ b/tests/unitary/default_setup/model/test_datascience_model.py @@ -26,7 +26,7 @@ ModelArtifactSizeError, BucketNotVersionedError, ModelFileDescriptionError, - InvalidArtifactType, + InvalidArtifactType, ModelRetentionSetting, ModelBackupSetting, ) from ads.model.model_metadata import ( ModelCustomMetadata, @@ -34,8 +34,6 @@ ModelTaxonomyMetadata, ModelCustomMetadataItem, MetadataCustomCategory, - ModelBackupSetting, - ModelRetentionSetting, ) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, diff --git a/tests/unitary/default_setup/model/test_model_metadata.py b/tests/unitary/default_setup/model/test_model_metadata.py index fd9b530ab..b6356a100 100644 --- a/tests/unitary/default_setup/model/test_model_metadata.py +++ b/tests/unitary/default_setup/model/test_model_metadata.py @@ -31,14 +31,10 @@ ModelTaxonomyMetadata, ModelTaxonomyMetadataItem, MetadataTaxonomyKeys, - UseCaseType, - ModelBackupSetting, - ModelBackupOperationDetails, - ModelRetentionSetting, - ModelRetentionOperationDetails, - CustomerNotificationType, - SettingStatus + UseCaseType ) +from ads.model.datascience_model import ModelRetentionSetting, CustomerNotificationType, SettingStatus, \ + ModelBackupSetting, ModelRetentionOperationDetails, ModelBackupOperationDetails from oci.data_science.models import Metadata as OciMetadataItem try: From 4ce3fcf3195919e15392b3cf18b8456080a03ac2 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Thu, 24 Oct 2024 01:21:01 +0530 Subject: [PATCH 09/15] updated restore method and model catalog doc --- ads/model/datascience_model.py | 20 +++++++++---------- .../model_catalog/model_catalog.rst | 9 +++++++++ 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 3fa0c468d..e9b6eb460 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -1393,19 +1393,16 @@ def _remove_file_description_artifact(self): if self.local_copy_dir: shutil.rmtree(self.local_copy_dir, ignore_errors=True) - @classmethod def restore_model( - cls, - model_id: str, + self, restore_model_for_hours_specified: Optional[int] = None, - ): + ) -> None: """ Restore archived model artifact. Parameters ---------- - model_id : str - The `OCID` of the model to be restored. + restore_model_for_hours_specified : Optional[int] Duration in hours for which the archived model is available for access. @@ -1419,8 +1416,11 @@ def restore_model( If the model ID is invalid or if any parameters are incorrect. """ # Validate model_id - if not model_id or not isinstance(model_id, str): - raise ValueError("model_id must be a non-empty string.") + if not self.id: + logger.warn( + "Model needs to be saved to the model catalog before it can be restored." + ) + return # Optional: Validate restore_model_for_hours_specified if restore_model_for_hours_specified is not None: @@ -1432,8 +1432,8 @@ def restore_model( "restore_model_for_hours_specified must be a positive integer." ) - cls.dsc_model.restore_archived_model_artifact( - model_id=model_id, + self.dsc_model.restore_archived_model_artifact( + model_id=self.id, restore_model_for_hours_specified=restore_model_for_hours_specified, ) diff --git a/docs/source/user_guide/model_catalog/model_catalog.rst b/docs/source/user_guide/model_catalog/model_catalog.rst index aaebaa87c..4a8732915 100644 --- a/docs/source/user_guide/model_catalog/model_catalog.rst +++ b/docs/source/user_guide/model_catalog/model_catalog.rst @@ -1394,6 +1394,10 @@ These are the metadata attributes: * ``schema_output``: Output schema. However, this field can't be updated. * ``time_created``: The date and time that the model artifacts were stored in the model catalog. * ``user_name``: User name of the account that created the entry. +* ``backup_setting``: The backup setting property of the model +* ``retention_setting``: The retention setting property of the model +* ``retention_operation_details``: The retention operation details of the model +* ``backup_operation_details``: The back up operation details of the model The ``provenance_metadata`` attribute returns a `ModelProvenance `__ object. This object has the attributes to access the metadata. @@ -1545,3 +1549,8 @@ In the next example, the model that was stored in the model catalog as part of t .. code-block:: python3 mc.delete_model(mc_model.id) + +Restore Archived Model +********************** + +The ``.restore_model()`` method of Model catalog restores the model for a specified number of hours. Restored models can be downloaded for 1-240 hours, defaulting to 24 hours. \ No newline at end of file From 651a0c01c209863673e4b22cb53e2002962cf783 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Thu, 24 Oct 2024 18:06:14 +0530 Subject: [PATCH 10/15] updated changes as per comments --- ads/model/datascience_model.py | 111 +++++++----------- ads/model/generic_model.py | 14 +-- ads/model/model_metadata.py | 109 ++++++++--------- ads/model/service/oci_datascience_model.py | 23 ++-- .../model/test_model_metadata.py | 26 ++-- 5 files changed, 124 insertions(+), 159 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index e9b6eb460..e664a6867 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8; -*- # Copyright (c) 2022, 2024 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ @@ -11,19 +10,22 @@ import shutil import tempfile from copy import deepcopy -from typing import Dict, List, Optional, Union, Tuple +from typing import Dict, List, Optional, Tuple, Union import pandas import yaml from jsonschema import ValidationError, validate +from ads.common import oci_client as oc from ads.common import utils from ads.common.extended_enum import ExtendedEnumMeta from ads.common.object_storage_details import ObjectStorageDetails +from ads.config import ( + AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET, +) from ads.config import ( COMPARTMENT_OCID, PROJECT_OCID, - AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET, ) from ads.feature_engineering.schema import Schema from ads.jobs.builders.base import Builder @@ -37,12 +39,12 @@ ModelCustomMetadata, ModelCustomMetadataItem, ModelProvenanceMetadata, - ModelTaxonomyMetadata, ) + ModelTaxonomyMetadata, +) from ads.model.service.oci_datascience_model import ( ModelProvenanceNotFoundError, OCIDataScienceModel, ) -from ads.common import oci_client as oc logger = logging.getLogger(__name__) @@ -84,14 +86,6 @@ class CustomerNotificationType(str, metaclass=ExtendedEnumMeta): ON_FAILURE = "ON_FAILURE" ON_SUCCESS = "ON_SUCCESS" - @classmethod - def is_valid(cls, value): - return value in (cls.NONE, cls.ALL, cls.ON_FAILURE, cls.ON_SUCCESS) - - @property - def value(self): - return str(self) - class SettingStatus(str, metaclass=ExtendedEnumMeta): """Enum to represent the status of retention settings.""" @@ -100,11 +94,6 @@ class SettingStatus(str, metaclass=ExtendedEnumMeta): SUCCEEDED = "SUCCEEDED" FAILED = "FAILED" - @classmethod - def is_valid(cls, state: str) -> bool: - """Validates the given state against allowed SettingStatus values.""" - return state in (cls.PENDING, cls.SUCCEEDED, cls.FAILED) - class ModelBackupSetting: """ @@ -167,10 +156,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str) -> "ModelBackupSetting": """Constructs backup settings from a JSON string or dictionary.""" - if isinstance(json_str, str): - data = json.loads(json_str) - else: - data = json_str + data = json.loads(json_str) if isinstance(json_str, str) else json_str return cls.from_dict(data) @@ -180,14 +166,12 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the backup settings details. Returns True if valid, False otherwise.""" - if not isinstance(self.is_backup_enabled, bool): - return False - if self.backup_region and not isinstance(self.backup_region, str): - return False - if not isinstance(self.customer_notification_type, str) \ - or not CustomerNotificationType.is_valid(self.customer_notification_type): - return False - return True + return all([ + isinstance(self.is_backup_enabled, bool), + not self.backup_region or isinstance(self.backup_region, str), + isinstance(self.customer_notification_type, str) and self.customer_notification_type in + CustomerNotificationType.values() + ]) def __repr__(self): return self.to_yaml() @@ -252,10 +236,7 @@ def to_json(self) -> str: @classmethod def from_json(cls, json_str) -> "ModelRetentionSetting": """Constructs retention settings from a JSON string.""" - if isinstance(json_str, str): - data = json.loads(json_str) - else: - data = json_str + data = json.loads(json_str) if isinstance(json_str, str) else json_str return cls.from_dict(data) def to_yaml(self) -> str: @@ -264,18 +245,13 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the retention settings details. Returns True if valid, False otherwise.""" - if self.archive_after_days is not None and ( - not isinstance(self.archive_after_days, int) or self.archive_after_days < 0 - ): - return False - if self.delete_after_days is not None and ( - not isinstance(self.delete_after_days, int) or self.delete_after_days < 0 - ): - return False - if not isinstance(self.customer_notification_type, str) or not \ - CustomerNotificationType.is_valid(self.customer_notification_type): - return False - return True + return all([ + self.archive_after_days is None or ( + isinstance(self.archive_after_days, int) and self.archive_after_days >= 0), + self.delete_after_days is None or (isinstance(self.delete_after_days, int) and self.delete_after_days >= 0), + isinstance(self.customer_notification_type, str) and self.customer_notification_type in + CustomerNotificationType.values() + ]) def __repr__(self): return self.to_yaml() @@ -358,8 +334,8 @@ def validate(self) -> bool: """Validates the retention operation details.""" return all( [ - self.archive_state is None or SettingStatus.is_valid(self.archive_state), - self.delete_state is None or SettingStatus.is_valid(self.delete_state), + self.archive_state is None or self.archive_state in SettingStatus.values(), + self.delete_state is None or self.delete_state in SettingStatus.values(), self.time_archival_scheduled is None or isinstance(self.time_archival_scheduled, int), self.time_deletion_scheduled is None @@ -395,18 +371,18 @@ def __init__( self, backup_state: Optional[SettingStatus] = None, backup_state_details: Optional[str] = None, - time_last_backed_up: Optional[int] = None, + time_last_backup: Optional[int] = None, ): self.backup_state = backup_state self.backup_state_details = backup_state_details - self.time_last_backed_up = time_last_backed_up + self.time_last_backup = time_last_backup def to_dict(self) -> Dict: """Serializes the backup operation details into a dictionary.""" return { "backup_state": self.backup_state or None, "backup_state_details": self.backup_state_details, - "time_last_backed_up": self.time_last_backed_up, + "time_last_backup": self.time_last_backup, } @classmethod @@ -415,7 +391,7 @@ def from_dict(cls, data: Dict) -> "ModelBackupOperationDetails": return cls( backup_state=SettingStatus(data.get("backup_state")) or None, backup_state_details=data.get("backup_state_details"), - time_last_backed_up=data.get("time_last_backed_up"), + time_last_backup=data.get("time_last_backup"), ) def to_json(self) -> str: @@ -434,13 +410,10 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the backup operation details.""" - if self.backup_state is not None and not SettingStatus.is_valid(self.backup_state): - return False - if self.time_last_backed_up is not None and not isinstance( - self.time_last_backed_up, int - ): - return False - return True + return not ( + (self.backup_state is not None and not self.backup_state in SettingStatus.values()) or + (self.time_last_backup is not None and not isinstance(self.time_last_backup, int)) + ) def __repr__(self): return self.to_yaml() @@ -1068,7 +1041,7 @@ def with_model_file_description( elif json_string: json_data = json.loads(json_string) elif json_uri: - with open(json_uri, "r") as json_file: + with open(json_uri) as json_file: json_data = json.load(json_file) else: raise ValueError("Must provide either a valid json string or URI location.") @@ -1423,17 +1396,11 @@ def restore_model( return # Optional: Validate restore_model_for_hours_specified - if restore_model_for_hours_specified is not None: - if ( - not isinstance(restore_model_for_hours_specified, int) - or restore_model_for_hours_specified <= 0 - ): - raise ValueError( - "restore_model_for_hours_specified must be a positive integer." - ) + if restore_model_for_hours_specified is not None and ( + not isinstance(restore_model_for_hours_specified, int) or restore_model_for_hours_specified <= 0): + raise ValueError("restore_model_for_hours_specified must be a positive integer.") self.dsc_model.restore_archived_model_artifact( - model_id=self.id, restore_model_for_hours_specified=restore_model_for_hours_specified, ) @@ -1692,8 +1659,8 @@ def _init_complex_attributes(self): self.with_provenance_metadata(self.provenance_metadata) self.with_input_schema(self.input_schema) self.with_output_schema(self.output_schema) - self.with_backup_setting(self.backup_setting) - self.with_retention_setting(self.retention_setting) + # self.with_backup_setting(self.backup_setting) + # self.with_retention_setting(self.retention_setting) def _to_oci_dsc_model(self, **kwargs): """Creates an `OCIDataScienceModel` instance from the `DataScienceModel`. @@ -1753,6 +1720,8 @@ def _update_from_oci_dsc_model( self.CONST_DEFINED_METADATA: ModelTaxonomyMetadata._from_oci_metadata, self.CONST_BACKUP_SETTING: ModelBackupSetting.to_dict, self.CONST_RETENTION_SETTING: ModelRetentionSetting.to_dict, + self.CONST_BACKUP_OPERATION_DETAILS: ModelBackupOperationDetails.to_dict, + self.CONST_RETENTION_OPERATION_DETAILS: ModelRetentionOperationDetails.to_dict } # Update the main properties diff --git a/ads/model/generic_model.py b/ads/model/generic_model.py index 225b9750c..7a44b82de 100644 --- a/ads/model/generic_model.py +++ b/ads/model/generic_model.py @@ -1422,9 +1422,9 @@ def from_model_artifact( ) model.update_summary_status( detail=PREPARE_STATUS_POPULATE_METADATA_DETAIL, - status=( - ModelState.AVAILABLE.value if reload else ModelState.NOTAPPLICABLE.value - ), + status=ModelState.AVAILABLE.value + if reload + else ModelState.NOTAPPLICABLE.value, ) return model @@ -1706,11 +1706,9 @@ def from_model_catalog( ) result_model.update_summary_status( detail=SAVE_STATUS_INTROSPECT_TEST_DETAIL, - status=( - ModelState.AVAILABLE.value - if not result_model.ignore_conda_error - else ModelState.NOTAVAILABLE.value - ), + status=ModelState.AVAILABLE.value + if not result_model.ignore_conda_error + else ModelState.NOTAVAILABLE.value, ) return result_model diff --git a/ads/model/model_metadata.py b/ads/model/model_metadata.py index fd0059d64..2667b82ad 100644 --- a/ads/model/model_metadata.py +++ b/ads/model/model_metadata.py @@ -31,6 +31,7 @@ except: from yaml import Dumper as dumper + logging.basicConfig(stream=sys.stdout, level=logging.INFO) logger = logging.getLogger("ADS") @@ -255,9 +256,9 @@ def to_json(self): return json.dumps(self.to_dict()) def to_json_file( - self, - file_path: str, - storage_options: dict = None, + self, + file_path: str, + storage_options: dict = None, ) -> None: """Saves the metadata item value to a local file or object storage. @@ -312,9 +313,9 @@ def to_json_file( storage_options = factory.default_storage_options or {"config": {}} with fsspec.open( - file_path, - mode="w", - **(storage_options), + file_path, + mode="w", + **(storage_options), ) as f: f.write(json.dumps(self.value)) @@ -399,9 +400,9 @@ class ModelTaxonomyMetadataItem(ModelMetadataItem): _FIELDS = ["key", "value"] def __init__( - self, - key: str, - value: str = None, + self, + key: str, + value: str = None, ): self.key = key self.value = value @@ -499,17 +500,17 @@ def validate(self) -> bool: If invalid Framework provided. """ if ( - self.key.lower() == MetadataTaxonomyKeys.USE_CASE_TYPE.lower() - and self.value - and (not isinstance(self.value, str) or self.value not in UseCaseType) + self.key.lower() == MetadataTaxonomyKeys.USE_CASE_TYPE.lower() + and self.value + and (not isinstance(self.value, str) or self.value not in UseCaseType) ): raise ValueError( f"Invalid value of `UseCaseType`. Choose from {UseCaseType.values()}." ) if ( - self.key.lower() == MetadataTaxonomyKeys.FRAMEWORK.lower() - and self.value - and (not isinstance(self.value, str) or self.value not in Framework) + self.key.lower() == MetadataTaxonomyKeys.FRAMEWORK.lower() + and self.value + and (not isinstance(self.value, str) or self.value not in Framework) ): raise ValueError( f"Invalid value of `Framework`. Choose from {Framework.values()}." @@ -556,11 +557,11 @@ class ModelCustomMetadataItem(ModelTaxonomyMetadataItem): _FIELDS = ["key", "value", "description", "category"] def __init__( - self, - key: str, - value: str = None, - description: str = None, - category: str = None, + self, + key: str, + value: str = None, + description: str = None, + category: str = None, ): super().__init__(key=key, value=value) self.description = description @@ -688,8 +689,8 @@ def validate(self) -> bool: raise MetadataValueTooLong(self.key, len(value)) if ( - self.description - and len(self.description) > METADATA_DESCRIPTION_LENGTH_LIMIT + self.description + and len(self.description) > METADATA_DESCRIPTION_LENGTH_LIMIT ): raise MetadataDescriptionTooLong(self.key, len(self.description)) @@ -729,7 +730,7 @@ def __init__(self): self._items = set() def get( - self, key: str, value: Optional[Any] = _sentinel + self, key: str, value: Optional[Any] = _sentinel ) -> Union[ModelMetadataItem, Any]: """Returns the model metadata item by provided key. @@ -890,9 +891,9 @@ def _to_oci_metadata(self): return [item._to_oci_metadata() for item in self._items] def to_json_file( - self, - file_path: str, - storage_options: dict = None, + self, + file_path: str, + storage_options: dict = None, ) -> None: """Saves the metadata to a local file or object storage. @@ -947,9 +948,9 @@ def to_json_file( storage_options = factory.default_storage_options or {"config": {}} with fsspec.open( - file_path, - mode="w", - **(storage_options), + file_path, + mode="w", + **(storage_options), ) as f: f.write(self.to_json()) @@ -1081,12 +1082,12 @@ def __init__(self): self._items = set() def add( - self, - key: str, - value: str, - description: str = "", - category: str = MetadataCustomCategory.OTHER, - replace: bool = False, + self, + key: str, + value: str, + description: str = "", + category: str = MetadataCustomCategory.OTHER, + replace: bool = False, ) -> None: """Adds a new model metadata item. Overrides the existing one if replace flag is True. @@ -1395,10 +1396,10 @@ def from_dict(cls, data: Dict) -> "ModelCustomMetadata": In case of the wrong input data format. """ if ( - not data - or not isinstance(data, Dict) - or not "data" in data - or not isinstance(data["data"], List) + not data + or not isinstance(data, Dict) + or not "data" in data + or not isinstance(data["data"], List) ): raise ValueError( "An error occurred when attempting to deserialize the model custom metadata from a dictionary. " @@ -1547,10 +1548,10 @@ def from_dict(cls, data: Dict) -> "ModelTaxonomyMetadata": In case of the wrong input data format. """ if ( - not data - or not isinstance(data, Dict) - or not "data" in data - or not isinstance(data["data"], List) + not data + or not isinstance(data, Dict) + or not "data" in data + or not isinstance(data["data"], List) ): raise ValueError( "An error occurred when attempting to deserialize the model taxonomy metadata from a dictionary. " @@ -1585,10 +1586,10 @@ class ModelProvenanceMetadata(DataClassSerializable): @classmethod def fetch_training_code_details( - cls, - training_script_path: str = None, - training_id: str = None, - artifact_dir: str = None, + cls, + training_script_path: str = None, + training_id: str = None, + artifact_dir: str = None, ): """Fetches the training code details: repo, git_branch, git_commit, repository_url, training_script_path and training_id. @@ -1671,15 +1672,15 @@ def assert_path_not_dirty(self, path: str, ignore: bool): if self.repo is not None and not ignore: path_abs = os.path.abspath(path) if ( - os.path.commonpath([path_abs, self.repo.working_dir]) - == self.repo.working_dir + os.path.commonpath([path_abs, self.repo.working_dir]) + == self.repo.working_dir ): path_relpath = os.path.relpath(path_abs, self.repo.working_dir) if self.repo.is_dirty(path=path_relpath) or any( - [ - os.path.commonpath([path_relpath, untracked]) == path_relpath - for untracked in self.repo.untracked_files - ] + [ + os.path.commonpath([path_relpath, untracked]) == path_relpath + for untracked in self.repo.untracked_files + ] ): raise ChangesNotCommitted(path_abs) @@ -1702,7 +1703,7 @@ def _to_oci_metadata(self) -> oci.data_science.models.ModelProvenance: @classmethod def _from_oci_metadata( - cls, model_provenance: oci.data_science.models.ModelProvenance + cls, model_provenance: oci.data_science.models.ModelProvenance ) -> "ModelProvenanceMetadata": """Creates a new model provenance metadata item from the `oci.data_science.models.ModelProvenance` object. diff --git a/ads/model/service/oci_datascience_model.py b/ads/model/service/oci_datascience_model.py index a61bb8565..c7bff4332 100644 --- a/ads/model/service/oci_datascience_model.py +++ b/ads/model/service/oci_datascience_model.py @@ -1,24 +1,14 @@ #!/usr/bin/env python -# -*- coding: utf-8; -*- # Copyright (c) 2022, 2024 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ import logging -import time from functools import wraps from io import BytesIO from typing import Callable, Dict, List, Optional import oci.data_science -from ads.common import utils -from ads.common.object_storage_details import ObjectStorageDetails -from ads.common.oci_datascience import OCIDataScienceMixin -from ads.common.oci_mixin import OCIWorkRequestMixin -from ads.common.oci_resource import SEARCH_TYPE, OCIResource -from ads.common.utils import extract_region -from ads.common.work_request import DataScienceWorkRequest -from ads.model.deployment import ModelDeployment from oci.data_science.models import ( ArtifactExportDetailsObjectStorage, ArtifactImportDetailsObjectStorage, @@ -26,10 +16,17 @@ ExportModelArtifactDetails, ImportModelArtifactDetails, UpdateModelDetails, - WorkRequest, ) from oci.exceptions import ServiceError +from ads.common.object_storage_details import ObjectStorageDetails +from ads.common.oci_datascience import OCIDataScienceMixin +from ads.common.oci_mixin import OCIWorkRequestMixin +from ads.common.oci_resource import SEARCH_TYPE, OCIResource +from ads.common.utils import extract_region +from ads.common.work_request import DataScienceWorkRequest +from ads.model.deployment import ModelDeployment + logger = logging.getLogger(__name__) _REQUEST_INTERVAL_IN_SEC = 3 @@ -282,7 +279,7 @@ def get_artifact_info(self) -> Dict: msg="Model needs to be restored before the archived artifact content can be accessed." ) def restore_archived_model_artifact( - self, model_id: str, restore_model_for_hours_specified: Optional[int] = None + self, restore_model_for_hours_specified: Optional[int] = None ) -> None: """Restores the archived model artifact. @@ -303,7 +300,7 @@ def restore_archived_model_artifact( If model artifact not found. """ return self.client.restore_archived_model_artifact( - model_id=model_id, + model_id=self.id, restore_model_for_hours_specified=restore_model_for_hours_specified).headers["opc-work-request-id"] @check_for_model_id( diff --git a/tests/unitary/default_setup/model/test_model_metadata.py b/tests/unitary/default_setup/model/test_model_metadata.py index b6356a100..6411c2355 100644 --- a/tests/unitary/default_setup/model/test_model_metadata.py +++ b/tests/unitary/default_setup/model/test_model_metadata.py @@ -1338,12 +1338,12 @@ def test_to_dict(self): details = ModelBackupOperationDetails( backup_state=SettingStatus.SUCCEEDED, backup_state_details="Backup completed successfully", - time_last_backed_up=1633046400 + time_last_backup=1633046400 ) expected_dict = { "backup_state": "SUCCEEDED", "backup_state_details": "Backup completed successfully", - "time_last_backed_up": 1633046400 + "time_last_backup": 1633046400 } assert details.to_dict() == expected_dict @@ -1352,24 +1352,24 @@ def test_from_dict(self): data = { "backup_state": "SUCCEEDED", "backup_state_details": "Backup completed successfully", - "time_last_backed_up": 1633046400 + "time_last_backup": 1633046400 } details = ModelBackupOperationDetails.from_dict(data) assert details.backup_state == SettingStatus.SUCCEEDED assert details.backup_state_details == "Backup completed successfully" - assert details.time_last_backed_up == 1633046400 + assert details.time_last_backup == 1633046400 def test_to_json(self): """Test that to_json serializes the details to a JSON string.""" details = ModelBackupOperationDetails( backup_state=SettingStatus.SUCCEEDED, backup_state_details="Backup completed successfully", - time_last_backed_up=1633046400 + time_last_backup=1633046400 ) expected_json = json.dumps({ "backup_state": "SUCCEEDED", "backup_state_details": "Backup completed successfully", - "time_last_backed_up": 1633046400 + "time_last_backup": 1633046400 }) assert details.to_json() == expected_json @@ -1378,24 +1378,24 @@ def test_from_json(self): json_str = json.dumps({ "backup_state": "SUCCEEDED", "backup_state_details": "Backup completed successfully", - "time_last_backed_up": 1633046400 + "time_last_backup": 1633046400 }) details = ModelBackupOperationDetails.from_json(json_str) assert details.backup_state == SettingStatus.SUCCEEDED assert details.backup_state_details == "Backup completed successfully" - assert details.time_last_backed_up == 1633046400 + assert details.time_last_backup == 1633046400 def test_to_yaml(self): """Test that to_yaml serializes the details to a YAML string.""" details = ModelBackupOperationDetails( backup_state=SettingStatus.SUCCEEDED, backup_state_details="Backup completed successfully", - time_last_backed_up=1633046400 + time_last_backup=1633046400 ) expected_yaml = yaml.dump({ "backup_state": "SUCCEEDED", "backup_state_details": "Backup completed successfully", - "time_last_backed_up": 1633046400 + "time_last_backup": 1633046400 }) assert details.to_yaml() == expected_yaml @@ -1403,7 +1403,7 @@ def test_validate_valid(self): """Test that validate method returns True for valid backup operation details.""" details = ModelBackupOperationDetails( backup_state=SettingStatus.SUCCEEDED, - time_last_backed_up=1633046400 + time_last_backup=1633046400 ) assert details.validate() is True @@ -1411,7 +1411,7 @@ def test_validate_invalid_state(self): """Test that validate method returns False for an invalid backup state.""" details = ModelBackupOperationDetails( backup_state="INVALID_STATE", - time_last_backed_up=1633046400 + time_last_backup=1633046400 ) assert details.validate() is False @@ -1419,7 +1419,7 @@ def test_validate_invalid_time(self): """Test that validate method returns False for an invalid time value.""" details = ModelBackupOperationDetails( backup_state=SettingStatus.SUCCEEDED, - time_last_backed_up="invalid_time" # Invalid time + time_last_backup="invalid_time" # Invalid time ) assert details.validate() is False From cf65397499f143b6fa71a4f6c78c0498d0b7a210 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Thu, 24 Oct 2024 22:50:34 +0530 Subject: [PATCH 11/15] updated with formatter changes --- ads/model/datascience_model.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index e664a6867..8fba8e48f 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -411,7 +411,7 @@ def to_yaml(self) -> str: def validate(self) -> bool: """Validates the backup operation details.""" return not ( - (self.backup_state is not None and not self.backup_state in SettingStatus.values()) or + (self.backup_state is not None and self.backup_state not in SettingStatus.values()) or (self.time_last_backup is not None and not isinstance(self.time_last_backup, int)) ) @@ -1536,15 +1536,16 @@ def update(self, **kwargs) -> "DataScienceModel": self.dsc_model = self._to_oci_dsc_model(**kwargs).update() logger.debug(f"Updating a model provenance metadata {self.provenance_metadata}") - try: - self.dsc_model.get_model_provenance() - self.dsc_model.update_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) - except ModelProvenanceNotFoundError: - self.dsc_model.create_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) + if self.provenance_metadata: + try: + self.dsc_model.get_model_provenance() + self.dsc_model.update_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) + except ModelProvenanceNotFoundError: + self.dsc_model.create_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) return self.sync() @@ -2036,7 +2037,7 @@ def add_artifact( "Both 'prefix' and 'files' cannot be provided. Please provide only one." ) - if self.model_file_description == None: + if self.model_file_description is None: self.empty_json = { "version": "1.0", "type": "modelOSSReferenceDescription", @@ -2086,7 +2087,7 @@ def list_obj_versions_unpaginated(): # Fetch object details and put it into the objects variable objectStorageList = [] - if files == None: + if files is None: objectStorageList = list_obj_versions_unpaginated() else: for fileName in files: @@ -2174,7 +2175,7 @@ def remove_artifact( if (not namespace) or (not bucket): raise ValueError("Both 'namespace' and 'bucket' must be provided.") - def findModelIdx(): + def find_model_idx(): for idx, model in enumerate(self.model_file_description["models"]): if ( model["namespace"], @@ -2184,10 +2185,10 @@ def findModelIdx(): return idx return -1 - if self.model_file_description == None: + if self.model_file_description is None: return - modelSearchIdx = findModelIdx() + modelSearchIdx = find_model_idx() if modelSearchIdx == -1: return else: From b9d923de41222cd797859c35bb107afc80b3ece2 Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 25 Oct 2024 06:58:05 +0530 Subject: [PATCH 12/15] updated with formatter changes --- ads/model/datascience_model.py | 121 ++++++++++++++++----------------- 1 file changed, 59 insertions(+), 62 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 8fba8e48f..375b9a9d1 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -20,12 +20,10 @@ from ads.common import utils from ads.common.extended_enum import ExtendedEnumMeta from ads.common.object_storage_details import ObjectStorageDetails -from ads.config import ( - AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET, -) from ads.config import ( COMPARTMENT_OCID, PROJECT_OCID, + AQUA_SERVICE_MODELS_BUCKET as SERVICE_MODELS_BUCKET, ) from ads.feature_engineering.schema import Schema from ads.jobs.builders.base import Builder @@ -48,6 +46,7 @@ logger = logging.getLogger(__name__) + _MAX_ARTIFACT_SIZE_IN_BYTES = 2147483648 # 2GB MODEL_BY_REFERENCE_VERSION = "1.0" MODEL_BY_REFERENCE_JSON_FILE_NAME = "model_description.json" @@ -65,8 +64,8 @@ def __init__(self, max_artifact_size: str): class BucketNotVersionedError(Exception): # pragma: no cover def __init__( - self, - msg="Model artifact bucket is not versioned. Enable versioning on the bucket to proceed with model creation by reference.", + self, + msg="Model artifact bucket is not versioned. Enable versioning on the bucket to proceed with model creation by reference.", ): super().__init__(msg) @@ -527,6 +526,7 @@ class DataScienceModel(Builder): Sets path details for models created by reference. Input can be either a dict, string or json file and the schema is dictated by model_file_description_schema.json + Examples -------- >>> ds_model = (DataScienceModel() @@ -796,7 +796,7 @@ def defined_tags(self) -> Dict[str, Dict[str, object]]: return self.get_spec(self.CONST_DEFINED_TAG) def with_defined_tags( - self, **kwargs: Dict[str, Dict[str, object]] + self, **kwargs: Dict[str, Dict[str, object]] ) -> "DataScienceModel": """Sets defined tags. @@ -877,7 +877,7 @@ def defined_metadata_list(self) -> ModelTaxonomyMetadata: return self.get_spec(self.CONST_DEFINED_METADATA) def with_defined_metadata_list( - self, metadata: Union[ModelTaxonomyMetadata, Dict] + self, metadata: Union[ModelTaxonomyMetadata, Dict] ) -> "DataScienceModel": """Sets model taxonomy (defined) metadata. @@ -901,7 +901,7 @@ def custom_metadata_list(self) -> ModelCustomMetadata: return self.get_spec(self.CONST_CUSTOM_METADATA) def with_custom_metadata_list( - self, metadata: Union[ModelCustomMetadata, Dict] + self, metadata: Union[ModelCustomMetadata, Dict] ) -> "DataScienceModel": """Sets model custom metadata. @@ -925,7 +925,7 @@ def provenance_metadata(self) -> ModelProvenanceMetadata: return self.get_spec(self.CONST_PROVENANCE_METADATA) def with_provenance_metadata( - self, metadata: Union[ModelProvenanceMetadata, Dict] + self, metadata: Union[ModelProvenanceMetadata, Dict] ) -> "DataScienceModel": """Sets model provenance metadata. @@ -1018,7 +1018,7 @@ def model_file_description(self) -> dict: return self.get_spec(self.CONST_MODEL_FILE_DESCRIPTION) def with_model_file_description( - self, json_dict: dict = None, json_string: str = None, json_uri: str = None + self, json_dict: dict = None, json_string: str = None, json_uri: str = None ): """Sets the json file description for model passed by reference Parameters @@ -1041,7 +1041,7 @@ def with_model_file_description( elif json_string: json_data = json.loads(json_string) elif json_uri: - with open(json_uri) as json_file: + with open(json_uri, "r") as json_file: json_data = json.load(json_file) else: raise ValueError("Must provide either a valid json string or URI location.") @@ -1256,15 +1256,15 @@ def create(self, **kwargs) -> "DataScienceModel": return self def upload_artifact( - self, - bucket_uri: Optional[str] = None, - auth: Optional[Dict] = None, - region: Optional[str] = None, - overwrite_existing_artifact: Optional[bool] = True, - remove_existing_artifact: Optional[bool] = True, - timeout: Optional[int] = None, - parallel_process_count: int = utils.DEFAULT_PARALLEL_PROCESS_COUNT, - model_by_reference: Optional[bool] = False, + self, + bucket_uri: Optional[str] = None, + auth: Optional[Dict] = None, + region: Optional[str] = None, + overwrite_existing_artifact: Optional[bool] = True, + remove_existing_artifact: Optional[bool] = True, + timeout: Optional[int] = None, + parallel_process_count: int = utils.DEFAULT_PARALLEL_PROCESS_COUNT, + model_by_reference: Optional[bool] = False, ) -> None: """Uploads model artifacts to the model catalog. @@ -1334,7 +1334,7 @@ def upload_artifact( bucket_uri = self.artifact if not model_by_reference and ( - bucket_uri or utils.folder_size(self.artifact) > _MAX_ARTIFACT_SIZE_IN_BYTES + bucket_uri or utils.folder_size(self.artifact) > _MAX_ARTIFACT_SIZE_IN_BYTES ): if not bucket_uri: raise ModelArtifactSizeError( @@ -1405,15 +1405,15 @@ def restore_model( ) def download_artifact( - self, - target_dir: str, - auth: Optional[Dict] = None, - force_overwrite: Optional[bool] = False, - bucket_uri: Optional[str] = None, - region: Optional[str] = None, - overwrite_existing_artifact: Optional[bool] = True, - remove_existing_artifact: Optional[bool] = True, - timeout: Optional[int] = None, + self, + target_dir: str, + auth: Optional[Dict] = None, + force_overwrite: Optional[bool] = False, + bucket_uri: Optional[str] = None, + region: Optional[str] = None, + overwrite_existing_artifact: Optional[bool] = True, + remove_existing_artifact: Optional[bool] = True, + timeout: Optional[int] = None, ): """Downloads model artifacts from the model catalog. @@ -1488,9 +1488,9 @@ def download_artifact( ) if ( - artifact_size > _MAX_ARTIFACT_SIZE_IN_BYTES - or bucket_uri - or model_by_reference + artifact_size > _MAX_ARTIFACT_SIZE_IN_BYTES + or bucket_uri + or model_by_reference ): artifact_downloader = LargeArtifactDownloader( dsc_model=self.dsc_model, @@ -1536,22 +1536,21 @@ def update(self, **kwargs) -> "DataScienceModel": self.dsc_model = self._to_oci_dsc_model(**kwargs).update() logger.debug(f"Updating a model provenance metadata {self.provenance_metadata}") - if self.provenance_metadata: - try: - self.dsc_model.get_model_provenance() - self.dsc_model.update_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) - except ModelProvenanceNotFoundError: - self.dsc_model.create_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) + try: + self.dsc_model.get_model_provenance() + self.dsc_model.update_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) + except ModelProvenanceNotFoundError: + self.dsc_model.create_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) return self.sync() def delete( - self, - delete_associated_model_deployment: Optional[bool] = False, + self, + delete_associated_model_deployment: Optional[bool] = False, ) -> "DataScienceModel": """Removes model from the model catalog. @@ -1570,7 +1569,7 @@ def delete( @classmethod def list( - cls, compartment_id: str = None, project_id: str = None, **kwargs + cls, compartment_id: str = None, project_id: str = None, **kwargs ) -> List["DataScienceModel"]: """Lists datascience models in a given compartment. @@ -1597,7 +1596,7 @@ def list( @classmethod def list_df( - cls, compartment_id: str = None, project_id: str = None, **kwargs + cls, compartment_id: str = None, project_id: str = None, **kwargs ) -> "pandas.DataFrame": """Lists datascience models in a given compartment. @@ -1617,7 +1616,7 @@ def list_df( """ records = [] for model in OCIDataScienceModel.list_resource( - compartment_id, project_id=project_id, **kwargs + compartment_id, project_id=project_id, **kwargs ): records.append( { @@ -1660,8 +1659,6 @@ def _init_complex_attributes(self): self.with_provenance_metadata(self.provenance_metadata) self.with_input_schema(self.input_schema) self.with_output_schema(self.output_schema) - # self.with_backup_setting(self.backup_setting) - # self.with_retention_setting(self.retention_setting) def _to_oci_dsc_model(self, **kwargs): """Creates an `OCIDataScienceModel` instance from the `DataScienceModel`. @@ -1700,7 +1697,7 @@ def _to_oci_dsc_model(self, **kwargs): return OCIDataScienceModel(**dsc_spec) def _update_from_oci_dsc_model( - self, dsc_model: OCIDataScienceModel + self, dsc_model: OCIDataScienceModel ) -> "DataScienceModel": """Update the properties from an OCIDataScienceModel object. @@ -1973,12 +1970,12 @@ def _download_file_description_artifact(self) -> Tuple[Union[str, List[str]], in return bucket_uri[0] if len(bucket_uri) == 1 else bucket_uri, artifact_size def add_artifact( - self, - uri: Optional[str] = None, - namespace: Optional[str] = None, - bucket: Optional[str] = None, - prefix: Optional[str] = None, - files: Optional[List[str]] = None, + self, + uri: Optional[str] = None, + namespace: Optional[str] = None, + bucket: Optional[str] = None, + prefix: Optional[str] = None, + files: Optional[List[str]] = None, ): """ Adds information about objects in a specified bucket to the model description JSON. @@ -2127,11 +2124,11 @@ def list_obj_versions_unpaginated(): self.set_spec(self.CONST_MODEL_FILE_DESCRIPTION, tmp_model_file_description) def remove_artifact( - self, - uri: Optional[str] = None, - namespace: Optional[str] = None, - bucket: Optional[str] = None, - prefix: Optional[str] = None, + self, + uri: Optional[str] = None, + namespace: Optional[str] = None, + bucket: Optional[str] = None, + prefix: Optional[str] = None, ): """ Removes information about objects in a specified bucket or using a specified URI from the model description JSON. From e4a4e0fd1811a34ffb49e9377a4397918e9d02ff Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 25 Oct 2024 06:59:19 +0530 Subject: [PATCH 13/15] updated with formatter changes --- ads/model/datascience_model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 375b9a9d1..bc01076da 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8; -*- # Copyright (c) 2022, 2024 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ From 119e5abc9844c787938644a823d132d0ab7c3a1a Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 25 Oct 2024 07:04:32 +0530 Subject: [PATCH 14/15] updated with formatter changes --- ads/model/datascience_model.py | 6 +++--- ads/model/service/oci_datascience_model.py | 19 +++++++++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index bc01076da..4b2a9f7b8 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -2176,9 +2176,9 @@ def remove_artifact( def find_model_idx(): for idx, model in enumerate(self.model_file_description["models"]): if ( - model["namespace"], - model["bucketName"], - (model["prefix"] if ("prefix" in model) else None), + model["namespace"], + model["bucketName"], + (model["prefix"] if ("prefix" in model) else None), ) == (namespace, bucket, "" if not prefix else prefix): return idx return -1 diff --git a/ads/model/service/oci_datascience_model.py b/ads/model/service/oci_datascience_model.py index c7bff4332..44ba091a6 100644 --- a/ads/model/service/oci_datascience_model.py +++ b/ads/model/service/oci_datascience_model.py @@ -1,14 +1,24 @@ #!/usr/bin/env python +# -*- coding: utf-8; -*- # Copyright (c) 2022, 2024 Oracle and/or its affiliates. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/ import logging +import time from functools import wraps from io import BytesIO from typing import Callable, Dict, List, Optional import oci.data_science +from ads.common import utils +from ads.common.object_storage_details import ObjectStorageDetails +from ads.common.oci_datascience import OCIDataScienceMixin +from ads.common.oci_mixin import OCIWorkRequestMixin +from ads.common.oci_resource import SEARCH_TYPE, OCIResource +from ads.common.utils import extract_region +from ads.common.work_request import DataScienceWorkRequest +from ads.model.deployment import ModelDeployment from oci.data_science.models import ( ArtifactExportDetailsObjectStorage, ArtifactImportDetailsObjectStorage, @@ -16,17 +26,10 @@ ExportModelArtifactDetails, ImportModelArtifactDetails, UpdateModelDetails, + WorkRequest, ) from oci.exceptions import ServiceError -from ads.common.object_storage_details import ObjectStorageDetails -from ads.common.oci_datascience import OCIDataScienceMixin -from ads.common.oci_mixin import OCIWorkRequestMixin -from ads.common.oci_resource import SEARCH_TYPE, OCIResource -from ads.common.utils import extract_region -from ads.common.work_request import DataScienceWorkRequest -from ads.model.deployment import ModelDeployment - logger = logging.getLogger(__name__) _REQUEST_INTERVAL_IN_SEC = 3 From 3d092ee33b7b3be889e1f714b53b842b66466feb Mon Sep 17 00:00:00 2001 From: Ishaan Arora Date: Fri, 25 Oct 2024 09:34:41 +0530 Subject: [PATCH 15/15] added the check for Model Provenance again --- ads/model/datascience_model.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/ads/model/datascience_model.py b/ads/model/datascience_model.py index 4b2a9f7b8..4a2f209c4 100644 --- a/ads/model/datascience_model.py +++ b/ads/model/datascience_model.py @@ -1537,15 +1537,16 @@ def update(self, **kwargs) -> "DataScienceModel": self.dsc_model = self._to_oci_dsc_model(**kwargs).update() logger.debug(f"Updating a model provenance metadata {self.provenance_metadata}") - try: - self.dsc_model.get_model_provenance() - self.dsc_model.update_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) - except ModelProvenanceNotFoundError: - self.dsc_model.create_model_provenance( - self.provenance_metadata._to_oci_metadata() - ) + if self.provenance_metadata: + try: + self.dsc_model.get_model_provenance() + self.dsc_model.update_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) + except ModelProvenanceNotFoundError: + self.dsc_model.create_model_provenance( + self.provenance_metadata._to_oci_metadata() + ) return self.sync()