diff --git a/.github/workflows/python_ci.yaml b/.github/workflows/python_ci.yaml index a240294d4..dee4a54f8 100644 --- a/.github/workflows/python_ci.yaml +++ b/.github/workflows/python_ci.yaml @@ -27,7 +27,7 @@ jobs: - name: Pip install run: | python -m pip install --upgrade pip - pip install '.[development,openssl,tdms,rosbags]' + pip install '.[development,openssl,tdms,rosbags,hdf5]' - name: Lint run: | ruff check diff --git a/protos/sift/common/type/v1/channel_enum_type.proto b/protos/sift/common/type/v1/channel_enum_type.proto index c67a2e867..8c5850251 100644 --- a/protos/sift/common/type/v1/channel_enum_type.proto +++ b/protos/sift/common/type/v1/channel_enum_type.proto @@ -9,4 +9,5 @@ import "google/api/field_behavior.proto"; message ChannelEnumType { string name = 1 [(google.api.field_behavior) = REQUIRED]; uint32 key = 2 [(google.api.field_behavior) = REQUIRED]; + bool is_signed = 3 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/protos/sift/protobuf_descriptors/v2/protobuf_descriptors.proto b/protos/sift/protobuf_descriptors/v2/protobuf_descriptors.proto index d0991671f..ad57560e6 100644 --- a/protos/sift/protobuf_descriptors/v2/protobuf_descriptors.proto +++ b/protos/sift/protobuf_descriptors/v2/protobuf_descriptors.proto @@ -95,6 +95,7 @@ message IncompatibleProtobufField { string field_number = 5; string reason = 6; string details = 7; + string field_kind = 8; } message CheckProtobufDescriptorCompatibilityResponse { diff --git a/protos/sift/rule_evaluation/v1/rule_evaluation.proto b/protos/sift/rule_evaluation/v1/rule_evaluation.proto index 5aab94553..6c7dee7c0 100644 --- a/protos/sift/rule_evaluation/v1/rule_evaluation.proto +++ b/protos/sift/rule_evaluation/v1/rule_evaluation.proto @@ -129,8 +129,18 @@ message EvaluateRulesFromRuleConfigs { repeated sift.rules.v1.UpdateRuleRequest configs = 1 [(google.api.field_behavior) = REQUIRED]; } +message RulePreviewOutput { + string rule_name = 1 [(google.api.field_behavior) = REQUIRED]; + string rule_id = 2; + string rule_version_id = 3; + string asset_id = 4 [(google.api.field_behavior) = REQUIRED]; + int32 exit_code = 5 [(google.api.field_behavior) = REQUIRED]; + string stdout = 6; + string stderr = 7; +} message EvaluateRulesPreviewResponse { int32 created_annotation_count = 1 [(google.api.field_behavior) = REQUIRED]; repeated sift.rules.v1.DryRunAnnotation dry_run_annotations = 2; + repeated RulePreviewOutput rule_outputs = 3; } diff --git a/protos/sift/rules/v1/rules.proto b/protos/sift/rules/v1/rules.proto index 88bbb992e..12a65128c 100644 --- a/protos/sift/rules/v1/rules.proto +++ b/protos/sift/rules/v1/rules.proto @@ -656,6 +656,8 @@ message RuleConditionExpression { oneof expression { SingleChannelComparisonExpression single_channel_comparison = 1 [deprecated = true]; CalculatedChannelConfig calculated_channel = 2; + // Experimental - not currently enabled + PythonCode python_code = 3; } } @@ -693,6 +695,15 @@ message ChannelReference { // Deprecated - use name instead. If provided, name will be joined with the component as `component.name` string component = 2 [(google.api.field_behavior) = REQUIRED]; } +message PythonChannelReference { + string reference = 1 [(google.api.field_behavior) = REQUIRED]; + string name = 2 [(google.api.field_behavior) = REQUIRED]; +} +message PythonCode { + repeated PythonChannelReference channel_references = 1 [(google.api.field_behavior) = REQUIRED]; + string code = 2 [(google.api.field_behavior) = REQUIRED]; + string dependencies = 3 [(google.api.field_behavior) = OPTIONAL]; +} message RuleActionConfiguration { oneof configuration { diff --git a/protos/sift/runs/v2/runs.proto b/protos/sift/runs/v2/runs.proto index 9717b0710..e64ca1c92 100644 --- a/protos/sift/runs/v2/runs.proto +++ b/protos/sift/runs/v2/runs.proto @@ -53,6 +53,18 @@ service RunService { }; } + // Create adhoc run, this is used to create a run after the data has been ingested + rpc CreateAdhocRun(CreateAdhocRunRequest) returns (CreateAdhocRunResponse) { + option (google.api.http) = { + post: "/api/v2/runs:adhoc" + body: "*" + }; + option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = { + summary: "CreateAdhocRun" + description: "Create an adhoc run." + }; + } + // Updates an existing run using using the list of fields specified in `update_mask`. rpc UpdateRun(UpdateRunRequest) returns (UpdateRunResponse) { option (google.api.http) = { @@ -118,6 +130,7 @@ message Run { repeated sift.metadata.v1.MetadataValue metadata = 15 [(google.api.field_behavior) = REQUIRED]; repeated string asset_ids = 16 [(google.api.field_behavior) = REQUIRED]; optional google.protobuf.Timestamp archived_date = 17 [(google.api.field_behavior) = OPTIONAL]; + bool is_adhoc = 18 [(google.api.field_behavior) = REQUIRED]; } // The request for a call to `RunService_GetRun` to retrieve run. @@ -148,8 +161,8 @@ message ListRunsRequest { string page_token = 2 [(google.api.field_behavior) = OPTIONAL]; // A [Common Expression Language (CEL)](https://github.com/google/cel-spec) filter string. - // Available fields to filter by are `run_id`, `organization_id`, `name`, `description`, `created_by_user_id`, `modified_by_user_id`, - // `created_date`, `modified_date`, `start_time`, `stop_time`, `client_key`, `is_pinned`, `asset_id`, `asset_name`, `archived_date`, + // Available fields to filter by are `run_id` `organization_id`, `asset_id`, `asset_name`, `client_key`, `name`, `description`, `created_by_user_id`, `modified_by_user_id`, + // `created_date`, `modified_date`, `start_time`, `stop_time`, `tag_id`, `asset_tag_id`, `duration`, `annotation_comments_count`, `annotation_state`, `archived_date`, // and `metadata`. Metadata can be used in filters by using `metadata.{metadata_key_name}` as the field name. // For further information about how to use CELs, please refer to [this guide](https://github.com/google/cel-spec/blob/master/doc/langdef.md#standard-definitions). // For more information about the fields used for filtering, please refer to [this definition](/docs/api/grpc/protocol-buffers/runs#run). Optional. @@ -203,11 +216,39 @@ message CreateRunRequest { repeated sift.metadata.v1.MetadataValue metadata = 9 [(google.api.field_behavior) = OPTIONAL]; } -// The response of a call to `RunService_CreateRuns` containing the newly created run. message CreateRunResponse { Run run = 1 [(google.api.field_behavior) = REQUIRED]; } +// The request for a call to `RunService_CreateAdhocRun` to create an adhoc run. +message CreateAdhocRunRequest { + // The name that will be assigned to the new run. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + // A description about the new run. + string description = 2 [(google.api.field_behavior) = REQUIRED]; + // The time at which data ingestion began for this new run. It must be before the `stop_time` + google.protobuf.Timestamp start_time = 3 [(google.api.field_behavior) = REQUIRED]; + // The time at which data ingestion concluded for this new run. + google.protobuf.Timestamp stop_time = 4 [(google.api.field_behavior) = REQUIRED]; + // A list of asset IDs to associate with the new run. + repeated string asset_ids = 5 [(google.api.field_behavior) = REQUIRED]; + // Tags to associate with the new run. + repeated string tags = 6 [(google.api.field_behavior) = OPTIONAL]; + // The metadata values associated with this run. + repeated sift.metadata.v1.MetadataValue metadata = 7 [(google.api.field_behavior) = OPTIONAL]; + // An arbitrary user-chosen key that uniquely identifies this run. Optional, though it is recommended to provide. + optional string client_key = 8 [ + (google.api.field_behavior) = OPTIONAL + ]; +} + + +// The response of a call to `RunService_CreateAdhocRun` containing the newly created adhoc run. +message CreateAdhocRunResponse { + Run run = 1 [(google.api.field_behavior) = REQUIRED]; +} + + // The request for a call to `RunService_UpdateRun` to update an existing run. message UpdateRunRequest { // The run to update. The run's `run_id` field is used to identify the run to update diff --git a/python/CHANGELOG.md b/python/CHANGELOG.md index a9fbecc1d..e5bc6d9de 100644 --- a/python/CHANGELOG.md +++ b/python/CHANGELOG.md @@ -3,6 +3,28 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/). +## [v0.8.4] - August 18, 2025 +- [Add experimental protos for development](https://github.com/sift-stack/sift/pull/291) + +## [v0.8.3] - August 11, 2025 +- [Fix windows utf-8 encoding bug with Hdf5UploadService](https://github.com/sift-stack/sift/pull/289) + +## [v0.8.2] - August 1, 2025 +- [Use name only in ChannelReference creation](https://github.com/sift-stack/sift/pull/284) + +## [v0.8.1] - July 31, 2025 +- [Catch PermissionError when removing temp files](https://github.com/sift-stack/sift/pull/282) +- [Add support for start and end times to rule evaluation](https://github.com/sift-stack/sift/pull/268) + +## [v0.8.0] - July 29, 2025 +### What's New +#### HDF5 Upload Service +Adds support for uploading HDF5 files to Sift for ingestion through the addition of the `Hdf5UploadService` and `Hdf5Config`. See `examples/data_import/hdf5` for an example of how to upload HDF5 files. + +### Full Changelog +- [Add HDF5 upload service](https://github.com/sift-stack/sift/pull/261) +- [Fixes bug when updating rules where the rule_id is not always passed to Sift](https://github.com/sift-stack/sift/pull/281) + ## [v0.7.0] - June 24, 2025 ### What's New #### AssetService and Metadata Support diff --git a/python/examples/data_import/hdf5/main.py b/python/examples/data_import/hdf5/main.py new file mode 100644 index 000000000..d85889aa3 --- /dev/null +++ b/python/examples/data_import/hdf5/main.py @@ -0,0 +1,72 @@ +import os + +import h5py +from dotenv import load_dotenv +from sift_py.data_import.config import Hdf5Config +from sift_py.data_import.hdf5 import Hdf5UploadService +from sift_py.rest import SiftRestConfig + +if __name__ == "__main__": + """ + Example of uploading an hdf5 into Sift. + """ + + load_dotenv() + + sift_uri = os.getenv("SIFT_API_URI") + assert sift_uri, "expected 'SIFT_API_URI' environment variable to be set" + + apikey = os.getenv("SIFT_API_KEY") + assert apikey, "expected 'SIFT_API_KEY' environment variable to be set" + + asset_name = os.getenv("ASSET_NAME") + assert asset_name, "expected 'ASSET_NAME' environment variable to be set" + + # Create an HDF5 configuration file to define the data to be ingested + hdf5_config_dict = { + "asset_name": asset_name, + "time": { + "format": "TIME_FORMAT_ABSOLUTE_DATETIME", + }, + "data": [], + } + + # For this example, each HDF5 dataset uses the common '/timestamp' dataset + # Each is of type double and contains its channel name in the 'Name' attribute + with h5py.File("sample_data.h5", "r") as f: + for dset in f.values(): + # Skip adding the timestamp dataset + if dset.name == "/timestamp": + continue + + hdf5_config_dict["data"].append( + { + "name": dset.attrs["Name"], + "time_dataset": "/timestamp", + "value_dataset": dset.name, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + } + ) + + hdf5_config = Hdf5Config(hdf5_config_dict) + + rest_config: SiftRestConfig = { + "uri": sift_uri, + "apikey": apikey, + } + + hdf5_upload_service = Hdf5UploadService(rest_config) + import_service = hdf5_upload_service.upload( + "sample_data.h5", + hdf5_config, + ) + + # Wait until the data import is completed. + # The hdf5 upload service may split the upload into multiple parts + data_imports = import_service.wait_until_all_complete() + + # Print the data import details and final status. + for data_import in data_imports: + print(data_import.model_dump_json(indent=1)) + + print("Upload example complete!") diff --git a/python/examples/data_import/hdf5/requirements.txt b/python/examples/data_import/hdf5/requirements.txt new file mode 100644 index 000000000..04a1ef53b --- /dev/null +++ b/python/examples/data_import/hdf5/requirements.txt @@ -0,0 +1,2 @@ +python-dotenv +sift-stack-py[hdf5] \ No newline at end of file diff --git a/python/examples/data_import/hdf5/sample_data.h5 b/python/examples/data_import/hdf5/sample_data.h5 new file mode 100644 index 000000000..24a268a76 Binary files /dev/null and b/python/examples/data_import/hdf5/sample_data.h5 differ diff --git a/python/lib/sift/common/type/v1/channel_enum_type_pb2.py b/python/lib/sift/common/type/v1/channel_enum_type_pb2.py index b42e7b5b4..561a29394 100644 --- a/python/lib/sift/common/type/v1/channel_enum_type_pb2.py +++ b/python/lib/sift/common/type/v1/channel_enum_type_pb2.py @@ -15,7 +15,7 @@ from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+sift/common/type/v1/channel_enum_type.proto\x12\x13sift.common.type.v1\x1a\x1fgoogle/api/field_behavior.proto\"A\n\x0f\x43hannelEnumType\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12\x15\n\x03key\x18\x02 \x01(\rB\x03\xe0\x41\x02R\x03keyB\x9e\x01\n\x17\x63om.sift.common.type.v1B\x14\x43hannelEnumTypeProtoP\x01\xa2\x02\x03SCT\xaa\x02\x13Sift.Common.Type.V1\xca\x02\x13Sift\\Common\\Type\\V1\xe2\x02\x1fSift\\Common\\Type\\V1\\GPBMetadata\xea\x02\x16Sift::Common::Type::V1b\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+sift/common/type/v1/channel_enum_type.proto\x12\x13sift.common.type.v1\x1a\x1fgoogle/api/field_behavior.proto\"c\n\x0f\x43hannelEnumType\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12\x15\n\x03key\x18\x02 \x01(\rB\x03\xe0\x41\x02R\x03key\x12 \n\tis_signed\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01R\x08isSignedB\x9e\x01\n\x17\x63om.sift.common.type.v1B\x14\x43hannelEnumTypeProtoP\x01\xa2\x02\x03SCT\xaa\x02\x13Sift.Common.Type.V1\xca\x02\x13Sift\\Common\\Type\\V1\xe2\x02\x1fSift\\Common\\Type\\V1\\GPBMetadata\xea\x02\x16Sift::Common::Type::V1b\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -27,6 +27,8 @@ _globals['_CHANNELENUMTYPE'].fields_by_name['name']._serialized_options = b'\340A\002' _globals['_CHANNELENUMTYPE'].fields_by_name['key']._loaded_options = None _globals['_CHANNELENUMTYPE'].fields_by_name['key']._serialized_options = b'\340A\002' + _globals['_CHANNELENUMTYPE'].fields_by_name['is_signed']._loaded_options = None + _globals['_CHANNELENUMTYPE'].fields_by_name['is_signed']._serialized_options = b'\340A\001' _globals['_CHANNELENUMTYPE']._serialized_start=101 - _globals['_CHANNELENUMTYPE']._serialized_end=166 + _globals['_CHANNELENUMTYPE']._serialized_end=200 # @@protoc_insertion_point(module_scope) diff --git a/python/lib/sift/common/type/v1/channel_enum_type_pb2.pyi b/python/lib/sift/common/type/v1/channel_enum_type_pb2.pyi index 8d4b7abb0..a752c7e4f 100644 --- a/python/lib/sift/common/type/v1/channel_enum_type_pb2.pyi +++ b/python/lib/sift/common/type/v1/channel_enum_type_pb2.pyi @@ -16,14 +16,17 @@ class ChannelEnumType(google.protobuf.message.Message): NAME_FIELD_NUMBER: builtins.int KEY_FIELD_NUMBER: builtins.int + IS_SIGNED_FIELD_NUMBER: builtins.int name: builtins.str key: builtins.int + is_signed: builtins.bool def __init__( self, *, name: builtins.str = ..., key: builtins.int = ..., + is_signed: builtins.bool = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["key", b"key", "name", b"name"]) -> None: ... + def ClearField(self, field_name: typing.Literal["is_signed", b"is_signed", "key", b"key", "name", b"name"]) -> None: ... global___ChannelEnumType = ChannelEnumType diff --git a/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.py b/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.py index bbf40550c..15f9014df 100644 --- a/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.py +++ b/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.py @@ -18,7 +18,7 @@ from protoc_gen_openapiv2.options import annotations_pb2 as protoc__gen__openapiv2_dot_options_dot_annotations__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n7sift/protobuf_descriptors/v2/protobuf_descriptors.proto\x12\x1csift.protobuf_descriptors.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xdc\x01\n DeleteProtobufDescriptorsRequest\x12\x37\n\x16message_type_full_name\x18\x01 \x01(\tB\x02\x18\x01R\x13messageTypeFullName\x12 \n\tnamespace\x18\x02 \x01(\tB\x02\x18\x01R\tnamespace\x12\'\n\x0forganization_id\x18\x03 \x01(\tR\x0eorganizationId\x12\x34\n\x16protobuf_descriptor_id\x18\x04 \x01(\tR\x14protobufDescriptorId\"#\n!DeleteProtobufDescriptorsResponse\"\xc3\x01\n\x1c\x41\x64\x64ProtobufDescriptorRequest\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\x12@\n\x1c\x66orce_duplicate_registration\x18\x02 \x01(\x08R\x1a\x66orceDuplicateRegistration\"\x82\x01\n\x1d\x41\x64\x64ProtobufDescriptorResponse\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\"\x90\x01\n+CheckProtobufDescriptorCompatibilityRequest\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\"\xae\x02\n\x19IncompatibleProtobufField\x12\x34\n\x16protobuf_descriptor_id\x18\x01 \x01(\tR\x14protobufDescriptorId\x12*\n\x11message_full_name\x18\x02 \x01(\tR\x0fmessageFullName\x12,\n\x12\x64\x65sired_field_name\x18\x03 \x01(\tR\x10\x64\x65siredFieldName\x12,\n\x12\x63urrent_field_name\x18\x04 \x01(\tR\x10\x63urrentFieldName\x12!\n\x0c\x66ield_number\x18\x05 \x01(\tR\x0b\x66ieldNumber\x12\x16\n\x06reason\x18\x06 \x01(\tR\x06reason\x12\x18\n\x07\x64\x65tails\x18\x07 \x01(\tR\x07\x64\x65tails\"\xda\x01\n,CheckProtobufDescriptorCompatibilityResponse\x12\x19\n\x08is_valid\x18\x01 \x01(\x08R\x07isValid\x12\x8e\x01\n\'incompatible_protobuf_descriptor_fields\x18\x02 \x03(\x0b\x32\x37.sift.protobuf_descriptors.v2.IncompatibleProtobufFieldR$incompatibleProtobufDescriptorFields\"\xb9\x02\n\x12ProtobufDescriptor\x12\x33\n\x16message_type_full_name\x18\x01 \x01(\tR\x13messageTypeFullName\x12.\n\x13\x66ile_descriptor_set\x18\x02 \x01(\x0cR\x11\x66ileDescriptorSet\x12&\n\x0fproto_file_name\x18\x03 \x01(\tR\rprotoFileName\x12\x1c\n\tnamespace\x18\x04 \x01(\tR\tnamespace\x12\x34\n\x16protobuf_descriptor_id\x18\x05 \x01(\tR\x14protobufDescriptorId\x12\x42\n\x0c\x63reated_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03R\x0b\x63reatedDate\"\xa3\x01\n\x1eListProtobufDescriptorsRequest\x12 \n\tpage_size\x18\x01 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x03 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12\x1e\n\x08order_by\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x07orderBy\"\xae\x01\n\x1fListProtobufDescriptorsResponse\x12\x63\n\x14protobuf_descriptors\x18\x01 \x03(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x13protobufDescriptors\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken2\x8f\r\n\x19ProtobufDescriptorService\x12\xbd\x02\n\x15\x41\x64\x64ProtobufDescriptor\x12:.sift.protobuf_descriptors.v2.AddProtobufDescriptorRequest\x1a;.sift.protobuf_descriptors.v2.AddProtobufDescriptorResponse\"\xaa\x01\x92\x41\x7f\x12\x15\x41\x64\x64ProtobufDescriptor\x1a\x33Used to register a protobuf message to be ingested.*1ProtobufDescriptorService_AddProtobufDescriptorV2\x82\xd3\xe4\x93\x02\"\" /api/v2/protobuf-descriptors:add\x12\xb9\x03\n$CheckProtobufDescriptorCompatibility\x12I.sift.protobuf_descriptors.v2.CheckProtobufDescriptorCompatibilityRequest\x1aJ.sift.protobuf_descriptors.v2.CheckProtobufDescriptorCompatibilityResponse\"\xf9\x01\x92\x41\xbd\x01\x12$CheckProtobufDescriptorCompatibility\x1aSUsed to check if a protobuf descriptor is compatible with the existing descriptors.*@ProtobufDescriptorService_CheckProtobufDescriptorCompatibilityV2\x82\xd3\xe4\x93\x02\x32\"0/api/v2/protobuf-descriptors:check-compatibility\x12\xfb\x02\n\x19\x44\x65leteProtobufDescriptors\x12>.sift.protobuf_descriptors.v2.DeleteProtobufDescriptorsRequest\x1a?.sift.protobuf_descriptors.v2.DeleteProtobufDescriptorsResponse\"\xdc\x01\x92\x41\xb4\x01\x12\x19\x44\x65leteProtobufDescriptors\x1a`Delete protobuf descriptors of that match the provided `namespace` and `message_type_full_name`.*5ProtobufDescriptorService_DeleteProtobufDescriptorsV2\x82\xd3\xe4\x93\x02\x1e*\x1c/api/v2/protobuf-descriptors\x12\xc8\x02\n\x17ListProtobufDescriptors\x12<.sift.protobuf_descriptors.v2.ListProtobufDescriptorsRequest\x1a=.sift.protobuf_descriptors.v2.ListProtobufDescriptorsResponse\"\xaf\x01\x92\x41\x87\x01\x12\x17ListProtobufDescriptors\x1a\x37Retrieve protobuf descriptors using an optional filter.*3ProtobufDescriptorService_ListProtobufDescriptorsV2\x82\xd3\xe4\x93\x02\x1e\x12\x1c/api/v2/protobuf-descriptors\x1a\xac\x01\x92\x41\xa8\x01\x12[Service to programmatically interact with protobuf descriptors used for protobuf ingestion.\x1aI\n(Read more about what protobuf ingestion.\x12\x1d/ingestion/protobuf_ingestionB\xec\x01\n com.sift.protobuf_descriptors.v2B\x18ProtobufDescriptorsProtoP\x01\xa2\x02\x03SPX\xaa\x02\x1bSift.ProtobufDescriptors.V2\xca\x02\x1bSift\\ProtobufDescriptors\\V2\xe2\x02\'Sift\\ProtobufDescriptors\\V2\\GPBMetadata\xea\x02\x1dSift::ProtobufDescriptors::V2\x92\x41\x1f\x12\x1d\n\x1bProtobuf Descriptor Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n7sift/protobuf_descriptors/v2/protobuf_descriptors.proto\x12\x1csift.protobuf_descriptors.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\"\xdc\x01\n DeleteProtobufDescriptorsRequest\x12\x37\n\x16message_type_full_name\x18\x01 \x01(\tB\x02\x18\x01R\x13messageTypeFullName\x12 \n\tnamespace\x18\x02 \x01(\tB\x02\x18\x01R\tnamespace\x12\'\n\x0forganization_id\x18\x03 \x01(\tR\x0eorganizationId\x12\x34\n\x16protobuf_descriptor_id\x18\x04 \x01(\tR\x14protobufDescriptorId\"#\n!DeleteProtobufDescriptorsResponse\"\xc3\x01\n\x1c\x41\x64\x64ProtobufDescriptorRequest\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\x12@\n\x1c\x66orce_duplicate_registration\x18\x02 \x01(\x08R\x1a\x66orceDuplicateRegistration\"\x82\x01\n\x1d\x41\x64\x64ProtobufDescriptorResponse\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\"\x90\x01\n+CheckProtobufDescriptorCompatibilityRequest\x12\x61\n\x13protobuf_descriptor\x18\x01 \x01(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x12protobufDescriptor\"\xcd\x02\n\x19IncompatibleProtobufField\x12\x34\n\x16protobuf_descriptor_id\x18\x01 \x01(\tR\x14protobufDescriptorId\x12*\n\x11message_full_name\x18\x02 \x01(\tR\x0fmessageFullName\x12,\n\x12\x64\x65sired_field_name\x18\x03 \x01(\tR\x10\x64\x65siredFieldName\x12,\n\x12\x63urrent_field_name\x18\x04 \x01(\tR\x10\x63urrentFieldName\x12!\n\x0c\x66ield_number\x18\x05 \x01(\tR\x0b\x66ieldNumber\x12\x16\n\x06reason\x18\x06 \x01(\tR\x06reason\x12\x18\n\x07\x64\x65tails\x18\x07 \x01(\tR\x07\x64\x65tails\x12\x1d\n\nfield_kind\x18\x08 \x01(\tR\tfieldKind\"\xda\x01\n,CheckProtobufDescriptorCompatibilityResponse\x12\x19\n\x08is_valid\x18\x01 \x01(\x08R\x07isValid\x12\x8e\x01\n\'incompatible_protobuf_descriptor_fields\x18\x02 \x03(\x0b\x32\x37.sift.protobuf_descriptors.v2.IncompatibleProtobufFieldR$incompatibleProtobufDescriptorFields\"\xb9\x02\n\x12ProtobufDescriptor\x12\x33\n\x16message_type_full_name\x18\x01 \x01(\tR\x13messageTypeFullName\x12.\n\x13\x66ile_descriptor_set\x18\x02 \x01(\x0cR\x11\x66ileDescriptorSet\x12&\n\x0fproto_file_name\x18\x03 \x01(\tR\rprotoFileName\x12\x1c\n\tnamespace\x18\x04 \x01(\tR\tnamespace\x12\x34\n\x16protobuf_descriptor_id\x18\x05 \x01(\tR\x14protobufDescriptorId\x12\x42\n\x0c\x63reated_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03R\x0b\x63reatedDate\"\xa3\x01\n\x1eListProtobufDescriptorsRequest\x12 \n\tpage_size\x18\x01 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x03 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12\x1e\n\x08order_by\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x07orderBy\"\xae\x01\n\x1fListProtobufDescriptorsResponse\x12\x63\n\x14protobuf_descriptors\x18\x01 \x03(\x0b\x32\x30.sift.protobuf_descriptors.v2.ProtobufDescriptorR\x13protobufDescriptors\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken2\x8f\r\n\x19ProtobufDescriptorService\x12\xbd\x02\n\x15\x41\x64\x64ProtobufDescriptor\x12:.sift.protobuf_descriptors.v2.AddProtobufDescriptorRequest\x1a;.sift.protobuf_descriptors.v2.AddProtobufDescriptorResponse\"\xaa\x01\x92\x41\x7f\x12\x15\x41\x64\x64ProtobufDescriptor\x1a\x33Used to register a protobuf message to be ingested.*1ProtobufDescriptorService_AddProtobufDescriptorV2\x82\xd3\xe4\x93\x02\"\" /api/v2/protobuf-descriptors:add\x12\xb9\x03\n$CheckProtobufDescriptorCompatibility\x12I.sift.protobuf_descriptors.v2.CheckProtobufDescriptorCompatibilityRequest\x1aJ.sift.protobuf_descriptors.v2.CheckProtobufDescriptorCompatibilityResponse\"\xf9\x01\x92\x41\xbd\x01\x12$CheckProtobufDescriptorCompatibility\x1aSUsed to check if a protobuf descriptor is compatible with the existing descriptors.*@ProtobufDescriptorService_CheckProtobufDescriptorCompatibilityV2\x82\xd3\xe4\x93\x02\x32\"0/api/v2/protobuf-descriptors:check-compatibility\x12\xfb\x02\n\x19\x44\x65leteProtobufDescriptors\x12>.sift.protobuf_descriptors.v2.DeleteProtobufDescriptorsRequest\x1a?.sift.protobuf_descriptors.v2.DeleteProtobufDescriptorsResponse\"\xdc\x01\x92\x41\xb4\x01\x12\x19\x44\x65leteProtobufDescriptors\x1a`Delete protobuf descriptors of that match the provided `namespace` and `message_type_full_name`.*5ProtobufDescriptorService_DeleteProtobufDescriptorsV2\x82\xd3\xe4\x93\x02\x1e*\x1c/api/v2/protobuf-descriptors\x12\xc8\x02\n\x17ListProtobufDescriptors\x12<.sift.protobuf_descriptors.v2.ListProtobufDescriptorsRequest\x1a=.sift.protobuf_descriptors.v2.ListProtobufDescriptorsResponse\"\xaf\x01\x92\x41\x87\x01\x12\x17ListProtobufDescriptors\x1a\x37Retrieve protobuf descriptors using an optional filter.*3ProtobufDescriptorService_ListProtobufDescriptorsV2\x82\xd3\xe4\x93\x02\x1e\x12\x1c/api/v2/protobuf-descriptors\x1a\xac\x01\x92\x41\xa8\x01\x12[Service to programmatically interact with protobuf descriptors used for protobuf ingestion.\x1aI\n(Read more about what protobuf ingestion.\x12\x1d/ingestion/protobuf_ingestionB\xec\x01\n com.sift.protobuf_descriptors.v2B\x18ProtobufDescriptorsProtoP\x01\xa2\x02\x03SPX\xaa\x02\x1bSift.ProtobufDescriptors.V2\xca\x02\x1bSift\\ProtobufDescriptors\\V2\xe2\x02\'Sift\\ProtobufDescriptors\\V2\\GPBMetadata\xea\x02\x1dSift::ProtobufDescriptors::V2\x92\x41\x1f\x12\x1d\n\x1bProtobuf Descriptor Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -61,15 +61,15 @@ _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYREQUEST']._serialized_start=825 _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYREQUEST']._serialized_end=969 _globals['_INCOMPATIBLEPROTOBUFFIELD']._serialized_start=972 - _globals['_INCOMPATIBLEPROTOBUFFIELD']._serialized_end=1274 - _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYRESPONSE']._serialized_start=1277 - _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYRESPONSE']._serialized_end=1495 - _globals['_PROTOBUFDESCRIPTOR']._serialized_start=1498 - _globals['_PROTOBUFDESCRIPTOR']._serialized_end=1811 - _globals['_LISTPROTOBUFDESCRIPTORSREQUEST']._serialized_start=1814 - _globals['_LISTPROTOBUFDESCRIPTORSREQUEST']._serialized_end=1977 - _globals['_LISTPROTOBUFDESCRIPTORSRESPONSE']._serialized_start=1980 - _globals['_LISTPROTOBUFDESCRIPTORSRESPONSE']._serialized_end=2154 - _globals['_PROTOBUFDESCRIPTORSERVICE']._serialized_start=2157 - _globals['_PROTOBUFDESCRIPTORSERVICE']._serialized_end=3836 + _globals['_INCOMPATIBLEPROTOBUFFIELD']._serialized_end=1305 + _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYRESPONSE']._serialized_start=1308 + _globals['_CHECKPROTOBUFDESCRIPTORCOMPATIBILITYRESPONSE']._serialized_end=1526 + _globals['_PROTOBUFDESCRIPTOR']._serialized_start=1529 + _globals['_PROTOBUFDESCRIPTOR']._serialized_end=1842 + _globals['_LISTPROTOBUFDESCRIPTORSREQUEST']._serialized_start=1845 + _globals['_LISTPROTOBUFDESCRIPTORSREQUEST']._serialized_end=2008 + _globals['_LISTPROTOBUFDESCRIPTORSRESPONSE']._serialized_start=2011 + _globals['_LISTPROTOBUFDESCRIPTORSRESPONSE']._serialized_end=2185 + _globals['_PROTOBUFDESCRIPTORSERVICE']._serialized_start=2188 + _globals['_PROTOBUFDESCRIPTORSERVICE']._serialized_end=3867 # @@protoc_insertion_point(module_scope) diff --git a/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.pyi b/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.pyi index ed75df1be..8ad737a35 100644 --- a/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.pyi +++ b/python/lib/sift/protobuf_descriptors/v2/protobuf_descriptors_pb2.pyi @@ -115,6 +115,7 @@ class IncompatibleProtobufField(google.protobuf.message.Message): FIELD_NUMBER_FIELD_NUMBER: builtins.int REASON_FIELD_NUMBER: builtins.int DETAILS_FIELD_NUMBER: builtins.int + FIELD_KIND_FIELD_NUMBER: builtins.int protobuf_descriptor_id: builtins.str message_full_name: builtins.str desired_field_name: builtins.str @@ -122,6 +123,7 @@ class IncompatibleProtobufField(google.protobuf.message.Message): field_number: builtins.str reason: builtins.str details: builtins.str + field_kind: builtins.str def __init__( self, *, @@ -132,8 +134,9 @@ class IncompatibleProtobufField(google.protobuf.message.Message): field_number: builtins.str = ..., reason: builtins.str = ..., details: builtins.str = ..., + field_kind: builtins.str = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["current_field_name", b"current_field_name", "desired_field_name", b"desired_field_name", "details", b"details", "field_number", b"field_number", "message_full_name", b"message_full_name", "protobuf_descriptor_id", b"protobuf_descriptor_id", "reason", b"reason"]) -> None: ... + def ClearField(self, field_name: typing.Literal["current_field_name", b"current_field_name", "desired_field_name", b"desired_field_name", "details", b"details", "field_kind", b"field_kind", "field_number", b"field_number", "message_full_name", b"message_full_name", "protobuf_descriptor_id", b"protobuf_descriptor_id", "reason", b"reason"]) -> None: ... global___IncompatibleProtobufField = IncompatibleProtobufField diff --git a/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.py b/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.py index 032dcff2b..d7109cf2f 100644 --- a/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.py +++ b/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.py @@ -20,7 +20,7 @@ from sift.rules.v1 import rules_pb2 as sift_dot_rules_dot_v1_dot_rules__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-sift/rule_evaluation/v1/rule_evaluation.proto\x12\x17sift.rule_evaluation.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a-sift/common/type/v1/resource_identifier.proto\x1a\x19sift/rules/v1/rules.proto\"\xe2\x05\n\x14\x45valuateRulesRequest\x12;\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierH\x00R\x03run\x12\x42\n\x06\x61ssets\x18\x02 \x01(\x0b\x32(.sift.rule_evaluation.v1.AssetsTimeRangeH\x00R\x06\x61ssets\x12M\n\x0erun_time_range\x18\t \x01(\x0b\x32%.sift.rule_evaluation.v1.RunTimeRangeH\x00R\x0crunTimeRange\x12U\n\x05rules\x18\x03 \x01(\x0b\x32=.sift.rule_evaluation.v1.EvaluateRulesFromCurrentRuleVersionsH\x01R\x05rules\x12]\n\rrule_versions\x18\x04 \x01(\x0b\x32\x36.sift.rule_evaluation.v1.EvaluateRulesFromRuleVersionsH\x01R\x0cruleVersions\x12\x63\n\x0freport_template\x18\x05 \x01(\x0b\x32\x38.sift.rule_evaluation.v1.EvaluateRulesFromReportTemplateH\x01R\x0ereportTemplate\x12\x66\n\x12\x61nnotation_options\x18\x06 \x01(\x0b\x32\x37.sift.rule_evaluation.v1.EvaluateRulesAnnotationOptionsR\x11\x61nnotationOptions\x12,\n\x0forganization_id\x18\x07 \x01(\tB\x03\xe0\x41\x01R\x0eorganizationId\x12)\n\x0breport_name\x18\x08 \x01(\tB\x03\xe0\x41\x01H\x02R\nreportName\x88\x01\x01\x42\x06\n\x04timeB\x06\n\x04modeB\x0e\n\x0c_report_name\"\xeb\x01\n\x0cRunTimeRange\x12\x39\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierR\x03run\x12\x43\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00R\tstartTime\x88\x01\x01\x12?\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x01R\x07\x65ndTime\x88\x01\x01\x42\r\n\x0b_start_timeB\x0b\n\t_end_time\"\xcf\x01\n\x0f\x41ssetsTimeRange\x12@\n\x06\x61ssets\x18\x01 \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesB\x03\xe0\x41\x02R\x06\x61ssets\x12>\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\tstartTime\x12:\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x07\x65ndTime\"k\n$EvaluateRulesFromCurrentRuleVersions\x12\x43\n\x05rules\x18\x01 \x01(\x0b\x32(.sift.common.type.v1.ResourceIdentifiersB\x03\xe0\x41\x02R\x05rules\"x\n\x1f\x45valuateRulesFromReportTemplate\x12U\n\x0freport_template\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierB\x03\xe0\x41\x02R\x0ereportTemplate\"N\n\x1d\x45valuateRulesFromRuleVersions\x12-\n\x10rule_version_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x0eruleVersionIds\"^\n\x1e\x45valuateRulesAnnotationOptions\x12<\n\x04tags\x18\x01 \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesB\x03\xe0\x41\x02R\x04tags\"\xb7\x01\n\x15\x45valuateRulesResponse\x12=\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05\x42\x03\xe0\x41\x02R\x16\x63reatedAnnotationCount\x12%\n\treport_id\x18\x02 \x01(\tB\x03\xe0\x41\x01H\x00R\x08reportId\x88\x01\x01\x12\x1f\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x01H\x01R\x05jobId\x88\x01\x01\x42\x0c\n\n_report_idB\t\n\x07_job_id\"\xde\x04\n\x1b\x45valuateRulesPreviewRequest\x12;\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierH\x00R\x03run\x12M\n\x0erun_time_range\x18\x08 \x01(\x0b\x32%.sift.rule_evaluation.v1.RunTimeRangeH\x00R\x0crunTimeRange\x12U\n\x05rules\x18\x03 \x01(\x0b\x32=.sift.rule_evaluation.v1.EvaluateRulesFromCurrentRuleVersionsH\x01R\x05rules\x12]\n\rrule_versions\x18\x04 \x01(\x0b\x32\x36.sift.rule_evaluation.v1.EvaluateRulesFromRuleVersionsH\x01R\x0cruleVersions\x12\x63\n\x0freport_template\x18\x05 \x01(\x0b\x32\x38.sift.rule_evaluation.v1.EvaluateRulesFromReportTemplateH\x01R\x0ereportTemplate\x12Z\n\x0crule_configs\x18\x06 \x01(\x0b\x32\x35.sift.rule_evaluation.v1.EvaluateRulesFromRuleConfigsH\x01R\x0bruleConfigs\x12,\n\x0forganization_id\x18\x07 \x01(\tB\x03\xe0\x41\x01R\x0eorganizationIdB\x06\n\x04timeB\x06\n\x04mode\"_\n\x1c\x45valuateRulesFromRuleConfigs\x12?\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x07\x63onfigs\"\xae\x01\n\x1c\x45valuateRulesPreviewResponse\x12=\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05\x42\x03\xe0\x41\x02R\x16\x63reatedAnnotationCount\x12O\n\x13\x64ry_run_annotations\x18\x02 \x03(\x0b\x32\x1f.sift.rules.v1.DryRunAnnotationR\x11\x64ryRunAnnotations2\xd8\x06\n\x15RuleEvaluationService\x12\xc3\x02\n\rEvaluateRules\x12-.sift.rule_evaluation.v1.EvaluateRulesRequest\x1a..sift.rule_evaluation.v1.EvaluateRulesResponse\"\xd2\x01\x92\x41\xa7\x01\x12\rEvaluateRules\x1a\x95\x01\x45valuate rules from a designated source against a run or asset and return the total amount of annotations created and the ID of the generated report.\x82\xd3\xe4\x93\x02!\"\x1c/api/v1/rules/evaluate-rules:\x01*\x12\xda\x02\n\x14\x45valuateRulesPreview\x12\x34.sift.rule_evaluation.v1.EvaluateRulesPreviewRequest\x1a\x35.sift.rule_evaluation.v1.EvaluateRulesPreviewResponse\"\xd4\x01\x92\x41\xa1\x01\x12\x14\x45valuateRulesPreview\x1a\x88\x01Perform a dry run evaluation for existing rules or rule configurations against a run and return the annotations that would be generated.\x82\xd3\xe4\x93\x02)\"$/api/v1/rules/evaluate-rules:preview:\x01*\x1a\x9b\x01\x92\x41\x97\x01\x12\x1aService to evaluate rules.\x1ay\n\x1fRead more about what rules are.\x12Vhttps://customer.support.siftstack.com/servicedesk/customer/portal/2/article/265421102B\xca\x01\n\x1b\x63om.sift.rule_evaluation.v1B\x13RuleEvaluationProtoP\x01\xa2\x02\x03SRX\xaa\x02\x16Sift.RuleEvaluation.V1\xca\x02\x16Sift\\RuleEvaluation\\V1\xe2\x02\"Sift\\RuleEvaluation\\V1\\GPBMetadata\xea\x02\x18Sift::RuleEvaluation::V1\x92\x41\x1b\x12\x19\n\x17Rule Evaluation Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n-sift/rule_evaluation/v1/rule_evaluation.proto\x12\x17sift.rule_evaluation.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a-sift/common/type/v1/resource_identifier.proto\x1a\x19sift/rules/v1/rules.proto\"\xe2\x05\n\x14\x45valuateRulesRequest\x12;\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierH\x00R\x03run\x12\x42\n\x06\x61ssets\x18\x02 \x01(\x0b\x32(.sift.rule_evaluation.v1.AssetsTimeRangeH\x00R\x06\x61ssets\x12M\n\x0erun_time_range\x18\t \x01(\x0b\x32%.sift.rule_evaluation.v1.RunTimeRangeH\x00R\x0crunTimeRange\x12U\n\x05rules\x18\x03 \x01(\x0b\x32=.sift.rule_evaluation.v1.EvaluateRulesFromCurrentRuleVersionsH\x01R\x05rules\x12]\n\rrule_versions\x18\x04 \x01(\x0b\x32\x36.sift.rule_evaluation.v1.EvaluateRulesFromRuleVersionsH\x01R\x0cruleVersions\x12\x63\n\x0freport_template\x18\x05 \x01(\x0b\x32\x38.sift.rule_evaluation.v1.EvaluateRulesFromReportTemplateH\x01R\x0ereportTemplate\x12\x66\n\x12\x61nnotation_options\x18\x06 \x01(\x0b\x32\x37.sift.rule_evaluation.v1.EvaluateRulesAnnotationOptionsR\x11\x61nnotationOptions\x12,\n\x0forganization_id\x18\x07 \x01(\tB\x03\xe0\x41\x01R\x0eorganizationId\x12)\n\x0breport_name\x18\x08 \x01(\tB\x03\xe0\x41\x01H\x02R\nreportName\x88\x01\x01\x42\x06\n\x04timeB\x06\n\x04modeB\x0e\n\x0c_report_name\"\xeb\x01\n\x0cRunTimeRange\x12\x39\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierR\x03run\x12\x43\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00R\tstartTime\x88\x01\x01\x12?\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x01R\x07\x65ndTime\x88\x01\x01\x42\r\n\x0b_start_timeB\x0b\n\t_end_time\"\xcf\x01\n\x0f\x41ssetsTimeRange\x12@\n\x06\x61ssets\x18\x01 \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesB\x03\xe0\x41\x02R\x06\x61ssets\x12>\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\tstartTime\x12:\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x07\x65ndTime\"k\n$EvaluateRulesFromCurrentRuleVersions\x12\x43\n\x05rules\x18\x01 \x01(\x0b\x32(.sift.common.type.v1.ResourceIdentifiersB\x03\xe0\x41\x02R\x05rules\"x\n\x1f\x45valuateRulesFromReportTemplate\x12U\n\x0freport_template\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierB\x03\xe0\x41\x02R\x0ereportTemplate\"N\n\x1d\x45valuateRulesFromRuleVersions\x12-\n\x10rule_version_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x0eruleVersionIds\"^\n\x1e\x45valuateRulesAnnotationOptions\x12<\n\x04tags\x18\x01 \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesB\x03\xe0\x41\x02R\x04tags\"\xb7\x01\n\x15\x45valuateRulesResponse\x12=\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05\x42\x03\xe0\x41\x02R\x16\x63reatedAnnotationCount\x12%\n\treport_id\x18\x02 \x01(\tB\x03\xe0\x41\x01H\x00R\x08reportId\x88\x01\x01\x12\x1f\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x01H\x01R\x05jobId\x88\x01\x01\x42\x0c\n\n_report_idB\t\n\x07_job_id\"\xde\x04\n\x1b\x45valuateRulesPreviewRequest\x12;\n\x03run\x18\x01 \x01(\x0b\x32\'.sift.common.type.v1.ResourceIdentifierH\x00R\x03run\x12M\n\x0erun_time_range\x18\x08 \x01(\x0b\x32%.sift.rule_evaluation.v1.RunTimeRangeH\x00R\x0crunTimeRange\x12U\n\x05rules\x18\x03 \x01(\x0b\x32=.sift.rule_evaluation.v1.EvaluateRulesFromCurrentRuleVersionsH\x01R\x05rules\x12]\n\rrule_versions\x18\x04 \x01(\x0b\x32\x36.sift.rule_evaluation.v1.EvaluateRulesFromRuleVersionsH\x01R\x0cruleVersions\x12\x63\n\x0freport_template\x18\x05 \x01(\x0b\x32\x38.sift.rule_evaluation.v1.EvaluateRulesFromReportTemplateH\x01R\x0ereportTemplate\x12Z\n\x0crule_configs\x18\x06 \x01(\x0b\x32\x35.sift.rule_evaluation.v1.EvaluateRulesFromRuleConfigsH\x01R\x0bruleConfigs\x12,\n\x0forganization_id\x18\x07 \x01(\tB\x03\xe0\x41\x01R\x0eorganizationIdB\x06\n\x04timeB\x06\n\x04mode\"_\n\x1c\x45valuateRulesFromRuleConfigs\x12?\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x07\x63onfigs\"\xe8\x01\n\x11RulePreviewOutput\x12 \n\trule_name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x08ruleName\x12\x17\n\x07rule_id\x18\x02 \x01(\tR\x06ruleId\x12&\n\x0frule_version_id\x18\x03 \x01(\tR\rruleVersionId\x12\x1e\n\x08\x61sset_id\x18\x04 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12 \n\texit_code\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02R\x08\x65xitCode\x12\x16\n\x06stdout\x18\x06 \x01(\tR\x06stdout\x12\x16\n\x06stderr\x18\x07 \x01(\tR\x06stderr\"\xfd\x01\n\x1c\x45valuateRulesPreviewResponse\x12=\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05\x42\x03\xe0\x41\x02R\x16\x63reatedAnnotationCount\x12O\n\x13\x64ry_run_annotations\x18\x02 \x03(\x0b\x32\x1f.sift.rules.v1.DryRunAnnotationR\x11\x64ryRunAnnotations\x12M\n\x0crule_outputs\x18\x03 \x03(\x0b\x32*.sift.rule_evaluation.v1.RulePreviewOutputR\x0bruleOutputs2\xd8\x06\n\x15RuleEvaluationService\x12\xc3\x02\n\rEvaluateRules\x12-.sift.rule_evaluation.v1.EvaluateRulesRequest\x1a..sift.rule_evaluation.v1.EvaluateRulesResponse\"\xd2\x01\x92\x41\xa7\x01\x12\rEvaluateRules\x1a\x95\x01\x45valuate rules from a designated source against a run or asset and return the total amount of annotations created and the ID of the generated report.\x82\xd3\xe4\x93\x02!\"\x1c/api/v1/rules/evaluate-rules:\x01*\x12\xda\x02\n\x14\x45valuateRulesPreview\x12\x34.sift.rule_evaluation.v1.EvaluateRulesPreviewRequest\x1a\x35.sift.rule_evaluation.v1.EvaluateRulesPreviewResponse\"\xd4\x01\x92\x41\xa1\x01\x12\x14\x45valuateRulesPreview\x1a\x88\x01Perform a dry run evaluation for existing rules or rule configurations against a run and return the annotations that would be generated.\x82\xd3\xe4\x93\x02)\"$/api/v1/rules/evaluate-rules:preview:\x01*\x1a\x9b\x01\x92\x41\x97\x01\x12\x1aService to evaluate rules.\x1ay\n\x1fRead more about what rules are.\x12Vhttps://customer.support.siftstack.com/servicedesk/customer/portal/2/article/265421102B\xca\x01\n\x1b\x63om.sift.rule_evaluation.v1B\x13RuleEvaluationProtoP\x01\xa2\x02\x03SRX\xaa\x02\x16Sift.RuleEvaluation.V1\xca\x02\x16Sift\\RuleEvaluation\\V1\xe2\x02\"Sift\\RuleEvaluation\\V1\\GPBMetadata\xea\x02\x18Sift::RuleEvaluation::V1\x92\x41\x1b\x12\x19\n\x17Rule Evaluation Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -60,6 +60,12 @@ _globals['_EVALUATERULESPREVIEWREQUEST'].fields_by_name['organization_id']._serialized_options = b'\340A\001' _globals['_EVALUATERULESFROMRULECONFIGS'].fields_by_name['configs']._loaded_options = None _globals['_EVALUATERULESFROMRULECONFIGS'].fields_by_name['configs']._serialized_options = b'\340A\002' + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['rule_name']._loaded_options = None + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['rule_name']._serialized_options = b'\340A\002' + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['asset_id']._loaded_options = None + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['asset_id']._serialized_options = b'\340A\002' + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['exit_code']._loaded_options = None + _globals['_RULEPREVIEWOUTPUT'].fields_by_name['exit_code']._serialized_options = b'\340A\002' _globals['_EVALUATERULESPREVIEWRESPONSE'].fields_by_name['created_annotation_count']._loaded_options = None _globals['_EVALUATERULESPREVIEWRESPONSE'].fields_by_name['created_annotation_count']._serialized_options = b'\340A\002' _globals['_RULEEVALUATIONSERVICE']._loaded_options = None @@ -88,8 +94,10 @@ _globals['_EVALUATERULESPREVIEWREQUEST']._serialized_end=2681 _globals['_EVALUATERULESFROMRULECONFIGS']._serialized_start=2683 _globals['_EVALUATERULESFROMRULECONFIGS']._serialized_end=2778 - _globals['_EVALUATERULESPREVIEWRESPONSE']._serialized_start=2781 - _globals['_EVALUATERULESPREVIEWRESPONSE']._serialized_end=2955 - _globals['_RULEEVALUATIONSERVICE']._serialized_start=2958 - _globals['_RULEEVALUATIONSERVICE']._serialized_end=3814 + _globals['_RULEPREVIEWOUTPUT']._serialized_start=2781 + _globals['_RULEPREVIEWOUTPUT']._serialized_end=3013 + _globals['_EVALUATERULESPREVIEWRESPONSE']._serialized_start=3016 + _globals['_EVALUATERULESPREVIEWRESPONSE']._serialized_end=3269 + _globals['_RULEEVALUATIONSERVICE']._serialized_start=3272 + _globals['_RULEEVALUATIONSERVICE']._serialized_end=4128 # @@protoc_insertion_point(module_scope) diff --git a/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.pyi b/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.pyi index 6a7fc2665..1fbe917c5 100644 --- a/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.pyi +++ b/python/lib/sift/rule_evaluation/v1/rule_evaluation_pb2.pyi @@ -280,21 +280,58 @@ class EvaluateRulesFromRuleConfigs(google.protobuf.message.Message): global___EvaluateRulesFromRuleConfigs = EvaluateRulesFromRuleConfigs +@typing.final +class RulePreviewOutput(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + RULE_NAME_FIELD_NUMBER: builtins.int + RULE_ID_FIELD_NUMBER: builtins.int + RULE_VERSION_ID_FIELD_NUMBER: builtins.int + ASSET_ID_FIELD_NUMBER: builtins.int + EXIT_CODE_FIELD_NUMBER: builtins.int + STDOUT_FIELD_NUMBER: builtins.int + STDERR_FIELD_NUMBER: builtins.int + rule_name: builtins.str + rule_id: builtins.str + rule_version_id: builtins.str + asset_id: builtins.str + exit_code: builtins.int + stdout: builtins.str + stderr: builtins.str + def __init__( + self, + *, + rule_name: builtins.str = ..., + rule_id: builtins.str = ..., + rule_version_id: builtins.str = ..., + asset_id: builtins.str = ..., + exit_code: builtins.int = ..., + stdout: builtins.str = ..., + stderr: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["asset_id", b"asset_id", "exit_code", b"exit_code", "rule_id", b"rule_id", "rule_name", b"rule_name", "rule_version_id", b"rule_version_id", "stderr", b"stderr", "stdout", b"stdout"]) -> None: ... + +global___RulePreviewOutput = RulePreviewOutput + @typing.final class EvaluateRulesPreviewResponse(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor CREATED_ANNOTATION_COUNT_FIELD_NUMBER: builtins.int DRY_RUN_ANNOTATIONS_FIELD_NUMBER: builtins.int + RULE_OUTPUTS_FIELD_NUMBER: builtins.int created_annotation_count: builtins.int @property def dry_run_annotations(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[sift.rules.v1.rules_pb2.DryRunAnnotation]: ... + @property + def rule_outputs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RulePreviewOutput]: ... def __init__( self, *, created_annotation_count: builtins.int = ..., dry_run_annotations: collections.abc.Iterable[sift.rules.v1.rules_pb2.DryRunAnnotation] | None = ..., + rule_outputs: collections.abc.Iterable[global___RulePreviewOutput] | None = ..., ) -> None: ... - def ClearField(self, field_name: typing.Literal["created_annotation_count", b"created_annotation_count", "dry_run_annotations", b"dry_run_annotations"]) -> None: ... + def ClearField(self, field_name: typing.Literal["created_annotation_count", b"created_annotation_count", "dry_run_annotations", b"dry_run_annotations", "rule_outputs", b"rule_outputs"]) -> None: ... global___EvaluateRulesPreviewResponse = EvaluateRulesPreviewResponse diff --git a/python/lib/sift/rules/v1/rules_pb2.py b/python/lib/sift/rules/v1/rules_pb2.py index 4180e2aa3..843f84461 100644 --- a/python/lib/sift/rules/v1/rules_pb2.py +++ b/python/lib/sift/rules/v1/rules_pb2.py @@ -21,7 +21,7 @@ from sift.common.type.v1 import user_defined_functions_pb2 as sift_dot_common_dot_type_dot_v1_dot_user__defined__functions__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19sift/rules/v1/rules.proto\x12\rsift.rules.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a%sift/annotations/v1/annotations.proto\x1a-sift/common/type/v1/resource_identifier.proto\x1a\x30sift/common/type/v1/user_defined_functions.proto\"\x99\x07\n\x04Rule\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12 \n\x08\x61sset_id\x18\x02 \x01(\tB\x05\x18\x01\xe0\x41\x02R\x07\x61ssetId\x12\x17\n\x04name\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12\"\n\nis_enabled\x18\x06 \x01(\x08\x42\x03\xe0\x41\x02R\tisEnabled\x12\x42\n\x0c\x63reated_date\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\t \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\n \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12,\n\x0forganization_id\x18\x0b \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId\x12\x41\n\nconditions\x18\x0c \x03(\x0b\x32\x1c.sift.rules.v1.RuleConditionB\x03\xe0\x41\x02R\nconditions\x12\x42\n\x0crule_version\x18\r \x01(\x0b\x32\x1a.sift.rules.v1.RuleVersionB\x03\xe0\x41\x02R\x0bruleVersion\x12\"\n\nclient_key\x18\x0e \x01(\tB\x03\xe0\x41\x01R\tclientKey\x12[\n\x13\x61sset_configuration\x18\x0f \x01(\x0b\x32%.sift.rules.v1.RuleAssetConfigurationB\x03\xe0\x41\x02R\x12\x61ssetConfiguration\x12W\n\x13\x63ontextual_channels\x18\x10 \x01(\x0b\x32!.sift.rules.v1.ContextualChannelsB\x03\xe0\x41\x02R\x12\x63ontextualChannels\x12\x42\n\x0c\x64\x65leted_date\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01R\x0b\x64\x65letedDate\x12$\n\x0bis_external\x18\x12 \x01(\x08\x42\x03\xe0\x41\x02R\nisExternalJ\x04\x08\x05\x10\x06\"\x9b\x04\n\rRuleCondition\x12/\n\x11rule_condition_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x0fruleConditionId\x12\x1c\n\x07rule_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12K\n\nexpression\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleConditionExpressionB\x03\xe0\x41\x02R\nexpression\x12\x42\n\x0c\x63reated_date\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\x08 \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12\x38\n\x07\x61\x63tions\x18\t \x03(\x0b\x32\x19.sift.rules.v1.RuleActionB\x03\xe0\x41\x02R\x07\x61\x63tions\x12>\n\x19rule_condition_version_id\x18\n \x01(\tB\x03\xe0\x41\x02R\x16ruleConditionVersionIdJ\x04\x08\x04\x10\x05\"\xa6\x04\n\nRuleAction\x12)\n\x0erule_action_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x0cruleActionId\x12/\n\x11rule_condition_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0fruleConditionId\x12?\n\x0b\x61\x63tion_type\x18\x03 \x01(\x0e\x32\x19.sift.rules.v1.ActionKindB\x03\xe0\x41\x02R\nactionType\x12Q\n\rconfiguration\x18\x04 \x01(\x0b\x32&.sift.rules.v1.RuleActionConfigurationB\x03\xe0\x41\x02R\rconfiguration\x12\x42\n\x0c\x63reated_date\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\x08 \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12\x38\n\x16rule_action_version_id\x18\t \x01(\tB\x03\xe0\x41\x02R\x13ruleActionVersionId\"N\n\x16RuleAssetConfiguration\x12\x1b\n\tasset_ids\x18\x01 \x03(\tR\x08\x61ssetIds\x12\x17\n\x07tag_ids\x18\x02 \x03(\tR\x06tagIds\"V\n\x12\x43ontextualChannels\x12@\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1f.sift.rules.v1.ChannelReferenceB\x03\xe0\x41\x02R\x08\x63hannels\"\xb6\x01\n\x1f\x41ssetExpressionValidationResult\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nasset_name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tassetName\x12%\n\x0c\x61sset_tag_id\x18\x03 \x01(\tB\x03\xe0\x41\x01R\nassetTagId\x12\x1e\n\x05\x65rror\x18\x04 \x01(\tB\x03\xe0\x41\x01H\x00R\x05\x65rror\x88\x01\x01\x42\x08\n\x06_error\"\xc6\x03\n\x12SearchRulesRequest\x12\x19\n\x05limit\x18\x01 \x01(\rH\x00R\x05limit\x88\x01\x01\x12\x16\n\x06offset\x18\x02 \x01(\rR\x06offset\x12\x35\n\x05order\x18\x03 \x01(\x0e\x32\x1a.sift.rules.v1.SearchOrderH\x01R\x05order\x88\x01\x01\x12!\n\x0cname_matches\x18\x04 \x01(\tR\x0bnameMatches\x12%\n\x0e\x63\x61se_sensitive\x18\x05 \x01(\x08R\rcaseSensitive\x12\x16\n\x06regexp\x18\x06 \x01(\x08R\x06regexp\x12\x1e\n\x08order_by\x18\x07 \x01(\tH\x02R\x07orderBy\x88\x01\x01\x12\x19\n\x08rule_ids\x18\x08 \x03(\tR\x07ruleIds\x12\x1b\n\tasset_ids\x18\t \x03(\tR\x08\x61ssetIds\x12\'\n\x0finclude_deleted\x18\n \x01(\x08R\x0eincludeDeleted\x12\x42\n\nasset_tags\x18\x0b \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesR\tassetTagsB\x08\n\x06_limitB\x08\n\x06_orderB\x0b\n\t_order_by\"`\n\x13SearchRulesResponse\x12\x19\n\x05\x63ount\x18\x01 \x01(\rB\x03\xe0\x41\x02R\x05\x63ount\x12.\n\x05rules\x18\x02 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"R\n\x0eGetRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"?\n\x0fGetRuleResponse\x12,\n\x04rule\x18\x01 \x01(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x04rule\"\\\n\x14\x42\x61tchGetRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"G\n\x15\x42\x61tchGetRulesResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"R\n\x11\x43reateRuleRequest\x12=\n\x06update\x18\x01 \x01(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x06update\"2\n\x12\x43reateRuleResponse\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\"\xdd\x04\n\x11UpdateRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tH\x00R\x06ruleId\x88\x01\x01\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12 \n\x08\x61sset_id\x18\x04 \x01(\tB\x05\x18\x01\xe0\x41\x01R\x07\x61ssetId\x12\"\n\nis_enabled\x18\x05 \x01(\x08\x42\x03\xe0\x41\x02R\tisEnabled\x12J\n\nconditions\x18\x06 \x03(\x0b\x32%.sift.rules.v1.UpdateConditionRequestB\x03\xe0\x41\x02R\nconditions\x12\'\n\x0forganization_id\x18\x07 \x01(\tR\x0eorganizationId\x12#\n\rversion_notes\x18\x08 \x01(\tR\x0cversionNotes\x12\"\n\nclient_key\x18\t \x01(\tH\x01R\tclientKey\x88\x01\x01\x12V\n\x13\x61sset_configuration\x18\n \x01(\x0b\x32%.sift.rules.v1.RuleAssetConfigurationR\x12\x61ssetConfiguration\x12R\n\x13\x63ontextual_channels\x18\x0b \x01(\x0b\x32!.sift.rules.v1.ContextualChannelsR\x12\x63ontextualChannels\x12\x1f\n\x0bis_external\x18\x0c \x01(\x08R\nisExternalB\n\n\x08_rule_idB\r\n\x0b_client_key\"\xf5\x01\n\x16UpdateConditionRequest\x12/\n\x11rule_condition_id\x18\x01 \x01(\tH\x00R\x0fruleConditionId\x88\x01\x01\x12K\n\nexpression\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleConditionExpressionB\x03\xe0\x41\x02R\nexpression\x12\x41\n\x07\x61\x63tions\x18\x04 \x03(\x0b\x32\".sift.rules.v1.UpdateActionRequestB\x03\xe0\x41\x02R\x07\x61\x63tionsB\x14\n\x12_rule_condition_idJ\x04\x08\x02\x10\x03\"\xe7\x01\n\x13UpdateActionRequest\x12)\n\x0erule_action_id\x18\x01 \x01(\tH\x00R\x0cruleActionId\x88\x01\x01\x12?\n\x0b\x61\x63tion_type\x18\x02 \x01(\x0e\x32\x19.sift.rules.v1.ActionKindB\x03\xe0\x41\x02R\nactionType\x12Q\n\rconfiguration\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleActionConfigurationB\x03\xe0\x41\x02R\rconfigurationB\x11\n\x0f_rule_action_id\"2\n\x12UpdateRuleResponse\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\"\x83\x02\n\x10ValidationResult\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\x12\x82\x01\n#asset_expression_validation_results\x18\x03 \x03(\x0b\x32..sift.rules.v1.AssetExpressionValidationResultB\x03\xe0\x41\x02R assetExpressionValidationResults\x12\x1e\n\x05\x65rror\x18\x04 \x01(\tB\x03\xe0\x41\x01H\x00R\x05\x65rror\x88\x01\x01\x42\x08\n\x06_error\"\xcb\x01\n\x17\x42\x61tchUpdateRulesRequest\x12;\n\x05rules\x18\x01 \x03(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x05rules\x12(\n\rvalidate_only\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01R\x0cvalidateOnly\x12I\n\x1eoverride_expression_validation\x18\x03 \x01(\x08\x42\x03\xe0\x41\x02R\x1coverrideExpressionValidation\"\x9d\x04\n\x18\x42\x61tchUpdateRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12\x33\n\x13rules_created_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesCreatedCount\x12\x33\n\x13rules_updated_count\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesUpdatedCount\x12(\n\rvalidate_only\x18\x04 \x01(\x08\x42\x03\xe0\x41\x02R\x0cvalidateOnly\x12S\n\x12validation_results\x18\x05 \x03(\x0b\x32\x1f.sift.rules.v1.ValidationResultB\x03\xe0\x41\x02R\x11validationResults\x12v\n\x18\x63reated_rule_identifiers\x18\x06 \x03(\x0b\x32\x37.sift.rules.v1.BatchUpdateRulesResponse.RuleIdentifiersB\x03\xe0\x41\x02R\x16\x63reatedRuleIdentifiers\x1a\x80\x01\n\x0fRuleIdentifiers\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\x12\'\n\nclient_key\x18\x03 \x01(\tB\x03\xe0\x41\x01H\x00R\tclientKey\x88\x01\x01\x42\r\n\x0b_client_key\"U\n\x11\x44\x65leteRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"\x14\n\x12\x44\x65leteRuleResponse\"_\n\x17\x42\x61tchDeleteRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"\x1a\n\x18\x42\x61tchDeleteRulesResponse\"W\n\x13UndeleteRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"\x16\n\x14UndeleteRuleResponse\"a\n\x19\x42\x61tchUndeleteRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"\x1c\n\x1a\x42\x61tchUndeleteRulesResponse\"C\n\x1dViewHumanFriendlyRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId:\x02\x18\x01\"H\n\x1eViewHumanFriendlyRulesResponse\x12\"\n\nrules_json\x18\x01 \x01(\tB\x03\xe0\x41\x02R\trulesJson:\x02\x18\x01\"\x97\x01\n\x1fUpdateHumanFriendlyRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nrules_json\x18\x02 \x01(\tB\x03\xe0\x41\x02R\trulesJson\x12,\n\x0forganization_id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId:\x02\x18\x01\"\x8c\x01\n UpdateHumanFriendlyRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12$\n\x0brules_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\nrulesCount\x12\x1f\n\x08messages\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x08messages:\x02\x18\x01\"6\n\x14ViewJsonRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\";\n\x15ViewJsonRulesResponse\x12\"\n\nrules_json\x18\x01 \x01(\tB\x03\xe0\x41\x02R\trulesJson\"\x84\x01\n\x10JsonRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nrules_json\x18\x02 \x01(\tB\x03\xe0\x41\x02R\trulesJson\x12,\n\x0forganization_id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId\"\xc1\x02\n\x11JsonRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12/\n\x11total_rules_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\x0ftotalRulesCount\x12\x33\n\x13rules_created_count\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesCreatedCount\x12\x33\n\x13rules_updated_count\x18\x04 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesUpdatedCount\x12\x33\n\x13rules_deleted_count\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesDeletedCount\x12*\n\x0e\x65rror_messages\x18\x06 \x01(\tH\x00R\rerrorMessages\x88\x01\x01\x42\x11\n\x0f_error_messages\"Z\n\x18ValidateJsonRulesRequest\x12>\n\x07request\x18\x01 \x01(\x0b\x32\x1f.sift.rules.v1.JsonRulesRequestB\x03\xe0\x41\x02R\x07request\"^\n\x19ValidateJsonRulesResponse\x12\x41\n\x08response\x18\x01 \x01(\x0b\x32 .sift.rules.v1.JsonRulesResponseB\x03\xe0\x41\x02R\x08response\"X\n\x16UpdateJsonRulesRequest\x12>\n\x07request\x18\x01 \x01(\x0b\x32\x1f.sift.rules.v1.JsonRulesRequestB\x03\xe0\x41\x02R\x07request\"\\\n\x17UpdateJsonRulesResponse\x12\x41\n\x08response\x18\x01 \x01(\x0b\x32 .sift.rules.v1.JsonRulesResponseB\x03\xe0\x41\x02R\x08response\"\x95\x01\n\x10ListRulesRequest\x12 \n\tpage_size\x18\x01 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x03 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12\x1e\n\x08order_by\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x07orderBy\"k\n\x11ListRulesResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\x95\x01\n\x17ListRuleVersionsRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12 \n\tpage_size\x18\x02 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x16\n\x06\x66ilter\x18\x04 \x01(\tR\x06\x66ilter\"\x9a\x03\n\x0bRuleVersion\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12+\n\x0frule_version_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\rruleVersionId\x12\x1d\n\x07version\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x07version\x12\x42\n\x0c\x63reated_date\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x05 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12(\n\rversion_notes\x18\x06 \x01(\tB\x03\xe0\x41\x02R\x0cversionNotes\x12=\n\x18generated_change_message\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x16generatedChangeMessage\x12\x42\n\x0c\x64\x65leted_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01R\x0b\x64\x65letedDate\"\x88\x01\n\x18ListRuleVersionsResponse\x12\x44\n\rrule_versions\x18\x01 \x03(\x0b\x32\x1a.sift.rules.v1.RuleVersionB\x03\xe0\x41\x02R\x0cruleVersions\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x15GetRuleVersionRequest\x12+\n\x0frule_version_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\rruleVersionId\"F\n\x16GetRuleVersionResponse\x12,\n\x04rule\x18\x01 \x01(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x04rule\"L\n\x1b\x42\x61tchGetRuleVersionsRequest\x12-\n\x10rule_version_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x0eruleVersionIds\"N\n\x1c\x42\x61tchGetRuleVersionsResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"\xf4\x01\n\x17RuleConditionExpression\x12r\n\x19single_channel_comparison\x18\x01 \x01(\x0b\x32\x30.sift.rules.v1.SingleChannelComparisonExpressionB\x02\x18\x01H\x00R\x17singleChannelComparison\x12W\n\x12\x63\x61lculated_channel\x18\x02 \x01(\x0b\x32&.sift.rules.v1.CalculatedChannelConfigH\x00R\x11\x63\x61lculatedChannelB\x0c\n\nexpression\"\xcb\x02\n!SingleChannelComparisonExpression\x12\x30\n\x11\x63hannel_component\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x10\x63hannelComponent\x12&\n\x0c\x63hannel_name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0b\x63hannelName\x12G\n\ncomparator\x18\x03 \x01(\x0e\x32\".sift.rules.v1.ConditionComparatorB\x03\xe0\x41\x02R\ncomparator\x12\x18\n\x06\x64ouble\x18\x04 \x01(\x01H\x00R\x06\x64ouble\x12\x18\n\x06string\x18\x05 \x01(\tH\x00R\x06string\x12\x42\n\nlast_value\x18\x06 \x01(\x0b\x32!.sift.rules.v1.LastValueThresholdH\x00R\tlastValueB\x0b\n\tthreshold\"\x14\n\x12LastValueThreshold\"\xfb\x02\n\x17\x43\x61lculatedChannelConfig\x12q\n\x12\x63hannel_references\x18\x01 \x03(\x0b\x32=.sift.rules.v1.CalculatedChannelConfig.ChannelReferencesEntryB\x03\xe0\x41\x02R\x11\x63hannelReferences\x12#\n\nexpression\x18\x02 \x01(\tB\x03\xe0\x41\x02R\nexpression\x12\x61\n\x15\x66unction_dependencies\x18\x03 \x03(\x0b\x32\'.sift.common.type.v1.FunctionDependencyB\x03\xe0\x41\x01R\x14\x66unctionDependencies\x1a\x65\n\x16\x43hannelReferencesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32\x1f.sift.rules.v1.ChannelReferenceR\x05value:\x02\x38\x01\"N\n\x10\x43hannelReference\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12!\n\tcomponent\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tcomponent\"\xd0\x01\n\x17RuleActionConfiguration\x12T\n\x0cnotification\x18\x01 \x01(\x0b\x32..sift.rules.v1.NotificationActionConfigurationH\x00R\x0cnotification\x12N\n\nannotation\x18\x02 \x01(\x0b\x32,.sift.rules.v1.AnnotationActionConfigurationH\x00R\nannotationB\x0f\n\rconfiguration\"T\n\x1fNotificationActionConfiguration\x12\x31\n\x12recipient_user_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x10recipientUserIds\"\xd7\x01\n\x1d\x41nnotationActionConfiguration\x12\x1c\n\x07tag_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x06tagIds\x12L\n\x0f\x61nnotation_type\x18\x02 \x01(\x0e\x32#.sift.annotations.v1.AnnotationTypeR\x0e\x61nnotationType\x12\x32\n\x13\x61ssigned_to_user_id\x18\x03 \x01(\tH\x00R\x10\x61ssignedToUserId\x88\x01\x01\x42\x16\n\x14_assigned_to_user_id\"\x8e\x02\n\x14\x45valuateRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x07ruleIds\x12X\n\x12\x61nnotation_options\x18\x02 \x01(\x0b\x32).sift.rules.v1.EvaluatedAnnotationOptionsR\x11\x61nnotationOptions\x12\x17\n\x06run_id\x18\x03 \x01(\tH\x00R\x05runId\x12>\n\ntime_range\x18\x04 \x01(\x0b\x32\x1d.sift.rules.v1.TimeRangeQueryH\x00R\ttimeRange\x12\x17\n\x07\x64ry_run\x18\x05 \x01(\x08R\x06\x64ryRun:\x02\x18\x01\x42\x06\n\x04time\"5\n\x1a\x45valuatedAnnotationOptions\x12\x17\n\x07tag_ids\x18\x01 \x03(\tR\x06tagIds\"\x82\x01\n\x0eTimeRangeQuery\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\"\xfd\x01\n\x15\x45valuateRulesResponse\x12\x38\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05R\x16\x63reatedAnnotationCount\x12O\n\x13\x64ry_run_annotations\x18\x02 \x03(\x0b\x32\x1f.sift.rules.v1.DryRunAnnotationR\x11\x64ryRunAnnotations\x12\x1a\n\x06job_id\x18\x03 \x01(\tH\x00R\x05jobId\x88\x01\x01\x12 \n\treport_id\x18\x04 \x01(\tH\x01R\x08reportId\x88\x01\x01:\x02\x18\x01\x42\t\n\x07_job_idB\x0c\n\n_report_id\"\xf7\x01\n\x10\x44ryRunAnnotation\x12!\n\x0c\x63ondition_id\x18\x01 \x01(\tR\x0b\x63onditionId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12>\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\tstartTime\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x07\x65ndTime\x12\x30\n\x14\x63ondition_version_id\x18\x05 \x01(\tR\x12\x63onditionVersionId*\\\n\x0bSearchOrder\x12 \n\x18SEARCH_ORDER_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x14\n\x10SEARCH_ORDER_ASC\x10\x01\x12\x15\n\x11SEARCH_ORDER_DESC\x10\x02*\\\n\nActionKind\x12\x1f\n\x17\x41\x43TION_KIND_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x10\n\x0cNOTIFICATION\x10\x01\x12\x0e\n\nANNOTATION\x10\x02\x12\x0b\n\x07WEBHOOK\x10\x03*\xad\x01\n\x13\x43onditionComparator\x12(\n CONDITION_COMPARATOR_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\r\n\tNOT_EQUAL\x10\x06\x32\xd0 \n\x0bRuleService\x12\xb9\x01\n\x0bSearchRules\x12!.sift.rules.v1.SearchRulesRequest\x1a\".sift.rules.v1.SearchRulesResponse\"c\x92\x41\x41\x12\x0bSearchRules\x1a\x32Queries rules based on provided search parameters.\x82\xd3\xe4\x93\x02\x19\"\x14/api/v1/rules/search:\x01*\x12\x94\x01\n\x07GetRule\x12\x1d.sift.rules.v1.GetRuleRequest\x1a\x1e.sift.rules.v1.GetRuleResponse\"J\x92\x41\x32\x12\x07GetRule\x1a\'Retrieves the latest version of a rule.\x82\xd3\xe4\x93\x02\x0f\x12\r/api/v1/rules\x12\xaa\x01\n\rBatchGetRules\x12#.sift.rules.v1.BatchGetRulesRequest\x1a$.sift.rules.v1.BatchGetRulesResponse\"N\x92\x41*\x12\rBatchGetRules\x1a\x19Retrieves multiple rules.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules:batchGet:\x01*\x12\x8b\x01\n\nCreateRule\x12 .sift.rules.v1.CreateRuleRequest\x1a!.sift.rules.v1.CreateRuleResponse\"8\x92\x41\x1d\x12\nCreateRule\x1a\x0f\x43reates a rule.\x82\xd3\xe4\x93\x02\x12\"\r/api/v1/rules:\x01*\x12\x95\x01\n\nUpdateRule\x12 .sift.rules.v1.UpdateRuleRequest\x1a!.sift.rules.v1.UpdateRuleResponse\"B\x92\x41\'\x12\nUpdateRule\x1a\x19Updates an existing rule.\x82\xd3\xe4\x93\x02\x12\x1a\r/api/v1/rules:\x01*\x12\xda\x01\n\x10\x42\x61tchUpdateRules\x12&.sift.rules.v1.BatchUpdateRulesRequest\x1a\'.sift.rules.v1.BatchUpdateRulesResponse\"u\x92\x41N\x12\x10\x42\x61tchUpdateRules\x1a:Updates existing rules or creates rules that do not exist.\x82\xd3\xe4\x93\x02\x1e\x1a\x19/api/v1/rules:batchUpdate:\x01*\x12\x92\x01\n\nDeleteRule\x12 .sift.rules.v1.DeleteRuleRequest\x1a!.sift.rules.v1.DeleteRuleResponse\"?\x92\x41\x1d\x12\nDeleteRule\x1a\x0f\x44\x65letes a rule.\x82\xd3\xe4\x93\x02\x19\"\x14/api/v1/rules/delete:\x01*\x12\xb7\x01\n\x10\x42\x61tchDeleteRules\x12&.sift.rules.v1.BatchDeleteRulesRequest\x1a\'.sift.rules.v1.BatchDeleteRulesResponse\"R\x92\x41+\x12\x10\x42\x61tchDeleteRules\x1a\x17\x44\x65letes multiple rules.\x82\xd3\xe4\x93\x02\x1e\"\x19/api/v1/rules/batchDelete:\x01*\x12\x9e\x01\n\x0cUndeleteRule\x12\".sift.rules.v1.UndeleteRuleRequest\x1a#.sift.rules.v1.UndeleteRuleResponse\"E\x92\x41!\x12\x0cUndeleteRule\x1a\x11Undeletes a rule.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules/undelete:\x01*\x12\xc3\x01\n\x12\x42\x61tchUndeleteRules\x12(.sift.rules.v1.BatchUndeleteRulesRequest\x1a).sift.rules.v1.BatchUndeleteRulesResponse\"X\x92\x41/\x12\x12\x42\x61tchUndeleteRules\x1a\x19Undeletes multiple rules.\x82\xd3\xe4\x93\x02 \"\x1b/api/v1/rules/batchUndelete:\x01*\x12\xdf\x01\n\rEvaluateRules\x12#.sift.rules.v1.EvaluateRulesRequest\x1a$.sift.rules.v1.EvaluateRulesResponse\"\x82\x01\x88\x02\x01\x92\x41[\x12\rEvaluateRules\x1aJEvaluates the provided rules and generate annotations based on the result.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules/evaluate:\x01*\x12\x84\x02\n\x16ViewHumanFriendlyRules\x12,.sift.rules.v1.ViewHumanFriendlyRulesRequest\x1a-.sift.rules.v1.ViewHumanFriendlyRulesResponse\"\x8c\x01\x88\x02\x01\x92\x41_\x12\x16ViewHumanFriendlyRules\x1a\x45Retrieve a JSON object containing all of the rules for a given asset.\x82\xd3\xe4\x93\x02!\x12\x1f/api/v1/rules:viewHumanFriendly\x12\xd6\x01\n\rViewJsonRules\x12#.sift.rules.v1.ViewJsonRulesRequest\x1a$.sift.rules.v1.ViewJsonRulesResponse\"z\x88\x02\x01\x92\x41V\x12\rViewJsonRules\x1a\x45Retrieve a JSON object containing all of the rules for a given asset.\x82\xd3\xe4\x93\x02\x18\x12\x16/api/v1/rules:viewJson\x12\x94\x02\n\x18UpdateHumanFriendlyRules\x12..sift.rules.v1.UpdateHumanFriendlyRulesRequest\x1a/.sift.rules.v1.UpdateHumanFriendlyRulesResponse\"\x96\x01\x88\x02\x01\x92\x41\x64\x12\x18UpdateHumanFriendlyRules\x1aHBatch update rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02&\"!/api/v1/rules:updateHumanFriendly:\x01*\x12\x80\x02\n\x11ValidateJsonRules\x12\'.sift.rules.v1.ValidateJsonRulesRequest\x1a(.sift.rules.v1.ValidateJsonRulesResponse\"\x97\x01\x88\x02\x01\x92\x41l\x12\x11ValidateJsonRules\x1aWValidate a batch update for rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02\x1f\"\x1a/api/v1/rules:validateJson:\x01*\x12\xe7\x01\n\x0fUpdateJsonRules\x12%.sift.rules.v1.UpdateJsonRulesRequest\x1a&.sift.rules.v1.UpdateJsonRulesResponse\"\x84\x01\x88\x02\x01\x92\x41[\x12\x0fUpdateJsonRules\x1aHBatch update rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02\x1d\"\x18/api/v1/rules:updateJson:\x01*\x12\x94\x01\n\tListRules\x12\x1f.sift.rules.v1.ListRulesRequest\x1a .sift.rules.v1.ListRulesResponse\"D\x92\x41\'\x12\tListRules\x1a\x1aRetrieves a list of rules.\x82\xd3\xe4\x93\x02\x14\x12\x12/api/v1/rules:list\x12\xd9\x01\n\x10ListRuleVersions\x12&.sift.rules.v1.ListRuleVersionsRequest\x1a\'.sift.rules.v1.ListRuleVersionsResponse\"t\x92\x41I\x12\x10ListRuleVersions\x1a\x35Retrieves a list of rule versions for the given rule.\x82\xd3\xe4\x93\x02\"\x12 /api/v1/rules/{rule_id}/versions\x12\xb8\x01\n\x0eGetRuleVersion\x12$.sift.rules.v1.GetRuleVersionRequest\x1a%.sift.rules.v1.GetRuleVersionResponse\"Y\x92\x41\x39\x12\x0eGetRuleVersion\x1a\'Retrieves a specific version of a rule.\x82\xd3\xe4\x93\x02\x17\x12\x15/api/v1/rules:version\x12\xdf\x01\n\x14\x42\x61tchGetRuleVersions\x12*.sift.rules.v1.BatchGetRuleVersionsRequest\x1a+.sift.rules.v1.BatchGetRuleVersionsResponse\"n\x92\x41\x42\x12\x14\x42\x61tchGetRuleVersions\x1a*Retrieves multiple rules by rule versions.\x82\xd3\xe4\x93\x02#\"\x1e/api/v1/rules:batchGetVersions:\x01*\x1a\xb1\x01\x92\x41\xad\x01\x12\x30Service to programmatically interact with rules.\x1ay\n\x1fRead more about what rules are.\x12Vhttps://customer.support.siftstack.com/servicedesk/customer/portal/2/article/265421102B\x88\x01\n\x11\x63om.sift.rules.v1B\nRulesProtoP\x01\xa2\x02\x03SRX\xaa\x02\rSift.Rules.V1\xca\x02\rSift\\Rules\\V1\xe2\x02\x19Sift\\Rules\\V1\\GPBMetadata\xea\x02\x0fSift::Rules::V1\x92\x41\x10\x12\x0e\n\x0cRule Serviceb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19sift/rules/v1/rules.proto\x12\rsift.rules.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a.protoc-gen-openapiv2/options/annotations.proto\x1a%sift/annotations/v1/annotations.proto\x1a-sift/common/type/v1/resource_identifier.proto\x1a\x30sift/common/type/v1/user_defined_functions.proto\"\x99\x07\n\x04Rule\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12 \n\x08\x61sset_id\x18\x02 \x01(\tB\x05\x18\x01\xe0\x41\x02R\x07\x61ssetId\x12\x17\n\x04name\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12\"\n\nis_enabled\x18\x06 \x01(\x08\x42\x03\xe0\x41\x02R\tisEnabled\x12\x42\n\x0c\x63reated_date\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\t \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\n \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12,\n\x0forganization_id\x18\x0b \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId\x12\x41\n\nconditions\x18\x0c \x03(\x0b\x32\x1c.sift.rules.v1.RuleConditionB\x03\xe0\x41\x02R\nconditions\x12\x42\n\x0crule_version\x18\r \x01(\x0b\x32\x1a.sift.rules.v1.RuleVersionB\x03\xe0\x41\x02R\x0bruleVersion\x12\"\n\nclient_key\x18\x0e \x01(\tB\x03\xe0\x41\x01R\tclientKey\x12[\n\x13\x61sset_configuration\x18\x0f \x01(\x0b\x32%.sift.rules.v1.RuleAssetConfigurationB\x03\xe0\x41\x02R\x12\x61ssetConfiguration\x12W\n\x13\x63ontextual_channels\x18\x10 \x01(\x0b\x32!.sift.rules.v1.ContextualChannelsB\x03\xe0\x41\x02R\x12\x63ontextualChannels\x12\x42\n\x0c\x64\x65leted_date\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01R\x0b\x64\x65letedDate\x12$\n\x0bis_external\x18\x12 \x01(\x08\x42\x03\xe0\x41\x02R\nisExternalJ\x04\x08\x05\x10\x06\"\x9b\x04\n\rRuleCondition\x12/\n\x11rule_condition_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x0fruleConditionId\x12\x1c\n\x07rule_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12K\n\nexpression\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleConditionExpressionB\x03\xe0\x41\x02R\nexpression\x12\x42\n\x0c\x63reated_date\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\x08 \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12\x38\n\x07\x61\x63tions\x18\t \x03(\x0b\x32\x19.sift.rules.v1.RuleActionB\x03\xe0\x41\x02R\x07\x61\x63tions\x12>\n\x19rule_condition_version_id\x18\n \x01(\tB\x03\xe0\x41\x02R\x16ruleConditionVersionIdJ\x04\x08\x04\x10\x05\"\xa6\x04\n\nRuleAction\x12)\n\x0erule_action_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x0cruleActionId\x12/\n\x11rule_condition_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0fruleConditionId\x12?\n\x0b\x61\x63tion_type\x18\x03 \x01(\x0e\x32\x19.sift.rules.v1.ActionKindB\x03\xe0\x41\x02R\nactionType\x12Q\n\rconfiguration\x18\x04 \x01(\x0b\x32&.sift.rules.v1.RuleActionConfigurationB\x03\xe0\x41\x02R\rconfiguration\x12\x42\n\x0c\x63reated_date\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x44\n\rmodified_date\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0cmodifiedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12\x32\n\x13modified_by_user_id\x18\x08 \x01(\tB\x03\xe0\x41\x02R\x10modifiedByUserId\x12\x38\n\x16rule_action_version_id\x18\t \x01(\tB\x03\xe0\x41\x02R\x13ruleActionVersionId\"N\n\x16RuleAssetConfiguration\x12\x1b\n\tasset_ids\x18\x01 \x03(\tR\x08\x61ssetIds\x12\x17\n\x07tag_ids\x18\x02 \x03(\tR\x06tagIds\"V\n\x12\x43ontextualChannels\x12@\n\x08\x63hannels\x18\x01 \x03(\x0b\x32\x1f.sift.rules.v1.ChannelReferenceB\x03\xe0\x41\x02R\x08\x63hannels\"\xb6\x01\n\x1f\x41ssetExpressionValidationResult\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nasset_name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tassetName\x12%\n\x0c\x61sset_tag_id\x18\x03 \x01(\tB\x03\xe0\x41\x01R\nassetTagId\x12\x1e\n\x05\x65rror\x18\x04 \x01(\tB\x03\xe0\x41\x01H\x00R\x05\x65rror\x88\x01\x01\x42\x08\n\x06_error\"\xc6\x03\n\x12SearchRulesRequest\x12\x19\n\x05limit\x18\x01 \x01(\rH\x00R\x05limit\x88\x01\x01\x12\x16\n\x06offset\x18\x02 \x01(\rR\x06offset\x12\x35\n\x05order\x18\x03 \x01(\x0e\x32\x1a.sift.rules.v1.SearchOrderH\x01R\x05order\x88\x01\x01\x12!\n\x0cname_matches\x18\x04 \x01(\tR\x0bnameMatches\x12%\n\x0e\x63\x61se_sensitive\x18\x05 \x01(\x08R\rcaseSensitive\x12\x16\n\x06regexp\x18\x06 \x01(\x08R\x06regexp\x12\x1e\n\x08order_by\x18\x07 \x01(\tH\x02R\x07orderBy\x88\x01\x01\x12\x19\n\x08rule_ids\x18\x08 \x03(\tR\x07ruleIds\x12\x1b\n\tasset_ids\x18\t \x03(\tR\x08\x61ssetIds\x12\'\n\x0finclude_deleted\x18\n \x01(\x08R\x0eincludeDeleted\x12\x42\n\nasset_tags\x18\x0b \x01(\x0b\x32#.sift.common.type.v1.NamedResourcesR\tassetTagsB\x08\n\x06_limitB\x08\n\x06_orderB\x0b\n\t_order_by\"`\n\x13SearchRulesResponse\x12\x19\n\x05\x63ount\x18\x01 \x01(\rB\x03\xe0\x41\x02R\x05\x63ount\x12.\n\x05rules\x18\x02 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"R\n\x0eGetRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"?\n\x0fGetRuleResponse\x12,\n\x04rule\x18\x01 \x01(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x04rule\"\\\n\x14\x42\x61tchGetRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"G\n\x15\x42\x61tchGetRulesResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"R\n\x11\x43reateRuleRequest\x12=\n\x06update\x18\x01 \x01(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x06update\"2\n\x12\x43reateRuleResponse\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\"\xdd\x04\n\x11UpdateRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tH\x00R\x06ruleId\x88\x01\x01\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12 \n\x08\x61sset_id\x18\x04 \x01(\tB\x05\x18\x01\xe0\x41\x01R\x07\x61ssetId\x12\"\n\nis_enabled\x18\x05 \x01(\x08\x42\x03\xe0\x41\x02R\tisEnabled\x12J\n\nconditions\x18\x06 \x03(\x0b\x32%.sift.rules.v1.UpdateConditionRequestB\x03\xe0\x41\x02R\nconditions\x12\'\n\x0forganization_id\x18\x07 \x01(\tR\x0eorganizationId\x12#\n\rversion_notes\x18\x08 \x01(\tR\x0cversionNotes\x12\"\n\nclient_key\x18\t \x01(\tH\x01R\tclientKey\x88\x01\x01\x12V\n\x13\x61sset_configuration\x18\n \x01(\x0b\x32%.sift.rules.v1.RuleAssetConfigurationR\x12\x61ssetConfiguration\x12R\n\x13\x63ontextual_channels\x18\x0b \x01(\x0b\x32!.sift.rules.v1.ContextualChannelsR\x12\x63ontextualChannels\x12\x1f\n\x0bis_external\x18\x0c \x01(\x08R\nisExternalB\n\n\x08_rule_idB\r\n\x0b_client_key\"\xf5\x01\n\x16UpdateConditionRequest\x12/\n\x11rule_condition_id\x18\x01 \x01(\tH\x00R\x0fruleConditionId\x88\x01\x01\x12K\n\nexpression\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleConditionExpressionB\x03\xe0\x41\x02R\nexpression\x12\x41\n\x07\x61\x63tions\x18\x04 \x03(\x0b\x32\".sift.rules.v1.UpdateActionRequestB\x03\xe0\x41\x02R\x07\x61\x63tionsB\x14\n\x12_rule_condition_idJ\x04\x08\x02\x10\x03\"\xe7\x01\n\x13UpdateActionRequest\x12)\n\x0erule_action_id\x18\x01 \x01(\tH\x00R\x0cruleActionId\x88\x01\x01\x12?\n\x0b\x61\x63tion_type\x18\x02 \x01(\x0e\x32\x19.sift.rules.v1.ActionKindB\x03\xe0\x41\x02R\nactionType\x12Q\n\rconfiguration\x18\x03 \x01(\x0b\x32&.sift.rules.v1.RuleActionConfigurationB\x03\xe0\x41\x02R\rconfigurationB\x11\n\x0f_rule_action_id\"2\n\x12UpdateRuleResponse\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\"\x83\x02\n\x10ValidationResult\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\x12\x82\x01\n#asset_expression_validation_results\x18\x03 \x03(\x0b\x32..sift.rules.v1.AssetExpressionValidationResultB\x03\xe0\x41\x02R assetExpressionValidationResults\x12\x1e\n\x05\x65rror\x18\x04 \x01(\tB\x03\xe0\x41\x01H\x00R\x05\x65rror\x88\x01\x01\x42\x08\n\x06_error\"\xcb\x01\n\x17\x42\x61tchUpdateRulesRequest\x12;\n\x05rules\x18\x01 \x03(\x0b\x32 .sift.rules.v1.UpdateRuleRequestB\x03\xe0\x41\x02R\x05rules\x12(\n\rvalidate_only\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01R\x0cvalidateOnly\x12I\n\x1eoverride_expression_validation\x18\x03 \x01(\x08\x42\x03\xe0\x41\x02R\x1coverrideExpressionValidation\"\x9d\x04\n\x18\x42\x61tchUpdateRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12\x33\n\x13rules_created_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesCreatedCount\x12\x33\n\x13rules_updated_count\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesUpdatedCount\x12(\n\rvalidate_only\x18\x04 \x01(\x08\x42\x03\xe0\x41\x02R\x0cvalidateOnly\x12S\n\x12validation_results\x18\x05 \x03(\x0b\x32\x1f.sift.rules.v1.ValidationResultB\x03\xe0\x41\x02R\x11validationResults\x12v\n\x18\x63reated_rule_identifiers\x18\x06 \x03(\x0b\x32\x37.sift.rules.v1.BatchUpdateRulesResponse.RuleIdentifiersB\x03\xe0\x41\x02R\x16\x63reatedRuleIdentifiers\x1a\x80\x01\n\x0fRuleIdentifiers\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\x12\'\n\nclient_key\x18\x03 \x01(\tB\x03\xe0\x41\x01H\x00R\tclientKey\x88\x01\x01\x42\r\n\x0b_client_key\"U\n\x11\x44\x65leteRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"\x14\n\x12\x44\x65leteRuleResponse\"_\n\x17\x42\x61tchDeleteRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"\x1a\n\x18\x42\x61tchDeleteRulesResponse\"W\n\x13UndeleteRuleRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x01R\x06ruleId\x12\"\n\nclient_key\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tclientKey\"\x16\n\x14UndeleteRuleResponse\"a\n\x19\x42\x61tchUndeleteRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x01R\x07ruleIds\x12$\n\x0b\x63lient_keys\x18\x02 \x03(\tB\x03\xe0\x41\x01R\nclientKeys\"\x1c\n\x1a\x42\x61tchUndeleteRulesResponse\"C\n\x1dViewHumanFriendlyRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId:\x02\x18\x01\"H\n\x1eViewHumanFriendlyRulesResponse\x12\"\n\nrules_json\x18\x01 \x01(\tB\x03\xe0\x41\x02R\trulesJson:\x02\x18\x01\"\x97\x01\n\x1fUpdateHumanFriendlyRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nrules_json\x18\x02 \x01(\tB\x03\xe0\x41\x02R\trulesJson\x12,\n\x0forganization_id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId:\x02\x18\x01\"\x8c\x01\n UpdateHumanFriendlyRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12$\n\x0brules_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\nrulesCount\x12\x1f\n\x08messages\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x08messages:\x02\x18\x01\"6\n\x14ViewJsonRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\";\n\x15ViewJsonRulesResponse\x12\"\n\nrules_json\x18\x01 \x01(\tB\x03\xe0\x41\x02R\trulesJson\"\x84\x01\n\x10JsonRulesRequest\x12\x1e\n\x08\x61sset_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x07\x61ssetId\x12\"\n\nrules_json\x18\x02 \x01(\tB\x03\xe0\x41\x02R\trulesJson\x12,\n\x0forganization_id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0eorganizationId\"\xc1\x02\n\x11JsonRulesResponse\x12\x1d\n\x07success\x18\x01 \x01(\x08\x42\x03\xe0\x41\x02R\x07success\x12/\n\x11total_rules_count\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02R\x0ftotalRulesCount\x12\x33\n\x13rules_created_count\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesCreatedCount\x12\x33\n\x13rules_updated_count\x18\x04 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesUpdatedCount\x12\x33\n\x13rules_deleted_count\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02R\x11rulesDeletedCount\x12*\n\x0e\x65rror_messages\x18\x06 \x01(\tH\x00R\rerrorMessages\x88\x01\x01\x42\x11\n\x0f_error_messages\"Z\n\x18ValidateJsonRulesRequest\x12>\n\x07request\x18\x01 \x01(\x0b\x32\x1f.sift.rules.v1.JsonRulesRequestB\x03\xe0\x41\x02R\x07request\"^\n\x19ValidateJsonRulesResponse\x12\x41\n\x08response\x18\x01 \x01(\x0b\x32 .sift.rules.v1.JsonRulesResponseB\x03\xe0\x41\x02R\x08response\"X\n\x16UpdateJsonRulesRequest\x12>\n\x07request\x18\x01 \x01(\x0b\x32\x1f.sift.rules.v1.JsonRulesRequestB\x03\xe0\x41\x02R\x07request\"\\\n\x17UpdateJsonRulesResponse\x12\x41\n\x08response\x18\x01 \x01(\x0b\x32 .sift.rules.v1.JsonRulesResponseB\x03\xe0\x41\x02R\x08response\"\x95\x01\n\x10ListRulesRequest\x12 \n\tpage_size\x18\x01 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x02 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x1b\n\x06\x66ilter\x18\x03 \x01(\tB\x03\xe0\x41\x01R\x06\x66ilter\x12\x1e\n\x08order_by\x18\x04 \x01(\tB\x03\xe0\x41\x01R\x07orderBy\"k\n\x11ListRulesResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"\x95\x01\n\x17ListRuleVersionsRequest\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12 \n\tpage_size\x18\x02 \x01(\rB\x03\xe0\x41\x01R\x08pageSize\x12\"\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01R\tpageToken\x12\x16\n\x06\x66ilter\x18\x04 \x01(\tR\x06\x66ilter\"\x9a\x03\n\x0bRuleVersion\x12\x1c\n\x07rule_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06ruleId\x12+\n\x0frule_version_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\rruleVersionId\x12\x1d\n\x07version\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x07version\x12\x42\n\x0c\x63reated_date\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x0b\x63reatedDate\x12\x30\n\x12\x63reated_by_user_id\x18\x05 \x01(\tB\x03\xe0\x41\x02R\x0f\x63reatedByUserId\x12(\n\rversion_notes\x18\x06 \x01(\tB\x03\xe0\x41\x02R\x0cversionNotes\x12=\n\x18generated_change_message\x18\x07 \x01(\tB\x03\xe0\x41\x02R\x16generatedChangeMessage\x12\x42\n\x0c\x64\x65leted_date\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01R\x0b\x64\x65letedDate\"\x88\x01\n\x18ListRuleVersionsResponse\x12\x44\n\rrule_versions\x18\x01 \x03(\x0b\x32\x1a.sift.rules.v1.RuleVersionB\x03\xe0\x41\x02R\x0cruleVersions\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken\"D\n\x15GetRuleVersionRequest\x12+\n\x0frule_version_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\rruleVersionId\"F\n\x16GetRuleVersionResponse\x12,\n\x04rule\x18\x01 \x01(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x04rule\"L\n\x1b\x42\x61tchGetRuleVersionsRequest\x12-\n\x10rule_version_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x0eruleVersionIds\"N\n\x1c\x42\x61tchGetRuleVersionsResponse\x12.\n\x05rules\x18\x01 \x03(\x0b\x32\x13.sift.rules.v1.RuleB\x03\xe0\x41\x02R\x05rules\"\xb2\x02\n\x17RuleConditionExpression\x12r\n\x19single_channel_comparison\x18\x01 \x01(\x0b\x32\x30.sift.rules.v1.SingleChannelComparisonExpressionB\x02\x18\x01H\x00R\x17singleChannelComparison\x12W\n\x12\x63\x61lculated_channel\x18\x02 \x01(\x0b\x32&.sift.rules.v1.CalculatedChannelConfigH\x00R\x11\x63\x61lculatedChannel\x12<\n\x0bpython_code\x18\x03 \x01(\x0b\x32\x19.sift.rules.v1.PythonCodeH\x00R\npythonCodeB\x0c\n\nexpression\"\xcb\x02\n!SingleChannelComparisonExpression\x12\x30\n\x11\x63hannel_component\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x10\x63hannelComponent\x12&\n\x0c\x63hannel_name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0b\x63hannelName\x12G\n\ncomparator\x18\x03 \x01(\x0e\x32\".sift.rules.v1.ConditionComparatorB\x03\xe0\x41\x02R\ncomparator\x12\x18\n\x06\x64ouble\x18\x04 \x01(\x01H\x00R\x06\x64ouble\x12\x18\n\x06string\x18\x05 \x01(\tH\x00R\x06string\x12\x42\n\nlast_value\x18\x06 \x01(\x0b\x32!.sift.rules.v1.LastValueThresholdH\x00R\tlastValueB\x0b\n\tthreshold\"\x14\n\x12LastValueThreshold\"\xfb\x02\n\x17\x43\x61lculatedChannelConfig\x12q\n\x12\x63hannel_references\x18\x01 \x03(\x0b\x32=.sift.rules.v1.CalculatedChannelConfig.ChannelReferencesEntryB\x03\xe0\x41\x02R\x11\x63hannelReferences\x12#\n\nexpression\x18\x02 \x01(\tB\x03\xe0\x41\x02R\nexpression\x12\x61\n\x15\x66unction_dependencies\x18\x03 \x03(\x0b\x32\'.sift.common.type.v1.FunctionDependencyB\x03\xe0\x41\x01R\x14\x66unctionDependencies\x1a\x65\n\x16\x43hannelReferencesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32\x1f.sift.rules.v1.ChannelReferenceR\x05value:\x02\x38\x01\"N\n\x10\x43hannelReference\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12!\n\tcomponent\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tcomponent\"T\n\x16PythonChannelReference\x12!\n\treference\x18\x01 \x01(\tB\x03\xe0\x41\x02R\treference\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\"\xa9\x01\n\nPythonCode\x12Y\n\x12\x63hannel_references\x18\x01 \x03(\x0b\x32%.sift.rules.v1.PythonChannelReferenceB\x03\xe0\x41\x02R\x11\x63hannelReferences\x12\x17\n\x04\x63ode\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04\x63ode\x12\'\n\x0c\x64\x65pendencies\x18\x03 \x01(\tB\x03\xe0\x41\x01R\x0c\x64\x65pendencies\"\xd0\x01\n\x17RuleActionConfiguration\x12T\n\x0cnotification\x18\x01 \x01(\x0b\x32..sift.rules.v1.NotificationActionConfigurationH\x00R\x0cnotification\x12N\n\nannotation\x18\x02 \x01(\x0b\x32,.sift.rules.v1.AnnotationActionConfigurationH\x00R\nannotationB\x0f\n\rconfiguration\"T\n\x1fNotificationActionConfiguration\x12\x31\n\x12recipient_user_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x10recipientUserIds\"\xd7\x01\n\x1d\x41nnotationActionConfiguration\x12\x1c\n\x07tag_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x06tagIds\x12L\n\x0f\x61nnotation_type\x18\x02 \x01(\x0e\x32#.sift.annotations.v1.AnnotationTypeR\x0e\x61nnotationType\x12\x32\n\x13\x61ssigned_to_user_id\x18\x03 \x01(\tH\x00R\x10\x61ssignedToUserId\x88\x01\x01\x42\x16\n\x14_assigned_to_user_id\"\x8e\x02\n\x14\x45valuateRulesRequest\x12\x1e\n\x08rule_ids\x18\x01 \x03(\tB\x03\xe0\x41\x02R\x07ruleIds\x12X\n\x12\x61nnotation_options\x18\x02 \x01(\x0b\x32).sift.rules.v1.EvaluatedAnnotationOptionsR\x11\x61nnotationOptions\x12\x17\n\x06run_id\x18\x03 \x01(\tH\x00R\x05runId\x12>\n\ntime_range\x18\x04 \x01(\x0b\x32\x1d.sift.rules.v1.TimeRangeQueryH\x00R\ttimeRange\x12\x17\n\x07\x64ry_run\x18\x05 \x01(\x08R\x06\x64ryRun:\x02\x18\x01\x42\x06\n\x04time\"5\n\x1a\x45valuatedAnnotationOptions\x12\x17\n\x07tag_ids\x18\x01 \x03(\tR\x06tagIds\"\x82\x01\n\x0eTimeRangeQuery\x12\x39\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\tstartTime\x12\x35\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x07\x65ndTime\"\xfd\x01\n\x15\x45valuateRulesResponse\x12\x38\n\x18\x63reated_annotation_count\x18\x01 \x01(\x05R\x16\x63reatedAnnotationCount\x12O\n\x13\x64ry_run_annotations\x18\x02 \x03(\x0b\x32\x1f.sift.rules.v1.DryRunAnnotationR\x11\x64ryRunAnnotations\x12\x1a\n\x06job_id\x18\x03 \x01(\tH\x00R\x05jobId\x88\x01\x01\x12 \n\treport_id\x18\x04 \x01(\tH\x01R\x08reportId\x88\x01\x01:\x02\x18\x01\x42\t\n\x07_job_idB\x0c\n\n_report_id\"\xf7\x01\n\x10\x44ryRunAnnotation\x12!\n\x0c\x63ondition_id\x18\x01 \x01(\tR\x0b\x63onditionId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12>\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\tstartTime\x12:\n\x08\x65nd_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02R\x07\x65ndTime\x12\x30\n\x14\x63ondition_version_id\x18\x05 \x01(\tR\x12\x63onditionVersionId*\\\n\x0bSearchOrder\x12 \n\x18SEARCH_ORDER_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x14\n\x10SEARCH_ORDER_ASC\x10\x01\x12\x15\n\x11SEARCH_ORDER_DESC\x10\x02*\\\n\nActionKind\x12\x1f\n\x17\x41\x43TION_KIND_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x10\n\x0cNOTIFICATION\x10\x01\x12\x0e\n\nANNOTATION\x10\x02\x12\x0b\n\x07WEBHOOK\x10\x03*\xad\x01\n\x13\x43onditionComparator\x12(\n CONDITION_COMPARATOR_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\r\n\tNOT_EQUAL\x10\x06\x32\xd0 \n\x0bRuleService\x12\xb9\x01\n\x0bSearchRules\x12!.sift.rules.v1.SearchRulesRequest\x1a\".sift.rules.v1.SearchRulesResponse\"c\x92\x41\x41\x12\x0bSearchRules\x1a\x32Queries rules based on provided search parameters.\x82\xd3\xe4\x93\x02\x19\"\x14/api/v1/rules/search:\x01*\x12\x94\x01\n\x07GetRule\x12\x1d.sift.rules.v1.GetRuleRequest\x1a\x1e.sift.rules.v1.GetRuleResponse\"J\x92\x41\x32\x12\x07GetRule\x1a\'Retrieves the latest version of a rule.\x82\xd3\xe4\x93\x02\x0f\x12\r/api/v1/rules\x12\xaa\x01\n\rBatchGetRules\x12#.sift.rules.v1.BatchGetRulesRequest\x1a$.sift.rules.v1.BatchGetRulesResponse\"N\x92\x41*\x12\rBatchGetRules\x1a\x19Retrieves multiple rules.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules:batchGet:\x01*\x12\x8b\x01\n\nCreateRule\x12 .sift.rules.v1.CreateRuleRequest\x1a!.sift.rules.v1.CreateRuleResponse\"8\x92\x41\x1d\x12\nCreateRule\x1a\x0f\x43reates a rule.\x82\xd3\xe4\x93\x02\x12\"\r/api/v1/rules:\x01*\x12\x95\x01\n\nUpdateRule\x12 .sift.rules.v1.UpdateRuleRequest\x1a!.sift.rules.v1.UpdateRuleResponse\"B\x92\x41\'\x12\nUpdateRule\x1a\x19Updates an existing rule.\x82\xd3\xe4\x93\x02\x12\x1a\r/api/v1/rules:\x01*\x12\xda\x01\n\x10\x42\x61tchUpdateRules\x12&.sift.rules.v1.BatchUpdateRulesRequest\x1a\'.sift.rules.v1.BatchUpdateRulesResponse\"u\x92\x41N\x12\x10\x42\x61tchUpdateRules\x1a:Updates existing rules or creates rules that do not exist.\x82\xd3\xe4\x93\x02\x1e\x1a\x19/api/v1/rules:batchUpdate:\x01*\x12\x92\x01\n\nDeleteRule\x12 .sift.rules.v1.DeleteRuleRequest\x1a!.sift.rules.v1.DeleteRuleResponse\"?\x92\x41\x1d\x12\nDeleteRule\x1a\x0f\x44\x65letes a rule.\x82\xd3\xe4\x93\x02\x19\"\x14/api/v1/rules/delete:\x01*\x12\xb7\x01\n\x10\x42\x61tchDeleteRules\x12&.sift.rules.v1.BatchDeleteRulesRequest\x1a\'.sift.rules.v1.BatchDeleteRulesResponse\"R\x92\x41+\x12\x10\x42\x61tchDeleteRules\x1a\x17\x44\x65letes multiple rules.\x82\xd3\xe4\x93\x02\x1e\"\x19/api/v1/rules/batchDelete:\x01*\x12\x9e\x01\n\x0cUndeleteRule\x12\".sift.rules.v1.UndeleteRuleRequest\x1a#.sift.rules.v1.UndeleteRuleResponse\"E\x92\x41!\x12\x0cUndeleteRule\x1a\x11Undeletes a rule.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules/undelete:\x01*\x12\xc3\x01\n\x12\x42\x61tchUndeleteRules\x12(.sift.rules.v1.BatchUndeleteRulesRequest\x1a).sift.rules.v1.BatchUndeleteRulesResponse\"X\x92\x41/\x12\x12\x42\x61tchUndeleteRules\x1a\x19Undeletes multiple rules.\x82\xd3\xe4\x93\x02 \"\x1b/api/v1/rules/batchUndelete:\x01*\x12\xdf\x01\n\rEvaluateRules\x12#.sift.rules.v1.EvaluateRulesRequest\x1a$.sift.rules.v1.EvaluateRulesResponse\"\x82\x01\x88\x02\x01\x92\x41[\x12\rEvaluateRules\x1aJEvaluates the provided rules and generate annotations based on the result.\x82\xd3\xe4\x93\x02\x1b\"\x16/api/v1/rules/evaluate:\x01*\x12\x84\x02\n\x16ViewHumanFriendlyRules\x12,.sift.rules.v1.ViewHumanFriendlyRulesRequest\x1a-.sift.rules.v1.ViewHumanFriendlyRulesResponse\"\x8c\x01\x88\x02\x01\x92\x41_\x12\x16ViewHumanFriendlyRules\x1a\x45Retrieve a JSON object containing all of the rules for a given asset.\x82\xd3\xe4\x93\x02!\x12\x1f/api/v1/rules:viewHumanFriendly\x12\xd6\x01\n\rViewJsonRules\x12#.sift.rules.v1.ViewJsonRulesRequest\x1a$.sift.rules.v1.ViewJsonRulesResponse\"z\x88\x02\x01\x92\x41V\x12\rViewJsonRules\x1a\x45Retrieve a JSON object containing all of the rules for a given asset.\x82\xd3\xe4\x93\x02\x18\x12\x16/api/v1/rules:viewJson\x12\x94\x02\n\x18UpdateHumanFriendlyRules\x12..sift.rules.v1.UpdateHumanFriendlyRulesRequest\x1a/.sift.rules.v1.UpdateHumanFriendlyRulesResponse\"\x96\x01\x88\x02\x01\x92\x41\x64\x12\x18UpdateHumanFriendlyRules\x1aHBatch update rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02&\"!/api/v1/rules:updateHumanFriendly:\x01*\x12\x80\x02\n\x11ValidateJsonRules\x12\'.sift.rules.v1.ValidateJsonRulesRequest\x1a(.sift.rules.v1.ValidateJsonRulesResponse\"\x97\x01\x88\x02\x01\x92\x41l\x12\x11ValidateJsonRules\x1aWValidate a batch update for rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02\x1f\"\x1a/api/v1/rules:validateJson:\x01*\x12\xe7\x01\n\x0fUpdateJsonRules\x12%.sift.rules.v1.UpdateJsonRulesRequest\x1a&.sift.rules.v1.UpdateJsonRulesResponse\"\x84\x01\x88\x02\x01\x92\x41[\x12\x0fUpdateJsonRules\x1aHBatch update rules given the `rules_json` which is a JSON list of rules.\x82\xd3\xe4\x93\x02\x1d\"\x18/api/v1/rules:updateJson:\x01*\x12\x94\x01\n\tListRules\x12\x1f.sift.rules.v1.ListRulesRequest\x1a .sift.rules.v1.ListRulesResponse\"D\x92\x41\'\x12\tListRules\x1a\x1aRetrieves a list of rules.\x82\xd3\xe4\x93\x02\x14\x12\x12/api/v1/rules:list\x12\xd9\x01\n\x10ListRuleVersions\x12&.sift.rules.v1.ListRuleVersionsRequest\x1a\'.sift.rules.v1.ListRuleVersionsResponse\"t\x92\x41I\x12\x10ListRuleVersions\x1a\x35Retrieves a list of rule versions for the given rule.\x82\xd3\xe4\x93\x02\"\x12 /api/v1/rules/{rule_id}/versions\x12\xb8\x01\n\x0eGetRuleVersion\x12$.sift.rules.v1.GetRuleVersionRequest\x1a%.sift.rules.v1.GetRuleVersionResponse\"Y\x92\x41\x39\x12\x0eGetRuleVersion\x1a\'Retrieves a specific version of a rule.\x82\xd3\xe4\x93\x02\x17\x12\x15/api/v1/rules:version\x12\xdf\x01\n\x14\x42\x61tchGetRuleVersions\x12*.sift.rules.v1.BatchGetRuleVersionsRequest\x1a+.sift.rules.v1.BatchGetRuleVersionsResponse\"n\x92\x41\x42\x12\x14\x42\x61tchGetRuleVersions\x1a*Retrieves multiple rules by rule versions.\x82\xd3\xe4\x93\x02#\"\x1e/api/v1/rules:batchGetVersions:\x01*\x1a\xb1\x01\x92\x41\xad\x01\x12\x30Service to programmatically interact with rules.\x1ay\n\x1fRead more about what rules are.\x12Vhttps://customer.support.siftstack.com/servicedesk/customer/portal/2/article/265421102B\x88\x01\n\x11\x63om.sift.rules.v1B\nRulesProtoP\x01\xa2\x02\x03SRX\xaa\x02\rSift.Rules.V1\xca\x02\rSift\\Rules\\V1\xe2\x02\x19Sift\\Rules\\V1\\GPBMetadata\xea\x02\x0fSift::Rules::V1\x92\x41\x10\x12\x0e\n\x0cRule Serviceb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -317,6 +317,16 @@ _globals['_CHANNELREFERENCE'].fields_by_name['name']._serialized_options = b'\340A\002' _globals['_CHANNELREFERENCE'].fields_by_name['component']._loaded_options = None _globals['_CHANNELREFERENCE'].fields_by_name['component']._serialized_options = b'\340A\002' + _globals['_PYTHONCHANNELREFERENCE'].fields_by_name['reference']._loaded_options = None + _globals['_PYTHONCHANNELREFERENCE'].fields_by_name['reference']._serialized_options = b'\340A\002' + _globals['_PYTHONCHANNELREFERENCE'].fields_by_name['name']._loaded_options = None + _globals['_PYTHONCHANNELREFERENCE'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_PYTHONCODE'].fields_by_name['channel_references']._loaded_options = None + _globals['_PYTHONCODE'].fields_by_name['channel_references']._serialized_options = b'\340A\002' + _globals['_PYTHONCODE'].fields_by_name['code']._loaded_options = None + _globals['_PYTHONCODE'].fields_by_name['code']._serialized_options = b'\340A\002' + _globals['_PYTHONCODE'].fields_by_name['dependencies']._loaded_options = None + _globals['_PYTHONCODE'].fields_by_name['dependencies']._serialized_options = b'\340A\001' _globals['_NOTIFICATIONACTIONCONFIGURATION'].fields_by_name['recipient_user_ids']._loaded_options = None _globals['_NOTIFICATIONACTIONCONFIGURATION'].fields_by_name['recipient_user_ids']._serialized_options = b'\340A\002' _globals['_ANNOTATIONACTIONCONFIGURATION'].fields_by_name['tag_ids']._loaded_options = None @@ -373,12 +383,12 @@ _globals['_RULESERVICE'].methods_by_name['GetRuleVersion']._serialized_options = b'\222A9\022\016GetRuleVersion\032\'Retrieves a specific version of a rule.\202\323\344\223\002\027\022\025/api/v1/rules:version' _globals['_RULESERVICE'].methods_by_name['BatchGetRuleVersions']._loaded_options = None _globals['_RULESERVICE'].methods_by_name['BatchGetRuleVersions']._serialized_options = b'\222AB\022\024BatchGetRuleVersions\032*Retrieves multiple rules by rule versions.\202\323\344\223\002#\"\036/api/v1/rules:batchGetVersions:\001*' - _globals['_SEARCHORDER']._serialized_start=11533 - _globals['_SEARCHORDER']._serialized_end=11625 - _globals['_ACTIONKIND']._serialized_start=11627 - _globals['_ACTIONKIND']._serialized_end=11719 - _globals['_CONDITIONCOMPARATOR']._serialized_start=11722 - _globals['_CONDITIONCOMPARATOR']._serialized_end=11895 + _globals['_SEARCHORDER']._serialized_start=11853 + _globals['_SEARCHORDER']._serialized_end=11945 + _globals['_ACTIONKIND']._serialized_start=11947 + _globals['_ACTIONKIND']._serialized_end=12039 + _globals['_CONDITIONCOMPARATOR']._serialized_start=12042 + _globals['_CONDITIONCOMPARATOR']._serialized_end=12215 _globals['_RULE']._serialized_start=325 _globals['_RULE']._serialized_end=1246 _globals['_RULECONDITION']._serialized_start=1249 @@ -482,33 +492,37 @@ _globals['_BATCHGETRULEVERSIONSRESPONSE']._serialized_start=8906 _globals['_BATCHGETRULEVERSIONSRESPONSE']._serialized_end=8984 _globals['_RULECONDITIONEXPRESSION']._serialized_start=8987 - _globals['_RULECONDITIONEXPRESSION']._serialized_end=9231 - _globals['_SINGLECHANNELCOMPARISONEXPRESSION']._serialized_start=9234 - _globals['_SINGLECHANNELCOMPARISONEXPRESSION']._serialized_end=9565 - _globals['_LASTVALUETHRESHOLD']._serialized_start=9567 - _globals['_LASTVALUETHRESHOLD']._serialized_end=9587 - _globals['_CALCULATEDCHANNELCONFIG']._serialized_start=9590 - _globals['_CALCULATEDCHANNELCONFIG']._serialized_end=9969 - _globals['_CALCULATEDCHANNELCONFIG_CHANNELREFERENCESENTRY']._serialized_start=9868 - _globals['_CALCULATEDCHANNELCONFIG_CHANNELREFERENCESENTRY']._serialized_end=9969 - _globals['_CHANNELREFERENCE']._serialized_start=9971 - _globals['_CHANNELREFERENCE']._serialized_end=10049 - _globals['_RULEACTIONCONFIGURATION']._serialized_start=10052 - _globals['_RULEACTIONCONFIGURATION']._serialized_end=10260 - _globals['_NOTIFICATIONACTIONCONFIGURATION']._serialized_start=10262 - _globals['_NOTIFICATIONACTIONCONFIGURATION']._serialized_end=10346 - _globals['_ANNOTATIONACTIONCONFIGURATION']._serialized_start=10349 - _globals['_ANNOTATIONACTIONCONFIGURATION']._serialized_end=10564 - _globals['_EVALUATERULESREQUEST']._serialized_start=10567 - _globals['_EVALUATERULESREQUEST']._serialized_end=10837 - _globals['_EVALUATEDANNOTATIONOPTIONS']._serialized_start=10839 - _globals['_EVALUATEDANNOTATIONOPTIONS']._serialized_end=10892 - _globals['_TIMERANGEQUERY']._serialized_start=10895 - _globals['_TIMERANGEQUERY']._serialized_end=11025 - _globals['_EVALUATERULESRESPONSE']._serialized_start=11028 - _globals['_EVALUATERULESRESPONSE']._serialized_end=11281 - _globals['_DRYRUNANNOTATION']._serialized_start=11284 - _globals['_DRYRUNANNOTATION']._serialized_end=11531 - _globals['_RULESERVICE']._serialized_start=11898 - _globals['_RULESERVICE']._serialized_end=16074 + _globals['_RULECONDITIONEXPRESSION']._serialized_end=9293 + _globals['_SINGLECHANNELCOMPARISONEXPRESSION']._serialized_start=9296 + _globals['_SINGLECHANNELCOMPARISONEXPRESSION']._serialized_end=9627 + _globals['_LASTVALUETHRESHOLD']._serialized_start=9629 + _globals['_LASTVALUETHRESHOLD']._serialized_end=9649 + _globals['_CALCULATEDCHANNELCONFIG']._serialized_start=9652 + _globals['_CALCULATEDCHANNELCONFIG']._serialized_end=10031 + _globals['_CALCULATEDCHANNELCONFIG_CHANNELREFERENCESENTRY']._serialized_start=9930 + _globals['_CALCULATEDCHANNELCONFIG_CHANNELREFERENCESENTRY']._serialized_end=10031 + _globals['_CHANNELREFERENCE']._serialized_start=10033 + _globals['_CHANNELREFERENCE']._serialized_end=10111 + _globals['_PYTHONCHANNELREFERENCE']._serialized_start=10113 + _globals['_PYTHONCHANNELREFERENCE']._serialized_end=10197 + _globals['_PYTHONCODE']._serialized_start=10200 + _globals['_PYTHONCODE']._serialized_end=10369 + _globals['_RULEACTIONCONFIGURATION']._serialized_start=10372 + _globals['_RULEACTIONCONFIGURATION']._serialized_end=10580 + _globals['_NOTIFICATIONACTIONCONFIGURATION']._serialized_start=10582 + _globals['_NOTIFICATIONACTIONCONFIGURATION']._serialized_end=10666 + _globals['_ANNOTATIONACTIONCONFIGURATION']._serialized_start=10669 + _globals['_ANNOTATIONACTIONCONFIGURATION']._serialized_end=10884 + _globals['_EVALUATERULESREQUEST']._serialized_start=10887 + _globals['_EVALUATERULESREQUEST']._serialized_end=11157 + _globals['_EVALUATEDANNOTATIONOPTIONS']._serialized_start=11159 + _globals['_EVALUATEDANNOTATIONOPTIONS']._serialized_end=11212 + _globals['_TIMERANGEQUERY']._serialized_start=11215 + _globals['_TIMERANGEQUERY']._serialized_end=11345 + _globals['_EVALUATERULESRESPONSE']._serialized_start=11348 + _globals['_EVALUATERULESRESPONSE']._serialized_end=11601 + _globals['_DRYRUNANNOTATION']._serialized_start=11604 + _globals['_DRYRUNANNOTATION']._serialized_end=11851 + _globals['_RULESERVICE']._serialized_start=12218 + _globals['_RULESERVICE']._serialized_end=16394 # @@protoc_insertion_point(module_scope) diff --git a/python/lib/sift/rules/v1/rules_pb2.pyi b/python/lib/sift/rules/v1/rules_pb2.pyi index 6d9be171d..4eebde0fc 100644 --- a/python/lib/sift/rules/v1/rules_pb2.pyi +++ b/python/lib/sift/rules/v1/rules_pb2.pyi @@ -1334,19 +1334,25 @@ class RuleConditionExpression(google.protobuf.message.Message): SINGLE_CHANNEL_COMPARISON_FIELD_NUMBER: builtins.int CALCULATED_CHANNEL_FIELD_NUMBER: builtins.int + PYTHON_CODE_FIELD_NUMBER: builtins.int @property def single_channel_comparison(self) -> global___SingleChannelComparisonExpression: ... @property def calculated_channel(self) -> global___CalculatedChannelConfig: ... + @property + def python_code(self) -> global___PythonCode: + """Experimental - not currently enabled""" + def __init__( self, *, single_channel_comparison: global___SingleChannelComparisonExpression | None = ..., calculated_channel: global___CalculatedChannelConfig | None = ..., + python_code: global___PythonCode | None = ..., ) -> None: ... - def HasField(self, field_name: typing.Literal["calculated_channel", b"calculated_channel", "expression", b"expression", "single_channel_comparison", b"single_channel_comparison"]) -> builtins.bool: ... - def ClearField(self, field_name: typing.Literal["calculated_channel", b"calculated_channel", "expression", b"expression", "single_channel_comparison", b"single_channel_comparison"]) -> None: ... - def WhichOneof(self, oneof_group: typing.Literal["expression", b"expression"]) -> typing.Literal["single_channel_comparison", "calculated_channel"] | None: ... + def HasField(self, field_name: typing.Literal["calculated_channel", b"calculated_channel", "expression", b"expression", "python_code", b"python_code", "single_channel_comparison", b"single_channel_comparison"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["calculated_channel", b"calculated_channel", "expression", b"expression", "python_code", b"python_code", "single_channel_comparison", b"single_channel_comparison"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["expression", b"expression"]) -> typing.Literal["single_channel_comparison", "calculated_channel", "python_code"] | None: ... global___RuleConditionExpression = RuleConditionExpression @@ -1458,6 +1464,46 @@ class ChannelReference(google.protobuf.message.Message): global___ChannelReference = ChannelReference +@typing.final +class PythonChannelReference(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REFERENCE_FIELD_NUMBER: builtins.int + NAME_FIELD_NUMBER: builtins.int + reference: builtins.str + name: builtins.str + def __init__( + self, + *, + reference: builtins.str = ..., + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name", "reference", b"reference"]) -> None: ... + +global___PythonChannelReference = PythonChannelReference + +@typing.final +class PythonCode(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + CHANNEL_REFERENCES_FIELD_NUMBER: builtins.int + CODE_FIELD_NUMBER: builtins.int + DEPENDENCIES_FIELD_NUMBER: builtins.int + code: builtins.str + dependencies: builtins.str + @property + def channel_references(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PythonChannelReference]: ... + def __init__( + self, + *, + channel_references: collections.abc.Iterable[global___PythonChannelReference] | None = ..., + code: builtins.str = ..., + dependencies: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["channel_references", b"channel_references", "code", b"code", "dependencies", b"dependencies"]) -> None: ... + +global___PythonCode = PythonCode + @typing.final class RuleActionConfiguration(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor diff --git a/python/lib/sift_py/_internal/time.py b/python/lib/sift_py/_internal/time.py index 8ff926e9c..cd277a6bc 100644 --- a/python/lib/sift_py/_internal/time.py +++ b/python/lib/sift_py/_internal/time.py @@ -29,7 +29,7 @@ def to_timestamp_nanos(arg: Union[TimestampPb, pd.Timestamp, datetime, str, int] return cast(pd.Timestamp, pd.Timestamp(arg)) -def to_timestamp_pb(arg: Union[datetime, str, int]) -> TimestampPb: +def to_timestamp_pb(arg: Union[datetime, str, int, float]) -> TimestampPb: """ Mainly used for testing at the moment. If using this for non-testing purposes should probably make this more robust and support nano-second precision. @@ -40,7 +40,7 @@ def to_timestamp_pb(arg: Union[datetime, str, int]) -> TimestampPb: if isinstance(arg, datetime): ts.FromDatetime(arg) return ts - elif isinstance(arg, int): + elif isinstance(arg, (int, float)): ts.FromDatetime(datetime.fromtimestamp(arg)) return ts else: diff --git a/python/lib/sift_py/data_import/_config.py b/python/lib/sift_py/data_import/_config.py index 354cb99b5..bd19c5fe5 100644 --- a/python/lib/sift_py/data_import/_config.py +++ b/python/lib/sift_py/data_import/_config.py @@ -20,94 +20,9 @@ class ConfigBaseModel(BaseModel): model_config = ConfigDict(extra="forbid") -class CsvConfigImpl(ConfigBaseModel): - """ - Defines the CSV config spec. - """ - - asset_name: str - run_name: str = "" - run_id: str = "" - first_data_row: int - time_column: TimeColumn - data_columns: Dict[int, DataColumn] - - @model_validator(mode="after") - def validate_config(self) -> Self: - if not self.data_columns: - raise PydanticCustomError("invalid_config_error", "Empty 'data_columns'") - - if self.run_name and self.run_id: - raise PydanticCustomError( - "invalid_config_error", "Only specify run_name or run_id, not both." - ) - - return self - - -class EnumType(ConfigBaseModel, ChannelEnumType): +class ConfigDataModel(ConfigBaseModel): """ - Defines an enum entry in the CSV config. - """ - - -class BitFieldElement(ConfigBaseModel, ChannelBitFieldElement): - """ - Defines a bit field element entry in the CSV config. - """ - - -class TimeColumn(ConfigBaseModel): - """ - Defines a time column entry in the CSV config. - """ - - format: Union[str, TimeFormatType] - column_number: int - relative_start_time: Optional[str] = None - - @field_validator("format", mode="before") - @classmethod - def convert_format(cls, raw: Union[str, TimeFormatType]) -> str: - """ - Converts the provided format value to a string. - """ - if isinstance(raw, TimeFormatType): - return raw.as_human_str() - elif isinstance(raw, str): - value = TimeFormatType.from_str(raw) - if value is not None: - return value.as_human_str() - - raise PydanticCustomError("invalid_config_error", f"Invalid time format: {raw}.") - - @model_validator(mode="after") - def validate_time(self) -> Self: - """ - Validates the provided time format. - """ - format = TimeFormatType.from_str(self.format) # type: ignore - if format is None: - raise PydanticCustomError( - "invalid_config_error", f"Invalid time format: {self.format}." - ) - - if format.is_relative(): - if self.relative_start_time is None: - raise PydanticCustomError("invalid_config_error", "Missing 'relative_start_time'") - else: - if self.relative_start_time is not None: - raise PydanticCustomError( - "invalid_config_error", - "'relative_start_time' specified for non relative time format.", - ) - - return self - - -class DataColumn(ConfigBaseModel): - """ - Defines a data column entry in the CSV config. + Base DataModel with common functionality """ name: str @@ -185,3 +100,142 @@ def validate_bit_fields(self) -> Self: ) return self + + +class ConfigTimeModel(ConfigBaseModel): + """ + Base TimeModel with common functionality + """ + + format: Union[str, TimeFormatType] + relative_start_time: Optional[str] = None + + @field_validator("format", mode="before") + @classmethod + def convert_format(cls, raw: Union[str, TimeFormatType]) -> str: + """ + Converts the provided format value to a string. + """ + if isinstance(raw, TimeFormatType): + return raw.as_human_str() + elif isinstance(raw, str): + value = TimeFormatType.from_str(raw) + if value is not None: + return value.as_human_str() + + raise PydanticCustomError("invalid_config_error", f"Invalid time format: {raw}.") + + @model_validator(mode="after") + def validate_time(self) -> Self: + """ + Validates the provided time format. + """ + format = TimeFormatType.from_str(self.format) # type: ignore + if format is None: + raise PydanticCustomError( + "invalid_config_error", f"Invalid time format: {self.format}." + ) + + if format.is_relative(): + if self.relative_start_time is None: + raise PydanticCustomError("invalid_config_error", "Missing 'relative_start_time'") + else: + if self.relative_start_time is not None: + raise PydanticCustomError( + "invalid_config_error", + "'relative_start_time' specified for non relative time format.", + ) + + return self + + +class CsvConfigImpl(ConfigBaseModel): + """ + Defines the CSV config spec. + """ + + asset_name: str + run_name: str = "" + run_id: str = "" + first_data_row: int + time_column: TimeColumn + data_columns: Dict[int, DataColumn] + + @model_validator(mode="after") + def validate_config(self) -> Self: + if not self.data_columns: + raise PydanticCustomError("invalid_config_error", "Empty 'data_columns'") + + if self.run_name and self.run_id: + raise PydanticCustomError( + "invalid_config_error", "Only specify run_name or run_id, not both." + ) + + return self + + +class Hdf5ConfigImpl(ConfigBaseModel): + """ + Defines the HDF5 config spec + """ + + asset_name: str + run_name: str = "" + run_id: str = "" + time: TimeCfg + data: List[Hdf5DataCfg] + + @model_validator(mode="after") + def validate_config(self) -> Self: + if not self.data: + raise PydanticCustomError("invalid_config_error", "Empty 'data'") + + if self.run_name and self.run_id: + raise PydanticCustomError( + "invalid_config_error", "Only specify run_name or run_id, not both." + ) + + return self + + +class EnumType(ConfigBaseModel, ChannelEnumType): + """ + Defines an enum entry in the CSV config. + """ + + +class BitFieldElement(ConfigBaseModel, ChannelBitFieldElement): + """ + Defines a bit field element entry in the CSV config. + """ + + +class TimeColumn(ConfigTimeModel): + """ + Defines a time column entry in the CSV config. + """ + + column_number: int + + +class DataColumn(ConfigDataModel): + """ + Defines a data column entry in the CSV config. + """ + + +class TimeCfg(ConfigTimeModel): + """ + Defines a time entry in the generic file config. + """ + + +class Hdf5DataCfg(ConfigDataModel): + """ + Defines a data entry in the HDF5 config. + """ + + time_dataset: str + time_column: int = 1 + value_dataset: str + value_column: int = 1 diff --git a/python/lib/sift_py/data_import/_config_test.py b/python/lib/sift_py/data_import/_config_test.py index be6c360e3..b0f20d9da 100644 --- a/python/lib/sift_py/data_import/_config_test.py +++ b/python/lib/sift_py/data_import/_config_test.py @@ -1,6 +1,8 @@ +import pydantic_core import pytest -from sift_py.data_import.config import CsvConfig +from sift_py.data_import._config import ConfigDataModel, ConfigTimeModel +from sift_py.data_import.config import CsvConfig, Hdf5Config from sift_py.data_import.time_format import TimeFormatType from sift_py.error import SiftAPIDeprecationWarning from sift_py.ingestion.channel import ChannelDataType @@ -24,12 +26,38 @@ def csv_config_data(): } +@pytest.fixture +def hdf5_config_data(): + return { + "asset_name": "test_asset", + "time": { + "format": "TIME_FORMAT_ABSOLUTE_DATETIME", + }, + "data": [ + { + "name": "channel1", + "time_dataset": "/channel1", + "value_dataset": "/channel1", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + + def test_empty_data_columns(csv_config_data: dict): csv_config_data["data_columns"] = {} with pytest.raises(Exception, match="Empty 'data_columns'"): CsvConfig(csv_config_data) +def test_empty_data_columns_hdf5(hdf5_config_data: dict): + hdf5_config_data["data"] = [] + with pytest.raises(Exception, match="Empty 'data'"): + Hdf5Config(hdf5_config_data) + + def test_run_name_and_run_id(csv_config_data: dict): csv_config_data["run_name"] = "Run Title" csv_config_data["run_id"] = "1c5546b4-ee53-460b-9205-4dc3980c200f" @@ -37,6 +65,13 @@ def test_run_name_and_run_id(csv_config_data: dict): CsvConfig(csv_config_data) +def test_run_name_and_run_id_hdf5(hdf5_config_data: dict): + hdf5_config_data["run_name"] = "Run Title" + hdf5_config_data["run_id"] = "1c5546b4-ee53-460b-9205-4dc3980c200f" + with pytest.raises(Exception, match="Only specify run_name or run_id, not both"): + Hdf5Config(hdf5_config_data) + + def test_data_column_validation(csv_config_data: dict): csv_config_data["data_columns"] = { 1: { @@ -68,6 +103,59 @@ def test_data_column_validation(csv_config_data: dict): assert cfg._csv_config.data_columns[1].name == "component.channel" +def test_data_column_validation_hdf5(hdf5_config_data: dict): + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "INVALID_DATA_TYPE", + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + + with pytest.raises(Exception, match="Invalid data_type:"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": complex, + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + with pytest.raises(Exception, match="Invalid data_type:"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel_bool", + "data_type": ChannelDataType.BOOL, + "time_dataset": "channel", + "value_dataset": "channel", + }, + { + "name": "channel_double", + "data_type": ChannelDataType.DOUBLE, + "time_dataset": "channel", + "value_dataset": "channel", + }, + { + "name": "channel_int", + "data_type": ChannelDataType.INT_64, + "time_dataset": "channel", + "value_dataset": "channel", + }, + { + "name": "channel_str", + "data_type": ChannelDataType.STRING, + "time_dataset": "channel", + "value_dataset": "channel", + }, + ] + Hdf5Config(hdf5_config_data) + + def test_enums(csv_config_data: dict): csv_config_data["data_columns"] = { 1: { @@ -108,6 +196,52 @@ def test_enums(csv_config_data: dict): CsvConfig(csv_config_data) +def test_enums_hdf5(hdf5_config_data: dict): + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_INT_32", + "enum_types": [ + {"key": 1, "name": "value_1"}, + {"key": 2, "name": "value_2"}, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + with pytest.raises(Exception, match="Enums can only be specified"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_ENUM", + "enum_types": [ + {"key": 1, "name": "value_1", "extra_key": "value"}, + {"key": 2, "name": "value_2"}, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + with pytest.raises(Exception, match="validation error"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_ENUM", + "enum_types": [ + {"key": 1, "name": "value_1"}, + {"key": 2, "name": "value_2"}, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + Hdf5Config(hdf5_config_data) + + def test_bit_field(csv_config_data: dict): csv_config_data["data_columns"] = { 1: { @@ -150,6 +284,54 @@ def test_bit_field(csv_config_data: dict): CsvConfig(csv_config_data) +def test_bit_field_hdf5(hdf5_config_data: dict): + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_INT_32", + "bit_field_elements": [ + {"index": 1, "name": "bit_field_name_1", "bit_count": 4}, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + with pytest.raises(Exception, match="Bit fields can only be specified"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_INT_32", + "bit_field_elements": [ + { + "index": 1, + "name": "bit_field_name_1", + "bit_count": 4, + "extra_key": "value", + }, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + with pytest.raises(Exception, match="validation error"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["data"] = [ + { + "name": "channel", + "data_type": "CHANNEL_DATA_TYPE_BIT_FIELD", + "bit_field_elements": [ + {"index": 1, "name": "bit_field_name_1", "bit_count": 4}, + ], + "time_dataset": "channel", + "value_dataset": "channel", + } + ] + Hdf5Config(hdf5_config_data) + + def test_time_column(csv_config_data: dict): csv_config_data["time_column"] = { "format": "INVALID_TIME_FORMAT", @@ -180,3 +362,53 @@ def test_time_column(csv_config_data: dict): "column_number": 1, } CsvConfig(csv_config_data) + + +def test_time_column_hdf5(hdf5_config_data: dict): + hdf5_config_data["time"] = { + "format": "INVALID_TIME_FORMAT", + } + with pytest.raises(Exception, match="Invalid time format"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["time"] = { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + } + with pytest.raises(Exception, match="Missing 'relative_start_time'"): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["time"] = { + "format": "TIME_FORMAT_ABSOLUTE_UNIX_SECONDS", + "relative_start_time": "100", + } + with pytest.raises( + Exception, match="'relative_start_time' specified for non relative time format." + ): + Hdf5Config(hdf5_config_data) + + hdf5_config_data["time"] = { + "format": TimeFormatType.ABSOLUTE_DATETIME, + } + Hdf5Config(hdf5_config_data) + + +def test_config_time_model_extra_field(): + time_cfg = { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": 123456789, + "extra_field": 0, + } + + with pytest.raises( + pydantic_core._pydantic_core.ValidationError, match="Extra inputs are not permitted" + ): + ConfigTimeModel(**time_cfg) + + +def test_config_data_model_extra_field(): + data_cfg = {"name": "testname", "data_type": float, "extra_field": 0} + + with pytest.raises( + pydantic_core._pydantic_core.ValidationError, match="Extra inputs are not permitted" + ): + ConfigDataModel(**data_cfg) diff --git a/python/lib/sift_py/data_import/_hdf5_test.py b/python/lib/sift_py/data_import/_hdf5_test.py new file mode 100644 index 000000000..f31f3879a --- /dev/null +++ b/python/lib/sift_py/data_import/_hdf5_test.py @@ -0,0 +1,1015 @@ +from typing import Dict + +import h5py # type: ignore +import numpy as np +import polars as pl # type: ignore +import pytest +from pytest_mock import MockFixture + +from sift_py.data_import._config import Hdf5DataCfg +from sift_py.data_import.config import Hdf5Config +from sift_py.data_import.hdf5 import ( + Hdf5UploadService, + _convert_hdf5_to_dataframes, + _convert_signed_enums, + _create_csv_config, + _extract_hdf5_data_to_dataframe, + _merge_timeseries_dataframes, + _split_hdf5_configs, +) + + +class MockHdf5File: + def __init__(self, data_dict: Dict): + self.data_dict = data_dict + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + pass + + def __getitem__(self, key): + return MockHdf5Dataset(self.data_dict[key]) + + def __contains__(self, key): + return key in self.data_dict + + +class MockHdf5Dataset: + def __init__(self, data): + self.data = data + + def __getitem__(self, key): + return self.data[key] + + +@pytest.fixture +def rest_config(): + return { + "uri": "some_uri.com", + "apikey": "123456789", + } + + +@pytest.fixture +def hdf5_config(): + return Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/DoubleChannel", + "value_dataset": "/DoubleChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + { + "name": "DoubleChannelInGroup", + "time_dataset": "/testgrp/DoubleChannelInGroup", + "value_dataset": "/testgrp/DoubleChannelInGroup", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + { + "name": "StringChannel1", + "time_dataset": "/StringChannel1", + "value_dataset": "/StringChannel1", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_STRING", + }, + { + "name": "BinaryStringChannel2", + "time_dataset": "/BinaryStringChannel2", + "value_dataset": "/BinaryStringChannel2", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_STRING", + }, + { + "name": "EnumChannel", + "time_dataset": "/EnumChannel", + "value_dataset": "/EnumChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_ENUM", + "enum_types": [ + {"key": 1, "name": "On"}, + {"key": 0, "name": "Off"}, + ], + }, + { + "name": "BitFieldChannel", + "time_dataset": "/BitFieldChannel", + "value_dataset": "/BitFieldChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_BIT_FIELD", + "bit_field_elements": [ + {"index": 0, "name": "flag1", "bit_count": 4}, + {"index": 4, "name": "flag2", "bit_count": 4}, + ], + }, + { + "name": "BoolChannel", + "time_dataset": "/BoolChannel", + "value_dataset": "/BoolChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_BOOL", + }, + { + "name": "FloatChannel", + "time_dataset": "/FloatChannel", + "value_dataset": "/FloatChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_FLOAT", + }, + { + "name": "Int32Channel", + "time_dataset": "/Int32Channel", + "value_dataset": "/Int32Channel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_INT_32", + }, + { + "name": "Int64Channel", + "time_dataset": "/Int64Channel", + "value_dataset": "/Int64Channel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_INT_64", + }, + { + "name": "UInt32Channel", + "time_dataset": "/UInt32Channel", + "value_dataset": "/UInt32Channel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_UINT_32", + }, + { + "name": "UInt64Channel", + "time_dataset": "/UInt64Channel", + "value_dataset": "/UInt64Channel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_UINT_64", + }, + ], + } + ) + + +@pytest.fixture +def hdf5_data_dict(): + return { + "/DoubleChannel": np.array( + list(zip([0, 1, 2], [1.0, 2.0, 3.0])), dtype=[("time", np.int64), ("value", np.float64)] + ), + "/testgrp/DoubleChannelInGroup": np.array( + list(zip([4, 5, 6], [-1.0, -2.0, -3.0])), + dtype=[("time", np.int64), ("value", np.float64)], + ), + "/StringChannel1": np.array( + list(zip([0, 1, 2], ["a", "b", "c"])), + dtype=[("time", np.int64), ("value", h5py.string_dtype("utf-8"))], + ), + "/BinaryStringChannel2": np.array( + list(zip([0, 1, 2], [b"a", b"b", b"c"])), + dtype=[("time", np.int64), ("value", h5py.string_dtype("ascii"))], + ), + "/EnumChannel": np.array( + list(zip([0, 1, 2], [1, 0, 1])), dtype=[("time", np.int64), ("value", np.int32)] + ), + "/BitFieldChannel": np.array( + list(zip([0, 1, 2], [15, 240, 15])), dtype=[("time", np.int64), ("value", np.int32)] + ), + "/BoolChannel": np.array( + list(zip([0, 1, 2], [True, False, True])), + dtype=[("time", np.int64), ("value", np.bool_)], + ), + "/FloatChannel": np.array( + list(zip([0, 1, 2], [1.1, 2.2, 3.3])), dtype=[("time", np.int64), ("value", np.float32)] + ), + "/Int32Channel": np.array( + list(zip([0, 1, 2], [10, 20, 30])), dtype=[("time", np.int64), ("value", np.int32)] + ), + "/Int64Channel": np.array( + list(zip([0, 1, 2], [10000000000, 20000000000, 30000000000])), + dtype=[("time", np.int64), ("value", np.int64)], + ), + "/UInt32Channel": np.array( + list(zip([0, 1, 2], [1000, 2000, 3000])), + dtype=[("time", np.int64), ("value", np.uint32)], + ), + "/UInt64Channel": np.array( + list(zip([0, 1, 2], [1000000000000, 2000000000000, 3000000000000])), + dtype=[("time", np.int64), ("value", np.uint64)], + ), + } + + +def test_hdf5_upload_service_valid_path(mocker: MockFixture, rest_config, hdf5_config): + mock_path_is_file = mocker.patch("pathlib.Path.is_file") + mock_path_is_file.return_value = False + + with pytest.raises(Exception, match="does not point to a regular file"): + svc = Hdf5UploadService(rest_config) + svc.upload(path="badpath.h5", hdf5_config=hdf5_config) + + +def test_split_hdf5_configs_splits_strings(hdf5_config): + configs = _split_hdf5_configs(hdf5_config) + # Should split into 1 non-string and 2 string configs (StringChannel1 and StringChannel2) + string_configs = [ + cfg for cfg in configs if cfg._hdf5_config.data[0].data_type == "CHANNEL_DATA_TYPE_STRING" + ] + non_string_configs = [ + cfg for cfg in configs if cfg._hdf5_config.data[0].data_type != "CHANNEL_DATA_TYPE_STRING" + ] + assert len(configs) == 3 + assert len(string_configs) == 2 + assert len(non_string_configs) == 1 + + +def test_create_csv_config(mocker: MockFixture, hdf5_config): + # Use a reverse list to make sure the order has changed + data_cols = [d_cfg.name for d_cfg in hdf5_config._hdf5_config.data][::-1] + columns = ["timestamp"] + data_cols + merged_df = pl.DataFrame({col: [] for col in columns}) + + csv_cfg = _create_csv_config(hdf5_config, merged_df) + csv_cfg_dict = csv_cfg.to_dict() + assert "time_column" in csv_cfg_dict + assert "data_columns" in csv_cfg_dict + assert len(csv_cfg_dict["data_columns"]) == 12 + + for csv_col, df_col in zip(csv_cfg_dict["data_columns"].values(), merged_df.columns[1:]): + assert csv_col["name"] == df_col + + +def test_convert_hdf5_to_dataframes(mocker: MockFixture, hdf5_config, hdf5_data_dict): + mocker.patch("h5py.File", return_value=MockHdf5File(hdf5_data_dict)) + + expected_col_count = len(hdf5_data_dict) + 1 + time_stamps = [] + for data in hdf5_data_dict.values(): + for row in data: + time_stamps.append(row[0]) + expected_row_count = len(set(time_stamps)) + + df = _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + # Dataframe should have cols == parameter count + 1 (timestamps) and rows == unique timestamps + assert df.shape == (expected_row_count, expected_col_count) + + +def test_two_dataset_extraction(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "Channel1", + "time_dataset": "/Channel1_Time", + "value_dataset": "/Channel1_Value", + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/Channel1_Time": np.array([0, 1, 2], dtype=np.int64), + "/Channel1_Value": np.array([1.0, 2.0, 3.0], dtype=np.float64), + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + df = _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + assert df.shape == (3, 2) + assert df.columns[1] == data_cfg.name + assert (np.array(df[df.columns[0]]) == data_dict["/Channel1_Time"]).all() + assert (np.array(df[df.columns[1]]) == data_dict["/Channel1_Value"]).all() + + +def test_multi_col_dataset_extraction(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "Channel1", + "time_dataset": "/Channel1", + "value_dataset": "/Channel1", + "time_column": 4, + "value_column": 3, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/Channel1": [ + np.array([9, 9, 9], dtype=np.int64), + np.array([9, 9, 9], dtype=np.int64), + np.array([1.0, 2.0, 3.0], dtype=np.float64), + np.array([0, 1, 2], dtype=np.int64), + ], + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + df = _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + assert df.shape == (3, 2) + assert df.columns[1] == data_cfg.name + assert (np.array(df[df.columns[0]]) == data_dict["/Channel1"][3]).all() + assert (np.array(df[df.columns[1]]) == data_dict["/Channel1"][2]).all() + + +def test_string_conversion(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "StringChannel1", + "time_dataset": "/StringChannel1", + "value_dataset": "/StringChannel1", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_STRING", + }, + { + "name": "BinaryStringChannel2", + "time_dataset": "/BinaryStringChannel2", + "value_dataset": "/BinaryStringChannel2", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_STRING", + }, + ], + } + ) + + data_dict = { + "/StringChannel1": np.array( + list(zip([0, 1, 2], ["a", "b", "cat"])), + dtype=[("time", np.int64), ("value", h5py.string_dtype("utf-8"))], + ), + "/BinaryStringChannel2": np.array( + list(zip([0, 1, 2], [b"a", b"b", b"cat"])), + dtype=[("time", np.int64), ("value", h5py.string_dtype("ascii"))], + ), + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + df = _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + assert (np.array(df[data_cfg.name]) == np.array(["a", "b", "cat"])).all() + + +def test_bitfield_conversion(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "bitfield1", + "time_dataset": "/bitChannel1", + "value_dataset": "/bitChannel1", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_BIT_FIELD", + "bit_field_elements": [ + {"index": 0, "name": "flag1", "bit_count": 4}, + {"index": 4, "name": "flag2", "bit_count": 4}, + ], + } + ], + } + ) + + data_dict = { + "/bitChannel1": np.array( + list(zip([0, 1, 2], [0, 2_147_483_647, 15])), + dtype=[("time", np.int64), ("value", np.int32)], + ), + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + df = _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + assert (np.array(df["timestamp"]) == np.array([0, 1, 2])).all() + assert (np.array(df[data_cfg.name]) == np.array([0, 2_147_483_647, 15])).all() + + +def test_enum_conversion(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "EnumChannel", + "time_dataset": "/EnumChannel", + "value_dataset": "/EnumChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_ENUM", + "enum_types": [ + {"key": 1, "name": "On"}, + {"key": 0, "name": "Off"}, + {"key": 2_147_483_647, "name": "Invalid"}, + ], + }, + ], + } + ) + + data_dict = { + "/EnumChannel": np.array( + list(zip([0, 1, 2], [1, 0, 2_147_483_647])), + dtype=[("time", np.int64), ("value", np.int32)], + ), + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + df = _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + assert (np.array(df["timestamp"]) == np.array([0, 1, 2])).all() + assert (np.array(df[data_cfg.name]) == np.array([1, 0, 2_147_483_647])).all() + + +def test_time_value_len_diff(): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/time", + "value_dataset": "/data", + "time_column": 1, + "value_column": 1, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/time": np.array([0, 1, 2], dtype=np.int64), + "/data": np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float64), + } + + mock_file = MockHdf5File(data_dict) + + for data_cfg in hdf5_config._hdf5_config.data: + with pytest.raises(Exception, match="time and value columns have different lengths"): + _extract_hdf5_data_to_dataframe( + mock_file, data_cfg.time_dataset, data_cfg.time_column, [data_cfg] + ) + + +def test_hdf5_to_dataframe_conversion(mocker: MockFixture, hdf5_config, hdf5_data_dict): + mocker.patch("h5py.File", return_value=MockHdf5File(hdf5_data_dict)) + name_dataframe_map = {data.name: data.value_dataset for data in hdf5_config._hdf5_config.data} + + df: pl.DataFrame = _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + for name, value_dataset in name_dataframe_map.items(): + assert name in df.columns + + # Remove nulls since they won't be in original data + data = df[name].filter(df[name].is_not_null()) + assert len(data) == len(hdf5_data_dict[value_dataset]) + + +def test_bad_time_col(mocker: MockFixture): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/DoubleChannel", + "value_dataset": "/DoubleChannel", + "time_column": 2, + "value_column": 1, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/DoubleChannel": np.array([0, 1, 2], dtype=np.int64), + } + + mocker.patch("h5py.File", return_value=MockHdf5File(data_dict)) + + with pytest.raises(Exception, match="time_column=2 out of range"): + _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + +def test_bad_val_col(mocker: MockFixture): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/DoubleChannel", + "value_dataset": "/DoubleChannel", + "time_column": 1, + "value_column": 2, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/DoubleChannel": np.array([0, 1, 2], dtype=np.int64), + } + + mocker.patch("h5py.File", return_value=MockHdf5File(data_dict)) + + with pytest.raises(Exception, match="value_column=2 out of range"): + _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + +def test_missing_time_data(mocker: MockFixture): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/DoubleChannelTime", + "value_dataset": "/DoubleChannelValue", + "time_column": 1, + "value_column": 1, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/DoubleChannelValue": np.array([0, 1, 2], dtype=np.int64), + } + + mocker.patch("h5py.File", return_value=MockHdf5File(data_dict)) + + with pytest.raises(Exception, match="HDF5 file does not contain dataset"): + _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + +def test_missing_value_data(mocker: MockFixture): + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_RELATIVE_SECONDS", + "relative_start_time": "2025-01-01T01:00:00Z", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/DoubleChannelTime", + "value_dataset": "/DoubleChannelValue", + "time_column": 1, + "value_column": 1, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + data_dict = { + "/DoubleChannelTime": np.array([0, 1, 2], dtype=np.int64), + } + + mocker.patch("h5py.File", return_value=MockHdf5File(data_dict)) + + with pytest.raises(Exception, match="HDF5 file does not contain dataset"): + _convert_hdf5_to_dataframes("mock.h5", hdf5_config) + + +def test_hdf5_upload(mocker: MockFixture, hdf5_config, hdf5_data_dict, rest_config): + mock_path_is_file = mocker.patch("pathlib.Path.is_file") + mock_path_is_file.return_value = True + + mocker.patch("h5py.File", return_value=MockHdf5File(hdf5_data_dict)) + + mock_csv_upload = mocker.patch("sift_py.data_import.csv.CsvUploadService.upload") + + svc = Hdf5UploadService(rest_config) + svc.upload( + "mock.h5", + hdf5_config, + ) + + assert mock_csv_upload.call_count == 3 + + +def test_hdf5_upload_string_timestamps(mocker: MockFixture, hdf5_config, rest_config): + mock_path_is_file = mocker.patch("pathlib.Path.is_file") + mock_path_is_file.return_value = True + + data_dict = { + "/timestamps": np.array( + [ + b"2024-10-07 17:00:09.982126", + b"2024-10-07 17:00:10.022126", + b"2024-10-07 17:00:10.062126", + ] + ), + "/DoubleChannel": np.array([0, 1, 2], dtype=np.int64), + } + + hdf5_config = Hdf5Config( + { + "asset_name": "TestAsset", + "time": { + "format": "TIME_FORMAT_ABSOLUTE_DATETIME", + }, + "data": [ + { + "name": "DoubleChannel", + "time_dataset": "/timestamps", + "value_dataset": "/DoubleChannel", + "time_column": 1, + "value_column": 1, + "data_type": "CHANNEL_DATA_TYPE_DOUBLE", + }, + ], + } + ) + + mocker.patch("h5py.File", return_value=MockHdf5File(data_dict)) + + mock_csv_upload = mocker.patch("sift_py.data_import.csv.CsvUploadService.upload") + + svc = Hdf5UploadService(rest_config) + svc.upload( + "mock.h5", + hdf5_config, + ) + + mock_csv_upload.assert_called() + + +def test_merge_timeseries_dataframes_no_duplicates(): + """Test merging dataframes with no duplicate channels""" + df1 = pl.DataFrame({"timestamp": [0, 1, 2], "channel1": [1.0, 2.0, 3.0]}) + df2 = pl.DataFrame({"timestamp": [1, 2, 3], "channel2": [4.0, 5.0, 6.0]}) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (4, 3) + assert "timestamp" in result.columns + assert "channel1" in result.columns + assert "channel2" in result.columns + result = result.sort("timestamp") + assert result["timestamp"].to_list() == [0, 1, 2, 3] + assert result["channel1"].to_list() == [1.0, 2.0, 3.0, None] + assert result["channel2"].to_list() == [None, 4.0, 5.0, 6.0] + + +def test_merge_timeseries_dataframes_with_duplicates(): + """Test merging dataframes with duplicate channel names""" + df1 = pl.DataFrame( + {"timestamp": [0, 1, 2], "channel1": [1.0, 2.0, 3.0], "common_channel": [10.0, 20.0, 30.0]} + ) + df2 = pl.DataFrame( + {"timestamp": [1, 2, 3], "channel2": [4.0, 5.0, 6.0], "common_channel": [40.0, 50.0, 60.0]} + ) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (4, 4) + assert "timestamp" in result.columns + assert "channel1" in result.columns + assert "channel2" in result.columns + assert "common_channel" in result.columns + + result = result.sort("timestamp") + + # Check that values are coalesced properly + common_values = result["common_channel"].to_list() + assert common_values == [10.0, 20.0, 30.0, 60.0] + + +def test_merge_timeseries_dataframes_with_nulls(): + """Test merging dataframes where one has null values""" + df1 = pl.DataFrame( + {"timestamp": [0, 1, 2], "channel1": [1.0, None, 3.0], "common_channel": [10.0, None, 30.0]} + ) + df2 = pl.DataFrame( + {"timestamp": [1, 2, 3], "channel2": [4.0, 5.0, 6.0], "common_channel": [40.0, 50.0, 60.0]} + ) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (4, 4) + + timestamps = result["timestamp"].to_list() + common_values = result["common_channel"].to_list() + + # At timestamp 1: df1 has null, so should use df2 value (40.0) + assert common_values[timestamps.index(1)] == 40.0 + # At timestamp 2: df1 has 30.0, so should use df1 value + assert common_values[timestamps.index(2)] == 30.0 + + +def test_merge_timeseries_dataframes_empty_dataframes(): + """Test merging empty dataframes""" + df1 = pl.DataFrame({"timestamp": [], "channel1": []}) + df2 = pl.DataFrame({"timestamp": [], "channel2": []}) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (0, 3) + assert "timestamp" in result.columns + assert "channel1" in result.columns + assert "channel2" in result.columns + + +def test_merge_timeseries_dataframes_multiple_duplicates(): + """Test merging dataframes with multiple duplicate channel names""" + df1 = pl.DataFrame( + { + "timestamp": [0, 1, 2], + "channel1": [1.0, 2.0, 3.0], + "dup1": [10.0, 20.0, 30.0], + "dup2": [100.0, 200.0, 300.0], + } + ) + df2 = pl.DataFrame( + { + "timestamp": [1, 2, 3], + "channel2": [4.0, 5.0, 6.0], + "dup1": [40.0, 50.0, 60.0], + "dup2": [400.0, 500.0, 600.0], + } + ) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (4, 5) + expected_columns = {"timestamp", "channel1", "channel2", "dup1", "dup2"} + assert set(result.columns) == expected_columns + + # At timestamp 0: should have df1 values only + assert result.filter(pl.col("timestamp") == 0)["dup1"].item() == 10.0 + assert result.filter(pl.col("timestamp") == 0)["dup2"].item() == 100.0 + + # At timestamp 3: should have df2 values only + assert result.filter(pl.col("timestamp") == 3)["dup1"].item() == 60.0 + assert result.filter(pl.col("timestamp") == 3)["dup2"].item() == 600.0 + + +def test_merge_timeseries_dataframes_different_dtypes(): + """Test merging dataframes with different data types""" + df1 = pl.DataFrame( + {"timestamp": [0, 1, 2], "int_channel": [1, 2, 3], "common_channel": [10.0, 20.0, 30.0]} + ) + df2 = pl.DataFrame( + { + "timestamp": [1, 2, 3], + "string_channel": ["a", "b", "c"], + "common_channel": [40.0, 50.0, 60.0], + } + ) + + result = _merge_timeseries_dataframes(df1, df2) + + assert result.shape == (4, 4) + assert "int_channel" in result.columns + assert "string_channel" in result.columns + assert "common_channel" in result.columns + result = result.sort("timestamp") + assert result["string_channel"].to_list() == [None, "a", "b", "c"] + assert result["common_channel"].to_list() == [10.0, 20.0, 30.0, 60.0] + + +def test_convert_signed_enums(): + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + {"name": "Off", "key": -1, "is_signed": True}, + {"name": "On", "key": 1, "is_signed": True}, + ], + ) + + # Create test data with signed enum values + test_data = pl.Series("test", [-1, 1, -1]) + + result = _convert_signed_enums(data_cfg, test_data) + + # Check that the signed enum key was converted: -1 + 2^32 = 4294967295 + assert data_cfg.enum_types[0].key == 4294967295 + assert data_cfg.enum_types[1].key == 1 # Positive key unchanged + + # Check that the data was converted to uint32 + assert result.dtype == pl.UInt32 + expected_values = np.array([4294967295, 1, 4294967295], dtype=np.uint32) + assert np.array_equal(result.to_numpy(), expected_values) + + +def test_convert_signed_enums_no_signed_keys(): + """Test _convert_signed_enums with no signed enum keys""" + + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + {"name": "Off", "key": 0, "is_signed": True}, + {"name": "On", "key": 1, "is_signed": True}, + ], + ) + + test_data = pl.Series("test", [0, 1, 0]) + + result = _convert_signed_enums(data_cfg, test_data) + + # Keys should remain unchanged + assert data_cfg.enum_types[0].key == 0 + assert data_cfg.enum_types[1].key == 1 + + assert result.dtype == pl.UInt32 + assert np.array_equal(result.to_numpy(), test_data.to_numpy()) + + +def test_convert_signed_enums_collision_error(): + """Test _convert_signed_enums raises error when conversion would cause collision""" + + # Create a scenario where converting -1 to unsigned (4294967295) would collide + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + {"name": "Negative", "key": -1, "is_signed": True}, + {"name": "Collision", "key": 4294967295, "is_signed": True}, # This would collide + ], + ) + + test_data = pl.Series("test", [-1, 4294967295]) + + with pytest.raises( + Exception, match="Converting key -1 to unsigned int collides with existing key 4294967295" + ): + _convert_signed_enums(data_cfg, test_data) + + +def test_convert_signed_enums_multiple_negative_keys(): + """Test _convert_signed_enums with multiple negative signed enum keys""" + + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + {"name": "NegOne", "key": -1, "is_signed": True}, + {"name": "NegTwo", "key": -2, "is_signed": True}, + {"name": "Zero", "key": 0, "is_signed": True}, + {"name": "PosOne", "key": 1, "is_signed": True}, + ], + ) + + test_data = pl.Series("test", [-1, -2, 0, 1]) + + result = _convert_signed_enums(data_cfg, test_data) + + # Check conversions: -1 -> 4294967295, -2 -> 4294967294 + assert data_cfg.enum_types[0].key == 4294967295 + assert data_cfg.enum_types[1].key == 4294967294 + assert data_cfg.enum_types[2].key == 0 # Unchanged + assert data_cfg.enum_types[3].key == 1 # Unchanged + + # Data should be converted to uint32 + assert result.dtype == pl.UInt32 + expected_values = np.array([4294967295, 4294967294, 0, 1]) + assert np.array_equal(result.to_numpy(), expected_values) + + +def test_convert_signed_enums_edge_case_min_int32(): + """Test _convert_signed_enums with minimum int32 value""" + + min_int32 = -2147483648 + + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + {"name": "MinInt32", "key": min_int32, "is_signed": True}, + ], + ) + + test_data = pl.Series("test", [min_int32]) + + result = _convert_signed_enums(data_cfg, test_data) + + # min_int32 + 2^32 = -2147483648 + 4294967296 = 2147483648 + expected_unsigned_key = min_int32 + (1 << 32) + assert data_cfg.enum_types[0].key == expected_unsigned_key + + # Data should be converted to uint32 + assert result.dtype == pl.UInt32 + expected_values = np.array([expected_unsigned_key]) + assert np.array_equal(result.to_numpy(), expected_values) + + +def test_convert_signed_enums_overflow(): + data_cfg = Hdf5DataCfg( + name="TestEnum", + time_dataset="/time", + value_dataset="/values", + data_type="CHANNEL_DATA_TYPE_ENUM", + enum_types=[ + # Min int32 is -2_147_483_648 + {"name": "Off", "key": -2_147_483_649, "is_signed": True}, + {"name": "On", "key": 1, "is_signed": True}, + ], + ) + + # Create test data with signed enum values + test_data = pl.Series("test", [-2_147_483_649, 1, -2_147_483_649]) + + with pytest.raises(Exception, match="below valid int32 range"): + _convert_signed_enums(data_cfg, test_data) diff --git a/python/lib/sift_py/data_import/_status_test.py b/python/lib/sift_py/data_import/_status_test.py index 9060ec92a..3976e074e 100644 --- a/python/lib/sift_py/data_import/_status_test.py +++ b/python/lib/sift_py/data_import/_status_test.py @@ -85,6 +85,77 @@ def test_get_status(mocker: MockFixture, data_import_data: dict): service.get_data_import() +def test_get_status_multiple(mocker: MockFixture, data_import_data: dict): + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + def mock_get_resp(*args, **kwargs): + if "123-succeed" in kwargs["url"]: + data_import_data["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + return MockResponse(status_code=200, text=json.dumps(data_import_data)) + elif "123-pend" in kwargs["url"]: + data_import_data["dataImport"]["status"] = "DATA_IMPORT_STATUS_PENDING" + return MockResponse(status_code=200, text=json.dumps(data_import_data)) + elif "123-prog" in kwargs["url"]: + data_import_data["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + return MockResponse(status_code=200, text=json.dumps(data_import_data)) + elif "123-fail" in kwargs["url"]: + data_import_data["dataImport"]["status"] = "DATA_IMPORT_STATUS_FAILED" + return MockResponse(status_code=200, text=json.dumps(data_import_data)) + elif "123-invalid" in kwargs["url"]: + data_import_data["dataImport"]["status"] = "INVALID_STATUS" + return MockResponse(status_code=200, text=json.dumps(data_import_data)) + else: + raise Exception("Unexpected url") + + mock_requests_get.side_effect = mock_get_resp + + service = DataImportService(rest_config, "123-succeed") + service.extend(DataImportService(rest_config, "123-succeed")) + assert service.get_data_import(idx=0).status == DataImportStatusType.SUCCEEDED + assert service.get_data_import(idx=1).status == DataImportStatusType.SUCCEEDED + + service = DataImportService(rest_config, "123-succeed") + service.extend(DataImportService(rest_config, "123-pend")) + assert service.get_data_import(idx=0).status == DataImportStatusType.SUCCEEDED + assert service.get_data_import(idx=1).status == DataImportStatusType.PENDING + + service = DataImportService(rest_config, "123-prog") + service.extend(DataImportService(rest_config, "123-pend")) + assert service.get_data_import(idx=0).status == DataImportStatusType.IN_PROGRESS + assert service.get_data_import(idx=1).status == DataImportStatusType.PENDING + + service = DataImportService(rest_config, "123-fail") + service.extend(DataImportService(rest_config, "123-succeed")) + assert service.get_data_import(idx=0).status == DataImportStatusType.FAILED + assert service.get_data_import(idx=1).status == DataImportStatusType.SUCCEEDED + + service = DataImportService(rest_config, "123-succeed") + service.extend(DataImportService(rest_config, "123-invalid")) + service.get_data_import(idx=0) + with pytest.raises(Exception, match="Invalid data import status"): + service.get_data_import(idx=1) + + +def test_many_imports(mocker: MockFixture, data_import_data: dict): + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + data_import_data["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + mock_requests_get.return_value = MockResponse( + status_code=200, text=json.dumps(data_import_data) + ) + + service = DataImportService(rest_config, "123-123-123") + for idx in range(10): + service.extend(DataImportService(rest_config, f"123-123-{idx}")) + + data_imports = list(service.get_data_imports()) + assert len(data_imports) == 11 + for data_import in data_imports: + assert data_import.status == DataImportStatusType.SUCCEEDED + + def test_wait_success(mocker: MockFixture, data_import_data: dict): mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) @@ -118,6 +189,134 @@ def test_wait_success(mocker: MockFixture, data_import_data: dict): assert service.wait_until_complete().status == DataImportStatusType.SUCCEEDED mock_time_sleep.assert_any_call(1) mock_time_sleep.assert_any_call(2) + assert mock_time_sleep.call_count == 2 + + +def test_wait_success_all_single(mocker: MockFixture, data_import_data: dict): + mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + succeeded = deepcopy(data_import_data) + succeeded["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + + pending = deepcopy(data_import_data) + pending["dataImport"]["status"] = "DATA_IMPORT_STATUS_PENDING" + + in_progress = deepcopy(data_import_data) + in_progress["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + + mock_requests_get.side_effect = [ + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ), + ] + + service = DataImportService(rest_config, "123-123-123") + data_imports = service.wait_until_all_complete() + assert data_imports[0].status == DataImportStatusType.SUCCEEDED + assert len(data_imports) == 1 + mock_time_sleep.assert_any_call(1) + mock_time_sleep.assert_any_call(2) + assert mock_time_sleep.call_count == 2 + + +def test_wait_success_multiple(mocker: MockFixture, data_import_data: dict): + mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + succeeded = deepcopy(data_import_data) + succeeded["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + + pending = deepcopy(data_import_data) + pending["dataImport"]["status"] = "DATA_IMPORT_STATUS_PENDING" + + in_progress = deepcopy(data_import_data) + in_progress["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + + mock_requests_get.side_effect = [ + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ), + ] + + service = DataImportService(rest_config, "123-123-123") + service.extend(DataImportService(rest_config, "456-456-456")) + assert service.wait_until_complete(idx=1).status == DataImportStatusType.SUCCEEDED + mock_time_sleep.assert_any_call(1) + mock_time_sleep.assert_any_call(2) + assert mock_time_sleep.call_count == 2 + + +def test_wait_success_all(mocker: MockFixture, data_import_data: dict): + mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + succeeded = deepcopy(data_import_data) + succeeded["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + + pending = deepcopy(data_import_data) + pending["dataImport"]["status"] = "DATA_IMPORT_STATUS_PENDING" + + in_progress = deepcopy(data_import_data) + in_progress["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + + mock_requests_get.side_effect = [ + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ), + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ), + ] + + service = DataImportService(rest_config, "123-123-123") + service.extend(DataImportService(rest_config, "456-456-456")) + data_imports = service.wait_until_all_complete() + assert len(data_imports) == 2 + assert data_imports[0].status == DataImportStatusType.SUCCEEDED + assert data_imports[1].status == DataImportStatusType.SUCCEEDED + mock_time_sleep.assert_any_call(1) + mock_time_sleep.assert_any_call(2) + assert mock_time_sleep.call_count == 4 def test_wait_failure(mocker: MockFixture, data_import_data: dict): @@ -152,6 +351,56 @@ def test_wait_failure(mocker: MockFixture, data_import_data: dict): assert service.wait_until_complete().status == DataImportStatusType.FAILED +def test_wait_failure_all(mocker: MockFixture, data_import_data: dict): + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + failed = deepcopy(data_import_data) + failed["dataImport"]["status"] = "DATA_IMPORT_STATUS_FAILED" + + pending = deepcopy(data_import_data) + pending["dataImport"]["status"] = "DATA_IMPORT_STATUS_PENDING" + + in_progress = deepcopy(data_import_data) + in_progress["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + + succeeded = deepcopy(data_import_data) + succeeded["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + + mock_requests_get.side_effect = [ + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ), + MockResponse( + status_code=200, + text=json.dumps(pending), + ), + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ), + MockResponse( + status_code=200, + text=json.dumps(failed), + ), + ] + + service = DataImportService(rest_config, "123-123-123") + service.extend(DataImportService(rest_config, "456-456-456")) + data_imports = service.wait_until_all_complete() + assert data_imports[0].status == DataImportStatusType.SUCCEEDED + assert data_imports[1].status == DataImportStatusType.FAILED + + def test_wait_max_polling_interval(mocker: MockFixture, data_import_data: dict): mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) @@ -179,3 +428,52 @@ def test_wait_max_polling_interval(mocker: MockFixture, data_import_data: dict): service = DataImportService(rest_config, "123-123-123") assert service.wait_until_complete().status == DataImportStatusType.SUCCEEDED mock_time_sleep.assert_called_with(60) + + +def test_wait_max_polling_interval_all(mocker: MockFixture, data_import_data: dict): + mock_time_sleep = mocker.patch("sift_py.data_import.status.time.sleep") + mock_session = mocker.patch("sift_py.rest.requests.Session", autospec=True) + mock_requests_get = mock_session.return_value.get + + succeeded = deepcopy(data_import_data) + succeeded["dataImport"]["status"] = "DATA_IMPORT_STATUS_SUCCEEDED" + + in_progress = deepcopy(data_import_data) + in_progress["dataImport"]["status"] = "DATA_IMPORT_STATUS_IN_PROGRESS" + + mock_requests_get.side_effect = ( + [ + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ) + for _ in range(60) + ] + + [ + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ) + ] + + [ + MockResponse( + status_code=200, + text=json.dumps(in_progress), + ) + for _ in range(60) + ] + + [ + MockResponse( + status_code=200, + text=json.dumps(succeeded), + ) + ] + ) + + service = DataImportService(rest_config, "123-123-123") + service.extend(DataImportService(rest_config, "456-456-456")) + data_imports = service.wait_until_all_complete() + assert data_imports[0].status == DataImportStatusType.SUCCEEDED + assert data_imports[1].status == DataImportStatusType.SUCCEEDED + mock_time_sleep.assert_called_with(60) + assert mock_time_sleep.call_count == 120 diff --git a/python/lib/sift_py/data_import/config.py b/python/lib/sift_py/data_import/config.py index d41354c71..1ffc55d20 100644 --- a/python/lib/sift_py/data_import/config.py +++ b/python/lib/sift_py/data_import/config.py @@ -1,6 +1,6 @@ from typing import Any, Dict -from sift_py.data_import._config import CsvConfigImpl +from sift_py.data_import._config import CsvConfigImpl, Hdf5ConfigImpl class CsvConfig: @@ -17,3 +17,19 @@ def to_json(self) -> str: def to_dict(self) -> Dict[str, Any]: return self._csv_config.model_dump() + + +class Hdf5Config: + """ + Defines the HDF5 config for data imports. + """ + + def __init__(self, config_info: Dict[str, Any]): + self._config_info = config_info + self._hdf5_config = Hdf5ConfigImpl(**self._config_info) + + def to_json(self) -> str: + return self._hdf5_config.model_dump_json() + + def to_dict(self) -> Dict[str, Any]: + return self._hdf5_config.model_dump() diff --git a/python/lib/sift_py/data_import/hdf5.py b/python/lib/sift_py/data_import/hdf5.py new file mode 100644 index 000000000..eea9cb54b --- /dev/null +++ b/python/lib/sift_py/data_import/hdf5.py @@ -0,0 +1,463 @@ +import json +import uuid +from collections import defaultdict +from contextlib import ExitStack +from pathlib import Path +from typing import Dict, List, Tuple, Union, cast +from urllib.parse import urljoin + +import numpy as np + +try: + import h5py # type: ignore +except ImportError as e: + raise RuntimeError( + "The h5py package is required to use the HDF5 upload service. " + "Please include this dependency in your project by specifying `sift-stack-py[hdf5]`." + ) from e + +try: + import polars as pl # type: ignore +except ImportError as e: + raise RuntimeError( + "The polars package is required to use the HDF5 upload service. " + "Please include this dependency in your project by specifying `sift-stack-py[hdf5]`." + ) from e + +from sift_py.data_import._config import Hdf5DataCfg +from sift_py.data_import.config import CsvConfig, Hdf5Config +from sift_py.data_import.csv import CsvUploadService +from sift_py.data_import.status import DataImportService +from sift_py.data_import.tempfile import NamedTemporaryFile +from sift_py.rest import SiftRestConfig + + +class Hdf5UploadService: + """ + Service to upload HDF5 files. + """ + + _RUN_PATH = "/api/v2/runs" + _csv_upload_service: CsvUploadService + _prev_run_id: str + + def __init__(self, rest_conf: SiftRestConfig): + self._csv_upload_service = CsvUploadService(rest_conf) + self._prev_run_id = "" + + def upload( + self, + path: Union[str, Path], + hdf5_config: Hdf5Config, + show_progress: bool = True, + ) -> DataImportService: + """ + Uploads the HDF5 file pointed to by `path` using a custom HDF5 config. + + Args: + path: The path to the HDF5 file. + hdf5_config: The HDF5 config. + show_progress: Whether to show the status bar or not. + + Returns: + DataImportService used to get the status of the import + """ + + posix_path = Path(path) if isinstance(path, str) else path + + if not posix_path.is_file(): + raise Exception(f"Provided path, '{path}', does not point to a regular file.") + + # Prefer to combine data into a single CSV for upload + # Empty data points for the String data type however will be ingested as empty strings + # This necessitates separate files for each string dataframe + # Split up hdf5_config into separate configs. String data is split into separate configs. All other data is a single config + split_configs = _split_hdf5_configs(hdf5_config) + + # NamedTemporaryFiles will delete upon exiting with block + # ExitStack used to ensures all temp files stay open through upload, than are closed upon existing block or if program exits early + with ExitStack() as stack: + # First convert each csv file + csv_items: List[Tuple[str, CsvConfig]] = [] + for config in split_configs: + temp_file = stack.enter_context(NamedTemporaryFile(mode="w", suffix=".csv")) + csv_config = _convert_to_csv_file( + path, + temp_file.name, + config, + ) + csv_items.append((temp_file.name, csv_config)) + + if not csv_items: + raise Exception("No data found for upload during processing of file") + + # If a config defines a run_name and is split up, multiple runs will be created. + # Instead, generate a run_id now, and use that instead of a run_name + # Perform now instead of before the config split to avoid creating a run any problems arise before ready to upload + # Active run_id copied to _prev_run_id for user reference + if hdf5_config._hdf5_config.run_name != "": + run_id = self._create_run(hdf5_config._hdf5_config.run_name) + for _, csv_config in csv_items: + csv_config._csv_config.run_name = "" + csv_config._csv_config.run_id = run_id + + self._prev_run_id = run_id + elif hdf5_config._hdf5_config.run_id != "": + self._prev_run_id = hdf5_config._hdf5_config.run_id + else: + self._prev_run_id = "" + + # Upload each file + import_service = None + for filename, csv_config in csv_items: + new_import_service = self._csv_upload_service.upload( + filename, csv_config, show_progress=show_progress + ) + if import_service is None: + import_service = new_import_service + else: + import_service.extend(new_import_service) + + if import_service is not None: + return import_service + else: + raise Exception("No data uploaded by service") + + def get_previous_upload_run_id(self) -> str: + """Return the run_id used in the previous upload""" + return self._prev_run_id + + def _create_run(self, run_name: str) -> str: + """Create a new run using the REST service, and return a run_id""" + run_uri = urljoin(self._csv_upload_service._base_uri, self._RUN_PATH) + + # Since CSVUploadService is already a RestService, we can utilize that + response = self._csv_upload_service._session.post( + url=run_uri, + headers={ + "Content-Encoding": "application/json", + }, + data=json.dumps( + { + "name": run_name, + "description": "", + } + ), + ) + if response.status_code != 200: + raise Exception( + f"Run creation failed with status code {response.status_code}. {response.text}" + ) + + try: + run_info = response.json() + except (json.decoder.JSONDecodeError, KeyError): + raise Exception(f"Invalid response: {response.text}") + + if "run" not in run_info: + raise Exception("Response missing key: run") + if "runId" not in run_info["run"]: + raise Exception("Response missing key: runId") + + return run_info["run"]["runId"] + + +def _convert_to_csv_file( + src_path: Union[str, Path], + dst_file: str, + hdf5_config: Hdf5Config, +) -> CsvConfig: + """Converts the HDF5 file to a temporary CSV on disk that we will upload. + + Args: + src_path: The source path to the HDF5 file. + dst_file: The output CSV file path. + hdf5_config: The HDF5 config. + + Returns: + The CSV config for the import. + """ + + merged_df = _convert_hdf5_to_dataframes(src_path, hdf5_config) + csv_cfg = _create_csv_config(hdf5_config, merged_df) + # polars write_csv requires a path, not a TextIO to work in windows + merged_df.write_csv(dst_file) + + return csv_cfg + + +def _convert_hdf5_to_dataframes( + src_path: Union[str, Path], hdf5_config: Hdf5Config +) -> pl.DataFrame: + """Convert the HDF5 file to a polars DataFrame. + + Args: + src_path: The source path to the HDF5 file. + hdf5_config: The HDF5 config. + + Returns: + A polars DataFrame containing the data. + """ + # Group data configs by matching time arrays to optimize downstream data processing + data_cfg_ts_map: Dict[Tuple[str, int], List[Hdf5DataCfg]] = defaultdict(list) + for data_cfg in hdf5_config._hdf5_config.data: + map_tuple = (data_cfg.time_dataset, data_cfg.time_column) + data_cfg_ts_map[map_tuple].append(data_cfg) + + data_frames = [] + # Using swmr=True allows opening of HDF5 files written in SWMR mode which may have not been properly closed, but may be otherwise valid + with h5py.File(src_path, "r", libver="latest", swmr=True) as h5f: + for (time_path, time_col), data_cfgs in data_cfg_ts_map.items(): + df = _extract_hdf5_data_to_dataframe(h5f, time_path, time_col, data_cfgs) + data_frames.append(df) + + # Merge polars dataframes by joining pairs, then merging those pairs until one dataframe remains + # More optimized than joining one by one + # pl.concat(data_frames, how="align") in practice can lead to a fatal crash with larger files + # https://github.com/pola-rs/polars/issues/14591 + while len(data_frames) > 1: + next_round = [] + for i in range(0, len(data_frames), 2): + if i + 1 < len(data_frames): + df1 = data_frames[i] + df2 = data_frames[i + 1] + merged = _merge_timeseries_dataframes(df1, df2) + next_round.append(merged) + else: + next_round.append(data_frames[i]) + data_frames = next_round + merged_df = data_frames[0].sort("timestamp") + return merged_df + + +def _merge_timeseries_dataframes(df1: pl.DataFrame, df2: pl.DataFrame) -> pl.DataFrame: + """Merge two timeseries dataframes together. Handles duplicate channels""" + + df1_channels = [col for col in df1.columns if col != "timestamp"] + df2_channels = [col for col in df2.columns if col != "timestamp"] + dup_channels = set(df1_channels) & set(df2_channels) + + if dup_channels: + # Create a unique id to mark duplicate channels + uid = uuid.uuid4() + + df2_renamed = df2.clone() + for col in dup_channels: + df2_renamed = df2_renamed.rename({col: f"{col}_{uid}"}) + + merged_df = df1.join(df2_renamed, on="timestamp", how="full", coalesce=True) + + # Merge duplicate column data + for col in dup_channels: + temp_col_name = f"{col}_{uid}" + merged_df = merged_df.with_columns( + pl.coalesce([pl.col(col), pl.col(temp_col_name)]).alias(col) + ).drop(temp_col_name) + + else: + merged_df = df1.join(df2, on="timestamp", how="full", coalesce=True) + + return merged_df + + +def _extract_hdf5_data_to_dataframe( + hdf5_file: h5py.File, + time_path: str, + time_col: int, + hdf5_data_configs: List[Hdf5DataCfg], +) -> pl.DataFrame: + """Extract data from an hdf5_file to a polars DataFrame. + + Args: + hdf5_file: HDF5 File + time_path: HDF5 time array path + time_col: HDF5 time array col (1-indexed) + hdf5_data_config: List of HDF5 Data Configs being extracted + + Returns: + A multi-column polars DataFrame containing the timestamps and associated channels + """ + + if not time_path in hdf5_file: + raise Exception(f"HDF5 file does not contain dataset {time_path}") + time_dataset = cast(h5py.Dataset, hdf5_file[time_path]) + df_time = pl.DataFrame(time_dataset[:]) + time_idx = time_col - 1 + + if df_time.shape[1] <= time_idx: + raise Exception(f"{time_path}: time_column={time_col} out of range") + time_series = df_time[df_time.columns[time_idx]] + + # HDF5 string data may come in as binary, so convert + if time_series.dtype == pl.Binary: + time_series = time_series.cast(pl.String) + + data_frame = pl.DataFrame(data={"timestamp": time_series}) + + for hdf5_data_config in hdf5_data_configs: + if not hdf5_data_config.value_dataset in hdf5_file: + raise Exception(f"HDF5 file does not contain dataset {hdf5_data_config.value_dataset}") + + # Should always be true due to calling code + assert time_path == hdf5_data_config.time_dataset, ( + f"Working time dataset {time_path} does not match data cfg defined dataset {hdf5_data_config.time_dataset}" + ) + assert time_col == hdf5_data_config.time_column, ( + f"Working time col {time_col} does not match data cfg defined col {hdf5_data_config.time_column}" + ) + + value_dataset = cast(h5py.Dataset, hdf5_file[hdf5_data_config.value_dataset]) + + # Convert the full value dataset to a dataframe + # This will make it easier to work with any nested columns from a numpy structured array + df_value = pl.DataFrame(value_dataset[:]) + val_idx = hdf5_data_config.value_column - 1 + + if df_value.shape[1] <= val_idx: + raise Exception( + f"{hdf5_data_config.name}: value_column={hdf5_data_config.value_column} out of range for {hdf5_data_config.value_dataset}" + ) + value_series = df_value[df_value.columns[val_idx]] + + if len(time_series) != len(value_series): + raise Exception( + f"{hdf5_data_config.name}: time and value columns have different lengths ({len(time_series)} vs {len(value_series)})" + ) + + # HDF5 string data may come in as binary, so convert + if value_series.dtype == pl.Binary: + value_series = value_series.cast(pl.String) + + # Handle signed enums + # TODO: Remove once properly handled upon ingestion + if hdf5_data_config.data_type == "CHANNEL_DATA_TYPE_ENUM" and any( + [enum_type.is_signed for enum_type in hdf5_data_config.enum_types] + ): + value_series = _convert_signed_enums(hdf5_data_config, value_series) + + data_frame = data_frame.with_columns(value_series.alias(hdf5_data_config.name)) + + return data_frame + + +def _convert_signed_enums(data_cfg: Hdf5DataCfg, data: pl.Series) -> pl.Series: + """ + Convert signed enums to unsigned ints for ingestion + Ignores keys >= 0, such as those which may have been converted previously by the user + Will raise an exception if casting will cause a collision with an existing key + Or otherwise cannot cast signed negative int to a uint32 + """ + cur_enum_keys = set([enum_type.key for enum_type in data_cfg.enum_types]) + + for enum_type in data_cfg.enum_types: + if not enum_type.is_signed or enum_type.key >= 0: + continue + if enum_type.key < -2_147_483_648: + raise Exception( + f"{data_cfg.name}: Cannot convert key {enum_type.key} to uint32 due to being below valid int32 range" + ) + unsigned_key = enum_type.key + (1 << 32) + if unsigned_key in cur_enum_keys: + raise Exception( + f"{data_cfg.name}: Converting key {enum_type.key} to unsigned int collides with existing key {unsigned_key}" + ) + enum_type.key = unsigned_key + + # Numpy astype will wrap negative values + return pl.Series(data.to_numpy().astype(np.uint32)) + + +def _create_csv_config(hdf5_config: Hdf5Config, merged_df: pl.DataFrame) -> CsvConfig: + """Construct a CsvConfig from a Hdf5Config + + Args: + hdf5_config: The HDF5 config + merged_df: The merged dataFrame of data + + Returns: + The CSV config. + """ + + csv_config_dict = { + "asset_name": hdf5_config._hdf5_config.asset_name, + "run_name": hdf5_config._hdf5_config.run_name, + "run_id": hdf5_config._hdf5_config.run_id, + "first_data_row": 2, # Row 1 is headers + "time_column": { + "format": hdf5_config._hdf5_config.time.format, + "column_number": 1, + "relative_start_time": hdf5_config._hdf5_config.time.relative_start_time, + }, + } + + # Map each data config to its channel name + config_map = {d_cfg.name: d_cfg for d_cfg in hdf5_config._hdf5_config.data} + + assert merged_df.columns[0] == "timestamp", ( + f"Unexpected merged DataFrame layout. Expected first column to be timestamp, not {merged_df.columns[0]}" + ) + + data_columns = {} + for idx, channel_name in enumerate(merged_df.columns[1:]): + data_cfg = config_map[channel_name] + col_num = idx + 2 # 1-indexed and col 1 is time col + data_columns[col_num] = { + "name": data_cfg.name, + "data_type": data_cfg.data_type, + "units": data_cfg.units, + "description": data_cfg.description, + "enum_types": data_cfg.enum_types, + "bit_field_elements": data_cfg.bit_field_elements, + } + + csv_config_dict["data_columns"] = data_columns + + return CsvConfig(csv_config_dict) + + +def _split_hdf5_configs(hdf5_config: Hdf5Config) -> List[Hdf5Config]: + """ + Split up hdf5_config into separate configs used to generate each CSV file + Needed as string channels cannot be merged without creating empty string data points in the app + + Args: + hdf5_config: The HDF5 config. + + Returns: + List of HDF5Configs for later CSV conversion + """ + + # Combined config for non string types + non_string_config_dict = { + "asset_name": hdf5_config._hdf5_config.asset_name, + "run_name": hdf5_config._hdf5_config.run_name, + "run_id": hdf5_config._hdf5_config.run_id, + "time": hdf5_config._hdf5_config.time, + "data": [ + data_cfg + for data_cfg in hdf5_config._hdf5_config.data + if data_cfg.data_type != "CHANNEL_DATA_TYPE_STRING" + ], + } + + filtered_hdf5_configs = [] + + # Avoid adding combined config if no non-string data present + if non_string_config_dict["data"]: + filtered_hdf5_configs.append(Hdf5Config(non_string_config_dict)) + + for data_cfg in hdf5_config._hdf5_config.data: + if data_cfg.data_type != "CHANNEL_DATA_TYPE_STRING": + continue + string_config = Hdf5Config( + { + "asset_name": hdf5_config._hdf5_config.asset_name, + "run_name": hdf5_config._hdf5_config.run_name, + "run_id": hdf5_config._hdf5_config.run_id, + "time": hdf5_config._hdf5_config.time, + "data": [data_cfg], + } + ) + filtered_hdf5_configs.append(string_config) + + return filtered_hdf5_configs diff --git a/python/lib/sift_py/data_import/status.py b/python/lib/sift_py/data_import/status.py index ad721ad11..065f899d2 100644 --- a/python/lib/sift_py/data_import/status.py +++ b/python/lib/sift_py/data_import/status.py @@ -1,7 +1,7 @@ import time from datetime import datetime from enum import Enum -from typing import Optional, Union +from typing import Generator, List, Optional, Union from urllib.parse import urljoin from pydantic import BaseModel, ConfigDict, field_validator @@ -65,34 +65,48 @@ class DataImportService(_RestService): """ STATUS_PATH = "/api/v1/data-imports" - _data_import_id: str + _data_import_ids: List[str] + _status_uri: str # TODO: rename restconf to rest_conf for consistency between services def __init__(self, restconf: SiftRestConfig, data_import_id: str): super().__init__(rest_conf=restconf) - self._data_import_id = data_import_id + self._data_import_ids = [data_import_id] self._status_uri = urljoin(self._base_uri, self.STATUS_PATH) - def get_data_import(self) -> DataImport: + def extend(self, other: Self): """ - Returns information about the data import. + Add an existing data import service to track a batch data import + """ + self._data_import_ids.extend(other._data_import_ids) + + def get_data_import(self, idx: int = 0) -> DataImport: + """ + Returns information about the data import. Provides the first data import if multiple provided through `extend` and `idx` not passed + + - `idx`: Optional idx of the desired DataImport to access """ response = self._session.get( - url=f"{self._status_uri}/{self._data_import_id}", + url=f"{self._status_uri}/{self._data_import_ids[idx]}", ) response.raise_for_status() data = response.json().get("dataImport") data_import = DataImport(**data) return data_import - def wait_until_complete(self) -> DataImport: + def get_data_imports(self) -> Generator[DataImport, None, None]: + for idx in range(len(self._data_import_ids)): + yield self.get_data_import(idx=idx) + + def wait_until_complete(self, idx: int = 0) -> DataImport: """ Blocks until the data import is completed. Check the status to determine if the import was successful or not. + Waits on only the first data import if multiple provided through `add_data_import_id` and `idx` not passed """ polling_interval = 1 while True: - data_import = self.get_data_import() + data_import = self.get_data_import(idx=idx) status: DataImportStatusType = data_import.status # type: ignore if status in [ DataImportStatusType.SUCCEEDED, @@ -108,3 +122,9 @@ def wait_until_complete(self) -> DataImport: raise Exception(f"Unknown status: {status}") time.sleep(polling_interval) polling_interval = min(polling_interval * 2, 60) + + def wait_until_all_complete(self) -> List[DataImport]: + """ + Blocks until all data imports are complete. + """ + return [self.wait_until_complete(idx=idx) for idx in range(len(self._data_import_ids))] diff --git a/python/lib/sift_py/data_import/tempfile.py b/python/lib/sift_py/data_import/tempfile.py index bbde8c345..2f1bbfe6a 100644 --- a/python/lib/sift_py/data_import/tempfile.py +++ b/python/lib/sift_py/data_import/tempfile.py @@ -35,5 +35,5 @@ def __exit__(self, exc_type, exc_value, traceback): try: os.remove(self.name) os.rmdir(self.temp_dir) - except FileNotFoundError: + except (FileNotFoundError, PermissionError): pass diff --git a/python/lib/sift_py/ingestion/_internal/channel.py b/python/lib/sift_py/ingestion/_internal/channel.py index fe9f3e709..116e10c5c 100644 --- a/python/lib/sift_py/ingestion/_internal/channel.py +++ b/python/lib/sift_py/ingestion/_internal/channel.py @@ -2,11 +2,5 @@ def channel_reference_from_fqn(fqn: str) -> ChannelReference: - parts = fqn.split(".") - - if len(parts) == 1: - return ChannelReference(name=parts[0]) - - component_parts = parts[: len(parts) - 1] - - return ChannelReference(name=parts[-1], component=".".join(component_parts)) + # Components are depreciated, so use full channel name in name + return ChannelReference(name=fqn) diff --git a/python/lib/sift_py/ingestion/channel.py b/python/lib/sift_py/ingestion/channel.py index b894fb1f4..c1ab71dc1 100644 --- a/python/lib/sift_py/ingestion/channel.py +++ b/python/lib/sift_py/ingestion/channel.py @@ -179,17 +179,23 @@ def from_pb(cls, message: ChannelBitFieldElementPb) -> Self: class ChannelEnumType(AsProtobuf): name: str key: int + is_signed: Optional[bool] = None - def __init__(self, name: str, key: int): + def __init__(self, name: str, key: int, is_signed: Optional[bool] = None): self.name = name self.key = key + self.is_signed = is_signed def as_pb(self, klass: Type[ChannelEnumTypePb]) -> ChannelEnumTypePb: - return klass(name=self.name, key=self.key) + return klass( + name=self.name, + key=self.key, + is_signed=False if self.is_signed is None else self.is_signed, + ) @classmethod def from_pb(cls, message: ChannelEnumTypePb) -> Self: - return cls(name=message.name, key=message.key) + return cls(name=message.name, key=message.key, is_signed=message.is_signed) class ChannelDataTypeStrRep(Enum): diff --git a/python/lib/sift_py/rule/_service_test.py b/python/lib/sift_py/rule/_service_test.py index 7d6fbcfee..85877a9c8 100644 --- a/python/lib/sift_py/rule/_service_test.py +++ b/python/lib/sift_py/rule/_service_test.py @@ -184,7 +184,12 @@ def test_rule_service_attach_asset(): asset_names=["abc"], ) asset = Asset(name="asset", asset_id="asset-id", organization_id="org-id") - with mock.patch("sift_py.rule.service.RuleServiceStub", return_value=mock.MagicMock()): + with mock.patch( + "sift_py.rule.service.RuleServiceStub", return_value=mock.MagicMock() + ) as mock_stub: + # Need to return a rule_id string when calling get_rule + mock_instance = mock_stub.return_value + mock_instance.GetRule.return_value.rule.rule_id = "" rule_service = RuleService(MockChannel()) with mock.patch.object(RuleService, "_get_assets", return_value=[asset]): returned_config = rule_service.attach_asset(rule_config, ["asset"]) @@ -203,7 +208,12 @@ def test_rule_service_detach_asset(): ) asset_abc = Asset(name="abc", asset_id="abc-id", organization_id="org-id") asset_def = Asset(name="def", asset_id="def-id", organization_id="org-id") - with mock.patch("sift_py.rule.service.RuleServiceStub", return_value=mock.MagicMock()): + with mock.patch( + "sift_py.rule.service.RuleServiceStub", return_value=mock.MagicMock() + ) as mock_stub: + # Need to return a rule_id string when calling get_rule + mock_instance = mock_stub.return_value + mock_instance.GetRule.return_value.rule.rule_id = "" rule_service = RuleService(MockChannel()) with mock.patch.object(RuleService, "_get_assets", return_value=[asset_abc, asset_def]): returned_config = rule_service.detach_asset(rule_config, ["abc"]) diff --git a/python/lib/sift_py/rule/config.py b/python/lib/sift_py/rule/config.py index 22c429be2..3b38e3f48 100644 --- a/python/lib/sift_py/rule/config.py +++ b/python/lib/sift_py/rule/config.py @@ -38,6 +38,7 @@ class RuleConfig(AsJson): asset_names: List[str] contextual_channels: List[str] is_external: bool + _rule_id: Optional[str] # Allow passing of rule_id when existing config retrieved from API def __init__( self, @@ -65,6 +66,7 @@ def __init__( self.description = description self.expression = self.__class__.interpolate_sub_expressions(expression, sub_expressions) self.is_external = is_external + self._rule_id = None def as_json(self) -> Any: """ diff --git a/python/lib/sift_py/rule/service.py b/python/lib/sift_py/rule/service.py index 3fee3690c..efe25337d 100644 --- a/python/lib/sift_py/rule/service.py +++ b/python/lib/sift_py/rule/service.py @@ -267,6 +267,11 @@ def _attach_or_detach_asset( if isinstance(rule, str): rule = cast(RuleConfig, self.get_rule(rule)) + elif not rule._rule_id and rule.rule_client_key: + # Return provided rule with its rule_id + # Needed to fix bug with updating an existing rule without an id + existing_rule = cast(RuleConfig, self.get_rule(cast(str, rule.rule_client_key))) + rule._rule_id = existing_rule._rule_id if attach: if not rule.asset_names: @@ -469,8 +474,15 @@ def _update_req_from_rule_config( ident = channel_reference_from_fqn(channel) contextual_channel_names.append(ident) + # Pass rule_id from config if retrived from existing rule + # Will be overwritten by rule if passed to function + # Fixes bug with missing rule_id in UpdateRuleRequest for existing rules + if config._rule_id: + rule_id = config._rule_id + else: + rule_id = "" + organization_id = "" - rule_id = "" if rule: rule_id = rule.rule_id organization_id = rule.organization_id @@ -559,6 +571,9 @@ def get_rule(self, rule: str) -> Optional[RuleConfig]: expression=expression, ) + # rule_id currently required for an existing rule + rule_config._rule_id = rule_pb.rule_id if rule_pb.rule_id else None + return rule_config def _get_rule_from_client_key(self, client_key: str) -> Optional[Rule]: diff --git a/python/lib/sift_py/rule_evaluation/_service_test.py b/python/lib/sift_py/rule_evaluation/_service_test.py index aa1a2a6ce..51e40c7eb 100644 --- a/python/lib/sift_py/rule_evaluation/_service_test.py +++ b/python/lib/sift_py/rule_evaluation/_service_test.py @@ -45,12 +45,16 @@ def test_evaluate_and_preview_rule_identifiers_against_run(rule_evaluation_servi assert request.run.id == run_id assert request.rules.rules.ids.ids[0] == rule_identifiers[0].rule_id assert request.rules.rules.ids.ids[1] == rule_identifiers[1].rule_id + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 rule_evaluation_service.preview_against_run(run_id, rule_identifiers) request = mock_stub.mock_calls[1].args[0] assert request.run.id == run_id assert request.rules.rules.ids.ids[0] == rule_identifiers[0].rule_id assert request.rules.rules.ids.ids[1] == rule_identifiers[1].rule_id + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 def test_evaluate_and_preview_report_template_against_run(rule_evaluation_service): @@ -68,11 +72,51 @@ def test_evaluate_and_preview_report_template_against_run(rule_evaluation_servic assert request.report_name == report_name assert request.run.id == run_id assert request.report_template.report_template.id == report_template.template_id + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 rule_evaluation_service.preview_against_run(run_id, report_template) request = mock_stub.mock_calls[1].args[0] assert request.run.id == run_id assert request.report_template.report_template.id == report_template.template_id + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 + + +def test_evaluate_and_preview_report_template_against_run_with_start_end_times( + rule_evaluation_service, +): + mock_stub = MagicMock() + rule_evaluation_service._rule_evaluation_stub = mock_stub + mock_stub.EvaluateRules.return_value = EvaluateRulesResponse(report_id="test_report_id") + + run_id = "test_run_id" + report_name = "test_report" + report_template = ReportTemplateConfig(name=report_name, template_id="template-id") + start_time = datetime(2025, 1, 1, 1, 1, 1, tzinfo=timezone.utc) + end_time = datetime(2025, 1, 2, 1, 1, 59, tzinfo=timezone.utc) + + report = rule_evaluation_service.evaluate_against_run( + run_id, + report_template, + report_name, + start_time=start_time, + end_time=end_time, + ) + request = mock_stub.mock_calls[0].args[0] + assert report.report_id == "test_report_id" + assert request.report_name == report_name + assert request.run_time_range.run.id == run_id + assert request.run_time_range.start_time.seconds == int(start_time.timestamp()) + assert request.run_time_range.end_time.seconds == int(end_time.timestamp()) + assert request.report_template.report_template.id == report_template.template_id + + rule_evaluation_service.preview_against_run(run_id, report_template, start_time, end_time) + request = mock_stub.mock_calls[1].args[0] + assert request.report_template.report_template.id == report_template.template_id + assert request.run_time_range.run.id == run_id + assert request.run_time_range.start_time.seconds == int(start_time.timestamp()) + assert request.run_time_range.end_time.seconds == int(end_time.timestamp()) def test_evaluate_and_preview_rule_configs_against_run(rule_evaluation_service): @@ -106,12 +150,65 @@ def test_evaluate_and_preview_rule_configs_against_run(rule_evaluation_service): assert request.run.id == run_id assert request.rules.rules.client_keys.client_keys[0] == rule_configs[0].rule_client_key assert request.rules.rules.client_keys.client_keys[1] == rule_configs[1].rule_client_key + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 rule_evaluation_service.preview_against_run(run_id, rule_configs) request = mock_stub.mock_calls[1].args[0] assert request.run.id == run_id assert request.rules.rules.client_keys.client_keys[0] == rule_configs[0].rule_client_key assert request.rules.rules.client_keys.client_keys[1] == rule_configs[1].rule_client_key + assert request.run_time_range.start_time.seconds == 0 + assert request.run_time_range.end_time.seconds == 0 + + +def test_evaluate_and_preview_rule_configs_against_run_with_start_end_times( + rule_evaluation_service, +): + mock_stub = MagicMock() + rule_evaluation_service._rule_evaluation_stub = mock_stub + mock_stub.EvaluateRules.return_value = EvaluateRulesResponse(report_id="test_report_id") + + run_id = "test_run_id" + report_name = "test_report" + rule_configs = [ + RuleConfig( + name="rule1", + rule_client_key="key1", + channel_references=[], + expression="$1 == 1", + action=RuleActionCreateDataReviewAnnotation(), + ), + RuleConfig( + name="rule2", + rule_client_key="key2", + channel_references=[], + expression="$2 == 2", + action=RuleActionCreateDataReviewAnnotation(), + ), + ] + start_time = datetime(2025, 1, 1, 1, 1, 1, tzinfo=timezone.utc) + end_time = datetime(2025, 1, 2, 1, 1, 59, tzinfo=timezone.utc) + + report = rule_evaluation_service.evaluate_against_run( + run_id, rule_configs, report_name, start_time, end_time + ) + request = mock_stub.mock_calls[0].args[0] + assert report.report_id == "test_report_id" + assert request.report_name == report_name + assert request.rules.rules.client_keys.client_keys[0] == rule_configs[0].rule_client_key + assert request.rules.rules.client_keys.client_keys[1] == rule_configs[1].rule_client_key + assert request.run_time_range.run.id == run_id + assert request.run_time_range.start_time.seconds == int(start_time.timestamp()) + assert request.run_time_range.end_time.seconds == int(end_time.timestamp()) + + rule_evaluation_service.preview_against_run(run_id, rule_configs, start_time, end_time) + request = mock_stub.mock_calls[1].args[0] + assert request.rules.rules.client_keys.client_keys[0] == rule_configs[0].rule_client_key + assert request.rules.rules.client_keys.client_keys[1] == rule_configs[1].rule_client_key + assert request.run_time_range.run.id == run_id + assert request.run_time_range.start_time.seconds == int(start_time.timestamp()) + assert request.run_time_range.end_time.seconds == int(end_time.timestamp()) def test_evaluate_rules_against_assets(rule_evaluation_service): diff --git a/python/lib/sift_py/rule_evaluation/service.py b/python/lib/sift_py/rule_evaluation/service.py index 677cf6670..d53431687 100644 --- a/python/lib/sift_py/rule_evaluation/service.py +++ b/python/lib/sift_py/rule_evaluation/service.py @@ -20,6 +20,7 @@ EvaluateRulesPreviewResponse, EvaluateRulesRequest, EvaluateRulesResponse, + RunTimeRange, ) from sift.rule_evaluation.v1.rule_evaluation_pb2_grpc import RuleEvaluationServiceStub from sift_py._internal.time import to_timestamp_pb @@ -49,6 +50,8 @@ def evaluate_against_run( run_id: str, rules: Union[ReportTemplateConfig, List[RuleConfig], List[RuleIdentifier]], report_name: str = "", + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> ReportService: """Evaluate a set of rules against a run. @@ -57,16 +60,19 @@ def evaluate_against_run( rules: Either a ReportTemplateConfig, a list of RuleConfigs, or a list of RuleIdentifiers (typically from `RuleService.create_external_rules`). report_name: Optional report name. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: A ReportService object that can be use to get the status of the executed report. """ - eval_kwargs = self._get_rules_kwargs(rules) + rules_kwargs = self._get_rules_kwargs(rules) + run_kwargs = self._get_run_kwargs(run_id, start_time, end_time) req = EvaluateRulesRequest( report_name=report_name, - run=ResourceIdentifier(id=run_id), - **eval_kwargs, + **rules_kwargs, + **run_kwargs, ) res = cast(EvaluateRulesResponse, self._rule_evaluation_stub.EvaluateRules(req)) @@ -75,8 +81,8 @@ def evaluate_against_run( def evaluate_against_assets( self, asset_names: List[str], - start_time: Union[datetime, str, int], - end_time: Union[datetime, str, int], + start_time: Union[datetime, str, int, float], + end_time: Union[datetime, str, int, float], rules: Union[ReportTemplateConfig, List[RuleConfig], List[RuleIdentifier]], report_name: str = "", ) -> ReportService: @@ -84,8 +90,8 @@ def evaluate_against_assets( Args: asset_names: The list of assets to run against. - start_time: The start time to evaluate. - end_time: The end time to evaluate. + start_time: The start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: The end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). rules: Either a ReportTemplateConfig, a list of RuleConfigs, or a list of RuleIdentifiers (typically from `RuleService.create_external_rules`). report_name: Optional report name. @@ -98,12 +104,12 @@ def evaluate_against_assets( start_time=to_timestamp_pb(start_time), end_time=to_timestamp_pb(end_time), ) - eval_kwargs = self._get_rules_kwargs(rules) + rules_kwargs = self._get_rules_kwargs(rules) req = EvaluateRulesRequest( report_name=report_name, assets=asset_time_range, - **eval_kwargs, + **rules_kwargs, ) res = cast(EvaluateRulesResponse, self._rule_evaluation_stub.EvaluateRules(req)) @@ -113,6 +119,8 @@ def preview_against_run( self, run_id: str, rules: Union[ReportTemplateConfig, List[RuleConfig], List[RuleIdentifier]], + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> EvaluateRulesPreviewResponse: """Preview the evaluation of a set of rules against a run. @@ -120,15 +128,18 @@ def preview_against_run( run_id: The Run ID to run against. rules: Either a ReportTemplateConfig, a list of RuleConfigs, or a list of RuleIdentifiers (typically from `RuleService.create_external_rules`). + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: The EvaluateRulesPreviewResponse object. """ eval_kwargs = self._get_rules_kwargs(rules) + run_kwargs = self._get_run_kwargs(run_id, start_time, end_time) req = EvaluateRulesPreviewRequest( - run=ResourceIdentifier(id=run_id), **eval_kwargs, + **run_kwargs, ) return self._rule_evaluation_stub.EvaluateRulesPreview(req) @@ -138,6 +149,8 @@ def evaluate_external_rules( run_id: str, rules: List[RuleConfig], report_name: str = "", + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> ReportService: """Evaluate a set of external rules against a run. @@ -145,12 +158,14 @@ def evaluate_external_rules( run_id: The Run ID to run against. rules: A list of RuleConfigs. These must be external rules. report_name: Optional report name. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: A Report object that can be use to get the status of the executed report. """ rule_ids = self._rule_service.create_external_rules(rules) - return self.evaluate_against_run(run_id, rule_ids, report_name) + return self.evaluate_against_run(run_id, rule_ids, report_name, start_time, end_time) def evaluate_external_rules_from_yaml( self, @@ -158,6 +173,8 @@ def evaluate_external_rules_from_yaml( paths: List[Path], named_expressions: Optional[Dict[str, str]] = None, report_name: str = "", + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> ReportService: """Evaluate a set of external rules from a YAML config against a run. @@ -165,35 +182,43 @@ def evaluate_external_rules_from_yaml( run_id: The Run ID to run against. paths: The YAML paths to load rules from. report_name: Optional report name. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: A Report object that can be use to get the status of the executed report. """ rule_ids = self._rule_service.create_external_rules_from_yaml(paths, named_expressions) - return self.evaluate_against_run(run_id, rule_ids, report_name) + return self.evaluate_against_run(run_id, rule_ids, report_name, start_time, end_time) def preview_external_rules( self, run_id: str, rules: List[RuleConfig], + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> EvaluateRulesPreviewResponse: """Preview the evaluation a set of external rules against a run. Args: run_id: The Run ID to run against. rules: A list of RuleConfigs. These must be external rules. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: The EvaluateRulesPreviewResponse object. """ rule_ids = self._rule_service.create_external_rules(rules) - return self.preview_against_run(run_id, rule_ids) + return self.preview_against_run(run_id, rule_ids, start_time, end_time) def preview_external_rules_from_yaml( self, run_id: str, paths: List[Path], named_expressions: Optional[Dict[str, str]] = None, + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, ) -> EvaluateRulesPreviewResponse: """Preview the evaluation a set of external rules from a YAML config against a run. @@ -201,12 +226,14 @@ def preview_external_rules_from_yaml( run_id: The Run ID to run against. paths: The YAML paths to load rules from. named_expressions: The named expressions to substitute in the rules. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). Returns: The EvaluateRulesPreviewResponse object. """ rule_ids = self._rule_service.create_external_rules_from_yaml(paths, named_expressions) - return self.preview_against_run(run_id, rule_ids) + return self.preview_against_run(run_id, rule_ids, start_time, end_time) def _get_rules_kwargs( self, rules: Union[ReportTemplateConfig, List[RuleConfig], List[RuleIdentifier]] @@ -259,3 +286,32 @@ def _get_rules_kwargs( } raise ValueError("Invalid rules argument") + + def _get_run_kwargs( + self, + run_id: str, + start_time: Optional[Union[datetime, str, int, float]] = None, + end_time: Optional[Union[datetime, str, int, float]] = None, + ) -> dict: + """Returns the Run specific keyword arguments for a EvalutateRules request based on the input type. + + Args: + run_id: The Run ID to run against. + start_time: Optional start time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + end_time: Optional end time to evaluate (datetime, ISO 8601 formatted string, or POSIX timestamp). + + Returns: + dict: The keyword arguments. + """ + run = ResourceIdentifier(id=run_id) + + if start_time or end_time: + return { + "run_time_range": RunTimeRange( + run=run, + start_time=to_timestamp_pb(start_time) if start_time else None, + end_time=to_timestamp_pb(end_time) if end_time else None, + ) + } + else: + return {"run": run} diff --git a/python/pyproject.toml b/python/pyproject.toml index 921a7a856..c55580b85 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "sift_stack_py" -version = "0.7.0" +version = "0.8.4" description = "Python client library for the Sift API" requires-python = ">=3.8" readme = { file = "README.md", content-type = "text/markdown" } @@ -58,6 +58,7 @@ build = ["pdoc==14.5.0", "build==1.2.1"] openssl = ["pyOpenSSL<24.0.0", "types-pyOpenSSL<24.0.0", "cffi~=1.14"] tdms = ["npTDMS~=1.9"] rosbags = ["rosbags~=0.0"] +hdf5 = ["h5py~=3.11", "polars~=1.8"] [build-system] requires = ["setuptools"] diff --git a/rust/CHANGELOG.md b/rust/CHANGELOG.md index 23546d6f2..90196e98c 100644 --- a/rust/CHANGELOG.md +++ b/rust/CHANGELOG.md @@ -3,6 +3,11 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/). +## [v0.5.0] - August 14, 2025 + +- [Add ability to attach and detach runs to SiftStream](https://github.com/sift-stack/sift/pull/293) +- [Add better stream error handling to SiftStream to avoid checkpoint misses](https://github.com/sift-stack/sift/pull/292) + ## [v0.4.2] - July 17, 2025 - [Additional network hiccup resiliency for SiftStream](https://github.com/sift-stack/sift/pull/272) diff --git a/rust/Cargo.lock b/rust/Cargo.lock index 1ed1fcc42..143d9d963 100644 --- a/rust/Cargo.lock +++ b/rust/Cargo.lock @@ -1526,8 +1526,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "sift-stream-bindings" -version = "0.1.0" +version = "0.1.2" dependencies = [ + "pbjson-types", "pyo3", "pyo3-async-runtimes", "pyo3-stub-gen", @@ -1538,7 +1539,7 @@ dependencies = [ [[package]] name = "sift_connect" -version = "0.4.2" +version = "0.5.0" dependencies = [ "dirs", "sift_error", @@ -1549,14 +1550,14 @@ dependencies = [ [[package]] name = "sift_error" -version = "0.4.2" +version = "0.5.0" dependencies = [ "indoc", ] [[package]] name = "sift_rs" -version = "0.4.2" +version = "0.5.0" dependencies = [ "async-trait", "chrono", @@ -1575,7 +1576,7 @@ dependencies = [ [[package]] name = "sift_stream" -version = "0.4.2" +version = "0.5.0" dependencies = [ "async-trait", "bytesize", diff --git a/rust/Cargo.toml b/rust/Cargo.toml index 20b48b082..768297107 100644 --- a/rust/Cargo.toml +++ b/rust/Cargo.toml @@ -11,7 +11,7 @@ members = [ [workspace.package] authors = ["Sift Software Engineers "] -version = "0.4.2" +version = "0.5.0" edition = "2024" categories = ["aerospace", "science::robotics"] homepage = "https://github.com/sift-stack/sift/tree/main/rust" @@ -25,9 +25,9 @@ chrono = { version = "0.4.39", default-features = false, features = ["clock"] } pbjson-types = "^0.7" tonic = { version = "^0.12" } -sift_connect = { version = "0.4.2", path = "crates/sift_connect" } -sift_rs = { version = "0.4.2", path = "crates/sift_rs" } -sift_error = { version = "0.4.2", path = "crates/sift_error" } -sift_stream = { version = "0.4.2", path = "crates/sift_stream" } +sift_connect = { version = "0.5.0", path = "crates/sift_connect" } +sift_rs = { version = "0.5.0", path = "crates/sift_rs" } +sift_error = { version = "0.5.0", path = "crates/sift_error" } +sift_stream = { version = "0.5.0", path = "crates/sift_stream" } sift_stream_bindings = { version = "0.1.0", path = "crates/sift_stream_bindings" } diff --git a/rust/crates/sift_stream/src/stream/builder.rs b/rust/crates/sift_stream/src/stream/builder.rs index bf755077a..13ccc36ff 100644 --- a/rust/crates/sift_stream/src/stream/builder.rs +++ b/rust/crates/sift_stream/src/stream/builder.rs @@ -1,7 +1,9 @@ use super::{ - RetryPolicy, SiftStream, SiftStreamMode, flow::validate_flows, + RetryPolicy, SiftStream, SiftStreamMode, + flow::validate_flows, mode::ingestion_config::IngestionConfigMode, mode::ingestion_config::IngestionConfigModeBackupsManager, + run::{load_run_by_form, load_run_by_id}, }; use crate::backup::{DiskBackupsManager, InMemoryBackupsManager}; use sift_connect::{Credentials, SiftChannel, SiftChannelBuilder}; @@ -9,14 +11,12 @@ use sift_error::prelude::*; use sift_rs::{ ingestion_configs::v2::{FlowConfig, IngestionConfig as IngestionConfigPb}, ping::v1::{PingRequest, ping_service_client::PingServiceClient}, - runs::v2::Run, wrappers::{ assets::{AssetServiceWrapper, new_asset_service}, ingestion_configs::{IngestionConfigServiceWrapper, new_ingestion_config_service}, - runs::{RunServiceWrapper, new_run_service}, }, }; -use std::{collections::HashSet, marker::PhantomData, path::PathBuf, time::Duration}; +use std::{marker::PhantomData, path::PathBuf, time::Duration}; /// The default checkpoint interval (1 minute) to use if left unspecified. pub const DEFAULT_CHECKPOINT_INTERVAL: Duration = Duration::from_secs(60); @@ -97,7 +97,7 @@ pub enum RecoveryStrategy { /// expected to be unique across the user's organization; it's used to uniquely identify a /// particular ingestion config which defines the schema of an asset's telemetry. See the /// [top-level documentation](crate#ingestion-configs) for further details. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct IngestionConfigForm { pub asset_name: String, pub client_key: String, @@ -108,7 +108,7 @@ pub struct IngestionConfigForm { /// is used in [SiftStreamBuilder::attach_run]. Note that if there is an existing run with the /// given `client_key`, any other fields that are updated in this [RunForm] will be updated in /// Sift, with the exception of `Option` fields that are `None`. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct RunForm { pub name: String, pub client_key: String, @@ -179,112 +179,6 @@ where self.enable_tls = false; self } - - /// Retrieves a run by run ID. - async fn load_run_by_id(grpc_channel: SiftChannel, run_id: &str) -> Result { - let mut run_service = new_run_service(grpc_channel); - let run = run_service.try_get_run_by_id(run_id).await?; - - #[cfg(feature = "tracing")] - tracing::info!( - run_id = run.run_id, - run_name = run.name, - "successfully retrieve run by ID", - ); - - Ok(run) - } - - /// Retrieves a run or creates a run. If the run exists, this method will also update the run - /// if the `run_form` has changed since the last time it was used. - async fn load_run_by_form(grpc_channel: SiftChannel, run_form: RunForm) -> Result { - #[cfg(feature = "tracing")] - tracing::info_span!("load_run_by_form"); - - let mut run_service = new_run_service(grpc_channel); - - let RunForm { - name, - description, - tags, - client_key, - } = run_form; - - match run_service.try_get_run_by_client_key(&client_key).await { - Err(e) if e.kind() == ErrorKind::NotFoundError => { - let run = run_service - .try_create_run( - &name, - &client_key, - &description.unwrap_or_default(), - tags.unwrap_or_default().as_slice(), - ) - .await?; - - #[cfg(feature = "tracing")] - tracing::info!(run_id = run.run_id, run_name = run.name, "created new run"); - - Ok(run) - } - Err(e) => Err(e), - - Ok(mut run) => { - #[cfg(feature = "tracing")] - tracing::info!( - run_id = run.run_id, - run_name = run.name, - "an existing run was found with the provided client-key" - ); - - // An existing run was found; see if we need to update it. - let mut update_mask = Vec::new(); - - if name != run.name { - update_mask.push("name".to_string()); - run.name = name; - } - - if description.as_ref().is_some_and(|d| d != &run.description) { - update_mask.push("description".to_string()); - run.description = description.unwrap_or_default(); - } - match tags { - Some(new_tags) if run.tags.is_empty() => { - update_mask.push("tags".to_string()); - run.tags = new_tags; - } - Some(new_tags) => { - let new_tags_set = HashSet::<&String>::from_iter(new_tags.iter()); - let current_tags_set = HashSet::from_iter(run.tags.iter()); - let difference = new_tags_set.difference(¤t_tags_set); - - if difference.count() == 0 { - update_mask.push("tags".to_string()); - run.tags = new_tags; - } - } - _ => (), - } - - if update_mask.is_empty() { - return Ok(run); - } - - #[cfg(feature = "tracing")] - tracing::info!( - "updating run fields as some fields have changed: {}", - update_mask.join(", ") - ); - - let updated_run = run_service.try_update_run(run, &update_mask).await?; - - #[cfg(feature = "tracing")] - tracing::info!("successfully updated run"); - - Ok(updated_run) - } - } - } } /// Builds a [SiftStream] specifically for ingestion-config based streaming. @@ -368,9 +262,9 @@ impl SiftStreamBuilder { let run = { if let Some(run_id) = run_id.as_ref() { - Some(Self::load_run_by_id(channel.clone(), run_id).await?) + Some(load_run_by_id(channel.clone(), run_id).await?) } else if let Some(selector) = run { - Some(Self::load_run_by_form(channel.clone(), selector).await?) + Some(load_run_by_form(channel.clone(), selector).await?) } else { None } diff --git a/rust/crates/sift_stream/src/stream/mod.rs b/rust/crates/sift_stream/src/stream/mod.rs index af887b4e4..7552e3e24 100644 --- a/rust/crates/sift_stream/src/stream/mod.rs +++ b/rust/crates/sift_stream/src/stream/mod.rs @@ -15,6 +15,9 @@ use mode::ingestion_config::IngestionConfigMode; pub mod retry; pub use retry::RetryPolicy; +/// Concerned with accessing or creating runs for [SiftStream] +pub mod run; + /// Concerned with constructing values of time that make up the time-series sent ot Sift. pub mod time; diff --git a/rust/crates/sift_stream/src/stream/mode/ingestion_config.rs b/rust/crates/sift_stream/src/stream/mode/ingestion_config.rs index fc513ba2a..bd09c0cfd 100644 --- a/rust/crates/sift_stream/src/stream/mode/ingestion_config.rs +++ b/rust/crates/sift_stream/src/stream/mode/ingestion_config.rs @@ -1,7 +1,10 @@ use super::super::{ RetryPolicy, SiftStream, SiftStreamMode, channel::ChannelValue, time::TimeValue, }; -use crate::backup::{BackupsManager, DiskBackupsManager, InMemoryBackupsManager}; +use crate::{ + backup::{BackupsManager, DiskBackupsManager, InMemoryBackupsManager}, + stream::run::{RunSelector, load_run_by_form, load_run_by_id}, +}; use futures_core::Stream; use prost::Message; use sift_connect::SiftChannel; @@ -79,6 +82,7 @@ pub enum IngestionConfigModeBackupsManager { enum StreamMessage { Request(IngestWithConfigDataStreamRequest), CheckpointSignal, + ErrorSignal, } impl SiftStreamMode for IngestionConfigMode {} @@ -311,40 +315,64 @@ impl SiftStream { match data_tx.send(StreamMessage::Request(req.clone())).await { Ok(_) => Ok(()), - Err(SendError(_)) => match self.mode.streaming_task.take() { - None => { - self.restart_stream_and_backups_manager(false).await?; - Box::pin(self.send_impl(req)).await - } - - Some(streaming_task) => match streaming_task.await { - Ok(Ok(_)) => { - self.restart_stream_and_backups_manager(false).await?; - Box::pin(self.send_impl(req)).await - } - Ok(Err(err)) => { + Err(SendError(_)) => { + #[cfg(feature = "tracing")] + tracing::debug!( + sift_stream_id = self.mode.sift_stream_id.to_string(), + "returned Err(SendError) during data_tx.send()" + ); + match self.mode.streaming_task.take() { + None => { #[cfg(feature = "tracing")] - tracing::warn!( + tracing::debug!( sift_stream_id = self.mode.sift_stream_id.to_string(), - error = format!("{err:?}"), - "encountered an error while streaming to Sift" + "No streaming task was taken. Awaiting restart_stream_and_backups_manager()" ); - - self.retry(req, err).await + self.restart_stream_and_backups_manager(false).await?; + Box::pin(self.send_impl(req)).await } - Err(err) => { + + Some(streaming_task) => { #[cfg(feature = "tracing")] - tracing::warn!( + tracing::debug!( sift_stream_id = self.mode.sift_stream_id.to_string(), - error = format!("{err:?}"), - "something went wrong while waiting for response from Sift" + "Awaiting streaming_task" ); + match streaming_task.await { + Ok(Ok(_)) => { + #[cfg(feature = "tracing")] + tracing::debug!( + sift_stream_id = self.mode.sift_stream_id.to_string(), + "Streaming_task returned Ok(). Awaiting restart_stream_and_backups_manager()" + ); + self.restart_stream_and_backups_manager(false).await?; + Box::pin(self.send_impl(req)).await + } + Ok(Err(err)) => { + #[cfg(feature = "tracing")] + tracing::warn!( + sift_stream_id = self.mode.sift_stream_id.to_string(), + error = format!("{err:?}"), + "encountered an error while streaming to Sift" + ); + + self.retry(req, err).await + } + Err(err) => { + #[cfg(feature = "tracing")] + tracing::warn!( + sift_stream_id = self.mode.sift_stream_id.to_string(), + error = format!("{err:?}"), + "something went wrong while waiting for response from Sift" + ); - self.retry(req, Error::new(ErrorKind::StreamError, err)) - .await + self.retry(req, Error::new(ErrorKind::StreamError, err)) + .await + } + } } - }, - }, + } + } } } @@ -366,6 +394,7 @@ impl SiftStream { .and_modify(|flows| flows.push(flow_config.clone())) .or_insert_with(|| vec![flow_config.clone()]); + #[cfg(feature = "tracing")] tracing::info!( sift_stream_id = self.mode.sift_stream_id.to_string(), flow = flow_config.name, @@ -704,6 +733,27 @@ impl SiftStream { Ok(()) } + /// Attach a run to the stream. Any data provided through [SiftStream::send] after return + /// of this function will be associated with the run. + pub async fn attach_run(&mut self, run_selector: RunSelector) -> Result<()> { + let run = match run_selector { + RunSelector::ById(run_id) => load_run_by_id(self.grpc_channel.clone(), &run_id).await?, + RunSelector::ByForm(run_form) => { + load_run_by_form(self.grpc_channel.clone(), run_form).await? + } + }; + + self.mode.run = Some(run); + + Ok(()) + } + + /// Detach the run, if any, associated with the stream. Any data provided through [SiftStream::send] after + /// this function is called will not be associated with a run. + pub fn detach_run(&mut self) { + self.mode.run = None; + } + /// This will conclude the stream and return when Sift has sent its final response. It is /// important that this method be called in order to obtain the final checkpoint /// acknowledgement from Sift, otherwise some tail-end data may fail to send. @@ -784,6 +834,7 @@ impl SiftStream { let force_checkpoint = Arc::new(Notify::new()); let force_checkpoint_c = force_checkpoint.clone(); + let data_tx_c = data_tx.clone(); let checkpoint_task = tokio::spawn(async move { let mut checkpoint_timer = { let mut timer = tokio::time::interval(checkpoint_interval); @@ -839,7 +890,16 @@ impl SiftStream { ); Ok(res) } - Err(err) => Err(err), + Err(err) => { + #[cfg(feature = "tracing")] + tracing::info!( + sift_stream_id = sift_stream_id.to_string(), + "received error from Sift: {:?}", + err + ); + let _ = data_tx_c.send(StreamMessage::ErrorSignal).await; + Err(err) + } } } _ = force_checkpoint.notified() => { @@ -980,6 +1040,16 @@ impl Stream for DataStream { // Checkpoint was requested.. conclude stream Poll::Ready(None) } + StreamMessage::ErrorSignal => { + #[cfg(feature = "tracing")] + tracing::debug!( + sift_stream_id = self.sift_stream_id.to_string(), + "error signal received", + ); + + // Had error response.. conclude stream + Poll::Ready(None) + } }, Poll::Ready(None) => { // All senders dropped.. conclude stream diff --git a/rust/crates/sift_stream/src/stream/retry.rs b/rust/crates/sift_stream/src/stream/retry.rs index c17dc97d0..b867674d7 100644 --- a/rust/crates/sift_stream/src/stream/retry.rs +++ b/rust/crates/sift_stream/src/stream/retry.rs @@ -3,7 +3,7 @@ use std::time::Duration; /// A retry policy that is used to configure the retry behavior of a Sift stream. Most users should /// opt to use the default retry policy provided by [RetryPolicy::default], however, they are able /// to completely configure their own. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct RetryPolicy { pub max_attempts: u8, pub initial_backoff: Duration, diff --git a/rust/crates/sift_stream/src/stream/run.rs b/rust/crates/sift_stream/src/stream/run.rs new file mode 100644 index 000000000..1dd944b85 --- /dev/null +++ b/rust/crates/sift_stream/src/stream/run.rs @@ -0,0 +1,119 @@ +use super::builder::RunForm; +use sift_connect::SiftChannel; +use sift_error::prelude::*; +use sift_rs::{ + runs::v2::Run, + wrappers::runs::{RunServiceWrapper, new_run_service}, +}; +use std::collections::HashSet; + +pub enum RunSelector { + ById(String), + ByForm(RunForm), +} + +/// Retrieves a run by run ID. +pub(super) async fn load_run_by_id(grpc_channel: SiftChannel, run_id: &str) -> Result { + let mut run_service = new_run_service(grpc_channel); + let run = run_service.try_get_run_by_id(run_id).await?; + + #[cfg(feature = "tracing")] + tracing::info!( + run_id = run.run_id, + run_name = run.name, + "successfully retrieve run by ID", + ); + + Ok(run) +} + +/// Retrieves a run or creates a run. If the run exists, this method will also update the run +/// if the `run_form` has changed since the last time it was used. +pub(super) async fn load_run_by_form(grpc_channel: SiftChannel, run_form: RunForm) -> Result { + #[cfg(feature = "tracing")] + tracing::info_span!("load_run_by_form"); + + let mut run_service = new_run_service(grpc_channel); + + let RunForm { + name, + description, + tags, + client_key, + } = run_form; + + match run_service.try_get_run_by_client_key(&client_key).await { + Err(e) if e.kind() == ErrorKind::NotFoundError => { + let run = run_service + .try_create_run( + &name, + &client_key, + &description.unwrap_or_default(), + tags.unwrap_or_default().as_slice(), + ) + .await?; + + #[cfg(feature = "tracing")] + tracing::info!(run_id = run.run_id, run_name = run.name, "created new run"); + + Ok(run) + } + Err(e) => Err(e), + + Ok(mut run) => { + #[cfg(feature = "tracing")] + tracing::info!( + run_id = run.run_id, + run_name = run.name, + "an existing run was found with the provided client-key" + ); + + // An existing run was found; see if we need to update it. + let mut update_mask = Vec::new(); + + if name != run.name { + update_mask.push("name".to_string()); + run.name = name; + } + + if description.as_ref().is_some_and(|d| d != &run.description) { + update_mask.push("description".to_string()); + run.description = description.unwrap_or_default(); + } + match tags { + Some(new_tags) if run.tags.is_empty() => { + update_mask.push("tags".to_string()); + run.tags = new_tags; + } + Some(new_tags) => { + let new_tags_set = HashSet::<&String>::from_iter(new_tags.iter()); + let current_tags_set = HashSet::from_iter(run.tags.iter()); + let difference = new_tags_set.difference(¤t_tags_set); + + if difference.count() == 0 { + update_mask.push("tags".to_string()); + run.tags = new_tags; + } + } + _ => (), + } + + if update_mask.is_empty() { + return Ok(run); + } + + #[cfg(feature = "tracing")] + tracing::info!( + "updating run fields as some fields have changed: {}", + update_mask.join(", ") + ); + + let updated_run = run_service.try_update_run(run, &update_mask).await?; + + #[cfg(feature = "tracing")] + tracing::info!("successfully updated run"); + + Ok(updated_run) + } + } +} diff --git a/rust/crates/sift_stream_bindings/Cargo.toml b/rust/crates/sift_stream_bindings/Cargo.toml index a7bd74f65..fb62c3978 100644 --- a/rust/crates/sift_stream_bindings/Cargo.toml +++ b/rust/crates/sift_stream_bindings/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sift-stream-bindings" -version = "0.1.0" +version = "0.1.2" edition = { workspace = true } authors = { workspace = true } homepage = { workspace = true } @@ -25,3 +25,4 @@ sift_rs = { workspace = true } sift_stream = { workspace = true } sift_error = { workspace = true } pyo3-async-runtimes = { version = "0.25.0", features = ["tokio-runtime"] } +pbjson-types = { workspace = true } diff --git a/rust/crates/sift_stream_bindings/sift_stream_bindings.pyi b/rust/crates/sift_stream_bindings/sift_stream_bindings.pyi index 818e31321..788bc20cd 100644 --- a/rust/crates/sift_stream_bindings/sift_stream_bindings.pyi +++ b/rust/crates/sift_stream_bindings/sift_stream_bindings.pyi @@ -10,6 +10,7 @@ __all__ = [ "DurationPy", "FlowConfigPy", "FlowPy", + "IngestWithConfigDataChannelValuePy", "IngestWithConfigDataStreamRequestPy", "IngestionConfigFormPy", "RecoveryStrategyPy", @@ -29,7 +30,9 @@ class ChannelBitFieldElementPy: name: builtins.str index: builtins.int bit_count: builtins.int - def __new__(cls, name:builtins.str, index:builtins.int, bit_count:builtins.int) -> ChannelBitFieldElementPy: ... + def __new__( + cls, name: builtins.str, index: builtins.int, bit_count: builtins.int + ) -> ChannelBitFieldElementPy: ... @typing.final class ChannelConfigPy: @@ -39,95 +42,155 @@ class ChannelConfigPy: data_type: ChannelDataTypePy enum_types: builtins.list[ChannelEnumTypePy] bit_field_elements: builtins.list[ChannelBitFieldElementPy] - def __new__(cls, name:builtins.str, unit:builtins.str, description:builtins.str, data_type:ChannelDataTypePy, enum_types:typing.Sequence[ChannelEnumTypePy], bit_field_elements:typing.Sequence[ChannelBitFieldElementPy]) -> ChannelConfigPy: ... + def __new__( + cls, + name: builtins.str, + unit: builtins.str, + description: builtins.str, + data_type: ChannelDataTypePy, + enum_types: typing.Sequence[ChannelEnumTypePy], + bit_field_elements: typing.Sequence[ChannelBitFieldElementPy], + ) -> ChannelConfigPy: ... @typing.final class ChannelEnumTypePy: name: builtins.str key: builtins.int - def __new__(cls, name:builtins.str, key:builtins.int) -> ChannelEnumTypePy: ... + def __new__(cls, name: builtins.str, key: builtins.int) -> ChannelEnumTypePy: ... @typing.final class ChannelValuePy: @staticmethod - def bool(name:builtins.str, value:builtins.bool) -> ChannelValuePy: ... + def bool(name: builtins.str, value: builtins.bool) -> ChannelValuePy: ... @staticmethod - def string(name:builtins.str, value:builtins.str) -> ChannelValuePy: ... + def string(name: builtins.str, value: builtins.str) -> ChannelValuePy: ... @staticmethod - def float(name:builtins.str, value:builtins.float) -> ChannelValuePy: ... + def float(name: builtins.str, value: builtins.float) -> ChannelValuePy: ... @staticmethod - def double(name:builtins.str, value:builtins.float) -> ChannelValuePy: ... + def double(name: builtins.str, value: builtins.float) -> ChannelValuePy: ... @staticmethod - def int32(name:builtins.str, value:builtins.int) -> ChannelValuePy: ... + def int32(name: builtins.str, value: builtins.int) -> ChannelValuePy: ... @staticmethod - def uint32(name:builtins.str, value:builtins.int) -> ChannelValuePy: ... + def uint32(name: builtins.str, value: builtins.int) -> ChannelValuePy: ... @staticmethod - def int64(name:builtins.str, value:builtins.int) -> ChannelValuePy: ... + def int64(name: builtins.str, value: builtins.int) -> ChannelValuePy: ... @staticmethod - def uint64(name:builtins.str, value:builtins.int) -> ChannelValuePy: ... + def uint64(name: builtins.str, value: builtins.int) -> ChannelValuePy: ... @staticmethod - def enum_value(name:builtins.str, value:ChannelEnumTypePy) -> ChannelValuePy: ... + def enum_value(name: builtins.str, value: ChannelEnumTypePy) -> ChannelValuePy: ... @staticmethod - def bitfield(name:builtins.str, value:typing.Sequence[ChannelBitFieldElementPy]) -> ChannelValuePy: ... + def bitfield( + name: builtins.str, value: typing.Sequence[builtins.int] + ) -> ChannelValuePy: ... @typing.final class ChannelValueTypePy: @staticmethod - def bool(value:builtins.bool) -> ChannelValueTypePy: ... + def bool(value: builtins.bool) -> ChannelValueTypePy: ... @staticmethod - def string(value:builtins.str) -> ChannelValueTypePy: ... + def string(value: builtins.str) -> ChannelValueTypePy: ... @staticmethod - def float(value:builtins.float) -> ChannelValueTypePy: ... + def float(value: builtins.float) -> ChannelValueTypePy: ... @staticmethod - def double(value:builtins.float) -> ChannelValueTypePy: ... + def double(value: builtins.float) -> ChannelValueTypePy: ... @staticmethod - def int32(value:builtins.int) -> ChannelValueTypePy: ... + def int32(value: builtins.int) -> ChannelValueTypePy: ... @staticmethod - def uint32(value:builtins.int) -> ChannelValueTypePy: ... + def uint32(value: builtins.int) -> ChannelValueTypePy: ... @staticmethod - def int64(value:builtins.int) -> ChannelValueTypePy: ... + def int64(value: builtins.int) -> ChannelValueTypePy: ... @staticmethod - def uint64(value:builtins.int) -> ChannelValueTypePy: ... + def uint64(value: builtins.int) -> ChannelValueTypePy: ... @staticmethod - def enum_value(value:builtins.int) -> ChannelValueTypePy: ... + def enum_value(value: builtins.int) -> ChannelValueTypePy: ... @staticmethod - def bitfield(value:typing.Sequence[builtins.int]) -> ChannelValueTypePy: ... + def bitfield(value: typing.Sequence[builtins.int]) -> ChannelValueTypePy: ... @staticmethod - def bytes(value:typing.Sequence[builtins.int]) -> ChannelValueTypePy: ... + def bytes(value: typing.Sequence[builtins.int]) -> ChannelValueTypePy: ... + @staticmethod + def empty() -> ChannelValueTypePy: ... @typing.final class DurationPy: secs: builtins.int nanos: builtins.int - def __new__(cls, secs:builtins.int, nanos:builtins.int) -> DurationPy: ... + def __new__(cls, secs: builtins.int, nanos: builtins.int) -> DurationPy: ... @typing.final class FlowConfigPy: name: builtins.str channels: builtins.list[ChannelConfigPy] - def __new__(cls, name:builtins.str, channels:typing.Sequence[ChannelConfigPy]) -> FlowConfigPy: ... + def __new__( + cls, name: builtins.str, channels: typing.Sequence[ChannelConfigPy] + ) -> FlowConfigPy: ... @typing.final class FlowPy: - def __new__(cls, flow_name:builtins.str, timestamp:TimeValuePy, values:typing.Sequence[ChannelValuePy]) -> FlowPy: ... + def __new__( + cls, + flow_name: builtins.str, + timestamp: TimeValuePy, + values: typing.Sequence[ChannelValuePy], + ) -> FlowPy: ... + +@typing.final +class IngestWithConfigDataChannelValuePy: + @staticmethod + def bool(value: builtins.bool) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def string(value: builtins.str) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def float(value: builtins.float) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def double(value: builtins.float) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def int32(value: builtins.int) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def uint32(value: builtins.int) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def int64(value: builtins.int) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def uint64(value: builtins.int) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def enum_value(value: builtins.int) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def bitfield( + value: typing.Sequence[builtins.int], + ) -> IngestWithConfigDataChannelValuePy: ... + @staticmethod + def empty() -> IngestWithConfigDataChannelValuePy: ... @typing.final class IngestWithConfigDataStreamRequestPy: ingestion_config_id: builtins.str flow: builtins.str timestamp: typing.Optional[TimeValuePy] - channel_values: builtins.list[ChannelValuePy] + channel_values: builtins.list[IngestWithConfigDataChannelValuePy] run_id: builtins.str end_stream_on_validation_error: builtins.bool organization_id: builtins.str - def __new__(cls, ingestion_config_id:builtins.str, flow:builtins.str, timestamp:typing.Optional[TimeValuePy], channel_values:typing.Sequence[ChannelValuePy], run_id:builtins.str, end_stream_on_validation_error:builtins.bool, organization_id:builtins.str) -> IngestWithConfigDataStreamRequestPy: ... + def __new__( + cls, + ingestion_config_id: builtins.str, + flow: builtins.str, + timestamp: typing.Optional[TimeValuePy], + channel_values: typing.Sequence[IngestWithConfigDataChannelValuePy], + run_id: builtins.str, + end_stream_on_validation_error: builtins.bool, + organization_id: builtins.str, + ) -> IngestWithConfigDataStreamRequestPy: ... @typing.final class IngestionConfigFormPy: asset_name: builtins.str flows: builtins.list[FlowConfigPy] client_key: builtins.str - def __new__(cls, asset_name:builtins.str, client_key:builtins.str, flows:typing.Sequence[FlowConfigPy]) -> IngestionConfigFormPy: ... + def __new__( + cls, + asset_name: builtins.str, + client_key: builtins.str, + flows: typing.Sequence[FlowConfigPy], + ) -> IngestionConfigFormPy: ... @typing.final class RecoveryStrategyPy: @@ -136,13 +199,26 @@ class RecoveryStrategyPy: max_buffer_size: typing.Optional[builtins.int] backups_dir: typing.Optional[builtins.str] max_backups_file_size: typing.Optional[builtins.int] - def __new__(cls, strategy_type:builtins.str, retry_policy:typing.Optional[RetryPolicyPy], max_buffer_size:typing.Optional[builtins.int], backups_dir:typing.Optional[builtins.str], max_backups_file_size:typing.Optional[builtins.int]) -> RecoveryStrategyPy: ... - @staticmethod - def retry_only(retry_policy:RetryPolicyPy) -> RecoveryStrategyPy: ... - @staticmethod - def retry_with_in_memory_backups(retry_policy:RetryPolicyPy, max_buffer_size:typing.Optional[builtins.int]) -> RecoveryStrategyPy: ... - @staticmethod - def retry_with_disk_backups(retry_policy:RetryPolicyPy, backups_dir:typing.Optional[builtins.str], max_backups_file_size:typing.Optional[builtins.int]) -> RecoveryStrategyPy: ... + def __new__( + cls, + strategy_type: builtins.str, + retry_policy: typing.Optional[RetryPolicyPy], + max_buffer_size: typing.Optional[builtins.int], + backups_dir: typing.Optional[builtins.str], + max_backups_file_size: typing.Optional[builtins.int], + ) -> RecoveryStrategyPy: ... + @staticmethod + def retry_only(retry_policy: RetryPolicyPy) -> RecoveryStrategyPy: ... + @staticmethod + def retry_with_in_memory_backups( + retry_policy: RetryPolicyPy, max_buffer_size: typing.Optional[builtins.int] + ) -> RecoveryStrategyPy: ... + @staticmethod + def retry_with_disk_backups( + retry_policy: RetryPolicyPy, + backups_dir: typing.Optional[builtins.str], + max_backups_file_size: typing.Optional[builtins.int], + ) -> RecoveryStrategyPy: ... @staticmethod def default() -> RecoveryStrategyPy: ... @staticmethod @@ -156,7 +232,13 @@ class RetryPolicyPy: initial_backoff: DurationPy max_backoff: DurationPy backoff_multiplier: builtins.int - def __new__(cls, max_attempts:builtins.int, initial_backoff:DurationPy, max_backoff:DurationPy, backoff_multiplier:builtins.int) -> RetryPolicyPy: ... + def __new__( + cls, + max_attempts: builtins.int, + initial_backoff: DurationPy, + max_backoff: DurationPy, + backoff_multiplier: builtins.int, + ) -> RetryPolicyPy: ... @staticmethod def default() -> RetryPolicyPy: ... @@ -166,7 +248,13 @@ class RunFormPy: client_key: builtins.str description: typing.Optional[builtins.str] tags: typing.Optional[builtins.list[builtins.str]] - def __new__(cls, name:builtins.str, client_key:builtins.str, description:typing.Optional[builtins.str], tags:typing.Optional[typing.Sequence[builtins.str]]) -> RunFormPy: ... + def __new__( + cls, + name: builtins.str, + client_key: builtins.str, + description: typing.Optional[builtins.str], + tags: typing.Optional[typing.Sequence[builtins.str]], + ) -> RunFormPy: ... @typing.final class SiftStreamBuilderPy: @@ -178,28 +266,32 @@ class SiftStreamBuilderPy: checkpoint_interval: DurationPy run: typing.Optional[RunFormPy] run_id: typing.Optional[builtins.str] - def __new__(cls, uri:builtins.str, apikey:builtins.str) -> SiftStreamBuilderPy: ... + def __new__( + cls, uri: builtins.str, apikey: builtins.str + ) -> SiftStreamBuilderPy: ... def build(self) -> typing.Any: ... @typing.final class SiftStreamPy: - def send(self, flow:FlowPy) -> typing.Any: ... - def send_requests(self, requests:typing.Sequence[IngestWithConfigDataStreamRequestPy]) -> typing.Any: ... + def send(self, flow: FlowPy) -> typing.Any: ... + def send_requests( + self, requests: typing.Sequence[IngestWithConfigDataStreamRequestPy] + ) -> typing.Any: ... def finish(self) -> typing.Any: ... @typing.final class TimeValuePy: def __new__(cls) -> TimeValuePy: ... @staticmethod - def from_timestamp(secs:builtins.int, nsecs:builtins.int) -> TimeValuePy: ... + def from_timestamp(secs: builtins.int, nsecs: builtins.int) -> TimeValuePy: ... @staticmethod - def from_timestamp_millis(millis:builtins.int) -> TimeValuePy: ... + def from_timestamp_millis(millis: builtins.int) -> TimeValuePy: ... @staticmethod - def from_timestamp_micros(micros:builtins.int) -> TimeValuePy: ... + def from_timestamp_micros(micros: builtins.int) -> TimeValuePy: ... @staticmethod - def from_timestamp_nanos(nanos:builtins.int) -> TimeValuePy: ... + def from_timestamp_nanos(nanos: builtins.int) -> TimeValuePy: ... @staticmethod - def from_rfc3339(val:builtins.str) -> TimeValuePy: ... + def from_rfc3339(val: builtins.str) -> TimeValuePy: ... @typing.final class ChannelDataTypePy(Enum): diff --git a/rust/crates/sift_stream_bindings/src/lib.rs b/rust/crates/sift_stream_bindings/src/lib.rs index a6a5c4bfb..b238b377f 100644 --- a/rust/crates/sift_stream_bindings/src/lib.rs +++ b/rust/crates/sift_stream_bindings/src/lib.rs @@ -25,5 +25,6 @@ fn sift_stream_bindings(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_class::()?; m.add_class::()?; m.add_class::()?; + m.add_class::()?; Ok(()) } diff --git a/rust/crates/sift_stream_bindings/src/stream/channel.rs b/rust/crates/sift_stream_bindings/src/stream/channel.rs index 08c7c6d00..e61536cda 100644 --- a/rust/crates/sift_stream_bindings/src/stream/channel.rs +++ b/rust/crates/sift_stream_bindings/src/stream/channel.rs @@ -1,3 +1,4 @@ +use pbjson_types::Empty; use pyo3::prelude::*; use pyo3_stub_gen::derive::*; use sift_rs::common::r#type::v1::{ChannelBitFieldElement, ChannelDataType, ChannelEnumType}; @@ -270,11 +271,11 @@ impl ChannelValuePy { } #[staticmethod] - pub fn bitfield(name: &str, value: Vec) -> Self { + pub fn bitfield(name: &str, value: Vec) -> Self { Self { inner: ChannelValue { name: name.to_string(), - value: Value::BitField(value.into_iter().map(|e| e.index as u8).collect()), + value: Value::BitField(value), }, } } @@ -359,4 +360,11 @@ impl ChannelValueTypePy { inner: ChannelValueType::Bytes(value), } } + + #[staticmethod] + pub fn empty() -> Self { + Self { + inner: ChannelValueType::Empty(Empty {}), + } + } } diff --git a/rust/crates/sift_stream_bindings/src/stream/request.rs b/rust/crates/sift_stream_bindings/src/stream/request.rs index a9f2e3006..5734ab904 100644 --- a/rust/crates/sift_stream_bindings/src/stream/request.rs +++ b/rust/crates/sift_stream_bindings/src/stream/request.rs @@ -1,4 +1,4 @@ -use crate::stream::channel::{ChannelValuePy, ChannelValueTypePy}; +use crate::stream::channel::ChannelValueTypePy; use crate::stream::time::TimeValuePy; use pyo3::prelude::*; use pyo3_stub_gen::derive::*; @@ -16,7 +16,7 @@ pub struct IngestWithConfigDataStreamRequestPy { #[pyo3(get, set)] pub timestamp: Option, #[pyo3(get, set)] - pub channel_values: Vec, + pub channel_values: Vec, #[pyo3(get, set)] pub run_id: String, #[pyo3(get, set)] @@ -25,6 +25,14 @@ pub struct IngestWithConfigDataStreamRequestPy { pub organization_id: String, } +#[gen_stub_pyclass] +#[pyclass] +#[derive(Clone)] +pub struct IngestWithConfigDataChannelValuePy { + #[pyo3(get, set)] + pub r#type: ChannelValueTypePy, +} + // Trait Implementations impl From for IngestWithConfigDataStreamRequest { fn from(request: IngestWithConfigDataStreamRequestPy) -> Self { @@ -36,7 +44,7 @@ impl From for IngestWithConfigDataStreamReq .channel_values .into_iter() .map(|v| IngestWithConfigDataChannelValue { - r#type: Some(ChannelValueTypePy::from(v.inner.value).into()), + r#type: Some(v.r#type.into()), }) .collect(), run_id: request.run_id, @@ -55,7 +63,7 @@ impl IngestWithConfigDataStreamRequestPy { ingestion_config_id: String, flow: String, timestamp: Option, - channel_values: Vec, + channel_values: Vec, run_id: String, end_stream_on_validation_error: bool, organization_id: String, @@ -71,3 +79,84 @@ impl IngestWithConfigDataStreamRequestPy { } } } + +#[gen_stub_pymethods] +#[pymethods] +impl IngestWithConfigDataChannelValuePy { + #[staticmethod] + pub fn bool(value: bool) -> Self { + Self { + r#type: ChannelValueTypePy::bool(value), + } + } + + #[staticmethod] + pub fn string(value: String) -> Self { + Self { + r#type: ChannelValueTypePy::string(value), + } + } + + #[staticmethod] + pub fn float(value: f32) -> Self { + Self { + r#type: ChannelValueTypePy::float(value), + } + } + + #[staticmethod] + pub fn double(value: f64) -> Self { + Self { + r#type: ChannelValueTypePy::double(value), + } + } + + #[staticmethod] + pub fn int32(value: i32) -> Self { + Self { + r#type: ChannelValueTypePy::int32(value), + } + } + + #[staticmethod] + pub fn uint32(value: u32) -> Self { + Self { + r#type: ChannelValueTypePy::uint32(value), + } + } + + #[staticmethod] + pub fn int64(value: i64) -> Self { + Self { + r#type: ChannelValueTypePy::int64(value), + } + } + + #[staticmethod] + pub fn uint64(value: u64) -> Self { + Self { + r#type: ChannelValueTypePy::uint64(value), + } + } + + #[staticmethod] + pub fn enum_value(value: u32) -> Self { + Self { + r#type: ChannelValueTypePy::enum_value(value), + } + } + + #[staticmethod] + pub fn bitfield(value: Vec) -> Self { + Self { + r#type: ChannelValueTypePy::bitfield(value), + } + } + + #[staticmethod] + pub fn empty() -> Self { + Self { + r#type: ChannelValueTypePy::empty(), + } + } +}