Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
8cedb01
fix(sift-stream-bindings): Pass bitfield values as vectors (#277)
Jul 23, 2025
5dbbd84
chore(sift-stream-bindings): Version bump for v0.1.1 (#278)
Jul 23, 2025
ac5fc43
python(chore): Update channel_enum_type proto (#276)
nathan-sift Jul 25, 2025
32a0d34
python(feat): Add HDF5 upload service (#261)
nathan-sift Jul 29, 2025
79da996
feat(sift-stream-bindings): Add IngestWithConfigDataChannelValuePy wr…
Jul 30, 2025
812f114
python(fix) Include rule_id when updating rule (#281)
nathan-sift Jul 30, 2025
7d47761
python(chore): v0.8.0 prep (#279)
nathan-sift Jul 30, 2025
9780ea4
python(feat): Add support for start and end times to rule evaluation …
marc-sift Jul 31, 2025
e17e043
python(bug): Catch PermissionError when removing temp files. (#282)
marc-sift Jul 31, 2025
e9be6fb
python(chore): v0.8.1 prep (#283)
marc-sift Jul 31, 2025
f1b2815
python(fix): Use name only in ChannelReference creation (#284)
nathan-sift Aug 1, 2025
49d1843
v0.8.2 prep (#285)
nathan-sift Aug 1, 2025
a23f26f
protos: Add 'field_kind' to the IncompatibleProtobufField message (#287)
tsift Aug 7, 2025
779feee
python(fix): hdf5 windows fix (#289)
nathan-sift Aug 11, 2025
cb216eb
python(chore): v0.8.3 prep (#290)
nathan-sift Aug 11, 2025
59305ae
rust(fix): Add ErrorSignal to avoid checkpoint misses (#292)
nathan-sift Aug 13, 2025
a4153e0
python(chore): Python rule protos (#291)
nathan-sift Aug 13, 2025
ac8ed17
rust(feat): Attach and detach runs in SiftStream (#293)
nathan-sift Aug 14, 2025
180e150
rust(build): 0.5.0 prep (#294)
nathan-sift Aug 14, 2025
d089526
python(chore): v0.8.4 prep (#295)
nathan-sift Aug 18, 2025
48b8363
Add Adhoc Run Creation Endpoint (#297)
bgfreeman7 Aug 21, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/python_ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
- name: Pip install
run: |
python -m pip install --upgrade pip
pip install '.[development,openssl,tdms,rosbags]'
pip install '.[development,openssl,tdms,rosbags,hdf5]'
- name: Lint
run: |
ruff check
Expand Down
1 change: 1 addition & 0 deletions protos/sift/common/type/v1/channel_enum_type.proto
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ import "google/api/field_behavior.proto";
message ChannelEnumType {
string name = 1 [(google.api.field_behavior) = REQUIRED];
uint32 key = 2 [(google.api.field_behavior) = REQUIRED];
bool is_signed = 3 [(google.api.field_behavior) = OPTIONAL];
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ message IncompatibleProtobufField {
string field_number = 5;
string reason = 6;
string details = 7;
string field_kind = 8;
}

message CheckProtobufDescriptorCompatibilityResponse {
Expand Down
10 changes: 10 additions & 0 deletions protos/sift/rule_evaluation/v1/rule_evaluation.proto
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,18 @@ message EvaluateRulesFromRuleConfigs {
repeated sift.rules.v1.UpdateRuleRequest configs = 1 [(google.api.field_behavior) = REQUIRED];
}

message RulePreviewOutput {
string rule_name = 1 [(google.api.field_behavior) = REQUIRED];
string rule_id = 2;
string rule_version_id = 3;
string asset_id = 4 [(google.api.field_behavior) = REQUIRED];
int32 exit_code = 5 [(google.api.field_behavior) = REQUIRED];
string stdout = 6;
string stderr = 7;
}

message EvaluateRulesPreviewResponse {
int32 created_annotation_count = 1 [(google.api.field_behavior) = REQUIRED];
repeated sift.rules.v1.DryRunAnnotation dry_run_annotations = 2;
repeated RulePreviewOutput rule_outputs = 3;
}
11 changes: 11 additions & 0 deletions protos/sift/rules/v1/rules.proto
Original file line number Diff line number Diff line change
Expand Up @@ -656,6 +656,8 @@ message RuleConditionExpression {
oneof expression {
SingleChannelComparisonExpression single_channel_comparison = 1 [deprecated = true];
CalculatedChannelConfig calculated_channel = 2;
// Experimental - not currently enabled
PythonCode python_code = 3;
}
}

Expand Down Expand Up @@ -693,6 +695,15 @@ message ChannelReference {
// Deprecated - use name instead. If provided, name will be joined with the component as `component.name`
string component = 2 [(google.api.field_behavior) = REQUIRED];
}
message PythonChannelReference {
string reference = 1 [(google.api.field_behavior) = REQUIRED];
string name = 2 [(google.api.field_behavior) = REQUIRED];
}
message PythonCode {
repeated PythonChannelReference channel_references = 1 [(google.api.field_behavior) = REQUIRED];
string code = 2 [(google.api.field_behavior) = REQUIRED];
string dependencies = 3 [(google.api.field_behavior) = OPTIONAL];
}

message RuleActionConfiguration {
oneof configuration {
Expand Down
47 changes: 44 additions & 3 deletions protos/sift/runs/v2/runs.proto
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,18 @@ service RunService {
};
}

// Create adhoc run, this is used to create a run after the data has been ingested
rpc CreateAdhocRun(CreateAdhocRunRequest) returns (CreateAdhocRunResponse) {
option (google.api.http) = {
post: "/api/v2/runs:adhoc"
body: "*"
};
option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_operation) = {
summary: "CreateAdhocRun"
description: "Create an adhoc run."
};
}

// Updates an existing run using using the list of fields specified in `update_mask`.
rpc UpdateRun(UpdateRunRequest) returns (UpdateRunResponse) {
option (google.api.http) = {
Expand Down Expand Up @@ -118,6 +130,7 @@ message Run {
repeated sift.metadata.v1.MetadataValue metadata = 15 [(google.api.field_behavior) = REQUIRED];
repeated string asset_ids = 16 [(google.api.field_behavior) = REQUIRED];
optional google.protobuf.Timestamp archived_date = 17 [(google.api.field_behavior) = OPTIONAL];
bool is_adhoc = 18 [(google.api.field_behavior) = REQUIRED];
}

// The request for a call to `RunService_GetRun` to retrieve run.
Expand Down Expand Up @@ -148,8 +161,8 @@ message ListRunsRequest {
string page_token = 2 [(google.api.field_behavior) = OPTIONAL];

// A [Common Expression Language (CEL)](https://github.com/google/cel-spec) filter string.
// Available fields to filter by are `run_id`, `organization_id`, `name`, `description`, `created_by_user_id`, `modified_by_user_id`,
// `created_date`, `modified_date`, `start_time`, `stop_time`, `client_key`, `is_pinned`, `asset_id`, `asset_name`, `archived_date`,
// Available fields to filter by are `run_id` `organization_id`, `asset_id`, `asset_name`, `client_key`, `name`, `description`, `created_by_user_id`, `modified_by_user_id`,
// `created_date`, `modified_date`, `start_time`, `stop_time`, `tag_id`, `asset_tag_id`, `duration`, `annotation_comments_count`, `annotation_state`, `archived_date`,
// and `metadata`. Metadata can be used in filters by using `metadata.{metadata_key_name}` as the field name.
// For further information about how to use CELs, please refer to [this guide](https://github.com/google/cel-spec/blob/master/doc/langdef.md#standard-definitions).
// For more information about the fields used for filtering, please refer to [this definition](/docs/api/grpc/protocol-buffers/runs#run). Optional.
Expand Down Expand Up @@ -203,11 +216,39 @@ message CreateRunRequest {
repeated sift.metadata.v1.MetadataValue metadata = 9 [(google.api.field_behavior) = OPTIONAL];
}

// The response of a call to `RunService_CreateRuns` containing the newly created run.
message CreateRunResponse {
Run run = 1 [(google.api.field_behavior) = REQUIRED];
}

// The request for a call to `RunService_CreateAdhocRun` to create an adhoc run.
message CreateAdhocRunRequest {
// The name that will be assigned to the new run.
string name = 1 [(google.api.field_behavior) = REQUIRED];
// A description about the new run.
string description = 2 [(google.api.field_behavior) = REQUIRED];
// The time at which data ingestion began for this new run. It must be before the `stop_time`
google.protobuf.Timestamp start_time = 3 [(google.api.field_behavior) = REQUIRED];
// The time at which data ingestion concluded for this new run.
google.protobuf.Timestamp stop_time = 4 [(google.api.field_behavior) = REQUIRED];
// A list of asset IDs to associate with the new run.
repeated string asset_ids = 5 [(google.api.field_behavior) = REQUIRED];
// Tags to associate with the new run.
repeated string tags = 6 [(google.api.field_behavior) = OPTIONAL];
// The metadata values associated with this run.
repeated sift.metadata.v1.MetadataValue metadata = 7 [(google.api.field_behavior) = OPTIONAL];
// An arbitrary user-chosen key that uniquely identifies this run. Optional, though it is recommended to provide.
optional string client_key = 8 [
(google.api.field_behavior) = OPTIONAL
];
}


// The response of a call to `RunService_CreateAdhocRun` containing the newly created adhoc run.
message CreateAdhocRunResponse {
Run run = 1 [(google.api.field_behavior) = REQUIRED];
}


// The request for a call to `RunService_UpdateRun` to update an existing run.
message UpdateRunRequest {
// The run to update. The run's `run_id` field is used to identify the run to update
Expand Down
22 changes: 22 additions & 0 deletions python/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,28 @@ All notable changes to this project will be documented in this file.

This project adheres to [Semantic Versioning](http://semver.org/).

## [v0.8.4] - August 18, 2025
- [Add experimental protos for development](https://github.com/sift-stack/sift/pull/291)

## [v0.8.3] - August 11, 2025
- [Fix windows utf-8 encoding bug with Hdf5UploadService](https://github.com/sift-stack/sift/pull/289)

## [v0.8.2] - August 1, 2025
- [Use name only in ChannelReference creation](https://github.com/sift-stack/sift/pull/284)

## [v0.8.1] - July 31, 2025
- [Catch PermissionError when removing temp files](https://github.com/sift-stack/sift/pull/282)
- [Add support for start and end times to rule evaluation](https://github.com/sift-stack/sift/pull/268)

## [v0.8.0] - July 29, 2025
### What's New
#### HDF5 Upload Service
Adds support for uploading HDF5 files to Sift for ingestion through the addition of the `Hdf5UploadService` and `Hdf5Config`. See `examples/data_import/hdf5` for an example of how to upload HDF5 files.

### Full Changelog
- [Add HDF5 upload service](https://github.com/sift-stack/sift/pull/261)
- [Fixes bug when updating rules where the rule_id is not always passed to Sift](https://github.com/sift-stack/sift/pull/281)

## [v0.7.0] - June 24, 2025
### What's New
#### AssetService and Metadata Support
Expand Down
72 changes: 72 additions & 0 deletions python/examples/data_import/hdf5/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import os

import h5py
from dotenv import load_dotenv
from sift_py.data_import.config import Hdf5Config
from sift_py.data_import.hdf5 import Hdf5UploadService
from sift_py.rest import SiftRestConfig

if __name__ == "__main__":
"""
Example of uploading an hdf5 into Sift.
"""

load_dotenv()

sift_uri = os.getenv("SIFT_API_URI")
assert sift_uri, "expected 'SIFT_API_URI' environment variable to be set"

apikey = os.getenv("SIFT_API_KEY")
assert apikey, "expected 'SIFT_API_KEY' environment variable to be set"

asset_name = os.getenv("ASSET_NAME")
assert asset_name, "expected 'ASSET_NAME' environment variable to be set"

# Create an HDF5 configuration file to define the data to be ingested
hdf5_config_dict = {
"asset_name": asset_name,
"time": {
"format": "TIME_FORMAT_ABSOLUTE_DATETIME",
},
"data": [],
}

# For this example, each HDF5 dataset uses the common '/timestamp' dataset
# Each is of type double and contains its channel name in the 'Name' attribute
with h5py.File("sample_data.h5", "r") as f:
for dset in f.values():
# Skip adding the timestamp dataset
if dset.name == "/timestamp":
continue

hdf5_config_dict["data"].append(
{
"name": dset.attrs["Name"],
"time_dataset": "/timestamp",
"value_dataset": dset.name,
"data_type": "CHANNEL_DATA_TYPE_DOUBLE",
}
)

hdf5_config = Hdf5Config(hdf5_config_dict)

rest_config: SiftRestConfig = {
"uri": sift_uri,
"apikey": apikey,
}

hdf5_upload_service = Hdf5UploadService(rest_config)
import_service = hdf5_upload_service.upload(
"sample_data.h5",
hdf5_config,
)

# Wait until the data import is completed.
# The hdf5 upload service may split the upload into multiple parts
data_imports = import_service.wait_until_all_complete()

# Print the data import details and final status.
for data_import in data_imports:
print(data_import.model_dump_json(indent=1))

print("Upload example complete!")
2 changes: 2 additions & 0 deletions python/examples/data_import/hdf5/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
python-dotenv
sift-stack-py[hdf5]
Binary file added python/examples/data_import/hdf5/sample_data.h5
Binary file not shown.
6 changes: 4 additions & 2 deletions python/lib/sift/common/type/v1/channel_enum_type_pb2.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 4 additions & 1 deletion python/lib/sift/common/type/v1/channel_enum_type_pb2.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,17 @@ class ChannelEnumType(google.protobuf.message.Message):

NAME_FIELD_NUMBER: builtins.int
KEY_FIELD_NUMBER: builtins.int
IS_SIGNED_FIELD_NUMBER: builtins.int
name: builtins.str
key: builtins.int
is_signed: builtins.bool
def __init__(
self,
*,
name: builtins.str = ...,
key: builtins.int = ...,
is_signed: builtins.bool = ...,
) -> None: ...
def ClearField(self, field_name: typing.Literal["key", b"key", "name", b"name"]) -> None: ...
def ClearField(self, field_name: typing.Literal["is_signed", b"is_signed", "key", b"key", "name", b"name"]) -> None: ...

global___ChannelEnumType = ChannelEnumType
Loading
Loading