Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -9359,6 +9359,7 @@ dependencies = [
"re_types",
"re_types_core",
"tempfile",
"tokio",
"tonic",
"url",
]
Expand Down
1 change: 1 addition & 0 deletions crates/store/re_redap_tests/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@ insta.workspace = true
itertools.workspace = true
prost-types.workspace = true
tempfile.workspace = true
tokio.workspace = true
tonic.workspace = true
url.workspace = true

Expand Down
2 changes: 2 additions & 0 deletions crates/store/re_redap_tests/src/tests/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,8 @@ define_redap_tests! {
register_partition::register_and_scan_simple_dataset_with_properties,
register_partition::register_and_scan_simple_dataset_with_properties_out_of_order,
register_partition::register_with_prefix,
register_partition::register_partition_bumps_timestamp,
update_entry::update_entry_tests,
update_entry::update_entry_bumps_timestamp,
write_table::write_table
}
83 changes: 83 additions & 0 deletions crates/store/re_redap_tests/src/tests/register_partition.rs
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,89 @@ pub async fn register_and_scan_empty_dataset(service: impl RerunCloudService) {
scan_dataset_manifest_and_snapshot(&service, dataset_name, "empty").await;
}

pub async fn register_partition_bumps_timestamp(service: impl RerunCloudService) {
async fn get_dataset_updated_at_nanos(
service: &impl RerunCloudService,
dataset_name: &str,
) -> i64 {
service
.read_dataset_entry(
tonic::Request::new(ReadDatasetEntryRequest {})
.with_entry_name(dataset_name)
.unwrap(),
)
.await
.unwrap()
.into_inner()
.dataset
.unwrap()
.details
.as_ref()
.unwrap()
.updated_at
.as_ref()
.map(|ts| ts.seconds * 1_000_000_000 + ts.nanos as i64)
.unwrap()
}

let dataset_name = "timestamp_test_dataset";

//
// Create a dataset
//

service.create_dataset_entry_with_name(dataset_name).await;

let initial_updated_at_nanos = get_dataset_updated_at_nanos(&service, dataset_name).await;

// Small delay to ensure timestamp difference
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;

//
// Register a partition - this should update the timestamp
//

let data_sources_def = DataSourcesDefinition::new_with_tuid_prefix(
1,
[LayerDefinition::simple("partition1", &["my/entity"])],
);

service
.register_with_dataset_name(dataset_name, data_sources_def.to_data_sources())
.await;

let after_register_updated_at_nanos =
get_dataset_updated_at_nanos(&service, dataset_name).await;

assert!(
after_register_updated_at_nanos > initial_updated_at_nanos,
"Timestamp should be updated after registering partition. Initial: {initial_updated_at_nanos}, After register: {after_register_updated_at_nanos}"
);

// Small delay to ensure timestamp difference
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;

//
// Register another layer to the same partition - this should also update the timestamp
//

let layer_data_sources_def = DataSourcesDefinition::new_with_tuid_prefix(
2,
[LayerDefinition::simple("partition1", &["another/entity"]).layer_name("layer2")],
);

service
.register_with_dataset_name(dataset_name, layer_data_sources_def.to_data_sources())
.await;

let after_layer_updated_at_nanos = get_dataset_updated_at_nanos(&service, dataset_name).await;

assert!(
after_layer_updated_at_nanos > after_register_updated_at_nanos,
"Timestamp should be updated after adding a layer. After register: {after_register_updated_at_nanos}, After layer: {after_layer_updated_at_nanos}"
);
}

// ---

async fn scan_partition_table_and_snapshot(
Expand Down
65 changes: 65 additions & 0 deletions crates/store/re_redap_tests/src/tests/update_entry.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,71 @@ pub async fn update_entry_tests(service: impl RerunCloudService) {
);
}

pub async fn update_entry_bumps_timestamp(service: impl RerunCloudService) {
//
// Create a dataset
//

let dataset_name = "timestamp_test_dataset";
let dataset_entry = create_dataset_entry(&service, dataset_name).await.unwrap();

let dataset_id = dataset_entry.details.id;
let initial_updated_at = dataset_entry.details.updated_at;

// Small delay to ensure timestamp difference
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;

//
// Rename the dataset - this should update the timestamp
//

let new_name = "renamed_dataset";
let response = update_entry(
&service,
UpdateEntryRequest {
id: dataset_id,
entry_details_update: EntryDetailsUpdate {
name: Some(new_name.to_owned()),
},
},
)
.await
.unwrap();

let after_rename_updated_at = response.entry_details.updated_at;

assert!(
after_rename_updated_at > initial_updated_at,
"Timestamp should be updated after rename. Initial: {initial_updated_at:?}, After rename: {after_rename_updated_at:?}"
);

// Small delay to ensure timestamp difference
tokio::time::sleep(tokio::time::Duration::from_millis(10)).await;

//
// Rename to the same name
//

let response2 = update_entry(
&service,
UpdateEntryRequest {
id: dataset_id,
entry_details_update: EntryDetailsUpdate {
name: Some(new_name.to_owned()),
},
},
)
.await
.unwrap();

let after_second_rename_updated_at = response2.entry_details.updated_at;

assert_eq!(
after_second_rename_updated_at, after_rename_updated_at,
"Timestamp should NOT be updated when renaming to the same name. After first rename: {after_rename_updated_at:?}, After second rename: {after_second_rename_updated_at:?}"
);
}

// ---

async fn create_dataset_entry(
Expand Down
Loading
Loading