Skip to content

Commit

Permalink
refactor ResolvedDocumentId to not use references (#25374)
Browse files Browse the repository at this point in the history
using references with types that implement `Copy` makes the types annoying, and you have to dereference all over the place.
this allows us to later change methods like `ResolvedDocument::id` to construct a new id from its constituent parts, without changing the callers.

also replace DocumentIdV6 with the equivalent DeveloperDocumentId.

GitOrigin-RevId: fae8397cb5660ea40aa7b7805ad8c9fd67ca41c5
  • Loading branch information
ldanilek authored and Convex, Inc. committed May 4, 2024
1 parent 91061e6 commit 2693de9
Show file tree
Hide file tree
Showing 78 changed files with 426 additions and 393 deletions.
12 changes: 8 additions & 4 deletions crates/application/src/application_function_runner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ use usage_tracking::{
};
use value::{
heap_size::HeapSize,
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
};
use vector::{
PublicVectorSearchQueryResult,
Expand Down Expand Up @@ -1979,7 +1979,7 @@ impl<RT: Runtime> ActionCallbacks for ApplicationFunctionRunner<RT> {
&self,
identity: Identity,
entry: FileStorageEntry,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
let mut tx = self.database.begin(identity).await?;
let id = self.file_storage.store_file_entry(&mut tx, entry).await?;
self.database
Expand Down Expand Up @@ -2010,7 +2010,7 @@ impl<RT: Runtime> ActionCallbacks for ApplicationFunctionRunner<RT> {
udf_args: Vec<JsonValue>,
scheduled_ts: UnixTimestamp,
context: ExecutionContext,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
let mut tx = self.database.begin(identity).await?;
let (udf_path, udf_args) = validate_schedule_args(
udf_path,
Expand All @@ -2034,7 +2034,11 @@ impl<RT: Runtime> ActionCallbacks for ApplicationFunctionRunner<RT> {
Ok(virtual_id)
}

async fn cancel_job(&self, identity: Identity, virtual_id: DocumentIdV6) -> anyhow::Result<()> {
async fn cancel_job(
&self,
identity: Identity,
virtual_id: DeveloperDocumentId,
) -> anyhow::Result<()> {
let mut tx = self.database.begin(identity).await?;
VirtualSchedulerModel::new(&mut tx)
.cancel(virtual_id)
Expand Down
20 changes: 12 additions & 8 deletions crates/application/src/export_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ use usage_tracking::{
};
use value::{
export::ValueFormat,
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
TableId,
TableNumber,
VirtualTableMapping,
Expand Down Expand Up @@ -502,8 +502,10 @@ impl<RT: Runtime> ExportWorker<RT> {
pin_mut!(stream);
while let Some((doc, _ts)) = stream.try_next().await? {
let file_storage_entry = ParsedDocument::<FileStorageEntry>::try_from(doc)?;
let virtual_storage_id =
DocumentIdV6::new(virtual_table_id, file_storage_entry.id().internal_id());
let virtual_storage_id = DeveloperDocumentId::new(
virtual_table_id,
file_storage_entry.id().internal_id(),
);
let creation_time = f64::from(
file_storage_entry
.creation_time()
Expand All @@ -528,8 +530,10 @@ impl<RT: Runtime> ExportWorker<RT> {
pin_mut!(stream);
while let Some((doc, _ts)) = stream.try_next().await? {
let file_storage_entry = ParsedDocument::<FileStorageEntry>::try_from(doc)?;
let virtual_storage_id =
DocumentIdV6::new(virtual_table_id, file_storage_entry.id().internal_id());
let virtual_storage_id = DeveloperDocumentId::new(
virtual_table_id,
file_storage_entry.id().internal_id(),
);
// Add an extension, which isn't necessary for anything and might be incorrect,
// but allows the file to be viewed at a glance in most cases.
let extension_guess = file_storage_entry
Expand All @@ -550,7 +554,7 @@ impl<RT: Runtime> ExportWorker<RT> {
.with_context(|| {
format!(
"file missing from storage: {} with key {:?}",
file_storage_entry.id_v6().encode(),
file_storage_entry.developer_id().encode(),
file_storage_entry.storage_key,
)
})?;
Expand Down Expand Up @@ -581,7 +585,7 @@ impl<RT: Runtime> ExportWorker<RT> {
);
pin_mut!(stream);
while let Some((doc, _ts)) = stream.try_next().await? {
generated_schema.insert(doc.value(), doc.id_v6());
generated_schema.insert(doc.value(), doc.developer_id());
}
}

Expand Down Expand Up @@ -985,7 +989,7 @@ mod tests {
};
let doc = UserFacingModel::new(&mut tx).get(id, None).await?.unwrap();
let doc = doc.to_resolved(&tx.table_mapping().inject_table_id())?;
let id_v6 = doc.id_v6().encode();
let id_v6 = doc.developer_id().encode();
expected_export_entries.insert(
format!("table_{i}/documents.jsonl"),
format!(
Expand Down
8 changes: 4 additions & 4 deletions crates/application/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ use usage_tracking::{
UsageCounter,
};
use value::{
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
sha256::Sha256Digest,
ConvexValue,
Namespace,
Expand Down Expand Up @@ -681,7 +681,7 @@ impl<RT: Runtime> Application<RT> {
&self,
identity: Identity,
snapshot: Option<Timestamp>,
cursor: Option<DocumentIdV6>,
cursor: Option<DeveloperDocumentId>,
table_filter: Option<TableName>,
) -> anyhow::Result<SnapshotPage> {
self.database
Expand Down Expand Up @@ -1681,7 +1681,7 @@ impl<RT: Runtime> Application<RT> {
mode: ImportMode,
upload_token: ClientDrivenUploadToken,
part_tokens: Vec<ClientDrivenUploadPartToken>,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
if !identity.is_admin() {
anyhow::bail!(ErrorMetadata::forbidden(
"InvalidImport",
Expand Down Expand Up @@ -2012,7 +2012,7 @@ impl<RT: Runtime> Application<RT> {
content_type: Option<ContentType>,
expected_sha256: Option<Sha256Digest>,
body: impl Stream<Item = anyhow::Result<Bytes>> + Send,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
let storage_id = self
.file_storage
.store_file(
Expand Down
35 changes: 18 additions & 17 deletions crates/application/src/snapshot_import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ use usage_tracking::{
UsageCounter,
};
use value::{
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
sha256::Sha256Digest,
val,
ConvexObject,
Expand Down Expand Up @@ -776,7 +776,7 @@ enum ImportUnit {
Object(JsonValue),
NewTable(TableName),
GeneratedSchema(TableName, GeneratedSchema<ProdConfigWithOptionalFields>),
StorageFileChunk(DocumentIdV6, Bytes),
StorageFileChunk(DeveloperDocumentId, Bytes),
}

static GENERATED_SCHEMA_PATTERN: LazyLock<Regex> =
Expand Down Expand Up @@ -949,12 +949,13 @@ where
continue;
}
let entry_reader = zip_reader.entry_reader(i).await?;
let storage_id = DocumentIdV6::decode(storage_id_str).map_err(|e| {
ErrorMetadata::bad_request(
"InvalidStorageId",
format!("_storage id '{storage_id_str}' invalid: {e}"),
)
})?;
let storage_id =
DeveloperDocumentId::decode(storage_id_str).map_err(|e| {
ErrorMetadata::bad_request(
"InvalidStorageId",
format!("_storage id '{storage_id_str}' invalid: {e}"),
)
})?;
tracing::info!(
"importing zip file containing storage file {}",
storage_id.encode()
Expand Down Expand Up @@ -1079,7 +1080,7 @@ async fn parse_generated_schema<'a, T: ShapeConfig, R: AsyncRead + Unpin>(
let export_context = ExportContext::try_from(value.clone())
.map_err(|e| ImportError::InvalidConvexValue(lineno, e))?;
overrides.insert(
DocumentIdV6::decode(key)
DeveloperDocumentId::decode(key)
.map_err(|e| ImportError::InvalidConvexValue(lineno, e.into()))?,
export_context,
);
Expand Down Expand Up @@ -1108,7 +1109,7 @@ pub async fn upload_import_file<RT: Runtime>(
format: ImportFormat,
mode: ImportMode,
body_stream: BoxStream<'_, anyhow::Result<Bytes>>,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
if !identity.is_admin() {
anyhow::bail!(ImportError::Unauthorized);
}
Expand All @@ -1122,7 +1123,7 @@ pub async fn store_uploaded_import<RT: Runtime>(
format: ImportFormat,
mode: ImportMode,
object_key: ObjectKey,
) -> anyhow::Result<DocumentIdV6> {
) -> anyhow::Result<DeveloperDocumentId> {
let (_, id, _) = application
.database
.execute_with_overloaded_retries(
Expand All @@ -1147,7 +1148,7 @@ pub async fn store_uploaded_import<RT: Runtime>(
pub async fn perform_import<RT: Runtime>(
application: &Application<RT>,
identity: Identity,
import_id: DocumentIdV6,
import_id: DeveloperDocumentId,
) -> anyhow::Result<()> {
if !identity.is_admin() {
anyhow::bail!(ImportError::Unauthorized);
Expand Down Expand Up @@ -1186,7 +1187,7 @@ fn wrap_import_err(e: anyhow::Error) -> anyhow::Error {
async fn wait_for_import_worker<RT: Runtime>(
application: &Application<RT>,
identity: Identity,
import_id: DocumentIdV6,
import_id: DeveloperDocumentId,
) -> anyhow::Result<ParsedDocument<SnapshotImport>> {
let snapshot_import = loop {
let mut tx = application.begin(identity.clone()).await?;
Expand Down Expand Up @@ -1669,7 +1670,7 @@ async fn import_storage_table<RT: Runtime>(
lineno += 1;
let metadata: FileStorageZipMetadata = serde_json::from_value(exported_value)
.map_err(|e| ImportError::InvalidConvexValue(lineno, e.into()))?;
let id = DocumentIdV6::decode(&metadata.id)
let id = DeveloperDocumentId::decode(&metadata.id)
.map_err(|e| ImportError::InvalidConvexValue(lineno, e.into()))?;
anyhow::ensure!(
*id.table() == virtual_table_number,
Expand Down Expand Up @@ -2206,7 +2207,7 @@ async fn table_number_for_import(
let JsonValue::String(id) = first_id else {
return None;
};
let id_v6 = DocumentIdV6::decode(id).ok()?;
let id_v6 = DeveloperDocumentId::decode(id).ok()?;
Some(*id_v6.table())
},
ImportUnit::NewTable(_) => None,
Expand Down Expand Up @@ -2351,7 +2352,7 @@ mod tests {
use value::{
assert_obj,
assert_val,
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
ConvexObject,
FieldName,
TableName,
Expand Down Expand Up @@ -2987,7 +2988,7 @@ a
let identity = new_admin_id();

let storage_id = "kg21pzwemsm55e1fnt2kcsvgjh6h6gtf";
let storage_idv6 = DocumentIdV6::decode(storage_id)?;
let storage_idv6 = DeveloperDocumentId::decode(storage_id)?;

let objects = stream::iter(vec![
Ok(ImportUnit::NewTable("_storage".parse()?)),
Expand Down
4 changes: 2 additions & 2 deletions crates/common/src/bootstrap_model/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::str::FromStr;

use errors::ErrorMetadata;
use value::{
id_v6::DocumentIdV6,
id_v6::DeveloperDocumentId,
GenericDocumentId,
ResolvedDocumentId,
TableId,
Expand All @@ -23,7 +23,7 @@ pub fn parse_schema_id(
Ok(s) => s.map_table(table_mapping.inject_table_number()),
Err(_) => {
// Try parsing as an IDv6 ID
let id = DocumentIdV6::decode(schema_id)?;
let id = DeveloperDocumentId::decode(schema_id)?;
id.to_resolved(&table_mapping.inject_table_id())
},
}
Expand Down
Loading

0 comments on commit 2693de9

Please sign in to comment.