Skip to content

Commit

Permalink
bugfix: update should allow the user to set time_stamp and location
Browse files Browse the repository at this point in the history
  • Loading branch information
skeptrunedev authored and densumesh committed May 8, 2024
1 parent 0d59052 commit 89e3715
Show file tree
Hide file tree
Showing 9 changed files with 51 additions and 40 deletions.
4 changes: 2 additions & 2 deletions search/src/HomeSearch.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ export const HomeSearch = () => {
const groupUnique = params.get("groupUnique") === "true" || false;
const slimChunks = params.get("slimChunks") === "true" || false;
const getTotalPages = params.get("getTotalPages") === "true" || false;
const highlightResults = params.get("highlightResults") === "true" || false;
const highlightResults = params.get("highlightResults") === "true" || true;
const highlightDelimiters = params
.get("highlightDelimiters")
?.split(",")
.filter((delimiter) => delimiter !== "") ?? ["?", ",", ".", "!"];
.filter((delimiter) => delimiter !== "") ?? ["?", ".", "!"];

return (
<div class="flex min-h-screen flex-col bg-white dark:bg-shark-800 dark:text-white">
Expand Down
2 changes: 1 addition & 1 deletion search/src/components/GroupPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ export const GroupPage = (props: GroupPageProps) => {
setGetTotalPages(location.query.getTotalPages === "true");
setHighlightResults(location.query.highlightResults === "true");
setHighlightDelimiters(
location.query.highlightDelimiters?.split(",") ?? ["?", ",", ".", "!"],
location.query.highlightDelimiters?.split(",") ?? ["?", ".", "!"],
);
});

Expand Down
11 changes: 10 additions & 1 deletion search/src/components/ResultsPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ const ResultsPage = (props: ResultsPageProps) => {
const [noResults, setNoResults] = createSignal(false);
const [filters, setFilters] = createSignal<Filters>({} as Filters);
const [totalPages, setTotalPages] = createSignal(0);
const [triggerSearch, setTriggerSearch] = createSignal(false);

const fetchChunkCollections = () => {
if (!$currentUser?.()) return;
Expand Down Expand Up @@ -130,6 +131,8 @@ const ResultsPage = (props: ResultsPageProps) => {
const dataset = $dataset?.();
if (!dataset) return;

triggerSearch();

// eslint-disable-next-line @typescript-eslint/no-explicit-any
const requestBody: any = {
query: props.query,
Expand All @@ -140,7 +143,7 @@ const ResultsPage = (props: ResultsPageProps) => {
slim_chunks: props.slimChunks ?? false,
get_total_pages: props.getTotalPages ?? false,
highlight_results: props.highlightResults ?? true,
highlight_delimiters: props.highlightDelimiters ?? ["?", ",", ".", "!"],
highlight_delimiters: props.highlightDelimiters ?? ["?", ".", "!"],
};

let searchRoute = "chunk/search";
Expand Down Expand Up @@ -250,6 +253,12 @@ const ResultsPage = (props: ResultsPageProps) => {
return filtersKey;
}, "");

createEffect(() => {
window.addEventListener("triggerSearch", () => {
setTriggerSearch((prev) => !prev);
});
});

createEffect(() => {
if (!openChat()) {
setSelectedIds((prev) => (prev.length < 10 ? prev : []));
Expand Down
12 changes: 7 additions & 5 deletions search/src/components/SearchForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ const SearchForm = (props: {
);
const [highlightDelimiters, setHighlightDelimiters] = createSignal(
// eslint-disable-next-line solid/reactivity
props.highlightDelimiters ?? ["?", ",", ".", "!"],
props.highlightDelimiters ?? ["?", ".", "!"],
);

const resizeTextarea = (textarea: HTMLTextAreaElement | null) => {
Expand Down Expand Up @@ -176,14 +176,14 @@ const SearchForm = (props: {

if (!isDeleting && currentCharIndex === currentText.length) {
isDeleting = true;
timeoutRefOne = setTimeout(typeText, 1000);
timeoutRefOne = setTimeout(typeText, 1000) as unknown as number;
} else if (isDeleting && currentCharIndex === 0) {
isDeleting = false;
currentTextIndex = (currentTextIndex + 1) % textArray.length;
timeoutRefTwo = setTimeout(typeText, typingSpeed);
timeoutRefTwo = setTimeout(typeText, typingSpeed) as unknown as number;
} else {
const speed = isDeleting ? deleteSpeed : typingSpeed;
timeoutRefThree = setTimeout(typeText, speed);
timeoutRefThree = setTimeout(typeText, speed) as unknown as number;
}
};

Expand Down Expand Up @@ -241,6 +241,8 @@ const SearchForm = (props: {
((e.ctrlKey || e.metaKey) && e.key === "Enter") ||
(!e.shiftKey && e.key === "Enter")
) {
window.dispatchEvent(new Event("triggerSearch"));

onSubmit(e);
}
}}
Expand Down Expand Up @@ -415,7 +417,7 @@ const SearchForm = (props: {
setSlimChunks(false);
setGetTotalPages(false);
setHighlightResults(true);
setHighlightDelimiters(["?", ",", ".", "!"]);
setHighlightDelimiters(["?", ".", "!"]);
setState(false);
onSubmit(e);
}}
Expand Down
2 changes: 1 addition & 1 deletion search/src/pages/Search.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ export const Search = () => {
setGetTotalPages(location.query.getTotalPages === "true" || false);
setHighlightResults(location.query.highlightResults === "true" || false);
setHighlightDelimiters(
location.query.highlightDelimiters?.split(",") ?? ["?", ",", ".", "!"],
location.query.highlightDelimiters?.split(",") ?? ["?", ".", "!"],
);
});

Expand Down
4 changes: 2 additions & 2 deletions server/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 5 additions & 3 deletions server/src/bin/ingestion-worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ use trieve_server::operators::parse_operator::{
average_embeddings, coarse_doc_chunker, convert_html_to_text,
};
use trieve_server::operators::qdrant_operator::{
bulk_create_new_qdrant_points_query, update_qdrant_point_query,
bulk_upsert_qdrant_points_query, update_qdrant_point_query,
};
use trieve_server::operators::search_operator::global_unfiltered_top_match_query;
use trieve_server::{establish_connection, get_env};
Expand Down Expand Up @@ -584,7 +584,7 @@ pub async fn bulk_upload_chunks(
);

let create_point_result =
bulk_create_new_qdrant_points_query(qdrant_points, dataset_config.clone()).await;
bulk_upsert_qdrant_points_query(qdrant_points, dataset_config.clone()).await;

insert_tx.finish();

Expand Down Expand Up @@ -848,7 +848,9 @@ async fn upload_chunk(
"calling_bulk_create_new_qdrant_points_query",
);

if let Err(e) = bulk_create_new_qdrant_points_query(vec![point], dataset_config).await {
if let Err(e) = bulk_upsert_qdrant_points_query(vec![point], dataset_config).await {
log::error!("Failed to create qdrant point: {:?}", e);

bulk_revert_insert_chunk_metadata_query(vec![inserted_chunk.id], web_pool.clone())
.await?;

Expand Down
46 changes: 22 additions & 24 deletions server/src/operators/chunk_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -764,31 +764,29 @@ pub async fn update_chunk_metadata_query(

let mut conn = pool.get().await.unwrap();

let updated_chunk = conn
.transaction::<_, diesel::result::Error, _>(|conn| {
async move {
let updated_chunk: ChunkMetadata = diesel::update(
chunk_metadata_columns::chunk_metadata
.filter(chunk_metadata_columns::id.eq(chunk_data.id))
.filter(chunk_metadata_columns::dataset_id.eq(dataset_uuid)),
)
.set((
chunk_metadata_columns::link.eq(chunk_data.link),
chunk_metadata_columns::chunk_html.eq(chunk_data.chunk_html),
chunk_metadata_columns::content.eq(chunk_data.content),
chunk_metadata_columns::metadata.eq(chunk_data.metadata),
chunk_metadata_columns::tag_set.eq(chunk_data.tag_set),
chunk_metadata_columns::weight.eq(chunk_data.weight),
))
.get_result::<ChunkMetadata>(conn)
.await?;
let updated_chunk: ChunkMetadata = diesel::update(
chunk_metadata_columns::chunk_metadata
.filter(chunk_metadata_columns::id.eq(chunk_data.id))
.filter(chunk_metadata_columns::dataset_id.eq(dataset_uuid)),
)
.set((
chunk_metadata_columns::link.eq(chunk_data.link),
chunk_metadata_columns::chunk_html.eq(chunk_data.chunk_html),
chunk_metadata_columns::content.eq(chunk_data.content),
chunk_metadata_columns::metadata.eq(chunk_data.metadata),
chunk_metadata_columns::tag_set.eq(chunk_data.tag_set),
chunk_metadata_columns::tracking_id.eq(chunk_data.tracking_id),
chunk_metadata_columns::time_stamp.eq(chunk_data.time_stamp),
chunk_metadata_columns::location.eq(chunk_data.location),
chunk_metadata_columns::weight.eq(chunk_data.weight),
))
.get_result::<ChunkMetadata>(&mut conn)
.await
.map_err(|e| {
log::error!("Failed to update chunk_metadata: {:?}", e);

Ok(updated_chunk)
}
.scope_boxed()
})
.await
.map_err(|_| ServiceError::BadRequest("Failed to update chunk metadata".to_string()))?;
ServiceError::BadRequest("Failed to update chunk metadata".to_string())
})?;

if let Some(group_ids) = group_ids {
let group_id1 = group_ids.clone();
Expand Down
2 changes: 1 addition & 1 deletion server/src/operators/qdrant_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ pub async fn create_new_qdrant_collection_query(
}

#[tracing::instrument(skip(points))]
pub async fn bulk_create_new_qdrant_points_query(
pub async fn bulk_upsert_qdrant_points_query(
points: Vec<PointStruct>,
config: ServerDatasetConfiguration,
) -> Result<(), ServiceError> {
Expand Down

0 comments on commit 89e3715

Please sign in to comment.