Skip to content

Commit a7bda16

Browse files
committed
cleanup: remove unneeded params and functions
1 parent 8f053d8 commit a7bda16

File tree

3 files changed

+4
-82
lines changed

3 files changed

+4
-82
lines changed

server/src/handlers/chunk_handler.rs

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1145,8 +1145,6 @@ pub struct SearchChunkData {
11451145
pub highlight_results: Option<bool>,
11461146
/// Set highlight_delimiters to a list of strings to use as delimiters for highlighting. If not specified, this defaults to ["?", ",", ".", "!"].
11471147
pub highlight_delimiters: Option<Vec<String>>,
1148-
/// Turn on quote words and negated words to search for exact phrases and exclude words from the search results. Default is false.
1149-
pub quote_negated_words: Option<bool>,
11501148
/// Set score_threshold to a float to filter out chunks with a score below the threshold.
11511149
pub score_threshold: Option<f32>,
11521150
/// Set slim_chunks to true to avoid returning the content and chunk_html of the chunks. This is useful for when you want to reduce amount of data over the wire for latency improvement. Default is false.
@@ -1265,14 +1263,7 @@ pub async fn search_chunk(
12651263

12661264
let page = data.page.unwrap_or(1);
12671265

1268-
let mut parsed_query = ParsedQuery {
1269-
query: data.query.clone(),
1270-
quote_words: None,
1271-
negated_words: None,
1272-
};
1273-
if data.quote_negated_words.unwrap_or(false) {
1274-
parsed_query = parse_query(data.query.clone());
1275-
}
1266+
let parsed_query = parse_query(data.query.clone());
12761267

12771268
let tx_ctx = sentry::TransactionContext::new("search", "search_chunks");
12781269
let transaction = sentry::start_transaction(tx_ctx);

server/src/handlers/group_handler.rs

Lines changed: 3 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use super::{
22
auth_handler::{AdminOnly, LoggedUser},
3-
chunk_handler::{parse_query, ChunkFilter, ParsedQuery, ScoreChunkDTO, SearchChunkData},
3+
chunk_handler::{parse_query, ChunkFilter, ScoreChunkDTO, SearchChunkData},
44
};
55
use crate::{
66
data::models::{
@@ -1101,8 +1101,6 @@ pub struct SearchWithinGroupData {
11011101
pub highlight_results: Option<bool>,
11021102
/// Set highlight_delimiters to a list of strings to use as delimiters for highlighting. If not specified, this defaults to ["?", ",", ".", "!"].
11031103
pub highlight_delimiters: Option<Vec<String>>,
1104-
/// Turn on quote words and negated words to search for exact phrases and exclude words from the search results. Default is false.
1105-
pub quote_negated_words: Option<bool>,
11061104
/// Set score_threshold to a float to filter out chunks with a score below the threshold.
11071105
pub score_threshold: Option<f32>,
11081106
/// Set slim_chunks to true to avoid returning the content and chunk_html of the chunks. This is useful for when you want to reduce amount of data over the wire for latency improvement. Default is false.
@@ -1122,7 +1120,6 @@ impl From<SearchWithinGroupData> for SearchChunkData {
11221120
get_collisions: Some(false),
11231121
highlight_results: data.highlight_results,
11241122
highlight_delimiters: data.highlight_delimiters,
1125-
quote_negated_words: data.quote_negated_words,
11261123
score_threshold: data.score_threshold,
11271124
slim_chunks: data.slim_chunks,
11281125
}
@@ -1194,14 +1191,7 @@ pub async fn search_within_group(
11941191
}
11951192
};
11961193

1197-
let mut parsed_query = ParsedQuery {
1198-
query: data.query.clone(),
1199-
quote_words: None,
1200-
negated_words: None,
1201-
};
1202-
if data.quote_negated_words.unwrap_or(false) {
1203-
parsed_query = parse_query(data.query.clone());
1204-
}
1194+
let parsed_query = parse_query(data.query.clone());
12051195

12061196
let result_chunks = match data.search_type.as_str() {
12071197
"fulltext" => {
@@ -1286,8 +1276,6 @@ pub struct SearchOverGroupsData {
12861276
pub highlight_results: Option<bool>,
12871277
/// Set highlight_delimiters to a list of strings to use as delimiters for highlighting. If not specified, this defaults to ["?", ",", ".", "!"].
12881278
pub highlight_delimiters: Option<Vec<String>>,
1289-
/// Turn on quote words and negated words to search for exact phrases and exclude words from the search results. Default is false.
1290-
pub quote_negated_words: Option<bool>,
12911279
/// Set score_threshold to a float to filter out chunks with a score below the threshold.
12921280
pub score_threshold: Option<f32>,
12931281
// Group_size is the number of chunks to fetch for each group.
@@ -1337,14 +1325,7 @@ pub async fn search_over_groups(
13371325
//search over the links as well
13381326
let page = data.page.unwrap_or(1);
13391327

1340-
let mut parsed_query = ParsedQuery {
1341-
query: data.query.clone(),
1342-
quote_words: None,
1343-
negated_words: None,
1344-
};
1345-
if data.quote_negated_words.unwrap_or(false) {
1346-
parsed_query = parse_query(data.query.clone());
1347-
}
1328+
let parsed_query = parse_query(data.query.clone());
13481329

13491330
let result_chunks = match data.search_type.as_str() {
13501331
"fulltext" => {

server/src/operators/search_operator.rs

Lines changed: 0 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1941,53 +1941,3 @@ pub async fn hybrid_search_over_groups(
19411941

19421942
Ok(result_chunks)
19431943
}
1944-
1945-
#[tracing::instrument(skip(pool))]
1946-
pub async fn get_qdrant_point_ids_from_pg_for_quote_negated_words(
1947-
quote_words: Option<Vec<String>>,
1948-
negated_words: Option<Vec<String>>,
1949-
dataset_id: uuid::Uuid,
1950-
pool: web::Data<Pool>,
1951-
) -> Result<Vec<uuid::Uuid>, ServiceError> {
1952-
use crate::data::schema::chunk_metadata::dsl as chunk_metadata_columns;
1953-
use diesel::prelude::*;
1954-
use diesel_async::RunQueryDsl;
1955-
1956-
let mut conn = pool.get().await.unwrap();
1957-
let mut query = chunk_metadata_columns::chunk_metadata
1958-
.select(chunk_metadata_columns::qdrant_point_id)
1959-
.filter(chunk_metadata_columns::qdrant_point_id.is_not_null())
1960-
.filter(chunk_metadata_columns::dataset_id.eq(dataset_id))
1961-
.into_boxed();
1962-
1963-
if let Some(quote_words) = quote_words {
1964-
for word in quote_words.iter() {
1965-
let word_without_quotes = word.trim_matches('\"');
1966-
query = query.filter(
1967-
chunk_metadata_columns::chunk_html.ilike(format!("%{}%", word_without_quotes)),
1968-
);
1969-
}
1970-
}
1971-
1972-
if let Some(negated_words) = negated_words {
1973-
for word in negated_words.iter() {
1974-
let word_without_negation = word.trim_matches('-');
1975-
query = query.filter(
1976-
chunk_metadata_columns::chunk_html
1977-
.not_ilike(format!("%{}%", word_without_negation)),
1978-
);
1979-
}
1980-
}
1981-
1982-
let matching_qdrant_point_ids: Vec<Option<uuid::Uuid>> =
1983-
query.load(&mut conn).await.map_err(|_| {
1984-
ServiceError::BadRequest("Failed to load full-text searched chunks".to_string())
1985-
})?;
1986-
1987-
let matching_qdrant_point_ids = matching_qdrant_point_ids
1988-
.into_iter()
1989-
.flatten()
1990-
.collect::<Vec<uuid::Uuid>>();
1991-
1992-
Ok(matching_qdrant_point_ids)
1993-
}

0 commit comments

Comments
 (0)