Skip to content

Commit c37b093

Browse files
committed
fix: stats response
FIX: When Migrated from Standalone Mode stats updated CHORE: Remove Dead Code
1 parent 6167e49 commit c37b093

File tree

4 files changed

+28
-33
lines changed

4 files changed

+28
-33
lines changed

server/src/handlers/http/logstream.rs

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ use crate::{metadata, validator};
3333

3434
use self::error::{CreateStreamError, StreamError};
3535

36-
use super::modal::query_server::{self, IngestionStats, QueriedStats, StorageStats};
36+
use super::modal::query_server::{self, IngestionStats, QueriedStats, QueryServer, StorageStats};
3737

3838
pub async fn delete(req: HttpRequest) -> Result<impl Responder, StreamError> {
3939
let stream_name: String = req.match_info().get("logstream").unwrap().parse().unwrap();
@@ -284,19 +284,19 @@ pub async fn get_stats(req: HttpRequest) -> Result<impl Responder, StreamError>
284284
let stats = stats::get_current_stats(&stream_name, "json")
285285
.ok_or(StreamError::StreamNotFound(stream_name.clone()))?;
286286

287-
if CONFIG.parseable.mode == Mode::Query {
288-
let stats = query_server::QueryServer::fetch_stats_from_ingesters(&stream_name).await?;
289-
let stats = serde_json::to_value(stats).unwrap();
290-
return Ok((web::Json(stats), StatusCode::OK));
291-
}
287+
let ingestor_stats = if CONFIG.parseable.mode == Mode::Query {
288+
Some(query_server::QueryServer::fetch_stats_from_ingesters(&stream_name).await?)
289+
} else {
290+
None
291+
};
292292

293293
let hash_map = STREAM_INFO.read().unwrap();
294294
let stream_meta = &hash_map
295295
.get(&stream_name)
296296
.ok_or(StreamError::StreamNotFound(stream_name.clone()))?;
297297

298298
let time = Utc::now();
299-
let qstats = match &stream_meta.first_event_at {
299+
let stats = match &stream_meta.first_event_at {
300300
Some(first_event_at) => {
301301
let ingestion_stats = IngestionStats::new(
302302
stats.events,
@@ -336,10 +336,16 @@ pub async fn get_stats(req: HttpRequest) -> Result<impl Responder, StreamError>
336336
)
337337
}
338338
};
339+
let stats = if let Some(mut ingestor_stats) = ingestor_stats {
340+
ingestor_stats.push(stats);
341+
QueryServer::merge_quried_stats(ingestor_stats)
342+
} else {
343+
stats
344+
};
339345

340-
let out_stats = serde_json::to_value(qstats).unwrap();
346+
let stats = serde_json::to_value(stats).unwrap();
341347

342-
Ok((web::Json(out_stats), StatusCode::OK))
348+
Ok((web::Json(stats), StatusCode::OK))
343349
}
344350

345351
// Check if the first_event_at is empty

server/src/handlers/http/modal/query_server.rs

Lines changed: 1 addition & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,6 @@ impl QueryServer {
235235

236236
migration::run_migration(&CONFIG).await?;
237237

238-
// when do we do this
239238
let storage = CONFIG.storage().get_object_store();
240239
if let Err(e) = metadata::STREAM_INFO.load(&*storage).await {
241240
log::warn!("could not populate local metadata. {:?}", e);
@@ -253,25 +252,12 @@ impl QueryServer {
253252
analytics::init_analytics_scheduler();
254253
}
255254

256-
// spawn the sync thread
257-
// tokio::spawn(Self::sync_ingester_metadata());
258-
259255
self.start(prometheus, CONFIG.parseable.openid.clone())
260256
.await?;
261257

262258
Ok(())
263259
}
264260

265-
#[allow(dead_code)]
266-
async fn sync_ingester_metadata() {
267-
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(60 / 10));
268-
loop {
269-
interval.tick().await;
270-
// dbg!("Tick");
271-
Self::get_ingester_info().await.unwrap();
272-
}
273-
}
274-
275261
// forward the request to all ingesters to keep them in sync
276262
pub async fn sync_streams_with_ingesters(stream_name: &str) -> Result<(), StreamError> {
277263
let ingester_infos = Self::get_ingester_info().await.map_err(|err| {
@@ -326,7 +312,7 @@ impl QueryServer {
326312
/// get the cumulative stats from all ingesters
327313
pub async fn fetch_stats_from_ingesters(
328314
stream_name: &str,
329-
) -> Result<QueriedStats, StreamError> {
315+
) -> Result<Vec<QueriedStats>, StreamError> {
330316
let mut stats = Vec::new();
331317

332318
let ingester_infos = Self::get_ingester_info().await.map_err(|err| {
@@ -373,7 +359,6 @@ impl QueryServer {
373359
}
374360
}
375361
}
376-
let stats = Self::merge_quried_stats(stats);
377362

378363
Ok(stats)
379364
}

server/src/metadata.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ impl StreamInfo {
168168

169169
for stream in storage.list_streams().await? {
170170
let alerts = storage.get_alerts(&stream.name).await?;
171-
let schema = storage.get_schema_for_the_first_time(&stream.name).await?;
171+
let schema = storage.get_schema_on_server_start(&stream.name).await?;
172172
let meta = storage.get_stream_metadata(&stream.name).await?;
173173

174174
let schema = update_schema_from_staging(&stream.name, schema);

server/src/storage/object_storage.rs

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ pub trait ObjectStorage: Sync + 'static {
173173
.await
174174
}
175175

176-
async fn get_schema_for_the_first_time(
176+
async fn get_schema_on_server_start(
177177
&self,
178178
stream_name: &str,
179179
) -> Result<Schema, ObjectStorageError> {
@@ -218,12 +218,16 @@ pub trait ObjectStorage: Sync + 'static {
218218
STREAM_METADATA_FILE_NAME,
219219
]))
220220
.await?;
221-
self.put_stream_manifest(
222-
stream_name,
223-
&serde_json::from_slice::<ObjectStoreFormat>(&bytes)
224-
.expect("parseable config is valid json"),
225-
)
226-
.await?;
221+
222+
let mut config = serde_json::from_slice::<ObjectStoreFormat>(&bytes)
223+
.expect("parseable config is valid json");
224+
225+
if CONFIG.parseable.mode == Mode::Ingest {
226+
config.stats = Stats::default();
227+
config.snapshot.manifest_list = vec![];
228+
}
229+
230+
self.put_stream_manifest(stream_name, &config).await?;
227231
bytes
228232
}
229233
};

0 commit comments

Comments
 (0)