Skip to content

Commit

Permalink
Fixed issue where we were sending two queries instead of one
Browse files Browse the repository at this point in the history
  • Loading branch information
kyle-sammons committed Nov 29, 2023
1 parent 4c1a1c7 commit 79dd4b9
Show file tree
Hide file tree
Showing 4 changed files with 158 additions and 88 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -100,5 +100,5 @@
"engines": {
"node": ">=16"
},
"packageManager": "yarn@"
"packageManager": "yarn@1.22.21"
}
3 changes: 2 additions & 1 deletion src/datasource/OpenSearchResponse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,9 @@ export class OpenSearchResponse {
}

switch (metric.type) {
case 'logs':
case 'count': {
newSeries = { datapoints: [], metric: 'count', props, refId: target.refId };
newSeries = { datapoints: [], metric: metric.type, props, refId: target.refId };
for (let i = 0; i < esAgg.buckets.length; i++) {
const bucket = esAgg.buckets[i];
const value = bucket.doc_count;
Expand Down
111 changes: 111 additions & 0 deletions src/datasource/QueryBuilder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -444,6 +444,117 @@ export class QueryBuilder {
// todo - use the limit variable to allow users to set this
query = this.documentQuery(query, size || 500);

// If there are no bucket aggs to do here, then we don't need to do anything
// else
if (target.bucketAggs.length === 0) {
return {
...query,
}

}

let i, j, pv, nestedAggs;

nestedAggs = query;


for (i = 0; i < target.bucketAggs.length; i++) {
const aggDef = target.bucketAggs[i];
const esAgg: any = {};

switch (aggDef.type) {
case 'date_histogram': {
esAgg['date_histogram'] = this.getDateHistogramAgg(aggDef);
break;
}
case 'histogram': {
esAgg['histogram'] = this.getHistogramAgg(aggDef);
break;
}
case 'filters': {
esAgg['filters'] = { filters: this.getFiltersAgg(aggDef) };
break;
}
case 'terms': {
this.buildTermsAgg(aggDef, esAgg, target);
break;
}
case 'geohash_grid': {
esAgg['geohash_grid'] = {
field: aggDef.field,
precision: aggDef.settings?.precision,
};
break;
}
}

nestedAggs.aggs = nestedAggs.aggs || {};
nestedAggs.aggs[aggDef.id] = esAgg;
nestedAggs = esAgg;
}

nestedAggs.aggs = {};

for (i = 0; i < target.metrics.length; i++) {
metric = target.metrics[i];
if (metric.type === 'count' || metric.type === "logs") {
continue;
}

const aggField: any = {};
let metricAgg: any = null;

if (isPipelineAggregation(metric)) {
if (isPipelineAggregationWithMultipleBucketPaths(metric)) {
if (metric.pipelineVariables) {
metricAgg = {
buckets_path: {},
};

for (j = 0; j < metric.pipelineVariables.length; j++) {
pv = metric.pipelineVariables[j];

if (pv.name && pv.pipelineAgg && /^\d*$/.test(pv.pipelineAgg)) {
const appliedAgg = findMetricById(target.metrics, pv.pipelineAgg);
if (appliedAgg) {
if (appliedAgg.type === 'count') {
metricAgg.buckets_path[pv.name] = '_count';
} else {
metricAgg.buckets_path[pv.name] = pv.pipelineAgg;
}
}
}
}
} else {
continue;
}
} else {
if (metric.field && /^\d*$/.test(metric.field)) {
const appliedAgg = findMetricById(target.metrics, metric.field);
if (appliedAgg) {
if (appliedAgg.type === 'count') {
metricAgg = { buckets_path: '_count' };
} else {
metricAgg = { buckets_path: metric.field };
}
}
} else {
continue;
}
}
} else if (isMetricAggregationWithField(metric)) {
metricAgg = { field: metric.field };
}

metricAgg = {
...metricAgg,
...(isMetricAggregationWithSettings(metric) && metric.settings),
};

aggField[metric.type] = metricAgg;
nestedAggs.aggs[metric.id] = aggField;
}

return {
...query
};
Expand Down
130 changes: 44 additions & 86 deletions src/pages/explore.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -134,12 +134,11 @@ class ResultStats extends SceneObjectBase<ResultsStatsState> {

interface KaldbQueryState extends SceneObjectState {
query: string;
timeseriesLoading: boolean;
logsLoading: boolean;
loading: boolean;
}

const KaldbQueryRenderer = ({ model }: SceneComponentProps<KaldbQuery>) => {
const { timeseriesLoading, logsLoading } = model.useState();
const { loading } = model.useState();

return (
<>
Expand All @@ -155,12 +154,11 @@ const KaldbQueryRenderer = ({ model }: SceneComponentProps<KaldbQuery>) => {
onChange={(e) => model.onTextChange(e.currentTarget.value)}
/>
</InlineField>
{timeseriesLoading || logsLoading ? (
{loading ? (
<Button
icon="fa fa-spinner"
onClick={() => {
logsQueryRunner.cancelQuery();
histogramQueryRunner.cancelQuery();
queryRunner.cancelQuery();
}}
variant="destructive"
>
Expand Down Expand Up @@ -431,8 +429,7 @@ class KaldbQuery extends SceneObjectBase<KaldbQueryState> {
constructor(state?: Partial<KaldbQueryState>) {
super({
query: '',
timeseriesLoading: false,
logsLoading: false,
loading: false,
...state,
});
}
Expand Down Expand Up @@ -481,17 +478,12 @@ class KaldbQuery extends SceneObjectBase<KaldbQueryState> {
this.doQuery();
};

setLogsLoading = (loading: boolean) => {
setLoading = (loading: boolean) => {
this.setState({
logsLoading: loading,
loading: loading,
});
};

setTimeseriesLoading = (loading: boolean) => {
this.setState({
timeseriesLoading: loading,
});
};
}

const histogramNodeStats = new NodeStats();
Expand Down Expand Up @@ -604,46 +596,6 @@ const logsPanel = PanelBuilders.logs()
)
.setTitle('Logs');

const logsQueryRunner = new SceneQueryRunner({
datasource: {
uid: '${datasource}',
},
queries: [
{
refId: 'A',
query: '${query:raw}',
queryType: 'lucene',
metrics: [
{
id: '1',
type: 'logs',
},
],
bucketAggs: [],
// todo - this should use the config value for timestamp
timeField: '_timesinceepoch',
},
],
});

logsQueryRunner.subscribeToEvent(SceneObjectStateChangedEvent, (event) => {
if (typeof event.payload.newState !== 'undefined') {
if (event.payload.newState['data'].state === 'Done') {
queryComponent.setLogsLoading(false);
fieldComponent.setLoading(false);
} else if (event.payload.newState['data'].state === 'Loading') {
queryComponent.setLogsLoading(true);
fieldComponent.setLoading(true);
} else if (event.payload.newState['data'].state === 'Error') {
queryComponent.setLogsLoading(false);
logsNodeStats.setCount(-1, -1);
fieldComponent.setFields([]);
fieldComponent.setTopTenMostPopularFields([]);
fieldComponent.setLoading(false);
}
}
});

/*
* Calculates the frequency map for a list of values.
* The map returned is in sorted descending order
Expand Down Expand Up @@ -764,29 +716,7 @@ const logsResultTransformation: CustomTransformOperator = () => (source: Observa
);
};

logsPanel.setData(
new SceneDataTransformer({
$data: logsQueryRunner,
transformations: [
logsResultTransformation,
{
id: 'organize',
options: {
excludeByName: {},
indexByName: {
// todo - this should use the config value for timestamp
_timesinceepoch: 0,
// todo - this should use the config value "message field name"
_source: 1,
},
renameByName: {},
},
},
],
})
);

const histogramQueryRunner = new SceneQueryRunner({
const queryRunner = new SceneQueryRunner({
datasource: {
uid: '${datasource}',
},
Expand All @@ -798,7 +728,7 @@ const histogramQueryRunner = new SceneQueryRunner({
metrics: [
{
id: '1',
type: 'count',
type: 'logs',
},
],
bucketAggs: [
Expand All @@ -819,21 +749,49 @@ const histogramQueryRunner = new SceneQueryRunner({
maxDataPoints: 30,
});

histogramQueryRunner.subscribeToEvent(SceneObjectStateChangedEvent, (event) => {
queryRunner.subscribeToEvent(SceneObjectStateChangedEvent, (event) => {
if (typeof event.payload.newState !== 'undefined') {
if (event.payload.newState['data'].state === 'Done') {
queryComponent.setTimeseriesLoading(false);
queryComponent.setLoading(false);
fieldComponent.setLoading(false);
} else if (event.payload.newState['data'].state === 'Loading') {
resultsCounter.setResults(-1);
queryComponent.setTimeseriesLoading(true);
queryComponent.setLoading(true);
fieldComponent.setLoading(true);
} else if (event.payload.newState['data'].state === 'Error') {
queryComponent.setTimeseriesLoading(false);
queryComponent.setLoading(false);
fieldComponent.setFields([]);
fieldComponent.setTopTenMostPopularFields([])
fieldComponent.setLoading(false);
logsNodeStats.setCount(-1, -1);
resultsCounter.setResults(-1);
histogramNodeStats.setCount(-1, -1);
}
}
});

logsPanel.setData(
new SceneDataTransformer({
$data: queryRunner,
transformations: [
logsResultTransformation,
{
id: 'organize',
options: {
excludeByName: {},
indexByName: {
// todo - this should use the config value for timestamp
_timesinceepoch: 0,
// todo - this should use the config value "message field name"
_source: 1,
},
renameByName: {},
},
},
],
})
);

const histogramPanel = PanelBuilders.timeseries()
.setCustomFieldConfig('drawStyle', DrawStyle.Bars)
.setCustomFieldConfig('fillOpacity', 100)
Expand All @@ -855,8 +813,8 @@ const histogramResultTransformation: CustomTransformOperator = () => (source: Ob
map((data: DataFrame[]) => {
if (data.length > 0 && data[0].meta['shards']) {
let counter = 0;
for (let i = data[0].fields[1].values['buffer'].length - 1; i >= 0; i--) {
counter += data[0].fields[1].values['buffer'][i];
for (let i = data[1].fields[1].values['buffer'].length - 1; i >= 0; i--) {
counter += data[1].fields[1].values['buffer'][i];
}
resultsCounter.setResults(counter);
histogramNodeStats.setCount(data[0].meta['shards'].total, data[0].meta['shards'].failed);
Expand All @@ -868,7 +826,7 @@ const histogramResultTransformation: CustomTransformOperator = () => (source: Ob

histogramPanel.setData(
new SceneDataTransformer({
$data: histogramQueryRunner,
$data: queryRunner,
transformations: [histogramResultTransformation],
})
);
Expand Down

0 comments on commit 79dd4b9

Please sign in to comment.