-
Notifications
You must be signed in to change notification settings - Fork 3
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix(DiffFlameGraph): Fix the "Explain Flame Graph" (AI) feature #129
Changes from all commits
0e11d5e
d686d32
b560caf
0b10c93
2a53689
e839994
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,165 @@ | ||
import { css } from '@emotion/css'; | ||
import { GrafanaTheme2 } from '@grafana/data'; | ||
import { SceneComponentProps, sceneGraph, SceneObjectBase, SceneObjectState } from '@grafana/scenes'; | ||
import { Alert, Button, IconButton, Spinner, useStyles2 } from '@grafana/ui'; | ||
import { getProfileMetric, ProfileMetricId } from '@shared/infrastructure/profile-metrics/getProfileMetric'; | ||
import { DomainHookReturnValue } from '@shared/types/DomainHookReturnValue'; | ||
import { InlineBanner } from '@shared/ui/InlineBanner'; | ||
import { Panel } from '@shared/ui/Panel/Panel'; | ||
import React from 'react'; | ||
|
||
import { ProfilesDataSourceVariable } from '../../domain/variables/ProfilesDataSourceVariable'; | ||
import { getSceneVariableValue } from '../../helpers/getSceneVariableValue'; | ||
import { AiReply } from './components/AiReply'; | ||
import { FollowUpForm } from './components/FollowUpForm'; | ||
import { useOpenAiChatCompletions } from './domain/useOpenAiChatCompletions'; | ||
import { FetchParams, useFetchDotProfiles } from './infrastructure/useFetchDotProfiles'; | ||
|
||
interface SceneAiPanelState extends SceneObjectState {} | ||
|
||
export class SceneAiPanel extends SceneObjectBase<SceneAiPanelState> { | ||
constructor() { | ||
super({ key: 'ai-panel' }); | ||
} | ||
|
||
validateFetchParams(isDiff: boolean, fetchParams: FetchParams) { | ||
let params = fetchParams; | ||
let error; | ||
|
||
if (isDiff && fetchParams.length !== 2) { | ||
error = new Error( | ||
`Invalid number of fetch parameters for analyzing the diff flame graph (${fetchParams.length})!` | ||
); | ||
params = []; | ||
} else if (!isDiff && fetchParams.length !== 1) { | ||
error = new Error(`Invalid number of fetch parameters for analyzing the flame graph (${fetchParams.length})!`); | ||
params = []; | ||
} | ||
|
||
return { params, error }; | ||
} | ||
|
||
useSceneAiPanel = (isDiff: boolean, fetchParams: FetchParams): DomainHookReturnValue => { | ||
const dataSourceUid = sceneGraph.findByKeyAndType(this, 'dataSource', ProfilesDataSourceVariable).useState() | ||
.value as string; | ||
|
||
const { params, error: validationError } = this.validateFetchParams(isDiff, fetchParams); | ||
|
||
const { error: fetchError, isFetching, profiles } = useFetchDotProfiles(dataSourceUid, params); | ||
|
||
const profileMetricId = getSceneVariableValue(this, 'profileMetricId'); | ||
const profileType = getProfileMetric(profileMetricId as ProfileMetricId).type; | ||
|
||
const { reply, error: llmError, retry } = useOpenAiChatCompletions(profileType, profiles); | ||
|
||
return { | ||
data: { | ||
validationError, | ||
isLoading: isFetching || (!isFetching && !fetchError && !llmError && !reply.text.trim()), | ||
fetchError, | ||
llmError, | ||
reply, | ||
shouldDisplayReply: Boolean(reply?.hasStarted || reply?.hasFinished), | ||
shouldDisplayFollowUpForm: !fetchError && !llmError && Boolean(reply?.hasFinished), | ||
}, | ||
actions: { | ||
retry, | ||
submitFollowupQuestion(question: string) { | ||
reply.askFollowupQuestion(question); | ||
}, | ||
}, | ||
}; | ||
}; | ||
|
||
static Component = ({ | ||
model, | ||
isDiff, | ||
fetchParams, | ||
onClose, | ||
}: SceneComponentProps<SceneAiPanel> & { | ||
isDiff: boolean; | ||
fetchParams: FetchParams; | ||
onClose: () => void; | ||
}) => { | ||
const styles = useStyles2(getStyles); | ||
const { data, actions } = model.useSceneAiPanel(isDiff, fetchParams); | ||
|
||
return ( | ||
<Panel | ||
className={styles.sidePanel} | ||
title="Flame graph analysis" | ||
isLoading={data.isLoading} | ||
headerActions={ | ||
<IconButton | ||
title="Close panel" | ||
name="times-circle" | ||
variant="secondary" | ||
aria-label="close" | ||
onClick={onClose} | ||
/> | ||
} | ||
dataTestId="ai-panel" | ||
> | ||
<div className={styles.content}> | ||
{data.validationError && ( | ||
<InlineBanner severity="error" title="Validation error!" errors={[data.validationError]} /> | ||
)} | ||
|
||
{data.fetchError && ( | ||
<InlineBanner | ||
severity="error" | ||
title="Error while loading profile data!" | ||
message="Sorry for any inconvenience, please try again later." | ||
errors={[data.fetchError]} | ||
/> | ||
)} | ||
|
||
{data.shouldDisplayReply && <AiReply reply={data.reply} />} | ||
|
||
{data.isLoading && ( | ||
<> | ||
<Spinner inline /> | ||
Analyzing... | ||
</> | ||
)} | ||
|
||
{data.llmError && ( | ||
<Alert title="An error occured while generating content using OpenAI!" severity="warning"> | ||
<div> | ||
<div> | ||
<p>{data.llmError.message}</p> | ||
<p> | ||
Sorry for any inconvenience, please retry or if the problem persists, contact your organization | ||
admin. | ||
</p> | ||
</div> | ||
</div> | ||
<Button className={styles.retryButton} variant="secondary" fill="outline" onClick={() => actions.retry()}> | ||
Retry | ||
</Button> | ||
</Alert> | ||
)} | ||
|
||
{data.shouldDisplayFollowUpForm && <FollowUpForm onSubmit={actions.submitFollowupQuestion} />} | ||
</div> | ||
</Panel> | ||
); | ||
}; | ||
} | ||
|
||
const getStyles = (theme: GrafanaTheme2) => ({ | ||
sidePanel: css` | ||
flex: 1 0 50%; | ||
margin-left: 8px; | ||
max-width: calc(50% - 4px); | ||
`, | ||
title: css` | ||
margin: -4px 0 4px 0; | ||
`, | ||
content: css` | ||
padding: ${theme.spacing(1)}; | ||
`, | ||
retryButton: css` | ||
float: right; | ||
`, | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,53 @@ | ||
import { css } from '@emotion/css'; | ||
import { IconName } from '@grafana/data'; | ||
import { Button, useStyles2 } from '@grafana/ui'; | ||
import { reportInteraction } from '@shared/domain/reportInteraction'; | ||
import React, { ReactNode } from 'react'; | ||
|
||
import { useFetchLlmPluginStatus } from './infrastructure/useFetchLlmPluginStatus'; | ||
|
||
type AIButtonProps = { | ||
children: ReactNode; | ||
onClick: (event: React.MouseEvent<HTMLButtonElement>) => void; | ||
disabled?: boolean; | ||
interactionName: string; | ||
}; | ||
|
||
export function AIButton({ children, onClick, disabled, interactionName }: AIButtonProps) { | ||
const styles = useStyles2(getStyles); | ||
const { isEnabled, error, isFetching } = useFetchLlmPluginStatus(); | ||
|
||
let icon: IconName = 'ai'; | ||
let title = ''; | ||
|
||
if (error) { | ||
icon = 'shield-exclamation'; | ||
title = 'Grafana LLM plugin missing or not configured!'; | ||
} else if (isFetching) { | ||
icon = 'fa fa-spinner'; | ||
title = 'Checking the status of the Grafana LLM plugin...'; | ||
} | ||
|
||
return ( | ||
<Button | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree with this move. |
||
className={styles.aiButton} | ||
size="md" | ||
fill="text" | ||
icon={icon} | ||
title={isEnabled ? 'Ask FlameGrot AI' : title} | ||
disabled={!isEnabled || disabled} | ||
onClick={(event: React.MouseEvent<HTMLButtonElement>) => { | ||
reportInteraction(interactionName); | ||
onClick(event); | ||
}} | ||
> | ||
{children} | ||
</Button> | ||
); | ||
} | ||
|
||
const getStyles = () => ({ | ||
aiButton: css` | ||
padding: 0 4px; | ||
`, | ||
}); |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
import { llms } from '@grafana/experimental'; | ||
import { useQuery } from '@tanstack/react-query'; | ||
|
||
export function useFetchLlmPluginStatus() { | ||
const { data, isFetching, error } = useQuery({ | ||
queryKey: ['llm'], | ||
queryFn: () => llms.openai.enabled(), | ||
}); | ||
|
||
if (error) { | ||
console.error('Error while checking the status of the Grafana LLM plugin!'); | ||
console.error(error); | ||
} | ||
|
||
return { isEnabled: Boolean(data), isFetching, error }; | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
import { css } from '@emotion/css'; | ||
import { useStyles2 } from '@grafana/ui'; | ||
import Markdown from 'markdown-to-jsx'; | ||
import React, { ReactNode } from 'react'; | ||
|
||
import { OpenAiReply } from '../domain/useOpenAiChatCompletions'; | ||
|
||
// yeah, I know... | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 😁 |
||
const setNativeValue = (element: Element, value: string) => { | ||
const valueSetter = Object!.getOwnPropertyDescriptor(element, 'value')!.set; | ||
const prototypeValueSetter = Object!.getOwnPropertyDescriptor(Object.getPrototypeOf(element), 'value')!.set; | ||
|
||
if (valueSetter && valueSetter !== prototypeValueSetter) { | ||
prototypeValueSetter!.call(element, value); | ||
} else { | ||
valueSetter!.call(element, value); | ||
} | ||
}; | ||
|
||
const onClickSearchTerm = (event: any) => { | ||
const searchInputElement = document.querySelector('[placeholder^="Search"]'); | ||
|
||
if (searchInputElement === null) { | ||
console.error('Cannot find search input element!'); | ||
return; | ||
} | ||
|
||
const value = event.target.textContent.trim(); | ||
|
||
setNativeValue(searchInputElement, value); | ||
|
||
searchInputElement.dispatchEvent(new Event('input', { bubbles: true })); | ||
}; | ||
|
||
const SearchTerm = ({ children }: { children: ReactNode }) => { | ||
const styles = useStyles2(getStyles); | ||
|
||
// If the code block contains newlines, don't make it a search link | ||
if (typeof children === 'string' && children.includes('\n')) { | ||
return <code>{children}</code>; | ||
} | ||
|
||
return ( | ||
<code className={styles.searchLink} title="Search for this node" onClick={onClickSearchTerm}> | ||
{children} | ||
</code> | ||
); | ||
}; | ||
|
||
const MARKDOWN_OPTIONS = { | ||
overrides: { | ||
code: { | ||
component: SearchTerm, | ||
}, | ||
}, | ||
}; | ||
|
||
type AiReplyProps = { | ||
reply: OpenAiReply['reply']; | ||
}; | ||
|
||
export function AiReply({ reply }: AiReplyProps) { | ||
const styles = useStyles2(getStyles); | ||
|
||
return ( | ||
<div className={styles.container}> | ||
{reply?.messages | ||
?.filter((message) => message.role !== 'system') | ||
.map((message) => ( | ||
<> | ||
<div className={styles.reply}> | ||
<Markdown options={MARKDOWN_OPTIONS}>{message.content}</Markdown> | ||
</div> | ||
<hr /> | ||
</> | ||
))} | ||
|
||
<div className={styles.reply}> | ||
<Markdown options={MARKDOWN_OPTIONS}>{reply.text}</Markdown> | ||
</div> | ||
</div> | ||
); | ||
} | ||
|
||
const getStyles = () => ({ | ||
container: css` | ||
width: 100%; | ||
height: 100%; | ||
`, | ||
reply: css` | ||
font-size: 13px; | ||
|
||
& ol, | ||
& ul { | ||
margin: 0 0 16px 24px; | ||
} | ||
`, | ||
searchLink: css` | ||
color: rgb(255, 136, 51); | ||
border: 1px solid transparent; | ||
padding: 2px 4px; | ||
cursor: pointer; | ||
font-size: 13px; | ||
|
||
&:hover, | ||
&:focus, | ||
&:active { | ||
box-sizing: border-box; | ||
border: 1px solid rgb(255, 136, 51, 0.8); | ||
border-radius: 4px; | ||
} | ||
`, | ||
}); |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This component, as well as other files have been copied from the "shared" folder and customized for the Scenes app. Once we remove the code for the legacy Comparison pages (which is still present), we'll clean up everything.