chrome-devtools-frontend
Version:
Chrome DevTools UI
1,129 lines (1,024 loc) • 73.2 kB
text/typescript
// Copyright 2024 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../../ui/legacy/legacy.js';
import * as Common from '../../core/common/common.js';
import * as Host from '../../core/host/host.js';
import * as i18n from '../../core/i18n/i18n.js';
import type * as Platform from '../../core/platform/platform.js';
import * as Root from '../../core/root/root.js';
import * as SDK from '../../core/sdk/sdk.js';
import * as Protocol from '../../generated/protocol.js';
import * as AiAssistanceModel from '../../models/ai_assistance/ai_assistance.js';
import * as Workspace from '../../models/workspace/workspace.js';
import * as Buttons from '../../ui/components/buttons/buttons.js';
import * as Snackbars from '../../ui/components/snackbars/snackbars.js';
import * as UI from '../../ui/legacy/legacy.js';
import * as Lit from '../../ui/lit/lit.js';
import * as VisualLogging from '../../ui/visual_logging/visual_logging.js';
import * as ElementsPanel from '../elements/elements.js';
import * as NetworkForward from '../network/forward/forward.js';
import * as NetworkPanel from '../network/network.js';
import * as SourcesPanel from '../sources/sources.js';
import * as TimelinePanel from '../timeline/timeline.js';
import * as TimelineUtils from '../timeline/utils/utils.js';
import aiAssistancePanelStyles from './aiAssistancePanel.css.js';
import {
type ChatMessage,
ChatMessageEntity,
ChatView,
type ImageInputData,
type ModelChatMessage,
type Props as ChatViewProps,
State as ChatViewState,
type Step
} from './components/ChatView.js';
import {ExploreWidget} from './components/ExploreWidget.js';
import {isAiAssistancePatchingEnabled} from './PatchWidget.js';
const {html} = Lit;
const AI_ASSISTANCE_SEND_FEEDBACK = 'https://crbug.com/364805393' as Platform.DevToolsPath.UrlString;
const AI_ASSISTANCE_HELP = 'https://developer.chrome.com/docs/devtools/ai-assistance';
const SCREENSHOT_QUALITY = 100;
const SHOW_LOADING_STATE_TIMEOUT = 100;
const JPEG_MIME_TYPE = 'image/jpeg';
const UIStrings = {
/**
*@description AI assistance UI text creating a new chat.
*/
newChat: 'New chat',
/**
*@description AI assistance UI tooltip text for the help button.
*/
help: 'Help',
/**
*@description AI assistant UI tooltip text for the settings button (gear icon).
*/
settings: 'Settings',
/**
*@description AI assistant UI tooltip sending feedback.
*/
sendFeedback: 'Send feedback',
/**
*@description Announcement text for screen readers when a new chat is created.
*/
newChatCreated: 'New chat created',
/**
*@description Announcement text for screen readers when the chat is deleted.
*/
chatDeleted: 'Chat deleted',
/**
*@description AI assistance UI text creating selecting a history entry.
*/
history: 'History',
/**
*@description AI assistance UI text deleting the current chat session from local history.
*/
deleteChat: 'Delete local chat',
/**
*@description AI assistance UI text that deletes all local history entries.
*/
clearChatHistory: 'Clear local chats',
/**
*@description AI assistance UI text explains that he user had no pas conversations.
*/
noPastConversations: 'No past conversations',
/**
* @description Placeholder text for an inactive text field. When active, it's used for the user's input to the GenAI assistance.
*/
followTheSteps: 'Follow the steps above to ask a question',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForEmptyState: 'This is an experimental AI feature and won\'t always get it right.',
/**
*@description Notification shown to the user whenever DevTools receives an external request.
*/
externalRequestReceived: '`DevTools` received an external request',
} as const;
/*
* Strings that don't need to be translated at this time.
*/
const UIStringsNotTranslate = {
/**
*@description Announcement text for screen readers when the conversation starts.
*/
answerLoading: 'Answer loading',
/**
*@description Announcement text for screen readers when the answer comes.
*/
answerReady: 'Answer ready',
/**
* @description Placeholder text for the input shown when the conversation is blocked because a cross-origin context was selected.
*/
crossOriginError: 'To talk about data from another origin, start a new chat',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForStyling: 'Ask a question about the selected element',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForNetwork: 'Ask a question about the selected network request',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForFile: 'Ask a question about the selected file',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForPerformance: 'Ask a question about the selected item and its call tree',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForPerformanceWithNoRecording: 'Record a performance trace and select an item to ask a question',
/**
*@description Placeholder text for the chat UI input when there is no context selected.
*/
inputPlaceholderForStylingNoContext: 'Select an element to ask a question',
/**
*@description Placeholder text for the chat UI input when there is no context selected.
*/
inputPlaceholderForNetworkNoContext: 'Select a network request to ask a question',
/**
*@description Placeholder text for the chat UI input when there is no context selected.
*/
inputPlaceholderForFileNoContext: 'Select a file to ask a question',
/**
*@description Placeholder text for the chat UI input when there is no context selected.
*/
inputPlaceholderForPerformanceNoContext: 'Select an item to ask a question',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForPerformanceInsights: 'Ask a question about the selected performance insight',
/**
*@description Placeholder text for the chat UI input.
*/
inputPlaceholderForPerformanceInsightsNoContext: 'Select a performance insight to ask a question',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForStyling:
'Chat messages and any data the inspected page can access via Web APIs are sent to Google and may be seen by human reviewers to improve this feature. This is an experimental AI feature and won’t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForStylingEnterpriseNoLogging:
'Chat messages and any data the inspected page can access via Web APIs are sent to Google. The content you submit and that is generated by this feature will not be used to improve Google’s AI models. This is an experimental AI feature and won’t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForNetwork:
'Chat messages and the selected network request are sent to Google and may be seen by human reviewers to improve this feature. This is an experimental AI feature and won’t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForNetworkEnterpriseNoLogging:
'Chat messages and the selected network request are sent to Google. The content you submit and that is generated by this feature will not be used to improve Google’s AI models. This is an experimental AI feature and won’t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForFile:
'Chat messages and the selected file are sent to Google and may be seen by human reviewers to improve this feature. This is an experimental AI feature and won\'t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForFileEnterpriseNoLogging:
'Chat messages and the selected file are sent to Google. The content you submit and that is generated by this feature will not be used to improve Google’s AI models. This is an experimental AI feature and won’t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForPerformance:
'Chat messages and trace data from your performance trace are sent to Google and may be seen by human reviewers to improve this feature. This is an experimental AI feature and won\'t always get it right.',
/**
*@description Disclaimer text right after the chat input.
*/
inputDisclaimerForPerformanceEnterpriseNoLogging:
'Chat messages and data from your performance trace are sent to Google. The content you submit and that is generated by this feature will not be used to improve Google’s AI models. This is an experimental AI feature and won’t always get it right.',
/**
* @description Message displayed in toast in case of any failures while taking a screenshot of the page.
*/
screenshotFailureMessage: 'Failed to take a screenshot. Please try again.',
/**
* @description Message displayed in toast in case of any failures while uploading an image file as input.
*/
uploadImageFailureMessage: 'Failed to upload image. Please try again.',
/**
* @description Error message shown when AI assistance is not enabled in DevTools settings.
*/
enableInSettings: 'For AI features to be available, you need to enable AI assistance in DevTools settings.',
} as const;
const str_ = i18n.i18n.registerUIStrings('panels/ai_assistance/AiAssistancePanel.ts', UIStrings);
const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_);
const lockedString = i18n.i18n.lockedString;
function selectedElementFilter(maybeNode: SDK.DOMModel.DOMNode|null): SDK.DOMModel.DOMNode|null {
if (maybeNode) {
return maybeNode.nodeType() === Node.ELEMENT_NODE ? maybeNode : null;
}
return null;
}
async function getEmptyStateSuggestions(
context: AiAssistanceModel.ConversationContext<unknown>|null,
conversationType?: AiAssistanceModel.ConversationType): Promise<AiAssistanceModel.ConversationSuggestion[]> {
if (context) {
const specialSuggestions = await context.getSuggestions();
if (specialSuggestions) {
return specialSuggestions;
}
}
if (!conversationType) {
return [];
}
switch (conversationType) {
case AiAssistanceModel.ConversationType.STYLING:
return [
{title: 'What can you help me with?', jslogContext: 'styling-default'},
{title: 'Why isn’t this element visible?', jslogContext: 'styling-default'},
{title: 'How do I center this element?', jslogContext: 'styling-default'},
];
case AiAssistanceModel.ConversationType.FILE:
return [
{title: 'What does this script do?', jslogContext: 'file-default'},
{title: 'Is the script optimized for performance?', jslogContext: 'file-default'},
{title: 'Does the script handle user input safely?', jslogContext: 'file-default'},
];
case AiAssistanceModel.ConversationType.NETWORK:
return [
{title: 'Why is this network request taking so long?', jslogContext: 'network-default'},
{title: 'Are there any security headers present?', jslogContext: 'network-default'},
{title: 'Why is the request failing?', jslogContext: 'network-default'},
];
case AiAssistanceModel.ConversationType.PERFORMANCE:
return [
{title: 'What\'s the purpose of this work?', jslogContext: 'performance-default'},
{title: 'Where is time being spent?', jslogContext: 'performance-default'},
{title: 'How can I optimize this?', jslogContext: 'performance-default'},
];
case AiAssistanceModel.ConversationType.PERFORMANCE_INSIGHT:
return [
{title: 'Help me optimize my page load performance', jslogContext: 'performance-insights-default'},
];
}
}
interface ToolbarViewInput {
onNewChatClick: () => void;
populateHistoryMenu: (contextMenu: UI.ContextMenu.ContextMenu) => void;
onDeleteClick: () => void;
onHelpClick: () => void;
onSettingsClick: () => void;
showDeleteHistoryAction: boolean;
showChatActions: boolean;
}
export type ViewInput = ChatViewProps&ToolbarViewInput;
export interface PanelViewOutput {
chatView?: ChatView;
}
type View = (input: ViewInput, output: PanelViewOutput, target: HTMLElement) => void;
function toolbarView(input: ToolbarViewInput): Lit.LitTemplate {
// clang-format off
return html`
<div class="toolbar-container" role="toolbar" .jslogContext=${VisualLogging.toolbar()}>
<devtools-toolbar class="freestyler-left-toolbar" role="presentation">
${input.showChatActions
? html`<devtools-button
title=${i18nString(UIStrings.newChat)}
aria-label=${i18nString(UIStrings.newChat)}
.iconName=${'plus'}
.jslogContext=${'freestyler.new-chat'}
.variant=${Buttons.Button.Variant.TOOLBAR}
@click=${input.onNewChatClick}></devtools-button>
<div class="toolbar-divider"></div>
<devtools-menu-button
title=${i18nString(UIStrings.history)}
aria-label=${i18nString(UIStrings.history)}
.iconName=${'history'}
.jslogContext=${'freestyler.history'}
.populateMenuCall=${input.populateHistoryMenu}></devtools-menu-button>`
: Lit.nothing}
${input.showDeleteHistoryAction
? html`<devtools-button
title=${i18nString(UIStrings.deleteChat)}
aria-label=${i18nString(UIStrings.deleteChat)}
.iconName=${'bin'}
.jslogContext=${'freestyler.delete'}
.variant=${Buttons.Button.Variant.TOOLBAR}
@click=${input.onDeleteClick}></devtools-button>`
: Lit.nothing}
</devtools-toolbar>
<devtools-toolbar class="freestyler-right-toolbar" role="presentation">
<x-link
class="toolbar-feedback-link devtools-link"
title=${UIStrings.sendFeedback}
href=${AI_ASSISTANCE_SEND_FEEDBACK}
jslog=${VisualLogging.link().track({click: true, keydown:'Enter|Space'}).context('freestyler.send-feedback')}
>${UIStrings.sendFeedback}</x-link>
<div class="toolbar-divider"></div>
<devtools-button
title=${i18nString(UIStrings.help)}
aria-label=${i18nString(UIStrings.help)}
.iconName=${'help'}
.jslogContext=${'freestyler.help'}
.variant=${Buttons.Button.Variant.TOOLBAR}
@click=${input.onHelpClick}></devtools-button>
<devtools-button
title=${i18nString(UIStrings.settings)}
aria-label=${i18nString(UIStrings.settings)}
.iconName=${'gear'}
.jslogContext=${'freestyler.settings'}
.variant=${Buttons.Button.Variant.TOOLBAR}
@click=${input.onSettingsClick}></devtools-button>
</devtools-toolbar>
</div>
`;
// clang-format on
}
function defaultView(input: ViewInput, output: PanelViewOutput, target: HTMLElement): void {
// clang-format off
Lit.render(
html`
${toolbarView(input)}
<div class="ai-assistance-view-container">
${input.state !== ChatViewState.EXPLORE_VIEW
? html` <devtools-ai-chat-view
.props=${input}
${Lit.Directives.ref((el: Element | undefined) => {
if (!el || !(el instanceof ChatView)) {
return;
}
output.chatView = el;
})}
></devtools-ai-chat-view>`
: html`<devtools-widget
class="explore"
.widgetConfig=${UI.Widget.widgetConfig(ExploreWidget)}
></devtools-widget>`}
</div>
`,
target,
{ host: input },
);
// clang-format on
}
function createNodeContext(node: SDK.DOMModel.DOMNode|null): AiAssistanceModel.NodeContext|null {
if (!node) {
return null;
}
return new AiAssistanceModel.NodeContext(node);
}
function createFileContext(file: Workspace.UISourceCode.UISourceCode|null): AiAssistanceModel.FileContext|null {
if (!file) {
return null;
}
return new AiAssistanceModel.FileContext(file);
}
function createRequestContext(request: SDK.NetworkRequest.NetworkRequest|null): AiAssistanceModel.RequestContext|null {
if (!request) {
return null;
}
return new AiAssistanceModel.RequestContext(request);
}
function createCallTreeContext(callTree: TimelineUtils.AICallTree.AICallTree|null): AiAssistanceModel.CallTreeContext|
null {
if (!callTree) {
return null;
}
return new AiAssistanceModel.CallTreeContext(callTree);
}
function createPerfInsightContext(insight: TimelineUtils.InsightAIContext.ActiveInsight|null):
AiAssistanceModel.InsightContext|null {
if (!insight) {
return null;
}
return new AiAssistanceModel.InsightContext(insight);
}
function agentToConversationType(agent: AiAssistanceModel.AiAgent<unknown>): AiAssistanceModel.ConversationType {
if (agent instanceof AiAssistanceModel.StylingAgent) {
return AiAssistanceModel.ConversationType.STYLING;
}
if (agent instanceof AiAssistanceModel.NetworkAgent) {
return AiAssistanceModel.ConversationType.NETWORK;
}
if (agent instanceof AiAssistanceModel.FileAgent) {
return AiAssistanceModel.ConversationType.FILE;
}
if (agent instanceof AiAssistanceModel.PerformanceAgent) {
return AiAssistanceModel.ConversationType.PERFORMANCE;
}
if (agent instanceof AiAssistanceModel.PerformanceInsightsAgent) {
return AiAssistanceModel.ConversationType.PERFORMANCE_INSIGHT;
}
throw new Error('Provided agent does not have a corresponding conversation type');
}
// TODO(crbug.com/416134018): Add piercing of shadow roots and handling of child frames
async function inspectElementBySelector(selector: string): Promise<void> {
const primaryPageTarget = SDK.TargetManager.TargetManager.instance().primaryPageTarget();
const runtimeModel = primaryPageTarget?.model(SDK.RuntimeModel.RuntimeModel);
const executionContext = runtimeModel?.defaultExecutionContext();
if (!executionContext) {
throw new Error('Could not find execution context for executing code');
}
// `inspect()` is not available in `callFunctionOn()`, but it is in `evaluate()`.
// We therefore get a reference to `inspect()` via `evaluate()` and then pass
// this reference as an argument to `callFunctionOn()`.
const inspectReference = await executionContext.evaluate(
{
expression: 'window.inspect',
includeCommandLineAPI: true,
returnByValue: false,
},
/* userGesture */ false,
/* awaitPromise */ false,
);
if ('error' in inspectReference || inspectReference.exceptionDetails) {
throw new Error('Cannot find \'window.inspect\'');
}
const inspectResult = await executionContext.callFunctionOn({
functionDeclaration: 'async function (inspect, selector) { return inspect(document.querySelector(selector)); }',
arguments: [{objectId: inspectReference.object.objectId}, {value: selector}],
userGesture: false,
awaitPromise: true,
returnByValue: false,
});
if ('error' in inspectResult || inspectResult.exceptionDetails ||
SDK.RemoteObject.RemoteObject.isNullOrUndefined(inspectResult.object)) {
throw new Error(`'document.querySelector()' could not find matching element for '${selector}' selector`);
}
}
let panelInstance: AiAssistancePanel;
export class AiAssistancePanel extends UI.Panel.Panel {
static panelName = 'freestyler';
// NodeJS debugging does not have Elements panel, thus this action might not exist.
#toggleSearchElementAction?: UI.ActionRegistration.Action;
#aidaClient: Host.AidaClient.AidaClient;
#viewOutput: PanelViewOutput = {};
#serverSideLoggingEnabled = isAiAssistanceServerSideLoggingEnabled();
#aiAssistanceEnabledSetting: Common.Settings.Setting<boolean>|undefined;
#changeManager = new AiAssistanceModel.ChangeManager();
#mutex = new Common.Mutex.Mutex();
#conversationAgent?: AiAssistanceModel.AiAgent<unknown>;
#conversation?: AiAssistanceModel.Conversation;
#historicalConversations: AiAssistanceModel.Conversation[] = [];
#selectedFile: AiAssistanceModel.FileContext|null = null;
#selectedElement: AiAssistanceModel.NodeContext|null = null;
#selectedCallTree: AiAssistanceModel.CallTreeContext|null = null;
#selectedPerformanceInsight: AiAssistanceModel.InsightContext|null = null;
#selectedRequest: AiAssistanceModel.RequestContext|null = null;
// Messages displayed in the `ChatView` component.
#messages: ChatMessage[] = [];
// Indicates whether the new conversation context is blocked due to cross-origin restrictions.
// This happens when the conversation's context has a different
// origin than the selected context.
#blockedByCrossOrigin = false;
// Whether the UI should show loading or not.
#isLoading = false;
// Selected conversation context. The reason we keep this as a
// state field rather than using `#getConversationContext` is that,
// there is a case where the context differs from the selectedElement (or other selected context type).
// Specifically, it allows restoring the previous context when a new selection is cross-origin.
// See `#onContextSelectionChanged` for details.
#selectedContext: AiAssistanceModel.ConversationContext<unknown>|null = null;
// Stores the availability status of the `AidaClient` and the reason for unavailability, if any.
#aidaAvailability: Host.AidaClient.AidaAccessPreconditions;
// Info of the currently logged in user.
#userInfo: {
accountImage?: string,
accountFullName?: string,
};
#imageInput?: ImageInputData;
// Used to disable send button when there is not text input.
#isTextInputEmpty = true;
#timelinePanelInstance: TimelinePanel.TimelinePanel.TimelinePanel|null = null;
constructor(private view: View = defaultView, {aidaClient, aidaAvailability, syncInfo}: {
aidaClient: Host.AidaClient.AidaClient,
aidaAvailability: Host.AidaClient.AidaAccessPreconditions,
syncInfo: Host.InspectorFrontendHostAPI.SyncInformation,
}) {
super(AiAssistancePanel.panelName);
this.registerRequiredCSS(aiAssistancePanelStyles);
this.#aiAssistanceEnabledSetting = this.#getAiAssistanceEnabledSetting();
this.#aidaClient = aidaClient;
this.#aidaAvailability = aidaAvailability;
this.#userInfo = {
accountImage: syncInfo.accountImage,
accountFullName: syncInfo.accountFullName,
};
this.#historicalConversations = AiAssistanceModel.AiHistoryStorage.instance().getHistory().map(item => {
return new AiAssistanceModel.Conversation(item.type, item.history, item.id, true, item.isExternal);
});
if (UI.ActionRegistry.ActionRegistry.instance().hasAction('elements.toggle-element-search')) {
this.#toggleSearchElementAction =
UI.ActionRegistry.ActionRegistry.instance().getAction('elements.toggle-element-search');
}
}
#getChatUiState(): ChatViewState {
const blockedByAge = Root.Runtime.hostConfig.aidaAvailability?.blockedByAge === true;
// Special case due to the way its handled downstream quirks
if (this.#aidaAvailability !== Host.AidaClient.AidaAccessPreconditions.AVAILABLE) {
return ChatViewState.CHAT_VIEW;
}
if (!this.#aiAssistanceEnabledSetting?.getIfNotDisabled() || blockedByAge) {
return ChatViewState.CONSENT_VIEW;
}
if (this.#conversation?.type) {
return ChatViewState.CHAT_VIEW;
}
return ChatViewState.EXPLORE_VIEW;
}
#getAiAssistanceEnabledSetting(): Common.Settings.Setting<boolean>|undefined {
try {
return Common.Settings.moduleSetting('ai-assistance-enabled') as Common.Settings.Setting<boolean>;
} catch {
return;
}
}
#createAgent(conversationType: AiAssistanceModel.ConversationType): AiAssistanceModel.AiAgent<unknown> {
const options = {
aidaClient: this.#aidaClient,
serverSideLoggingEnabled: this.#serverSideLoggingEnabled,
};
let agent: AiAssistanceModel.AiAgent<unknown>;
switch (conversationType) {
case AiAssistanceModel.ConversationType.STYLING: {
agent = new AiAssistanceModel.StylingAgent({
...options,
changeManager: this.#changeManager,
});
if (isAiAssistanceStylingWithFunctionCallingEnabled()) {
agent = new AiAssistanceModel.StylingAgentWithFunctionCalling({
...options,
changeManager: this.#changeManager,
});
}
break;
}
case AiAssistanceModel.ConversationType.NETWORK: {
agent = new AiAssistanceModel.NetworkAgent(options);
break;
}
case AiAssistanceModel.ConversationType.FILE: {
agent = new AiAssistanceModel.FileAgent(options);
break;
}
case AiAssistanceModel.ConversationType.PERFORMANCE: {
agent = new AiAssistanceModel.PerformanceAgent(options);
break;
}
case AiAssistanceModel.ConversationType.PERFORMANCE_INSIGHT: {
agent = new AiAssistanceModel.PerformanceInsightsAgent(options);
break;
}
}
return agent;
}
static async instance(opts: {
forceNew: boolean|null,
}|undefined = {forceNew: null}): Promise<AiAssistancePanel> {
const {forceNew} = opts;
if (!panelInstance || forceNew) {
const aidaClient = new Host.AidaClient.AidaClient();
const syncInfoPromise = new Promise<Host.InspectorFrontendHostAPI.SyncInformation>(
resolve => Host.InspectorFrontendHost.InspectorFrontendHostInstance.getSyncInformation(resolve));
const [aidaAvailability, syncInfo] =
await Promise.all([Host.AidaClient.AidaClient.checkAccessPreconditions(), syncInfoPromise]);
panelInstance = new AiAssistancePanel(defaultView, {aidaClient, aidaAvailability, syncInfo});
}
return panelInstance;
}
/**
* Called when the TimelinePanel instance changes. We use this to listen to
* the status of if the user is viewing a trace or not, and update the
* placeholder text in the panel accordingly. We do this because if the user
* has an active trace, we show different text than if they are viewing
* the performance panel but have no trace imported.
*/
#bindTimelineTraceListener(): void {
const timelinePanel = UI.Context.Context.instance().flavor(TimelinePanel.TimelinePanel.TimelinePanel);
// Avoid binding multiple times.
if (timelinePanel === this.#timelinePanelInstance) {
return;
}
// Ensure we clear up any listener from the old TimelinePanel instance.
this.#timelinePanelInstance?.removeEventListener(
TimelinePanel.TimelinePanel.Events.IS_VIEWING_TRACE, this.requestUpdate, this);
this.#timelinePanelInstance = timelinePanel;
if (this.#timelinePanelInstance) {
this.#timelinePanelInstance.addEventListener(
TimelinePanel.TimelinePanel.Events.IS_VIEWING_TRACE, this.requestUpdate, this);
}
}
// We select the default agent based on the open panels if
// there isn't any active conversation.
#selectDefaultAgentIfNeeded(): void {
// If there already is an agent and if it is not empty,
// we don't automatically change the agent. In addition to this,
// we don't change the current agent when there is a message in flight.
if ((this.#conversationAgent && this.#conversation && !this.#conversation.isEmpty) || this.#isLoading) {
return;
}
const {hostConfig} = Root.Runtime;
const isElementsPanelVisible =
Boolean(UI.Context.Context.instance().flavor(ElementsPanel.ElementsPanel.ElementsPanel));
const isNetworkPanelVisible = Boolean(UI.Context.Context.instance().flavor(NetworkPanel.NetworkPanel.NetworkPanel));
const isSourcesPanelVisible = Boolean(UI.Context.Context.instance().flavor(SourcesPanel.SourcesPanel.SourcesPanel));
const isPerformancePanelVisible =
Boolean(UI.Context.Context.instance().flavor(TimelinePanel.TimelinePanel.TimelinePanel));
// Check if the user has an insight expanded in the performance panel sidebar.
// If they have, we default to the Insights agent; otherwise we fallback to
// the regular Performance agent.
// Note that we do not listen to this flavor changing; this code is here to
// ensure that by default we do not pick the Insights agent if the user has
// just imported a trace and not done anything else. It doesn't make sense
// to select the Insights AI agent in that case.
const userHasExpandedPerfInsight =
Boolean(UI.Context.Context.instance().flavor(TimelinePanel.TimelinePanel.SelectedInsight));
let targetConversationType: AiAssistanceModel.ConversationType|undefined = undefined;
if (isElementsPanelVisible && hostConfig.devToolsFreestyler?.enabled) {
targetConversationType = AiAssistanceModel.ConversationType.STYLING;
} else if (isNetworkPanelVisible && hostConfig.devToolsAiAssistanceNetworkAgent?.enabled) {
targetConversationType = AiAssistanceModel.ConversationType.NETWORK;
} else if (isSourcesPanelVisible && hostConfig.devToolsAiAssistanceFileAgent?.enabled) {
targetConversationType = AiAssistanceModel.ConversationType.FILE;
} else if (
isPerformancePanelVisible && hostConfig.devToolsAiAssistancePerformanceAgent?.enabled &&
hostConfig.devToolsAiAssistancePerformanceAgent?.insightsEnabled && userHasExpandedPerfInsight) {
targetConversationType = AiAssistanceModel.ConversationType.PERFORMANCE_INSIGHT;
} else if (isPerformancePanelVisible && hostConfig.devToolsAiAssistancePerformanceAgent?.enabled) {
targetConversationType = AiAssistanceModel.ConversationType.PERFORMANCE;
}
if (this.#conversation?.type === targetConversationType) {
// The above if makes sure even if we have an active agent it's empty
// So we can just reuse it
return;
}
const agent = targetConversationType ? this.#createAgent(targetConversationType) : undefined;
this.#updateConversationState(agent);
}
#updateConversationState(input?: AiAssistanceModel.AiAgent<unknown>|AiAssistanceModel.Conversation): void {
const agent = input instanceof AiAssistanceModel.AiAgent ? input : undefined;
const conversation = input instanceof AiAssistanceModel.Conversation ? input : undefined;
if (this.#conversationAgent !== agent) {
// Cancel any previous conversation
this.#cancel();
this.#messages = [];
this.#isLoading = false;
this.#conversation?.archiveConversation();
this.#conversationAgent = agent;
// If we get a new agent we need to
// create a new conversation along side it
if (agent) {
this.#conversation = new AiAssistanceModel.Conversation(
agentToConversationType(agent),
[],
agent.id,
false,
);
this.#historicalConversations.push(this.#conversation);
}
}
if (!agent) {
this.#conversation = undefined;
// We need to run doConversation separately
this.#messages = [];
// If a no new agent is provided
// but conversation is
// update with history conversation
if (conversation) {
this.#conversation = conversation;
}
}
if (!this.#conversationAgent && !this.#conversation) {
this.#selectDefaultAgentIfNeeded();
}
this.#onContextSelectionChanged();
this.requestUpdate();
}
override wasShown(): void {
super.wasShown();
this.#viewOutput.chatView?.restoreScrollPosition();
this.#viewOutput.chatView?.focusTextInput();
void this.#handleAidaAvailabilityChange();
this.#selectedElement =
createNodeContext(selectedElementFilter(UI.Context.Context.instance().flavor(SDK.DOMModel.DOMNode)));
this.#selectedRequest =
createRequestContext(UI.Context.Context.instance().flavor(SDK.NetworkRequest.NetworkRequest));
this.#selectedCallTree =
createCallTreeContext(UI.Context.Context.instance().flavor(TimelineUtils.AICallTree.AICallTree));
this.#selectedPerformanceInsight =
createPerfInsightContext(UI.Context.Context.instance().flavor(TimelineUtils.InsightAIContext.ActiveInsight));
this.#selectedFile = createFileContext(UI.Context.Context.instance().flavor(Workspace.UISourceCode.UISourceCode));
this.#updateConversationState(this.#conversationAgent);
this.#aiAssistanceEnabledSetting?.addChangeListener(this.requestUpdate, this);
Host.AidaClient.HostConfigTracker.instance().addEventListener(
Host.AidaClient.Events.AIDA_AVAILABILITY_CHANGED, this.#handleAidaAvailabilityChange);
this.#toggleSearchElementAction?.addEventListener(UI.ActionRegistration.Events.TOGGLED, this.requestUpdate, this);
UI.Context.Context.instance().addFlavorChangeListener(SDK.DOMModel.DOMNode, this.#handleDOMNodeFlavorChange);
UI.Context.Context.instance().addFlavorChangeListener(
SDK.NetworkRequest.NetworkRequest, this.#handleNetworkRequestFlavorChange);
UI.Context.Context.instance().addFlavorChangeListener(
TimelineUtils.AICallTree.AICallTree, this.#handleTraceEntryNodeFlavorChange);
UI.Context.Context.instance().addFlavorChangeListener(
Workspace.UISourceCode.UISourceCode, this.#handleUISourceCodeFlavorChange);
UI.Context.Context.instance().addFlavorChangeListener(
TimelineUtils.InsightAIContext.ActiveInsight, this.#handlePerfInsightFlavorChange);
UI.Context.Context.instance().addFlavorChangeListener(
ElementsPanel.ElementsPanel.ElementsPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().addFlavorChangeListener(
NetworkPanel.NetworkPanel.NetworkPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().addFlavorChangeListener(
SourcesPanel.SourcesPanel.SourcesPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().addFlavorChangeListener(
TimelinePanel.TimelinePanel.TimelinePanel, this.#selectDefaultAgentIfNeeded, this);
SDK.TargetManager.TargetManager.instance().addModelListener(
SDK.DOMModel.DOMModel, SDK.DOMModel.Events.AttrModified, this.#handleDOMNodeAttrChange, this);
SDK.TargetManager.TargetManager.instance().addModelListener(
SDK.DOMModel.DOMModel, SDK.DOMModel.Events.AttrRemoved, this.#handleDOMNodeAttrChange, this);
SDK.TargetManager.TargetManager.instance().addModelListener(
SDK.ResourceTreeModel.ResourceTreeModel, SDK.ResourceTreeModel.Events.PrimaryPageChanged,
this.#onPrimaryPageChanged, this);
// Listen to changes in the Timeline Panel state. We also call the
// function immediately in case the Performance panel is already shown
// when AI Assistance is loaded.
UI.Context.Context.instance().addFlavorChangeListener(
TimelinePanel.TimelinePanel.TimelinePanel, this.#bindTimelineTraceListener, this);
this.#bindTimelineTraceListener();
Host.userMetrics.actionTaken(Host.UserMetrics.Action.AiAssistancePanelOpened);
}
override willHide(): void {
this.#aiAssistanceEnabledSetting?.removeChangeListener(this.requestUpdate, this);
Host.AidaClient.HostConfigTracker.instance().removeEventListener(
Host.AidaClient.Events.AIDA_AVAILABILITY_CHANGED, this.#handleAidaAvailabilityChange);
this.#toggleSearchElementAction?.removeEventListener(
UI.ActionRegistration.Events.TOGGLED, this.requestUpdate, this);
UI.Context.Context.instance().removeFlavorChangeListener(SDK.DOMModel.DOMNode, this.#handleDOMNodeFlavorChange);
UI.Context.Context.instance().removeFlavorChangeListener(
SDK.NetworkRequest.NetworkRequest, this.#handleNetworkRequestFlavorChange);
UI.Context.Context.instance().removeFlavorChangeListener(
TimelineUtils.AICallTree.AICallTree, this.#handleTraceEntryNodeFlavorChange);
UI.Context.Context.instance().removeFlavorChangeListener(
TimelineUtils.InsightAIContext.ActiveInsight, this.#handlePerfInsightFlavorChange);
UI.Context.Context.instance().removeFlavorChangeListener(
Workspace.UISourceCode.UISourceCode, this.#handleUISourceCodeFlavorChange);
UI.Context.Context.instance().removeFlavorChangeListener(
ElementsPanel.ElementsPanel.ElementsPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().removeFlavorChangeListener(
NetworkPanel.NetworkPanel.NetworkPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().removeFlavorChangeListener(
SourcesPanel.SourcesPanel.SourcesPanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().removeFlavorChangeListener(
TimelinePanel.TimelinePanel.TimelinePanel, this.#selectDefaultAgentIfNeeded, this);
UI.Context.Context.instance().removeFlavorChangeListener(
TimelinePanel.TimelinePanel.TimelinePanel, this.#bindTimelineTraceListener, this);
SDK.TargetManager.TargetManager.instance().removeModelListener(
SDK.DOMModel.DOMModel,
SDK.DOMModel.Events.AttrModified,
this.#handleDOMNodeAttrChange,
this,
);
SDK.TargetManager.TargetManager.instance().removeModelListener(
SDK.DOMModel.DOMModel,
SDK.DOMModel.Events.AttrRemoved,
this.#handleDOMNodeAttrChange,
this,
);
SDK.TargetManager.TargetManager.instance().removeModelListener(
SDK.ResourceTreeModel.ResourceTreeModel, SDK.ResourceTreeModel.Events.PrimaryPageChanged,
this.#onPrimaryPageChanged, this);
if (this.#timelinePanelInstance) {
this.#timelinePanelInstance.removeEventListener(
TimelinePanel.TimelinePanel.Events.IS_VIEWING_TRACE, this.requestUpdate, this);
this.#timelinePanelInstance = null;
}
}
#handleAidaAvailabilityChange = async(): Promise<void> => {
const currentAidaAvailability = await Host.AidaClient.AidaClient.checkAccessPreconditions();
if (currentAidaAvailability !== this.#aidaAvailability) {
this.#aidaAvailability = currentAidaAvailability;
const syncInfo = await new Promise<Host.InspectorFrontendHostAPI.SyncInformation>(
resolve => Host.InspectorFrontendHost.InspectorFrontendHostInstance.getSyncInformation(resolve));
this.#userInfo = {
accountImage: syncInfo.accountImage,
accountFullName: syncInfo.accountFullName,
};
this.requestUpdate();
}
};
#handleDOMNodeFlavorChange = (ev: Common.EventTarget.EventTargetEvent<SDK.DOMModel.DOMNode>): void => {
if (this.#selectedElement?.getItem() === ev.data) {
return;
}
this.#selectedElement = createNodeContext(selectedElementFilter(ev.data));
this.#updateConversationState(this.#conversationAgent);
};
#handleDOMNodeAttrChange =
(ev: Common.EventTarget.EventTargetEvent<{node: SDK.DOMModel.DOMNode, name: string}>): void => {
if (this.#selectedElement?.getItem() === ev.data.node) {
if (ev.data.name === 'class' || ev.data.name === 'id') {
this.requestUpdate();
}
}
};
#handleNetworkRequestFlavorChange =
(ev: Common.EventTarget.EventTargetEvent<SDK.NetworkRequest.NetworkRequest>): void => {
if (this.#selectedRequest?.getItem() === ev.data) {
return;
}
this.#selectedRequest = Boolean(ev.data) ? new AiAssistanceModel.RequestContext(ev.data) : null;
this.#updateConversationState(this.#conversationAgent);
};
#handleTraceEntryNodeFlavorChange =
(ev: Common.EventTarget.EventTargetEvent<TimelineUtils.AICallTree.AICallTree>): void => {
if (this.#selectedCallTree?.getItem() === ev.data) {
return;
}
this.#selectedCallTree = Boolean(ev.data) ? new AiAssistanceModel.CallTreeContext(ev.data) : null;
this.#updateConversationState(this.#conversationAgent);
};
#handlePerfInsightFlavorChange =
(ev: Common.EventTarget.EventTargetEvent<TimelineUtils.InsightAIContext.ActiveInsight>): void => {
if (this.#selectedPerformanceInsight?.getItem() === ev.data) {
return;
}
this.#selectedPerformanceInsight = Boolean(ev.data) ? new AiAssistanceModel.InsightContext(ev.data) : null;
this.#updateConversationState(this.#conversationAgent);
};
#handleUISourceCodeFlavorChange =
(ev: Common.EventTarget.EventTargetEvent<Workspace.UISourceCode.UISourceCode>): void => {
const newFile = ev.data;
if (!newFile) {
return;
}
if (this.#selectedFile?.getItem() === newFile) {
return;
}
this.#selectedFile = new AiAssistanceModel.FileContext(ev.data);
this.#updateConversationState(this.#conversationAgent);
};
#onPrimaryPageChanged(): void {
if (!this.#imageInput) {
return;
}
this.#imageInput = undefined;
this.requestUpdate();
}
#getChangeSummary(): string|undefined {
if (!isAiAssistancePatchingEnabled() || !this.#conversationAgent || this.#conversation?.isReadOnly) {
return;
}
return this.#changeManager.formatChangesForPatching(this.#conversationAgent.id, /* includeSourceLocation= */ true);
}
override async performUpdate(): Promise<void> {
const emptyStateSuggestions = await getEmptyStateSuggestions(this.#selectedContext, this.#conversation?.type);
this.view(
{
state: this.#getChatUiState(),
blockedByCrossOrigin: this.#blockedByCrossOrigin,
aidaAvailability: this.#aidaAvailability,
isLoading: this.#isLoading,
messages: this.#messages,
selectedContext: this.#selectedContext,
conversationType: this.#conversation?.type,
isReadOnly: this.#conversation?.isReadOnly ?? false,
changeSummary: this.#getChangeSummary(),
inspectElementToggled: this.#toggleSearchElementAction?.toggled() ?? false,
userInfo: this.#userInfo,
canShowFeedbackForm: this.#serverSideLoggingEnabled,
multimodalInputEnabled: isAiAssistanceMultimodalInputEnabled() &&
this.#conversation?.type === AiAssistanceModel.ConversationType.STYLING,
imageInput: this.#imageInput,
showDeleteHistoryAction: Boolean(this.#conversation && !this.#conversation.isEmpty),
showChatActions: this.#shouldShowChatActions(),
isTextInputDisabled: this.#isTextInputDisabled(),
emptyStateSuggestions,
inputPlaceholder: this.#getChatInputPlaceholder(),
disclaimerText: this.#getDisclaimerText(),
isTextInputEmpty: this.#isTextInputEmpty,
changeManager: this.#changeManager,
uploadImageInputEnabled: isAiAssistanceMultimodalUploadInputEnabled() &&
this.#conversation?.type === AiAssistanceModel.ConversationType.STYLING,
onNewChatClick: this.#handleNewChatRequest.bind(this),
populateHistoryMenu: this.#populateHistoryMenu.bind(this),
onDeleteClick: this.#onDeleteClicked.bind(this),
onHelpClick: () => {
UI.UIUtils.openInNewTab(AI_ASSISTANCE_HELP);
},
onSettingsClick: () => {
void UI.ViewManager.ViewManager.instance().showView('chrome-ai');
},
onTextSubmit: async (
text: string, imageInput?: Host.AidaClient.Part,
multimodalInputType?: AiAssistanceModel.MultimodalInputType) => {
this.#imageInput = undefined;
this.#isTextInputEmpty = true;
Host.userMetrics.actionTaken(Host.UserMetrics.Action.AiAssistanceQuerySubmitted);
await this.#startConversation(text, imageInput, multimodalInputType);
},
onInspectElementClick: this.#handleSelectElementClick.bind(this),
onFeedbackSubmit: this.#handleFeedbackSubmit.bind(this),
onCancelClick: this.#cancel.bind(this),
onContextClick: this.#handleContextClick.bind(this),
onNewConversation: this.#handleNewChatRequest.bind(this),
onTakeScreenshot: isAiAssistanceMultimodalInputEnabled() ? this.#handleTakeScreenshot.bind(this) : undefined,
onRemoveImageInput: isAiAssistanceMultimodalInputEnabled() ? this.#handleRemoveImageInput.bind(this) :
undefined,
onTextInputChange: this.#handleTextInputChange.bind(this),
onLoadImage: isAiAssistanceMultimodalUploadInputEnabled() ? this.#handleLoadImage.bind(this) : undefined,
},
this.#viewOutput, this.contentElement);
}
#handleSelectElementClick(): void {
void this.#toggleSearchElementAction?.execute();
}
#isTextInputDisabled(): boolean {
// If the `aiAssistanceSetting` is not enabled
// or if the user is blocked by age, the text input is disabled.
const aiAssistanceSetting = this.#aiAssistanceEnabledSetting?.getIfNotDisabled();
const isBlockedByAge = Root.Runtime.hostConfig.aidaAvailability?.blockedByAge === true;
if (!aiAssistanceSetting || isBlockedByAge) {
return true;
}
// If the Aida is not available, the text input is disabled.
const isAidaAvailable = this.#aidaAvailability === Host.AidaClient.AidaAccessPreconditions.AVAILABLE;
if (!isAidaAvailable) {
return true;
}
// If sending a new message is blocked by cross origin context
// the text input is disabled.
if (this.#blockedByCrossOrigin) {
return true;
}
// If there is no current agent if there is no selected context
// the text input is disabled.
if (!this.#conversation || !this.#selectedContext) {
return true;
}
return false;
}
#shouldShowChatActions(): boolean {
const aiAssistanceSetting = this.#aiAssistanceEnabledSetting?.getIfNotDisabled();
const isBlockedByAge = Root.Runtime.hostConfig.aidaAvailability?.blockedByAge === true;
if (!aiAssistanceSetting || isBlockedByAge) {
return false;
}
if (this.#aidaAvailability === Host.AidaClient.AidaAccessPreconditions.NO_ACCOUNT_EMAIL ||
this.#aidaAvailability === Host.AidaClient.AidaAccessPreconditions.SYNC_IS_PAUSED) {
return false;
}
return true;
}
#getChatInputPlaceholder(): Platform.UIString.LocalizedString {
const state = this.#getChatUiState();
if (state === ChatViewState.CONSENT_VIEW || !this.#conversation) {
return i18nString(UIStrings.followTheSteps);
}
if (this.#blockedByCrossOrigin) {
return lockedString(UIStringsNotTranslate.crossOriginError);
}
switch (this.#conversation.type) {
case AiAssistanceModel.ConversationType.STYLING:
return this.#selectedContext ? lockedString(UIStringsNotTranslate.inputPlaceholderForStyling) :
lockedString(UIStringsNotTranslate.inputPlaceholderForStylingNoContext);
case AiAssistanceModel.ConversationType.FILE:
return this.#selectedContext ? lockedString(UIStringsNotTranslate.inputPlaceholderForFile) :
lockedString(UIStringsNotTranslate.inputPlaceholderForFileNoContext);
case AiAssistanceModel.ConversationType.NETWORK:
return this.#selectedContext ? lockedString(UIStringsNotTranslate.inputPlaceholderForNetwork) :
lockedString(UIStringsNotTranslate.inputPlaceholderForNetworkNoContext);
case AiAssistanceModel.ConversationType.PERFORMANCE: {
const perfPanel = UI.Context.Context.instance().flavor(TimelinePanel.TimelinePanel.TimelinePanel);
if (perfPanel?.hasActiveTrace()) {
return this.#selectedContext ? lockedString(UIStringsNotTranslate.inputPlaceholderForPerformance) :
lockedString(UIStringsNotTranslate.inputPlaceholderForPerformanceNoContext);
}
return lockedString(UIStringsNotTranslate.inputPlaceholderForPerformanceWithNoRecording);
}
case AiAssistanceModel.ConversationType.PERFORMANCE_INSIGHT:
return this.#selectedContext ?
lockedString(UIStringsNotTranslate.inputPlaceholderForPerformanceInsights) :
lockedString(UIStringsNotTranslate.inputPlaceholderForPerformanceInsightsNoContext);
}
}
#getDisclaimerText(): Platform.UIString.LocalizedString {
const state = this.#getChatUiState();
if (state === ChatViewState.CONSENT_VIEW || !this.#conversation || this.#conversation.isReadOnly) {
return i18nString(UIStrings.inputDisclaimerForEmptyState);
}
const noLogging = Root.Runtime.hostConfig.aidaAvailability?.enterprisePolicyValue ===
Root.Runtime.GenAiEnterprisePolicyValue.ALLOW_WITHOUT_LOGGING;
switch (this.#conversation.type) {
case AiAssistanceModel.ConversationType.STYLING:
if (noLogging) {
return lockedString(UIStringsNotTranslate.inputDisclaimerForStylingEnterpriseNoLogging);
}
return lockedString(UIStringsNotTranslate.inputDisclaimerForStyling);
case AiAssistanceModel.ConversationType.FILE:
if (noLogging) {
return lockedString(UIStringsNotTranslate.inputDisclaimerForFileEnterpriseNoLogging);
}
return lockedString(UIStringsNotTranslate.inputDisclaimerForFile);
case AiAssistanceModel.ConversationType.NETWORK:
if (noLogging) {
return lockedString(UIStringsNotTranslate.inputDisclaimerForNetworkEnterpriseNoLogging);
}
return lockedString(UIStringsNotTranslate.inputDisclaimerForNetwork);
// It is deliberate that both Performance agents use the same disclaimer
// text and this has been approved by Privacy.