voice - cleanup voice chat actions (#212653)

This commit is contained in:
Benjamin Pasero 2024-05-14 10:19:21 +02:00 committed by GitHub
parent 49bac27eb0
commit 7aa7be5f5c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 280 additions and 379 deletions

View file

@ -51,7 +51,7 @@ import { IChatVariablesService } from 'vs/workbench/contrib/chat/common/chatVari
import { ChatWidgetHistoryService, IChatWidgetHistoryService } from 'vs/workbench/contrib/chat/common/chatWidgetHistoryService';
import { ILanguageModelsService, LanguageModelsService } from 'vs/workbench/contrib/chat/common/languageModels';
import { ILanguageModelStatsService, LanguageModelStatsService } from 'vs/workbench/contrib/chat/common/languageModelStats';
import { IVoiceChatService, VoiceChatService } from 'vs/workbench/contrib/chat/common/voiceChat';
import { IVoiceChatService, VoiceChatService } from 'vs/workbench/contrib/chat/common/voiceChatService';
import { IEditorResolverService, RegisteredEditorPriority } from 'vs/workbench/services/editor/common/editorResolverService';
import { LifecyclePhase } from 'vs/workbench/services/lifecycle/common/lifecycle';
import '../common/chatColors';

View file

@ -128,6 +128,7 @@ export type IChatWidgetViewContext = IChatViewViewContext | IChatResourceViewCon
export interface IChatWidget {
readonly onDidChangeViewModel: Event<void>;
readonly onDidAcceptInput: Event<void>;
readonly onDidHideInput: Event<void>;
readonly onDidSubmitAgent: Event<{ agent: IChatAgentData; slashCommand?: IChatAgentCommand }>;
readonly onDidChangeParsedInput: Event<void>;
readonly location: ChatAgentLocation;

View file

@ -75,6 +75,12 @@ export class ChatEditor extends EditorPane {
this.widget.setVisible(true);
}
protected override setEditorVisible(visible: boolean): void {
super.setEditorVisible(visible);
this.widget?.setVisible(visible);
}
public override focus(): void {
super.focus();

View file

@ -96,6 +96,9 @@ export class ChatWidget extends Disposable implements IChatWidget {
private _onDidAcceptInput = this._register(new Emitter<void>());
readonly onDidAcceptInput = this._onDidAcceptInput.event;
private _onDidHideInput = this._register(new Emitter<void>());
readonly onDidHideInput = this._onDidHideInput.event;
private _onDidChangeParsedInput = this._register(new Emitter<void>());
readonly onDidChangeParsedInput = this._onDidChangeParsedInput.event;
@ -388,6 +391,7 @@ export class ChatWidget extends Disposable implements IChatWidget {
}
setVisible(visible: boolean): void {
const wasVisible = this._visible;
this._visible = visible;
this.visibleChangeCount++;
this.renderer.setVisible(visible);
@ -400,6 +404,8 @@ export class ChatWidget extends Disposable implements IChatWidget {
this.onDidChangeItems(true);
}
}, 0));
} else if (wasVisible) {
this._onDidHideInput.fire();
}
}

View file

@ -3,10 +3,12 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { localize } from 'vs/nls';
import { CancellationToken } from 'vs/base/common/cancellation';
import { Emitter, Event } from 'vs/base/common/event';
import { Disposable, DisposableStore } from 'vs/base/common/lifecycle';
import { rtrim } from 'vs/base/common/strings';
import { IContextKeyService, RawContextKey } from 'vs/platform/contextkey/common/contextkey';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { IChatAgentService } from 'vs/workbench/contrib/chat/common/chatAgents';
import { IChatModel } from 'vs/workbench/contrib/chat/common/chatModel';
@ -58,6 +60,8 @@ enum PhraseTextType {
AGENT_AND_COMMAND = 3
}
export const VoiceChatInProgress = new RawContextKey<boolean>('voiceChatInProgress', false, { type: 'boolean', description: localize('voiceChatInProgress', "A speech-to-text session is in progress for chat.") });
export class VoiceChatService extends Disposable implements IVoiceChatService {
readonly _serviceBrand: undefined;
@ -77,9 +81,13 @@ export class VoiceChatService extends Disposable implements IVoiceChatService {
private static readonly CHAT_AGENT_ALIAS = new Map<string, string>([['vscode', 'code']]);
private readonly voiceChatInProgress = VoiceChatInProgress.bindTo(this.contextKeyService);
private activeVoiceChatSessions = 0;
constructor(
@ISpeechService private readonly speechService: ISpeechService,
@IChatAgentService private readonly chatAgentService: IChatAgentService
@IChatAgentService private readonly chatAgentService: IChatAgentService,
@IContextKeyService private readonly contextKeyService: IContextKeyService
) {
super();
}
@ -116,7 +124,19 @@ export class VoiceChatService extends Disposable implements IVoiceChatService {
async createVoiceChatSession(token: CancellationToken, options: IVoiceChatSessionOptions): Promise<IVoiceChatSession> {
const disposables = new DisposableStore();
disposables.add(token.onCancellationRequested(() => disposables.dispose()));
const onSessionStoppedOrCanceled = (dispose: boolean) => {
this.activeVoiceChatSessions--;
if (this.activeVoiceChatSessions === 0) {
this.voiceChatInProgress.reset();
}
if (dispose) {
disposables.dispose();
}
};
disposables.add(token.onCancellationRequested(() => onSessionStoppedOrCanceled(true)));
let detectedAgent = false;
let detectedSlashCommand = false;
@ -124,6 +144,10 @@ export class VoiceChatService extends Disposable implements IVoiceChatService {
const emitter = disposables.add(new Emitter<IVoiceChatTextEvent>());
const session = await this.speechService.createSpeechToTextSession(token, 'chat');
if (token.isCancellationRequested) {
onSessionStoppedOrCanceled(true);
}
const phrases = this.createPhrases(options.model);
disposables.add(session.onDidChange(e => {
switch (e.status) {
@ -193,6 +217,15 @@ export class VoiceChatService extends Disposable implements IVoiceChatService {
break;
}
}
case SpeechToTextStatus.Started:
this.activeVoiceChatSessions++;
this.voiceChatInProgress.set(true);
emitter.fire(e);
break;
case SpeechToTextStatus.Stopped:
onSessionStoppedOrCanceled(false);
emitter.fire(e);
break;
default:
emitter.fire(e);
break;

View file

@ -11,7 +11,6 @@ import { Color } from 'vs/base/common/color';
import { Event } from 'vs/base/common/event';
import { KeyCode, KeyMod } from 'vs/base/common/keyCodes';
import { Disposable, DisposableStore, MutableDisposable, toDisposable } from 'vs/base/common/lifecycle';
import { ThemeIcon } from 'vs/base/common/themables';
import { isNumber } from 'vs/base/common/types';
import { getCodeEditor } from 'vs/editor/browser/editorBrowser';
import { EditorContextKeys } from 'vs/editor/common/editorContextKeys';
@ -36,12 +35,12 @@ import { ACTIVITY_BAR_BADGE_BACKGROUND } from 'vs/workbench/common/theme';
import { AccessibilityVoiceSettingId, SpeechTimeoutDefault, accessibilityConfigurationNodeBase } from 'vs/workbench/contrib/accessibility/browser/accessibilityConfiguration';
import { CHAT_CATEGORY } from 'vs/workbench/contrib/chat/browser/actions/chatActions';
import { IChatExecuteActionContext } from 'vs/workbench/contrib/chat/browser/actions/chatExecuteActions';
import { CHAT_VIEW_ID, IChatWidget, IChatWidgetService, IQuickChatService, showChatView } from 'vs/workbench/contrib/chat/browser/chat';
import { IChatWidget, IChatWidgetService, IQuickChatService, showChatView } from 'vs/workbench/contrib/chat/browser/chat';
import { ChatAgentLocation, IChatAgentService } from 'vs/workbench/contrib/chat/common/chatAgents';
import { CONTEXT_CHAT_REQUEST_IN_PROGRESS, CONTEXT_IN_CHAT_INPUT, CONTEXT_CHAT_ENABLED, CONTEXT_RESPONSE, CONTEXT_RESPONSE_FILTERED } from 'vs/workbench/contrib/chat/common/chatContextKeys';
import { IChatService, KEYWORD_ACTIVIATION_SETTING_ID } from 'vs/workbench/contrib/chat/common/chatService';
import { KEYWORD_ACTIVIATION_SETTING_ID } from 'vs/workbench/contrib/chat/common/chatService';
import { isResponseVM } from 'vs/workbench/contrib/chat/common/chatViewModel';
import { IVoiceChatService } from 'vs/workbench/contrib/chat/common/voiceChat';
import { IVoiceChatService, VoiceChatInProgress as GlobalVoiceChatInProgress } from 'vs/workbench/contrib/chat/common/voiceChatService';
import { IExtensionsWorkbenchService } from 'vs/workbench/contrib/extensions/common/extensions';
import { InlineChatController } from 'vs/workbench/contrib/inlineChat/browser/inlineChatController';
import { CTX_INLINE_CHAT_FOCUSED, CTX_INLINE_CHAT_HAS_ACTIVE_REQUEST } from 'vs/workbench/contrib/inlineChat/common/inlineChat';
@ -59,21 +58,20 @@ import { IAccessibilityService } from 'vs/platform/accessibility/common/accessib
//#region Speech to Text
const CONTEXT_VOICE_CHAT_GETTING_READY = new RawContextKey<boolean>('voiceChatGettingReady', false, { type: 'boolean', description: localize('voiceChatGettingReady', "True when getting ready for receiving voice input from the microphone for voice chat.") });
const CONTEXT_VOICE_CHAT_IN_PROGRESS = new RawContextKey<boolean>('voiceChatInProgress', false, { type: 'boolean', description: localize('voiceChatInProgress', "True when voice recording from microphone is in progress for voice chat.") });
type VoiceChatSessionContext = 'view' | 'inline' | 'terminal' | 'quick' | 'editor';
const VoiceChatSessionContexts: VoiceChatSessionContext[] = ['view', 'inline', 'terminal', 'quick', 'editor'];
const CONTEXT_QUICK_VOICE_CHAT_IN_PROGRESS = new RawContextKey<boolean>('quickVoiceChatInProgress', false, { type: 'boolean', description: localize('quickVoiceChatInProgress', "True when voice recording from microphone is in progress for quick chat.") });
const CONTEXT_INLINE_VOICE_CHAT_IN_PROGRESS = new RawContextKey<boolean>('inlineVoiceChatInProgress', false, { type: 'boolean', description: localize('inlineVoiceChatInProgress', "True when voice recording from microphone is in progress for inline chat.") });
const CONTEXT_VOICE_CHAT_IN_TERMINAL_IN_PROGRESS = new RawContextKey<boolean>('voiceChatInTerminalInProgress', false, { type: 'boolean', description: localize('voiceChatInTerminalInProgress', "True when voice recording from microphone is in progress for terminal chat.") });
const CONTEXT_VOICE_CHAT_IN_VIEW_IN_PROGRESS = new RawContextKey<boolean>('voiceChatInViewInProgress', false, { type: 'boolean', description: localize('voiceChatInViewInProgress', "True when voice recording from microphone is in progress in the chat view.") });
const CONTEXT_VOICE_CHAT_IN_EDITOR_IN_PROGRESS = new RawContextKey<boolean>('voiceChatInEditorInProgress', false, { type: 'boolean', description: localize('voiceChatInEditorInProgress', "True when voice recording from microphone is in progress in the chat editor.") });
const TerminalChatExecute = MenuId.for('terminalChatInput'); // unfortunately, terminal decided to go with their own menu (https://github.com/microsoft/vscode/issues/208789)
// Global Context Keys (set on global context key service)
const CanVoiceChat = ContextKeyExpr.and(CONTEXT_CHAT_ENABLED, HasSpeechProvider);
const FocusInChatInput = ContextKeyExpr.or(CTX_INLINE_CHAT_FOCUSED, CONTEXT_IN_CHAT_INPUT);
const AnyChatRequestInProgress = ContextKeyExpr.or(CONTEXT_CHAT_REQUEST_IN_PROGRESS, CTX_INLINE_CHAT_HAS_ACTIVE_REQUEST, TerminalChatContextKeys.requestActive);
type VoiceChatSessionContext = 'inline' | 'terminal' | 'quick' | 'view' | 'editor';
// Scoped Context Keys (set on per-chat-context scoped context key service)
const SCOPED_VOICE_CHAT_GETTING_READY = new RawContextKey<boolean>('scopedVoiceChatGettingReady', false, { type: 'boolean', description: localize('scopedVoiceChatGettingReady', "True when getting ready for receiving voice input from the microphone for voice chat. This key is only defined scoped, per chat context.") });
const SCOPED_VOICE_CHAT_IN_PROGRESS = new RawContextKey<VoiceChatSessionContext | undefined>('scopedVoiceChatInProgress', undefined, { type: 'string', description: localize('scopedVoiceChatInProgress', "Defined as a location where voice recording from microphone is in progress for voice chat. This key is only defined scoped, per chat context.") });
const ScopedVoiceChatInProgress = ContextKeyExpr.or(...VoiceChatSessionContexts.map(context => SCOPED_VOICE_CHAT_IN_PROGRESS.isEqualTo(context)));
enum VoiceChatSessionState {
Stopped = 1,
@ -84,7 +82,7 @@ enum VoiceChatSessionState {
interface IVoiceChatSessionController {
readonly onDidAcceptInput: Event<unknown>;
readonly onDidCancelInput: Event<unknown>;
readonly onDidHideInput: Event<unknown>;
readonly context: VoiceChatSessionContext;
@ -101,214 +99,135 @@ interface IVoiceChatSessionController {
class VoiceChatSessionControllerFactory {
static create(accessor: ServicesAccessor, context: 'inline'): Promise<IVoiceChatSessionController | undefined>;
static create(accessor: ServicesAccessor, context: 'quick'): Promise<IVoiceChatSessionController | undefined>;
static create(accessor: ServicesAccessor, context: 'view'): Promise<IVoiceChatSessionController | undefined>;
static create(accessor: ServicesAccessor, context: 'terminal'): Promise<IVoiceChatSessionController | undefined>;
static create(accessor: ServicesAccessor, context: 'focused'): Promise<IVoiceChatSessionController | undefined>;
static create(accessor: ServicesAccessor, context: 'inline' | 'quick' | 'view' | 'terminal' | 'focused'): Promise<IVoiceChatSessionController | undefined>;
static async create(accessor: ServicesAccessor, context: 'inline' | 'quick' | 'view' | 'terminal' | 'focused'): Promise<IVoiceChatSessionController | undefined> {
static async create(accessor: ServicesAccessor, context: 'view' | 'inline' | 'quick' | 'focused'): Promise<IVoiceChatSessionController | undefined> {
const chatWidgetService = accessor.get(IChatWidgetService);
const viewsService = accessor.get(IViewsService);
const quickChatService = accessor.get(IQuickChatService);
const layoutService = accessor.get(IWorkbenchLayoutService);
const editorService = accessor.get(IEditorService);
const terminalService = accessor.get(ITerminalService);
// Currently Focused Context
if (context === 'focused') {
// Try with the terminal chat
const activeInstance = terminalService.activeInstance;
if (activeInstance) {
const terminalChat = TerminalChatController.activeChatWidget || TerminalChatController.get(activeInstance);
if (terminalChat?.hasFocus()) {
return VoiceChatSessionControllerFactory.doCreateForTerminalChat(terminalChat);
}
}
// Try with the chat widget service, which currently
// only supports the chat view and quick chat
// https://github.com/microsoft/vscode/issues/191191
const chatInput = chatWidgetService.lastFocusedWidget;
if (chatInput?.hasInputFocus()) {
// Unfortunately there does not seem to be a better way
// to figure out if the chat widget is in a part or picker
if (
layoutService.hasFocus(Parts.SIDEBAR_PART) ||
layoutService.hasFocus(Parts.PANEL_PART) ||
layoutService.hasFocus(Parts.AUXILIARYBAR_PART)
) {
return VoiceChatSessionControllerFactory.doCreateForChatView(chatInput, viewsService);
}
if (layoutService.hasFocus(Parts.EDITOR_PART)) {
return VoiceChatSessionControllerFactory.doCreateForChatEditor(chatInput, viewsService);
}
return VoiceChatSessionControllerFactory.doCreateForQuickChat(chatInput, quickChatService);
}
// Try with the inline chat
const activeCodeEditor = getCodeEditor(editorService.activeTextEditorControl);
if (activeCodeEditor) {
const inlineChat = InlineChatController.get(activeCodeEditor);
if (inlineChat?.hasFocus()) {
return VoiceChatSessionControllerFactory.doCreateForInlineChat(inlineChat);
}
}
}
// View Chat
if (context === 'view' || context === 'focused' /* fallback in case 'focused' was not successful */) {
const chatView = await VoiceChatSessionControllerFactory.revealChatView(accessor);
if (chatView) {
return VoiceChatSessionControllerFactory.doCreateForChatView(chatView, viewsService);
}
}
// Inline Chat
if (context === 'inline') {
const activeCodeEditor = getCodeEditor(editorService.activeTextEditorControl);
if (activeCodeEditor) {
const inlineChat = InlineChatController.get(activeCodeEditor);
if (inlineChat) {
return VoiceChatSessionControllerFactory.doCreateForInlineChat(inlineChat);
}
}
}
// Terminal Chat
if (context === 'terminal') {
const activeInstance = terminalService.activeInstance;
if (activeInstance) {
const terminalChat = TerminalChatController.activeChatWidget || TerminalChatController.get(activeInstance);
if (terminalChat) {
return VoiceChatSessionControllerFactory.doCreateForTerminalChat(terminalChat);
}
}
}
// Quick Chat
if (context === 'quick') {
quickChatService.open();
const quickChat = chatWidgetService.lastFocusedWidget;
if (quickChat) {
return VoiceChatSessionControllerFactory.doCreateForQuickChat(quickChat, quickChatService);
}
}
return undefined;
}
static async revealChatView(accessor: ServicesAccessor): Promise<IChatWidget | undefined> {
const chatService = accessor.get(IChatService);
const viewsService = accessor.get(IViewsService);
if (chatService.isEnabled(ChatAgentLocation.Panel)) {
return showChatView(viewsService);
switch (context) {
case 'focused': {
const controller = VoiceChatSessionControllerFactory.doCreateForFocusedChat(terminalService, chatWidgetService, layoutService);
return controller ?? VoiceChatSessionControllerFactory.create(accessor, 'view'); // fallback to 'view'
}
case 'view': {
const chatWidget = await showChatView(viewsService);
if (chatWidget) {
return VoiceChatSessionControllerFactory.doCreateForChatWidget('view', chatWidget);
}
break;
}
case 'inline': {
const activeCodeEditor = getCodeEditor(editorService.activeTextEditorControl);
if (activeCodeEditor) {
const inlineChat = InlineChatController.get(activeCodeEditor);
if (inlineChat) {
if (!inlineChat.joinCurrentRun()) {
inlineChat.run();
}
return VoiceChatSessionControllerFactory.doCreateForChatWidget('inline', inlineChat.chatWidget);
}
}
break;
}
case 'quick': {
quickChatService.open(); // this will populate focused chat widget in the chat widget service
return VoiceChatSessionControllerFactory.create(accessor, 'focused');
}
}
return undefined;
}
private static doCreateForChatView(chatView: IChatWidget, viewsService: IViewsService): IVoiceChatSessionController {
return VoiceChatSessionControllerFactory.doCreateForChatViewOrEditor('view', chatView, viewsService);
private static doCreateForFocusedChat(terminalService: ITerminalService, chatWidgetService: IChatWidgetService, layoutService: IWorkbenchLayoutService): IVoiceChatSessionController | undefined {
// 1.) probe terminal chat which is not part of chat widget service
const activeInstance = terminalService.activeInstance;
if (activeInstance) {
const terminalChat = TerminalChatController.activeChatWidget || TerminalChatController.get(activeInstance);
if (terminalChat?.hasFocus()) {
return VoiceChatSessionControllerFactory.doCreateForTerminalChat(terminalChat);
}
}
// 2.) otherwise go via chat widget service
const chatWidget = chatWidgetService.lastFocusedWidget;
if (chatWidget?.hasInputFocus()) {
// Figure out the context of the chat widget by asking
// layout service for the part that has focus. Unfortunately
// there is no better way because the widget does not know
// its location.
let context: VoiceChatSessionContext;
if (layoutService.hasFocus(Parts.EDITOR_PART)) {
context = chatWidget.location === ChatAgentLocation.Panel ? 'editor' : 'inline';
} else if (
[Parts.SIDEBAR_PART, Parts.PANEL_PART, Parts.AUXILIARYBAR_PART, Parts.TITLEBAR_PART, Parts.STATUSBAR_PART, Parts.BANNER_PART, Parts.ACTIVITYBAR_PART].some(part => layoutService.hasFocus(part))
) {
context = 'view';
} else {
context = 'quick';
}
return VoiceChatSessionControllerFactory.doCreateForChatWidget(context, chatWidget);
}
return undefined;
}
private static doCreateForChatEditor(chatView: IChatWidget, viewsService: IViewsService): IVoiceChatSessionController {
return VoiceChatSessionControllerFactory.doCreateForChatViewOrEditor('editor', chatView, viewsService);
}
private static createContextKeyController(contextKeyService: IContextKeyService, rawControllerVoiceChatInProgress: RawContextKey<boolean>): (state: VoiceChatSessionState) => void {
const contextVoiceChatGettingReady = CONTEXT_VOICE_CHAT_GETTING_READY.bindTo(contextKeyService);
const contextVoiceChatInProgress = CONTEXT_VOICE_CHAT_IN_PROGRESS.bindTo(contextKeyService);
const controllerVoiceChatInProgress = rawControllerVoiceChatInProgress.bindTo(contextKeyService);
private static createContextKeyController(contextKeyService: IContextKeyService, context: VoiceChatSessionContext): (state: VoiceChatSessionState) => void {
const contextVoiceChatGettingReady = SCOPED_VOICE_CHAT_GETTING_READY.bindTo(contextKeyService);
const contextVoiceChatInProgress = SCOPED_VOICE_CHAT_IN_PROGRESS.bindTo(contextKeyService);
return (state: VoiceChatSessionState) => {
switch (state) {
case VoiceChatSessionState.GettingReady:
contextVoiceChatGettingReady.set(true);
contextVoiceChatInProgress.set(false);
controllerVoiceChatInProgress.set(false);
contextVoiceChatInProgress.set(undefined);
break;
case VoiceChatSessionState.Started:
contextVoiceChatGettingReady.set(false);
contextVoiceChatInProgress.set(true);
controllerVoiceChatInProgress.set(true);
contextVoiceChatInProgress.set(context);
break;
case VoiceChatSessionState.Stopped:
contextVoiceChatGettingReady.set(false);
contextVoiceChatInProgress.set(false);
controllerVoiceChatInProgress.set(false);
contextVoiceChatInProgress.set(undefined);
break;
}
};
}
private static doCreateForChatViewOrEditor(context: 'view' | 'editor', chatView: IChatWidget, viewsService: IViewsService): IVoiceChatSessionController {
private static doCreateForChatWidget(context: VoiceChatSessionContext, chatWidget: IChatWidget): IVoiceChatSessionController {
return {
context,
onDidAcceptInput: chatView.onDidAcceptInput,
// TODO@bpasero cancellation needs to work better for chat editors that are not view bound
onDidCancelInput: Event.filter(viewsService.onDidChangeViewVisibility, e => e.id === CHAT_VIEW_ID),
focusInput: () => chatView.focusInput(),
acceptInput: () => chatView.acceptInput(),
updateInput: text => chatView.setInput(text),
getInput: () => chatView.getInput(),
setInputPlaceholder: text => chatView.setInputPlaceholder(text),
clearInputPlaceholder: () => chatView.resetInputPlaceholder(),
updateState: VoiceChatSessionControllerFactory.createContextKeyController(chatView.scopedContextKeyService, CONTEXT_VOICE_CHAT_IN_VIEW_IN_PROGRESS)
};
}
private static doCreateForQuickChat(quickChat: IChatWidget, quickChatService: IQuickChatService): IVoiceChatSessionController {
return {
context: 'quick',
onDidAcceptInput: quickChat.onDidAcceptInput,
onDidCancelInput: quickChatService.onDidClose,
focusInput: () => quickChat.focusInput(),
acceptInput: () => quickChat.acceptInput(),
updateInput: text => quickChat.setInput(text),
getInput: () => quickChat.getInput(),
setInputPlaceholder: text => quickChat.setInputPlaceholder(text),
clearInputPlaceholder: () => quickChat.resetInputPlaceholder(),
updateState: VoiceChatSessionControllerFactory.createContextKeyController(quickChat.scopedContextKeyService, CONTEXT_QUICK_VOICE_CHAT_IN_PROGRESS)
};
}
private static doCreateForInlineChat(inlineChat: InlineChatController): IVoiceChatSessionController {
const inlineChatSession = inlineChat.joinCurrentRun() ?? inlineChat.run();
return {
context: 'inline',
onDidAcceptInput: inlineChat.onDidAcceptInput,
onDidCancelInput: Event.any(
inlineChat.onDidCancelInput,
Event.fromPromise(inlineChatSession)
),
focusInput: () => inlineChat.focus(),
acceptInput: () => inlineChat.acceptInput(),
updateInput: text => inlineChat.updateInput(text, false),
getInput: () => inlineChat.getInput(),
setInputPlaceholder: text => inlineChat.setPlaceholder(text),
clearInputPlaceholder: () => inlineChat.resetPlaceholder(),
updateState: VoiceChatSessionControllerFactory.createContextKeyController(inlineChat.scopedContextKeyService, CONTEXT_INLINE_VOICE_CHAT_IN_PROGRESS)
onDidAcceptInput: chatWidget.onDidAcceptInput,
onDidHideInput: chatWidget.onDidHideInput,
focusInput: () => chatWidget.focusInput(),
acceptInput: () => chatWidget.acceptInput(),
updateInput: text => chatWidget.setInput(text),
getInput: () => chatWidget.getInput(),
setInputPlaceholder: text => chatWidget.setInputPlaceholder(text),
clearInputPlaceholder: () => chatWidget.resetInputPlaceholder(),
updateState: VoiceChatSessionControllerFactory.createContextKeyController(chatWidget.scopedContextKeyService, context)
};
}
private static doCreateForTerminalChat(terminalChat: TerminalChatController): IVoiceChatSessionController {
const context = 'terminal';
return {
context: 'terminal',
context,
onDidAcceptInput: terminalChat.onDidAcceptInput,
onDidCancelInput: terminalChat.onDidCancelInput,
onDidHideInput: terminalChat.onDidHideInput,
focusInput: () => terminalChat.focus(),
acceptInput: () => terminalChat.acceptInput(),
updateInput: text => terminalChat.updateInput(text, false),
getInput: () => terminalChat.getInput(),
setInputPlaceholder: text => terminalChat.setPlaceholder(text),
clearInputPlaceholder: () => terminalChat.resetPlaceholder(),
updateState: VoiceChatSessionControllerFactory.createContextKeyController(terminalChat.scopedContextKeyService, CONTEXT_VOICE_CHAT_IN_TERMINAL_IN_PROGRESS)
updateState: VoiceChatSessionControllerFactory.createContextKeyController(terminalChat.scopedContextKeyService, context)
};
}
}
@ -369,7 +288,7 @@ class VoiceChatSessions {
session.disposables.add(toDisposable(() => cts.dispose(true)));
session.disposables.add(controller.onDidAcceptInput(() => this.stop(sessionId, controller.context)));
session.disposables.add(controller.onDidCancelInput(() => this.stop(sessionId, controller.context)));
session.disposables.add(controller.onDidHideInput(() => this.stop(sessionId, controller.context)));
controller.focusInput();
@ -476,7 +395,7 @@ class VoiceChatSessions {
export const VOICE_KEY_HOLD_THRESHOLD = 500;
async function startVoiceChatWithHoldMode(id: string, accessor: ServicesAccessor, target: 'inline' | 'quick' | 'view' | 'focused', context?: IChatExecuteActionContext): Promise<void> {
async function startVoiceChatWithHoldMode(id: string, accessor: ServicesAccessor, target: 'view' | 'inline' | 'quick' | 'focused', context?: IChatExecuteActionContext): Promise<void> {
const instantiationService = accessor.get(IInstantiationService);
const keybindingService = accessor.get(IKeybindingService);
@ -504,7 +423,7 @@ async function startVoiceChatWithHoldMode(id: string, accessor: ServicesAccessor
class VoiceChatWithHoldModeAction extends Action2 {
constructor(desc: Readonly<IAction2Options>, private readonly target: 'inline' | 'quick' | 'view') {
constructor(desc: Readonly<IAction2Options>, private readonly target: 'view' | 'inline' | 'quick') {
super(desc);
}
@ -561,6 +480,7 @@ export class HoldToVoiceChatInChatViewAction extends Action2 {
const instantiationService = accessor.get(IInstantiationService);
const keybindingService = accessor.get(IKeybindingService);
const viewsService = accessor.get(IViewsService);
const holdMode = keybindingService.enableKeybindingHoldMode(HoldToVoiceChatInChatViewAction.ID);
@ -573,7 +493,7 @@ export class HoldToVoiceChatInChatViewAction extends Action2 {
}
}, VOICE_KEY_HOLD_THRESHOLD);
(await VoiceChatSessionControllerFactory.revealChatView(accessor))?.focusInput();
(await showChatView(viewsService))?.focusInput();
await holdMode;
handle.dispose();
@ -634,36 +554,34 @@ export class StartVoiceChatAction extends Action2 {
keybinding: {
weight: KeybindingWeight.WorkbenchContrib,
when: ContextKeyExpr.and(
FocusInChatInput, // scope this action to chat input fields only
EditorContextKeys.focus.negate(), // do not steal the editor inline-chat keybinding
NOTEBOOK_EDITOR_FOCUSED.negate() // do not steal the notebook inline-chat keybinding
FocusInChatInput, // scope this action to chat input fields only
EditorContextKeys.focus.negate(), // do not steal the editor inline-chat keybinding
NOTEBOOK_EDITOR_FOCUSED.negate() // do not steal the notebook inline-chat keybinding
),
primary: KeyMod.CtrlCmd | KeyCode.KeyI
},
icon: Codicon.mic,
precondition: ContextKeyExpr.and(
CanVoiceChat,
CONTEXT_VOICE_CHAT_GETTING_READY.negate(), // disable when voice chat is getting ready
AnyChatRequestInProgress?.negate(), // disable when any chat request is in progress
SpeechToTextInProgress.negate() // disable when speech to text is in progress
SCOPED_VOICE_CHAT_GETTING_READY.negate(), // disable when voice chat is getting ready
AnyChatRequestInProgress?.negate(), // disable when any chat request is in progress
SpeechToTextInProgress.negate() // disable when speech to text is in progress
),
menu: [{
id: MenuId.ChatExecute,
when: ContextKeyExpr.and(
HasSpeechProvider,
TextToSpeechInProgress.negate(), // hide when text to speech is in progress
CONTEXT_VOICE_CHAT_IN_VIEW_IN_PROGRESS.negate(), // hide when voice chat is in progress
CONTEXT_QUICK_VOICE_CHAT_IN_PROGRESS.negate(), // ||
CONTEXT_VOICE_CHAT_IN_EDITOR_IN_PROGRESS.negate(), // ||
TextToSpeechInProgress.negate(), // hide when text to speech is in progress
ScopedVoiceChatInProgress?.negate(), // hide when voice chat is in progress
),
group: 'navigation',
order: -1
}, {
id: MenuId.for('terminalChatInput'),
id: TerminalChatExecute,
when: ContextKeyExpr.and(
HasSpeechProvider,
TextToSpeechInProgress.negate(), // hide when text to speech is in progress
CONTEXT_VOICE_CHAT_IN_TERMINAL_IN_PROGRESS.negate(), // hide when voice chat is in progress
TextToSpeechInProgress.negate(), // hide when text to speech is in progress
ScopedVoiceChatInProgress?.negate(), // hide when voice chat is in progress
),
group: 'navigation',
order: -1
@ -678,9 +596,6 @@ export class StartVoiceChatAction extends Action2 {
// from a toolbar within the chat widget, then make sure
// to move focus into the input field so that the controller
// is properly retrieved
// TODO@bpasero this will actually not work if the button
// is clicked from the inline editor while focus is in a
// chat input field in a view or picker
widget.focusInput();
}
@ -688,133 +603,41 @@ export class StartVoiceChatAction extends Action2 {
}
}
const InstallingSpeechProvider = new RawContextKey<boolean>('installingSpeechProvider', false, true);
export class StopListeningAction extends Action2 {
abstract class BaseInstallSpeechProviderAction extends Action2 {
private static readonly SPEECH_EXTENSION_ID = 'ms-vscode.vscode-speech';
async run(accessor: ServicesAccessor): Promise<void> {
const contextKeyService = accessor.get(IContextKeyService);
const extensionsWorkbenchService = accessor.get(IExtensionsWorkbenchService);
try {
InstallingSpeechProvider.bindTo(contextKeyService).set(true);
await extensionsWorkbenchService.install(BaseInstallSpeechProviderAction.SPEECH_EXTENSION_ID, {
justification: this.getJustification(),
enable: true
}, ProgressLocation.Notification);
} finally {
InstallingSpeechProvider.bindTo(contextKeyService).set(false);
}
}
protected abstract getJustification(): string;
}
export class InstallSpeechProviderForVoiceChatAction extends BaseInstallSpeechProviderAction {
static readonly ID = 'workbench.action.chat.installProviderForVoiceChat';
static readonly ID = 'workbench.action.chat.stopListening';
constructor() {
super({
id: InstallSpeechProviderForVoiceChatAction.ID,
title: localize2('workbench.action.chat.installProviderForVoiceChat.label', "Start Voice Chat"),
icon: Codicon.mic,
precondition: InstallingSpeechProvider.negate(),
id: StopListeningAction.ID,
title: localize2('workbench.action.chat.stopListening.label', "Stop Listening"),
category: CHAT_CATEGORY,
f1: true,
keybinding: {
weight: KeybindingWeight.WorkbenchContrib + 100,
primary: KeyCode.Escape
},
icon: spinningLoading,
precondition: GlobalVoiceChatInProgress, // need global context here because of `f1: true`
menu: [{
id: MenuId.ChatExecute,
when: HasSpeechProvider.negate(),
when: ScopedVoiceChatInProgress,
group: 'navigation',
order: -1
}, {
id: MenuId.for('terminalChatInput'),
when: HasSpeechProvider.negate(),
id: TerminalChatExecute,
when: ScopedVoiceChatInProgress,
group: 'navigation',
order: -1
}]
});
}
protected getJustification(): string {
return localize('installProviderForVoiceChat.justification', "Microphone support requires this extension.");
}
}
class BaseStopListeningAction extends Action2 {
constructor(
desc: { id: string; icon?: ThemeIcon; f1?: boolean },
context: RawContextKey<boolean>,
menu: MenuId | undefined,
) {
super({
...desc,
title: localize2('workbench.action.chat.stopListening.label', "Stop Listening"),
category: CHAT_CATEGORY,
keybinding: {
weight: KeybindingWeight.WorkbenchContrib + 100,
primary: KeyCode.Escape
},
precondition: ContextKeyExpr.and(CanVoiceChat, context),
menu: menu ? [{
id: menu,
when: ContextKeyExpr.and(CanVoiceChat, context),
group: 'navigation',
order: -1
}] : undefined
});
}
async run(accessor: ServicesAccessor): Promise<void> {
VoiceChatSessions.getInstance(accessor.get(IInstantiationService)).stop();
}
}
export class StopListeningAction extends BaseStopListeningAction {
static readonly ID = 'workbench.action.chat.stopListening';
constructor() {
super({ id: StopListeningAction.ID, f1: true }, CONTEXT_VOICE_CHAT_IN_PROGRESS, undefined);
}
}
export class StopListeningInChatViewAction extends BaseStopListeningAction {
static readonly ID = 'workbench.action.chat.stopListeningInChatView';
constructor() {
super({ id: StopListeningInChatViewAction.ID, icon: spinningLoading }, CONTEXT_VOICE_CHAT_IN_VIEW_IN_PROGRESS, MenuId.ChatExecute);
}
}
export class StopListeningInChatEditorAction extends BaseStopListeningAction {
static readonly ID = 'workbench.action.chat.stopListeningInChatEditor';
constructor() {
super({ id: StopListeningInChatEditorAction.ID, icon: spinningLoading }, CONTEXT_VOICE_CHAT_IN_EDITOR_IN_PROGRESS, MenuId.ChatExecute);
}
}
export class StopListeningInQuickChatAction extends BaseStopListeningAction {
static readonly ID = 'workbench.action.chat.stopListeningInQuickChat';
constructor() {
super({ id: StopListeningInQuickChatAction.ID, icon: spinningLoading }, CONTEXT_QUICK_VOICE_CHAT_IN_PROGRESS, MenuId.ChatExecute);
}
}
export class StopListeningInTerminalChatAction extends BaseStopListeningAction {
static readonly ID = 'workbench.action.chat.stopListeningInTerminalChat';
constructor() {
super({ id: StopListeningInTerminalChatAction.ID, icon: spinningLoading }, CONTEXT_VOICE_CHAT_IN_TERMINAL_IN_PROGRESS, MenuId.for('terminalChatInput'));
}
}
export class StopListeningAndSubmitAction extends Action2 {
static readonly ID = 'workbench.action.chat.stopListeningAndSubmit';
@ -830,7 +653,7 @@ export class StopListeningAndSubmitAction extends Action2 {
when: FocusInChatInput,
primary: KeyMod.CtrlCmd | KeyCode.KeyI
},
precondition: ContextKeyExpr.and(CanVoiceChat, CONTEXT_VOICE_CHAT_IN_PROGRESS)
precondition: GlobalVoiceChatInProgress // need global context here because of `f1: true`
});
}
@ -941,35 +764,11 @@ class ChatSynthesizerSessions {
}
}
export class InstallSpeechProviderForSynthesizeChatAction extends BaseInstallSpeechProviderAction {
static readonly ID = 'workbench.action.chat.installProviderForSynthesis';
constructor() {
super({
id: InstallSpeechProviderForSynthesizeChatAction.ID,
title: localize2('workbench.action.chat.installProviderForSynthesis.label', "Read Aloud"),
icon: Codicon.unmute,
precondition: InstallingSpeechProvider.negate(),
menu: [{
id: MenuId.ChatMessageTitle,
when: HasSpeechProvider.negate(),
group: 'navigation'
}]
});
}
protected getJustification(): string {
return localize('installProviderForSynthesis.justification', "Speaker support requires this extension.");
}
}
export class ReadChatResponseAloud extends Action2 {
constructor() {
super({
id: 'workbench.action.chat.readChatResponseAloud',
title: localize2('workbench.action.chat.readChatResponseAloud', "Read Aloud"),
f1: false,
icon: Codicon.unmute,
precondition: CanVoiceChat,
menu: {
@ -1019,7 +818,7 @@ export class StopReadAloud extends Action2 {
order: -1
},
{
id: MenuId.for('terminalChatInput'),
id: TerminalChatExecute,
when: TextToSpeechInProgress,
group: 'navigation',
order: -1
@ -1312,6 +1111,85 @@ class KeywordActivationStatusEntry extends Disposable {
//#endregion
//#region Install Provider Actions
const InstallingSpeechProvider = new RawContextKey<boolean>('installingSpeechProvider', false, true);
abstract class BaseInstallSpeechProviderAction extends Action2 {
private static readonly SPEECH_EXTENSION_ID = 'ms-vscode.vscode-speech';
async run(accessor: ServicesAccessor): Promise<void> {
const contextKeyService = accessor.get(IContextKeyService);
const extensionsWorkbenchService = accessor.get(IExtensionsWorkbenchService);
try {
InstallingSpeechProvider.bindTo(contextKeyService).set(true);
await extensionsWorkbenchService.install(BaseInstallSpeechProviderAction.SPEECH_EXTENSION_ID, {
justification: this.getJustification(),
enable: true
}, ProgressLocation.Notification);
} finally {
InstallingSpeechProvider.bindTo(contextKeyService).set(false);
}
}
protected abstract getJustification(): string;
}
export class InstallSpeechProviderForVoiceChatAction extends BaseInstallSpeechProviderAction {
static readonly ID = 'workbench.action.chat.installProviderForVoiceChat';
constructor() {
super({
id: InstallSpeechProviderForVoiceChatAction.ID,
title: localize2('workbench.action.chat.installProviderForVoiceChat.label', "Start Voice Chat"),
icon: Codicon.mic,
precondition: InstallingSpeechProvider.negate(),
menu: [{
id: MenuId.ChatExecute,
when: HasSpeechProvider.negate(),
group: 'navigation',
order: -1
}, {
id: TerminalChatExecute,
when: HasSpeechProvider.negate(),
group: 'navigation',
order: -1
}]
});
}
protected getJustification(): string {
return localize('installProviderForVoiceChat.justification', "Microphone support requires this extension.");
}
}
export class InstallSpeechProviderForSynthesizeChatAction extends BaseInstallSpeechProviderAction {
static readonly ID = 'workbench.action.chat.installProviderForSynthesis';
constructor() {
super({
id: InstallSpeechProviderForSynthesizeChatAction.ID,
title: localize2('workbench.action.chat.installProviderForSynthesis.label', "Read Aloud"),
icon: Codicon.unmute,
precondition: InstallingSpeechProvider.negate(),
menu: [{
id: MenuId.ChatMessageTitle,
when: HasSpeechProvider.negate(),
group: 'navigation'
}]
});
}
protected getJustification(): string {
return localize('installProviderForSynthesis.justification', "Speaker support requires this extension.");
}
}
//#endregion
registerThemingParticipant((theme, collector) => {
let activeRecordingColor: Color | undefined;
let activeRecordingDimmedColor: Color | undefined;

View file

@ -3,7 +3,7 @@
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { InlineVoiceChatAction, QuickVoiceChatAction, StartVoiceChatAction, StopListeningInQuickChatAction, StopListeningInChatEditorAction, StopListeningInChatViewAction, VoiceChatInChatViewAction, StopListeningAction, StopListeningAndSubmitAction, KeywordActivationContribution, InstallSpeechProviderForSynthesizeChatAction, InstallSpeechProviderForVoiceChatAction, StopListeningInTerminalChatAction, HoldToVoiceChatInChatViewAction, ReadChatResponseAloud, StopReadAloud, StopReadChatItemAloud } from 'vs/workbench/contrib/chat/electron-sandbox/actions/voiceChatActions';
import { InlineVoiceChatAction, QuickVoiceChatAction, StartVoiceChatAction, VoiceChatInChatViewAction, StopListeningAction, StopListeningAndSubmitAction, KeywordActivationContribution, InstallSpeechProviderForSynthesizeChatAction, InstallSpeechProviderForVoiceChatAction, HoldToVoiceChatInChatViewAction, ReadChatResponseAloud, StopReadAloud, StopReadChatItemAloud } from 'vs/workbench/contrib/chat/electron-sandbox/actions/voiceChatActions';
import { registerAction2 } from 'vs/platform/actions/common/actions';
import { WorkbenchPhase, registerWorkbenchContribution2 } from 'vs/workbench/common/contributions';
@ -18,11 +18,6 @@ registerAction2(InlineVoiceChatAction);
registerAction2(StopListeningAction);
registerAction2(StopListeningAndSubmitAction);
registerAction2(StopListeningInChatViewAction);
registerAction2(StopListeningInChatEditorAction);
registerAction2(StopListeningInQuickChatAction);
registerAction2(StopListeningInTerminalChatAction);
registerAction2(ReadChatResponseAloud);
registerAction2(StopReadChatItemAloud);
registerAction2(StopReadAloud);

View file

@ -11,10 +11,11 @@ import { DisposableStore, IDisposable, toDisposable } from 'vs/base/common/lifec
import { ensureNoDisposablesAreLeakedInTestSuite } from 'vs/base/test/common/utils';
import { ProviderResult } from 'vs/editor/common/languages';
import { ExtensionIdentifier } from 'vs/platform/extensions/common/extensions';
import { MockContextKeyService } from 'vs/platform/keybinding/test/common/mockKeybindingService';
import { ChatAgentLocation, IChatAgent, IChatAgentCommand, IChatAgentData, IChatAgentHistoryEntry, IChatAgentImplementation, IChatAgentMetadata, IChatAgentRequest, IChatAgentResult, IChatAgentService } from 'vs/workbench/contrib/chat/common/chatAgents';
import { IChatModel } from 'vs/workbench/contrib/chat/common/chatModel';
import { IChatProgress, IChatFollowup } from 'vs/workbench/contrib/chat/common/chatService';
import { IVoiceChatSessionOptions, IVoiceChatTextEvent, VoiceChatService } from 'vs/workbench/contrib/chat/common/voiceChat';
import { IVoiceChatSessionOptions, IVoiceChatTextEvent, VoiceChatService } from 'vs/workbench/contrib/chat/common/voiceChatService';
import { ISpeechProvider, ISpeechService, ISpeechToTextEvent, ISpeechToTextSession, ITextToSpeechSession, KeywordRecognitionStatus, SpeechToTextStatus } from 'vs/workbench/contrib/speech/common/speechService';
import { nullExtensionDescription } from 'vs/workbench/services/extensions/common/extensions';
@ -121,7 +122,7 @@ suite('VoiceChat', () => {
setup(() => {
emitter = disposables.add(new Emitter<ISpeechToTextEvent>());
service = disposables.add(new VoiceChatService(new TestSpeechService(), new TestChatAgentService()));
service = disposables.add(new VoiceChatService(new TestSpeechService(), new TestChatAgentService(), new MockContextKeyService()));
});
teardown(() => {

View file

@ -122,8 +122,13 @@ export class InlineChatController implements IEditorContribution {
private readonly _onWillStartSession = this._store.add(new Emitter<void>());
readonly onWillStartSession = this._onWillStartSession.event;
readonly onDidAcceptInput = Event.filter(this._messages.event, m => m === Message.ACCEPT_INPUT, this._store);
readonly onDidCancelInput = Event.filter(this._messages.event, m => m === Message.CANCEL_INPUT || m === Message.CANCEL_SESSION, this._store);
get chatWidget() {
if (this._input.value.isVisible) {
return this._input.value.chatWidget;
} else {
return this._zone.value.widget.chatWidget;
}
}
private readonly _sessionStore = this._store.add(new DisposableStore());
private readonly _stashedSession = this._store.add(new MutableDisposable<StashedSession>());
@ -1019,35 +1024,13 @@ export class InlineChatController implements IEditorContribution {
// ---- controller API
get scopedContextKeyService(): IContextKeyService {
if (this._input.value.isVisible) {
return this._input.value.chatWidget.scopedContextKeyService;
} else {
return this._zone.value.widget.chatWidget.scopedContextKeyService;
}
}
showSaveHint(): void {
const status = localize('savehint', "Accept or discard changes to continue saving");
this._zone.value.widget.updateStatus(status, { classes: ['warn'] });
}
setPlaceholder(text: string): void {
this._forcedPlaceholder = text;
this._updatePlaceholder();
}
resetPlaceholder(): void {
this._forcedPlaceholder = undefined;
this._updatePlaceholder();
}
acceptInput() {
if (this._input.value.isVisible) {
return this._input.value.chatWidget.acceptInput();
} else {
return this._zone.value.widget.chatWidget.acceptInput();
}
return this.chatWidget.acceptInput();
}
updateInput(text: string, selectAll = true): void {
@ -1061,12 +1044,6 @@ export class InlineChatController implements IEditorContribution {
}
}
getInput(): string {
return this._input.value.isVisible
? this._input.value.value
: this._zone.value.widget.value;
}
cancelCurrentRequest(): void {
this._messages.fire(Message.CANCEL_INPUT | Message.CANCEL_REQUEST);
}

View file

@ -14,9 +14,9 @@ import { language } from 'vs/base/common/platform';
export const ISpeechService = createDecorator<ISpeechService>('speechService');
export const HasSpeechProvider = new RawContextKey<boolean>('hasSpeechProvider', false, { type: 'string', description: localize('hasSpeechProvider', "A speech provider is registered to the speech service.") });
export const SpeechToTextInProgress = new RawContextKey<boolean>('speechToTextInProgress', false, { type: 'string', description: localize('speechToTextInProgress', "A speech-to-text session is in progress.") });
export const TextToSpeechInProgress = new RawContextKey<boolean>('textToSpeechInProgress', false, { type: 'string', description: localize('textToSpeechInProgress', "A text-to-speech session is in progress.") });
export const HasSpeechProvider = new RawContextKey<boolean>('hasSpeechProvider', false, { type: 'boolean', description: localize('hasSpeechProvider', "A speech provider is registered to the speech service.") });
export const SpeechToTextInProgress = new RawContextKey<boolean>('speechToTextInProgress', false, { type: 'boolean', description: localize('speechToTextInProgress', "A speech-to-text session is in progress.") });
export const TextToSpeechInProgress = new RawContextKey<boolean>('textToSpeechInProgress', false, { type: 'boolean', description: localize('textToSpeechInProgress', "A text-to-speech session is in progress.") });
export interface ISpeechProviderMetadata {
readonly extension: ExtensionIdentifier;

View file

@ -78,7 +78,7 @@ export class TerminalChatController extends Disposable implements ITerminalContr
}
readonly onDidAcceptInput = Event.filter(this._messages.event, m => m === Message.ACCEPT_INPUT, this._store);
readonly onDidCancelInput = Event.filter(this._messages.event, m => m === Message.CANCEL_INPUT || m === Message.CANCEL_SESSION, this._store);
get onDidHideInput() { return this.chatWidget?.onDidHideInput ?? Event.None; }
private _terminalAgentName = 'terminal';
private _terminalAgentId: string | undefined;

View file

@ -5,7 +5,7 @@
import type { Terminal as RawXtermTerminal } from '@xterm/xterm';
import { Dimension, getActiveWindow, IFocusTracker, trackFocus } from 'vs/base/browser/dom';
import { Event } from 'vs/base/common/event';
import { Emitter, Event } from 'vs/base/common/event';
import { Disposable, toDisposable } from 'vs/base/common/lifecycle';
import { MicrotaskDelay } from 'vs/base/common/symbols';
import 'vs/css!./media/terminalChatWidget';
@ -27,6 +27,9 @@ export class TerminalChatWidget extends Disposable {
private readonly _container: HTMLElement;
private readonly _onDidHideInput = this._register(new Emitter<void>());
readonly onDidHideInput = this._onDidHideInput.event;
private readonly _inlineChatWidget: InlineChatWidget;
public get inlineChatWidget(): InlineChatWidget { return this._inlineChatWidget; }
@ -171,6 +174,7 @@ export class TerminalChatWidget extends Disposable {
this._inlineChatWidget.value = '';
this._instance.focus();
this._setTerminalOffset(undefined);
this._onDidHideInput.fire();
}
private _setTerminalOffset(offset: number | undefined) {
if (offset === undefined || this._container.classList.contains('hide')) {