Merge pull request #205109 from microsoft/joh/vulnerable-wasp

remove ChatMessage and add types LanguageModelMessage types so that they can evolve at their own pace
This commit is contained in:
Johannes Rieken 2024-02-13 16:00:24 +01:00 committed by GitHub
commit ad436497cd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 102 additions and 33 deletions

View file

@ -1665,6 +1665,9 @@ export function createApiFactoryAndRegisterActors(accessor: ServicesAccessor): I
ChatResponseCommandButtonPart: extHostTypes.ChatResponseCommandButtonPart,
ChatAgentRequestTurn: extHostTypes.ChatAgentRequestTurn,
ChatAgentResponseTurn: extHostTypes.ChatAgentResponseTurn,
LanguageModelSystemMessage: extHostTypes.LanguageModelSystemMessage,
LanguageModelUserMessage: extHostTypes.LanguageModelUserMessage,
LanguageModelAssistantMessage: extHostTypes.LanguageModelAssistantMessage,
};
};
}

View file

@ -151,9 +151,15 @@ export class ExtHostChatProvider implements ExtHostChatProviderShape {
this._proxy.$handleProgressChunk(requestId, { index: fragment.index, part: fragment.part });
});
if (data.provider.provideLanguageModelResponse2) {
return data.provider.provideLanguageModelResponse2(messages.map(typeConvert.LanguageModelMessage.to), options, ExtensionIdentifier.toKey(from), progress, token);
} else {
// TODO@jrieken remove
return data.provider.provideLanguageModelResponse(messages.map(typeConvert.ChatMessage.to), options, ExtensionIdentifier.toKey(from), progress, token);
}
}
//#region --- making request
$updateLanguageModels(data: { added?: string[] | undefined; removed?: string[] | undefined }): void {
@ -269,7 +275,7 @@ export class ExtHostChatProvider implements ExtHostChatProviderShape {
}
const cts = new CancellationTokenSource(token);
const requestId = (Math.random() * 1e6) | 0;
const requestPromise = that._proxy.$fetchResponse(from, languageModelId, requestId, messages.map(typeConvert.ChatMessage.from), options ?? {}, cts.token);
const requestPromise = that._proxy.$fetchResponse(from, languageModelId, requestId, messages.map(typeConvert.LanguageModelMessage.from), options ?? {}, cts.token);
const res = new LanguageModelRequest(requestPromise, cts);
that._pendingRequest.set(requestId, { languageModelId, res });

View file

@ -2242,12 +2242,28 @@ export namespace ChatMessage {
export function to(message: chatProvider.IChatMessage): vscode.ChatMessage {
return new types.ChatMessage(ChatMessageRole.to(message.role), message.content);
}
}
export function from(message: vscode.ChatMessage): chatProvider.IChatMessage {
return {
role: ChatMessageRole.from(message.role),
content: message.content,
};
export namespace LanguageModelMessage {
export function to(message: chatProvider.IChatMessage): vscode.LanguageModelMessage {
switch (message.role) {
case chatProvider.ChatMessageRole.System: return new types.LanguageModelSystemMessage(message.content);
case chatProvider.ChatMessageRole.User: return new types.LanguageModelUserMessage(message.content);
case chatProvider.ChatMessageRole.Assistant: return new types.LanguageModelAssistantMessage(message.content);
}
}
export function from(message: vscode.LanguageModelMessage): chatProvider.IChatMessage {
if (message instanceof types.LanguageModelSystemMessage) {
return { role: chatProvider.ChatMessageRole.System, content: message.content };
} else if (message instanceof types.LanguageModelUserMessage) {
return { role: chatProvider.ChatMessageRole.User, content: message.content };
} else if (message instanceof types.LanguageModelAssistantMessage) {
return { role: chatProvider.ChatMessageRole.Assistant, content: message.content };
} else {
throw new Error('Invalid LanguageModelMessage');
}
}
}

View file

@ -4274,6 +4274,32 @@ export class ChatAgentResponseTurn implements vscode.ChatAgentResponseTurn {
) { }
}
export class LanguageModelSystemMessage {
content: string;
constructor(content: string) {
this.content = content;
}
}
export class LanguageModelUserMessage {
content: string;
name: string | undefined;
constructor(content: string, name?: string) {
this.content = content;
this.name = name;
}
}
export class LanguageModelAssistantMessage {
content: string;
constructor(content: string) {
this.content = content;
}
}
//#endregion
//#region ai

View file

@ -11,7 +11,6 @@ export const allApiProposals = Object.freeze({
authGetSessions: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.authGetSessions.d.ts',
authSession: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.authSession.d.ts',
canonicalUriProvider: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.canonicalUriProvider.d.ts',
chat: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.chat.d.ts',
chatAgents2: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.chatAgents2.d.ts',
chatAgents2Additions: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.chatAgents2Additions.d.ts',
chatProvider: 'https://raw.githubusercontent.com/microsoft/vscode/main/src/vscode-dts/vscode.proposed.chatProvider.d.ts',

View file

@ -1,24 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
declare module 'vscode' {
// ChatML
export enum ChatMessageRole {
System = 0,
User = 1,
// TODO@API name: align with ChatAgent (or whatever we'll rename that to)
Assistant = 2,
}
// ChatML
export class ChatMessage {
role: ChatMessageRole;
content: string;
constructor(role: ChatMessageRole, content: string);
}
}

View file

@ -5,6 +5,27 @@
declare module 'vscode' {
// TODO@API NAME: ChatMessageKind?
export enum ChatMessageRole {
System = 0,
User = 1,
// TODO@API name: align with ChatAgent (or whatever we'll rename that to)
Assistant = 2,
}
/**
* A chat message that is used to make chat request against a language model.
*/
export class ChatMessage {
readonly role: ChatMessageRole;
readonly content: string;
constructor(role: ChatMessageRole, content: string);
}
export interface ChatResponseFragment {
index: number;
part: string;
@ -17,6 +38,7 @@ declare module 'vscode' {
*/
export interface ChatResponseProvider {
provideLanguageModelResponse(messages: ChatMessage[], options: { [name: string]: any }, extensionId: string, progress: Progress<ChatResponseFragment>, token: CancellationToken): Thenable<any>;
provideLanguageModelResponse2?(messages: LanguageModelMessage[], options: { [name: string]: any }, extensionId: string, progress: Progress<ChatResponseFragment>, token: CancellationToken): Thenable<any>;
}
export interface ChatResponseProviderMetadata {

View file

@ -17,6 +17,27 @@ declare module 'vscode' {
stream: AsyncIterable<string>;
}
//TODO@API see https://learn.microsoft.com/en-us/dotnet/api/azure.ai.openai.chatrequestmessage?view=azure-dotnet-preview
// this allows to grow message by type, e.g add more content types to User message to support multimodal language models
export class LanguageModelSystemMessage {
content: string;
constructor(content: string);
}
export class LanguageModelUserMessage {
content: string;
name: string | undefined;
constructor(content: string, name?: string);
}
export class LanguageModelAssistantMessage {
content: string;
constructor(content: string);
}
export type LanguageModelMessage = LanguageModelSystemMessage | LanguageModelUserMessage | LanguageModelAssistantMessage;
/**
* Represents access to using a language model. Access can be revoked at any time and extension
* must check if the access is {@link LanguageModelAccess.isRevoked still valid} before using it.
@ -49,7 +70,7 @@ declare module 'vscode' {
* @param messages
* @param options
*/
makeChatRequest(messages: ChatMessage[], options: { [name: string]: any }, token: CancellationToken): LanguageModelResponse;
makeChatRequest(messages: LanguageModelMessage[], options: { [name: string]: any }, token: CancellationToken): LanguageModelResponse;
}
export interface LanguageModelAccessOptions {