From b0cc8df73c3557725c3af5344e12d86b852a751f Mon Sep 17 00:00:00 2001 From: JinmingYang <2214962083@qq.com> Date: Mon, 3 Jul 2023 02:22:37 +0800 Subject: [PATCH] feat(gpt-runner-vscode): add watching current edit file as context --- docs/example-cn.gpt.md | 4 +- docs/example.gpt.md | 4 +- .../src/core/get-common-file-tree.ts | 26 ++- .../src/common/helpers/debug.ts | 3 +- .../src/common/types/config/base.config.ts | 2 + .../src/common/types/server.ts | 4 +- ...gging-fface.zod.ts => hugging-face.zod.ts} | 0 .../src/common/zod/config/index.ts | 2 +- .../src/common/zod/config/user.config.ts | 17 +- .../src/common/zod/server.zod.ts | 4 +- .../node/helpers/tunnel/binary-downloader.ts | 3 +- .../src/register/sync-opening-file-paths.ts | 11 +- .../client/public/locales/de.json | 20 +- .../client/public/locales/en.json | 20 +- .../client/public/locales/ja.json | 20 +- .../client/public/locales/zh_CN.json | 20 +- .../client/public/locales/zh_Hant.json | 20 +- .../hook-form-input/hook-form-input.styles.ts | 2 + .../src/components/icon-button/index.tsx | 8 +- .../src/components/popover-menu/index.tsx | 9 +- .../src/components/select-option/index.tsx | 4 +- .../select-option/select-option.styles.ts | 14 +- .../client/src/hooks/use-debounce-fn.hook.ts | 54 ++++++ .../client/src/hooks/use-emit-bind.hook.ts | 14 +- .../hooks/use-get-common-files-tree.hook.ts | 32 ++++ .../client/src/hooks/use-token-num.hook.ts | 117 ++++++++++++ .../gpt-runner-web/client/src/networks/llm.ts | 14 ++ .../chat-panel/chat-panel.styles.ts | 9 + .../chat/components/chat-panel/index.tsx | 13 +- .../pages/chat/components/file-tree/index.tsx | 177 +++++------------- .../chat/components/init-gpt-files/index.tsx | 2 + .../context-settings.styles.ts | 22 +++ .../components/context-settings/index.tsx | 152 +++++++++++++++ .../model-settings/base-model-settings.tsx | 62 +++--- .../model-settings/base-secrets-settings.tsx | 106 +++++++++++ .../components/model-settings/index.tsx | 2 +- .../openai-settings/model-settings.tsx | 32 +--- .../openai-settings/secrets-settings.tsx | 158 ++++++---------- .../components/settings/settings.styles.ts | 2 + .../chat/components/top-toolbar/index.tsx | 92 +++++---- .../top-toolbar/top-toolbar.styles.ts | 15 ++ .../client/src/pages/chat/index.tsx | 10 + .../src/store/zustand/global/chat.slice.ts | 41 +++- .../store/zustand/global/file-tree.slice.ts | 58 ++++-- .../client/src/store/zustand/temp/index.ts | 78 +++++++- .../client/src/store/zustand/utils.ts | 4 +- .../src/controllers/gpt-files.controller.ts | 3 +- .../server/src/controllers/llm.controller.ts | 44 +++-- .../gpt-runner-web/server/src/middleware.ts | 3 +- .../server/src/services/index.ts | 1 - playground/scripts/gpt/i18n-helper.gpt.md | 44 +++++ playground/scripts/gpt/solid.gpt.md | 2 +- 52 files changed, 1158 insertions(+), 422 deletions(-) rename packages/gpt-runner-shared/src/common/zod/config/{hugging-fface.zod.ts => hugging-face.zod.ts} (100%) create mode 100644 packages/gpt-runner-web/client/src/hooks/use-debounce-fn.hook.ts create mode 100644 packages/gpt-runner-web/client/src/hooks/use-get-common-files-tree.hook.ts create mode 100644 packages/gpt-runner-web/client/src/hooks/use-token-num.hook.ts create mode 100644 packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/context-settings.styles.ts create mode 100644 packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/index.tsx create mode 100644 packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-secrets-settings.tsx delete mode 100644 packages/gpt-runner-web/server/src/services/index.ts create mode 100644 playground/scripts/gpt/i18n-helper.gpt.md diff --git a/docs/example-cn.gpt.md b/docs/example-cn.gpt.md index 3c89ed3..2dee584 100644 --- a/docs/example-cn.gpt.md +++ b/docs/example-cn.gpt.md @@ -2,7 +2,7 @@ { "title": "分类目录/AI角色名字", "model": { - "modalName": "gpt-3.5-turbo-16k", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0 } } @@ -20,6 +20,6 @@ 这里可以写你的备注 -`model` / `modalName` / `temperature` / `System Prompt` / `User Prompt` 都是**可选**参数,而且可定制参数还有非常多。 +`model` / `modelName` / `temperature` / `System Prompt` / `User Prompt` 都是**可选**参数,而且可定制参数还有非常多。 你还可以通过项目根目录下的 `gptr.config.json` 覆盖很多参数的默认值 diff --git a/docs/example.gpt.md b/docs/example.gpt.md index 9a48c4f..e948822 100644 --- a/docs/example.gpt.md +++ b/docs/example.gpt.md @@ -2,7 +2,7 @@ { "title": "Categories-Name/AI-Preset-Name", "model": { - "modalName": "gpt-3.5-turbo-16k", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0 } } @@ -21,6 +21,6 @@ when you create a new chat with this preset, user prompt text will auto fill in Here you can write your remarks -`model` / `modalName` / `temperature` / `System Prompt` / `User Prompt` are **optional** parameters, and there are many customizable parameters +`model` / `modelName` / `temperature` / `System Prompt` / `User Prompt` are **optional** parameters, and there are many customizable parameters You can also override the default values of many parameters through `gptr.config.json` in the project root directory diff --git a/packages/gpt-runner-core/src/core/get-common-file-tree.ts b/packages/gpt-runner-core/src/core/get-common-file-tree.ts index a9992ee..caf5ee0 100644 --- a/packages/gpt-runner-core/src/core/get-common-file-tree.ts +++ b/packages/gpt-runner-core/src/core/get-common-file-tree.ts @@ -143,10 +143,16 @@ export async function getCommonFileTree(params: GetCommonFileTreeParams): Promis export interface CreateFileContextParams { rootPath: string filePaths: string[] + editingFilePath?: string } export async function createFileContext(params: CreateFileContextParams) { - const { rootPath, filePaths } = params + const { rootPath, filePaths, editingFilePath } = params + + // exclude editing file path + const contextFilePaths = editingFilePath ? filePaths.filter(filePath => filePath !== editingFilePath) : filePaths + const editingFileRelativePath = editingFilePath ? PathUtils.relative(rootPath, editingFilePath) : '' + const editingFileContent = editingFilePath ? await FileUtils.readFile({ filePath: editingFilePath }) : '' const baseTips = `Please answer the user's question based on the user's file path and file content. The file path and file content will be separated by five single quotes. @@ -155,7 +161,7 @@ export async function createFileContext(params: CreateFileContextParams) { let tips = baseTips - for (const filePath of filePaths) { + for (const filePath of contextFilePaths) { const relativePath = PathUtils.relative(rootPath, filePath) const content = await FileUtils.readFile({ filePath }) @@ -172,6 +178,22 @@ ${content} tips += fileTips } + if (editingFileRelativePath) { + tips += `\nAt the same time, +User is editing the content of this file, +maybe User is asking you about this file, +Here is the file: +''''' +[file path] +${editingFileRelativePath} + +[file content] +${editingFileContent} +''''' + +` + } + tips += `\nWhen you want to create/modify/delete a file or talk about a file, you should always return the full path of the file. For example, if user provide you with a file path \`src/component/button.ts\`, you should return \`src/component/button.ts\` instead of \`button.ts\ when you talk about it. diff --git a/packages/gpt-runner-shared/src/common/helpers/debug.ts b/packages/gpt-runner-shared/src/common/helpers/debug.ts index ae9ac17..2f9e1d7 100644 --- a/packages/gpt-runner-shared/src/common/helpers/debug.ts +++ b/packages/gpt-runner-shared/src/common/helpers/debug.ts @@ -9,8 +9,9 @@ export class Debug { constructor(label: string) { this.label = `gpt-runner:${label}` - if (process.env.DEBUG === 'enabled') + if (process.env.DEBUG) debug.enable(this.label) + this.debugger = debug(this.label) // @ts-ignore diff --git a/packages/gpt-runner-shared/src/common/types/config/base.config.ts b/packages/gpt-runner-shared/src/common/types/config/base.config.ts index df5b55c..fc156d9 100644 --- a/packages/gpt-runner-shared/src/common/types/config/base.config.ts +++ b/packages/gpt-runner-shared/src/common/types/config/base.config.ts @@ -24,6 +24,8 @@ export interface ChatModelTypeMap { [ChatModelType.HuggingFace]: HuggingFaceModelConfig } +export type PartialChatModelTypeMap = Partial + export type GetModelConfigType = { config: ChatModelTypeMap[T] secrets: ChatModelTypeMap[T]['secrets'] diff --git a/packages/gpt-runner-shared/src/common/types/server.ts b/packages/gpt-runner-shared/src/common/types/server.ts index d14ac65..0529218 100644 --- a/packages/gpt-runner-shared/src/common/types/server.ts +++ b/packages/gpt-runner-shared/src/common/types/server.ts @@ -1,5 +1,5 @@ import type { FileInfoTree } from './common-file' -import type { SingleChatMessage, SingleFileConfig, UserConfig } from './config' +import type { PartialChatModelTypeMap, SingleChatMessage, SingleFileConfig, UserConfig } from './config' import type { ServerStorageName } from './enum' import type { GptFileInfo, GptFileInfoTree } from './gpt-file' @@ -34,7 +34,9 @@ export interface ChatStreamReqParams { * and get the real time singleFileConfig and then provide singleFileConfig to LangchainJs */ singleFileConfig?: SingleFileConfig + overrideModelsConfig?: PartialChatModelTypeMap contextFilePaths?: string[] + editingFilePath?: string rootPath?: string } diff --git a/packages/gpt-runner-shared/src/common/zod/config/hugging-fface.zod.ts b/packages/gpt-runner-shared/src/common/zod/config/hugging-face.zod.ts similarity index 100% rename from packages/gpt-runner-shared/src/common/zod/config/hugging-fface.zod.ts rename to packages/gpt-runner-shared/src/common/zod/config/hugging-face.zod.ts diff --git a/packages/gpt-runner-shared/src/common/zod/config/index.ts b/packages/gpt-runner-shared/src/common/zod/config/index.ts index a853852..6b54b2f 100644 --- a/packages/gpt-runner-shared/src/common/zod/config/index.ts +++ b/packages/gpt-runner-shared/src/common/zod/config/index.ts @@ -2,7 +2,7 @@ import type { z } from 'zod' import type { GetModelConfigType } from '../../types' import { ChatModelType } from '../../types' import { OpenaiModelConfigSchema, OpenaiSecretsSchema } from './openai.zod' -import { HuggingFaceModelConfigSchema, HuggingFaceSecretsSchema } from './hugging-fface.zod' +import { HuggingFaceModelConfigSchema, HuggingFaceSecretsSchema } from './hugging-face.zod' export * from './base.zod' export * from './openai.zod' diff --git a/packages/gpt-runner-shared/src/common/zod/config/user.config.ts b/packages/gpt-runner-shared/src/common/zod/config/user.config.ts index 360ecbb..adad8e2 100644 --- a/packages/gpt-runner-shared/src/common/zod/config/user.config.ts +++ b/packages/gpt-runner-shared/src/common/zod/config/user.config.ts @@ -1,7 +1,9 @@ import { z } from 'zod' -import type { FilterPattern, FormCheckboxGroupConfig, FormFieldBaseConfig, FormInputConfig, FormItemConfig, FormOption, FormRadioGroupConfig, FormSelectConfig, FormTextareaConfig, SingleChatMessage, SingleFileConfig, UserConfig, UserConfigForUser } from '../../types' +import { type ChatModel, ChatModelType, type FilterPattern, type FormCheckboxGroupConfig, type FormFieldBaseConfig, type FormInputConfig, type FormItemConfig, type FormOption, type FormRadioGroupConfig, type FormSelectConfig, type FormTextareaConfig, type SingleChatMessage, type SingleFileConfig, type UserConfig, type UserConfigForUser } from '../../types' import { ChatRoleSchema } from '../enum.zod' +import type { PartialChatModelTypeMap } from './../../types/config/base.config' import { OpenaiModelConfigSchema } from './openai.zod' +import { HuggingFaceModelConfigSchema } from './hugging-face.zod' export const FilterPatternSchema = z.union([ z.array(z.union([z.string(), z.instanceof(RegExp)])), @@ -12,8 +14,19 @@ export const FilterPatternSchema = z.union([ z.undefined(), ]) satisfies z.ZodType +// OpenaiModelConfigSchema or HuggingFaceModelConfigSchema +export const ChatModelSchema = z.union([ + OpenaiModelConfigSchema, + HuggingFaceModelConfigSchema, +]) satisfies z.ZodType + +export const PartialChatModelTypeMapSchema = z.object({ + [ChatModelType.Openai]: OpenaiModelConfigSchema.optional(), + [ChatModelType.HuggingFace]: HuggingFaceModelConfigSchema.optional(), +}) satisfies z.ZodType + export const UserConfigSchema = z.object({ - model: OpenaiModelConfigSchema.optional().describe('The LLM model configuration'), + model: ChatModelSchema.optional().describe('The LLM model configuration'), rootPath: z.string().optional().describe('The root path of the project'), exts: z.array(z.string()).optional().default(['.gpt.md']).describe('The file extensions to be used'), includes: FilterPatternSchema.optional().default(null).describe('The include patterns for filtering files'), diff --git a/packages/gpt-runner-shared/src/common/zod/server.zod.ts b/packages/gpt-runner-shared/src/common/zod/server.zod.ts index e690f34..b82e99f 100644 --- a/packages/gpt-runner-shared/src/common/zod/server.zod.ts +++ b/packages/gpt-runner-shared/src/common/zod/server.zod.ts @@ -1,6 +1,6 @@ import { z } from 'zod' import type { ChatStreamReqParams, GetCommonFilesReqParams, GetGptFileInfoReqParams, GetGptFilesReqParams, GetUserConfigReqParams, InitGptFilesReqParams, OpenEditorReqParams, StorageClearReqParams, StorageGetItemReqParams, StorageRemoveItemReqParams, StorageSetItemReqParams } from '../types' -import { SingleChatMessageSchema, SingleFileConfigSchema } from './config' +import { PartialChatModelTypeMapSchema, SingleChatMessageSchema, SingleFileConfigSchema } from './config' import { ServerStorageNameSchema } from './enum.zod' export const ChatStreamReqParamsSchema = z.object({ @@ -10,7 +10,9 @@ export const ChatStreamReqParamsSchema = z.object({ appendSystemPrompt: z.string().optional(), singleFilePath: z.string().optional(), singleFileConfig: SingleFileConfigSchema.optional(), + overrideModelsConfig: PartialChatModelTypeMapSchema.optional(), contextFilePaths: z.array(z.string()).optional(), + editingFilePath: z.string().optional(), rootPath: z.string().optional(), }) satisfies z.ZodType diff --git a/packages/gpt-runner-shared/src/node/helpers/tunnel/binary-downloader.ts b/packages/gpt-runner-shared/src/node/helpers/tunnel/binary-downloader.ts index 3ad1577..e097e68 100644 --- a/packages/gpt-runner-shared/src/node/helpers/tunnel/binary-downloader.ts +++ b/packages/gpt-runner-shared/src/node/helpers/tunnel/binary-downloader.ts @@ -5,8 +5,6 @@ import { getGlobalCacheDir } from '../get-cache-dir' import { getAxiosInstance } from '../axios' import { Debug } from '../../../common' -const debug = new Debug('tunnel') - // see: https://github.com/gradio-app/gradio/blob/main/gradio/tunneling.py export class BinaryDownloader { private static readonly VERSION = '0.2' @@ -23,6 +21,7 @@ export class BinaryDownloader { } public static async downloadBinary() { + const debug = new Debug('tunnel') const binaryPath = await BinaryDownloader.getBinaryPath() if (!fs.existsSync(binaryPath)) { diff --git a/packages/gpt-runner-vscode/src/register/sync-opening-file-paths.ts b/packages/gpt-runner-vscode/src/register/sync-opening-file-paths.ts index 9366de2..28f8dbf 100644 --- a/packages/gpt-runner-vscode/src/register/sync-opening-file-paths.ts +++ b/packages/gpt-runner-vscode/src/register/sync-opening-file-paths.ts @@ -39,7 +39,7 @@ export async function registerSyncOpeningFilePaths( const maybeActiveDocs: (vscode.TextDocument | undefined)[] = [ vscode.window.activeTextEditor?.document, state.activeEditor?.document, - ...vscode.window.visibleTextEditors.map(editor => editor.document), + // ...vscode.window.visibleTextEditors.map(editor => editor.document), ] state.activeFilePath = toUnixPath(maybeActiveDocs.find(doc => docIsFile(doc))?.uri.fsPath ?? '') @@ -63,9 +63,12 @@ export async function registerSyncOpeningFilePaths( debounceUpdateActiveFile() })) - // update files when vscode is activated - debounceUpdateOpenFiles() - debounceUpdateActiveFile() + setTimeout(() => { + // wait for all document to be load + // update files when vscode is activated + debounceUpdateOpenFiles() + debounceUpdateActiveFile() + }, 1000) return vscode.Disposable.from({ dispose, diff --git a/packages/gpt-runner-web/client/public/locales/de.json b/packages/gpt-runner-web/client/public/locales/de.json index b6af38e..e719db7 100644 --- a/packages/gpt-runner-web/client/public/locales/de.json +++ b/packages/gpt-runner-web/client/public/locales/de.json @@ -33,7 +33,6 @@ "search_placeholder": "Suchen...", "file_tree_top_tokens_tips": "{{fileNum}} Dateien {{tokenNum}} Tokens.", "file_tree_top_clear_checked_btn": "Auswahl aufheben", - "file_tree_top_all_file_path_as_prompt": "Alle Dateipfade als Vorschlag {{tokenNum}} Tokens", "search_files_placeholder": "Dateien durchsuchen...", "no_gpt_files_tips": "Es gibt keine xxx.gpt.md Datei im aktuellen Verzeichnis.", "ask_for_create_gpt_file_tips": "Möchten Sie eine {{fileName}} Datei erstellen?", @@ -46,6 +45,20 @@ "settings_tab_settings": "Einstellungen", "settings_tab_config_info": "Konfigurationsinformationen", "settings_tab_about": "Über", + "override_settings": "Einstellungen überschreiben", + "override_all_settings": "Alle Einstellungen überschreiben", + "context_settings": "Kontext-Einstellungen", + "context_settings_opening_ide_file_contents_checkbox_tips": "Öffnen von IDE-Dateiinhalten als Aufforderung{{fileNum}}Dateien{{tokenNum}}Token.", + "context_settings_active_ide_file_contents_checkbox_tips": "Aktive IDE-Dateiinhalte als Aufforderung{{tokenNum}}Token.", + "context_settings_selected_files_checkbox_label": "Ausgewählte Dateien als Aufforderung. Aktuell ausgewählte{{fileNum}}Dateien{{tokenNum}}Token.", + "context_settings_all_file_paths_checkbox_label": "Alle Dateipfade als Aufforderung{{tokenNum}}Token.", + "model_settings_btn": "Modell-Einstellungen", + "openai_model_name": "Modellname", + "openai_temperature": "Temperatur", + "openai_max_tokens": "Maximale Antwort-Token", + "openai_top_p": "Top P", + "openai_frequency_penalty": "Frequenzstrafe", + "openai_presence_penalty": "Anwesenheitsstrafe", "version": "Version", "github": "Github", "reward": "Belohnung", @@ -65,6 +78,7 @@ "toast_save_error": "Speichern fehlgeschlagen!", "toast_create_success": "Erstellen erfolgreich!", "toast_create_error": "Erstellen fehlgeschlagen!", - "toast_copy_success": "Kopiert!" + "toast_copy_success": "Kopiert!", + "toast_selected_files_as_prompt_reopened": "Ausgewählte Dateien als Aufforderung wurden wieder geöffnet!" } -} \ No newline at end of file +} diff --git a/packages/gpt-runner-web/client/public/locales/en.json b/packages/gpt-runner-web/client/public/locales/en.json index 7825193..6f9e157 100644 --- a/packages/gpt-runner-web/client/public/locales/en.json +++ b/packages/gpt-runner-web/client/public/locales/en.json @@ -33,7 +33,6 @@ "search_placeholder": "Search...", "file_tree_top_tokens_tips": "{{fileNum}} Files {{tokenNum}} Tokens.", "file_tree_top_clear_checked_btn": "Clear Checked", - "file_tree_top_all_file_path_as_prompt": "All file path as prompt {{tokenNum}} tokens", "search_files_placeholder": "Search files...", "no_gpt_files_tips": "There is no xxx.gpt.md file in the current directory.", "ask_for_create_gpt_file_tips": "Do you want to create a {{fileName}} file?", @@ -46,6 +45,20 @@ "settings_tab_settings": "Settings", "settings_tab_config_info": "Config Info", "settings_tab_about": "About", + "override_settings": "Override Settings", + "override_all_settings": "Override All Settings", + "context_settings": "Context Settings", + "context_settings_opening_ide_file_contents_checkbox_tips": "Opening IDE File Contents As Prompt {{fileNum}} Files {{tokenNum}} Tokens.", + "context_settings_active_ide_file_contents_checkbox_tips": "Active IDE File Contents As Prompt {{tokenNum}} Tokens.", + "context_settings_selected_files_checkbox_label": "Selected Files As Prompt. Current Selected {{fileNum}} Files {{tokenNum}} Tokens.", + "context_settings_all_file_paths_checkbox_label": "All File Path As Prompt {{tokenNum}} Tokens.", + "model_settings_btn": "Model Settings", + "openai_model_name": "Model Name", + "openai_temperature": "Temperature", + "openai_max_tokens": "Max Reply Tokens", + "openai_top_p": "Top P", + "openai_frequency_penalty": "Frequency Penalty", + "openai_presence_penalty": "Presence Penalty", "version": "Version", "github": "Github", "reward": "Reward", @@ -65,6 +78,7 @@ "toast_save_error": "Save error!", "toast_create_success": "Create success!", "toast_create_error": "Create error!", - "toast_copy_success": "Copied!" + "toast_copy_success": "Copied!", + "toast_selected_files_as_prompt_reopened": "Selected Files As Prompt Has Reopened!" } -} \ No newline at end of file +} diff --git a/packages/gpt-runner-web/client/public/locales/ja.json b/packages/gpt-runner-web/client/public/locales/ja.json index 88c5168..26f3792 100644 --- a/packages/gpt-runner-web/client/public/locales/ja.json +++ b/packages/gpt-runner-web/client/public/locales/ja.json @@ -33,7 +33,6 @@ "search_placeholder": "検索...", "file_tree_top_tokens_tips": "{{fileNum}}個のファイル、合計{{tokenNum}}トークン。", "file_tree_top_clear_checked_btn": "選択をクリア", - "file_tree_top_all_file_path_as_prompt": "すべてのファイルパスをプロンプトとして使用、合計{{tokenNum}}トークン", "search_files_placeholder": "ファイルを検索...", "no_gpt_files_tips": "現在のディレクトリにxxx.gpt.mdファイルはありません。", "ask_for_create_gpt_file_tips": "{{fileName}}ファイルを作成しますか?", @@ -46,6 +45,20 @@ "settings_tab_settings": "設定", "settings_tab_config_info": "設定情報", "settings_tab_about": "情報", + "override_settings": "設定を上書き", + "override_all_settings": "すべての設定を上書きする", + "context_settings": "コンテキスト設定", + "context_settings_opening_ide_file_contents_checkbox_tips": "開いているIDEファイルの内容をプロンプトとして使用{{fileNum}}ファイル{{tokenNum}}トークン。", + "context_settings_active_ide_file_contents_checkbox_tips": "アクティブなIDEファイルの内容をプロンプトとして使用{{tokenNum}}トークン。", + "context_settings_selected_files_checkbox_label": "選択したファイルをプロンプトとして使用。現在選択中{{fileNum}}ファイル{{tokenNum}}トークン。", + "context_settings_all_file_paths_checkbox_label": "すべてのファイルパスをプロンプトとして使用{{tokenNum}}トークン。", + "model_settings_btn": "モデル設定", + "openai_model_name": "モデル名", + "openai_temperature": "温度", + "openai_max_tokens": "最大回答トークン数", + "openai_top_p": "トップP", + "openai_frequency_penalty": "頻度ペナルティ", + "openai_presence_penalty": "存在ペナルティ", "version": "バージョン", "github": "GitHub", "reward": "寄付", @@ -65,6 +78,7 @@ "toast_save_error": "保存できませんでした!", "toast_create_success": "作成しました!", "toast_create_error": "作成できませんでした!", - "toast_copy_success": "コピーしました!" + "toast_copy_success": "コピーしました!", + "toast_selected_files_as_prompt_reopened": "選択したファイルをプロンプトとして再度開きました!" } -} \ No newline at end of file +} diff --git a/packages/gpt-runner-web/client/public/locales/zh_CN.json b/packages/gpt-runner-web/client/public/locales/zh_CN.json index ce745c9..0cdbceb 100644 --- a/packages/gpt-runner-web/client/public/locales/zh_CN.json +++ b/packages/gpt-runner-web/client/public/locales/zh_CN.json @@ -33,7 +33,6 @@ "search_placeholder": "搜索...", "file_tree_top_tokens_tips": "{{fileNum}}个文件,共{{tokenNum}}个 tokens。", "file_tree_top_clear_checked_btn": "清除选中", - "file_tree_top_all_file_path_as_prompt": "将所有文件路径作为提示,共 {{tokenNum}} 个 tokens", "search_files_placeholder": "搜索文件...", "no_gpt_files_tips": "当前目录中没有xxx.gpt.md文件。", "ask_for_create_gpt_file_tips": "是否要创建一个{{fileName}}文件?", @@ -46,6 +45,20 @@ "settings_tab_settings": "设置", "settings_tab_config_info": "配置信息", "settings_tab_about": "关于", + "override_settings": "覆盖设置", + "override_all_settings": "覆盖所有设置", + "context_settings": "上下文设置", + "context_settings_opening_ide_file_contents_checkbox_tips": "将 IDE 正在打开的文件内容作为提示,{{fileNum}} 个文件, {{tokenNum}} tokens", + "context_settings_active_ide_file_contents_checkbox_tips": "将 IDE 正在编辑的文件内容作为提示,{{tokenNum}} tokens", + "context_settings_selected_files_checkbox_label": "将选定的文件作为提示,当前选定 {{fileNum}} 个文件,{{tokenNum}} tokens", + "context_settings_all_file_paths_checkbox_label": "将所有文件路径作为提示,{{tokenNum}} tokens", + "model_settings_btn": "模型设置", + "openai_model_name": "模型名称", + "openai_temperature": "温度", + "openai_max_tokens": "最大回复令牌", + "openai_top_p": "Top P", + "openai_frequency_penalty": "频率惩罚", + "openai_presence_penalty": "存在惩罚", "version": "版本", "github": "GitHub", "reward": "赞赏", @@ -65,6 +78,7 @@ "toast_save_error": "保存失败!", "toast_create_success": "创建成功!", "toast_create_error": "创建失败!", - "toast_copy_success": "复制成功!" + "toast_copy_success": "复制成功!", + "toast_selected_files_as_prompt_reopened": "已重新打开选定的文件作为提示!" } -} \ No newline at end of file +} diff --git a/packages/gpt-runner-web/client/public/locales/zh_Hant.json b/packages/gpt-runner-web/client/public/locales/zh_Hant.json index 55b18e7..9077eef 100644 --- a/packages/gpt-runner-web/client/public/locales/zh_Hant.json +++ b/packages/gpt-runner-web/client/public/locales/zh_Hant.json @@ -33,7 +33,6 @@ "search_placeholder": "搜索...", "file_tree_top_tokens_tips": "{{fileNum}}個文件,共{{tokenNum}}個 tokens。", "file_tree_top_clear_checked_btn": "清除選中", - "file_tree_top_all_file_path_as_prompt": "將所有文件路徑作為提示,共 {{tokenNum}} 個 tokens", "search_files_placeholder": "搜索文件...", "no_gpt_files_tips": "當前目錄中沒有xxx.gpt.md文件。", "ask_for_create_gpt_file_tips": "是否要創建一個{{fileName}}文件?", @@ -46,6 +45,20 @@ "settings_tab_settings": "設定", "settings_tab_config_info": "配置信息", "settings_tab_about": "關於", + "override_settings": "覆寫設置", + "override_all_settings": "覆蓋所有設置", + "context_settings": "上下文設置", + "context_settings_opening_ide_file_contents_checkbox_tips": "將 IDE 正在打開的文件內容作為提示,{{fileNum}} 個文件, {{tokenNum}} tokens", + "context_settings_active_ide_file_contents_checkbox_tips": "將 IDE 正在編輯的文件內容作為提示,{{tokenNum}} tokens", + "context_settings_selected_files_checkbox_label": "將選定的文件作為提示,當前選定 {{fileNum}} 個文件, {{tokenNum}} tokens", + "context_settings_all_file_paths_checkbox_label": "將所有文件路徑作為提示,{{tokenNum}} tokens", + "model_settings_btn": "模型設置", + "openai_model_name": "模型名稱", + "openai_temperature": "溫度", + "openai_max_tokens": "最大回覆令牌", + "openai_top_p": "前P", + "openai_frequency_penalty": "頻率處罰", + "openai_presence_penalty": "存在處罰", "version": "版本", "github": "GitHub", "reward": "贊賞", @@ -65,6 +78,7 @@ "toast_save_error": "保存失敗!", "toast_create_success": "创建成功!", "toast_create_error": "创建失败!", - "toast_copy_success": "複製成功!" + "toast_copy_success": "複製成功!", + "toast_selected_files_as_prompt_reopened": "已重新開啟選定文件作為提示!" } -} \ No newline at end of file +} diff --git a/packages/gpt-runner-web/client/src/components/hook-form/hook-form-input/hook-form-input.styles.ts b/packages/gpt-runner-web/client/src/components/hook-form/hook-form-input/hook-form-input.styles.ts index adf04ab..cfbd0d4 100644 --- a/packages/gpt-runner-web/client/src/components/hook-form/hook-form-input/hook-form-input.styles.ts +++ b/packages/gpt-runner-web/client/src/components/hook-form/hook-form-input/hook-form-input.styles.ts @@ -2,6 +2,8 @@ import { VSCodeTextField } from '@vscode/webview-ui-toolkit/react' import styled from 'styled-components' export const StyledVSCodeTextField = styled(VSCodeTextField)` + width: 100%; + &::part(root) { border-radius: 0.25rem; overflow: hidden; diff --git a/packages/gpt-runner-web/client/src/components/icon-button/index.tsx b/packages/gpt-runner-web/client/src/components/icon-button/index.tsx index be13a0e..b567866 100644 --- a/packages/gpt-runner-web/client/src/components/icon-button/index.tsx +++ b/packages/gpt-runner-web/client/src/components/icon-button/index.tsx @@ -1,5 +1,5 @@ import { VSCodeButton } from '@vscode/webview-ui-toolkit/react' -import type { FC } from 'react' +import type { FC, MouseEvent } from 'react' import { memo, useCallback, useEffect, useState } from 'react' import clsx from 'clsx' import type { AnimationProps, Target, Tween } from 'framer-motion' @@ -25,7 +25,7 @@ export interface IconButtonProps extends GetComponentProps MaybePromise + onClick?: (e: MouseEvent) => MaybePromise buttonStyle?: React.CSSProperties } @@ -68,11 +68,11 @@ export const IconButton: FC = memo((props) => { isAnimating && setDebouncedIsAnimating(isAnimating) }, [isAnimating]) - const handleClick = useCallback(async () => { + const handleClick = useCallback(async (e: MouseEvent) => { if (animatingWhenClick) setIsAnimating(true) - await onClick?.() + await onClick?.(e) if (animatingWhenClick) setIsAnimating(false) diff --git a/packages/gpt-runner-web/client/src/components/popover-menu/index.tsx b/packages/gpt-runner-web/client/src/components/popover-menu/index.tsx index 9cb09eb..8eaa111 100644 --- a/packages/gpt-runner-web/client/src/components/popover-menu/index.tsx +++ b/packages/gpt-runner-web/client/src/components/popover-menu/index.tsx @@ -255,13 +255,18 @@ export const PopoverMenu: React.FC = memo((props) => { )} > - { + { + e.stopPropagation() + if (!clickMode) - return + return false + getIsPopoverOpen() ? handleClose() : handleOpen() if (!clickOutSideToClose) setIsPin(true) + + return false }}> { - label: string + label: ReactNode value: T } diff --git a/packages/gpt-runner-web/client/src/components/select-option/select-option.styles.ts b/packages/gpt-runner-web/client/src/components/select-option/select-option.styles.ts index 0a2f0a9..65599fa 100644 --- a/packages/gpt-runner-web/client/src/components/select-option/select-option.styles.ts +++ b/packages/gpt-runner-web/client/src/components/select-option/select-option.styles.ts @@ -6,26 +6,32 @@ export const SelectOptionList = styled.div` width: max-content; color: var(--foreground); background: var(--panel-view-background); + max-width: 100%; ` export const SelectOptionItem = styled.div` padding: 0.5rem; display: flex; + flex-wrap: wrap; align-items: center; cursor: pointer; user-select: none; font-size: var(--type-ramp-base-font-size); border-bottom: 1px solid var(--panel-view-border); + &:last-child { + border-bottom: none; + } + &:hover { - color: var(--button-primary-foreground); - background: var(--button-primary-hover-background); + color: var(--button-secondary-foreground); + background: var(--button-secondary-hover-background); border-bottom-color: transparent; } &[data-selected=true] { - color: var(--button-primary-foreground); - background: var(--button-primary-background); + color: var(--button-secondary-foreground); + background: var(--button-secondary-background); border-bottom-color: transparent; } ` diff --git a/packages/gpt-runner-web/client/src/hooks/use-debounce-fn.hook.ts b/packages/gpt-runner-web/client/src/hooks/use-debounce-fn.hook.ts new file mode 100644 index 0000000..d2d29e8 --- /dev/null +++ b/packages/gpt-runner-web/client/src/hooks/use-debounce-fn.hook.ts @@ -0,0 +1,54 @@ +import { useCallback, useEffect, useRef } from 'react' + +type Procedure = (...args: any[]) => void + +interface DebounceOptions { + delay?: number + leading?: boolean +} + +export function useDebounceFn( + fn: F, + options: DebounceOptions = {}, +): F { + const { delay = 300, leading = false } = options + const fnRef = useRef(fn) + const timerRef = useRef(null) + + useEffect(() => { + fnRef.current = fn + }, [fn]) + + const cancel = useCallback(() => { + if (timerRef.current !== null) { + clearTimeout(timerRef.current) + timerRef.current = null + } + }, []) + + const debouncedFn = useCallback( + ((...args: any[]) => { + const callNow = leading && timerRef.current === null + cancel() + + timerRef.current = window.setTimeout(() => { + if (!callNow) + fnRef.current(...args) + + timerRef.current = null + }, delay) + + if (callNow) + fnRef.current(...args) + }) as F, + [delay, leading, cancel], + ) + + useEffect(() => { + return () => { + cancel() + } + }, [cancel]) + + return debouncedFn +} diff --git a/packages/gpt-runner-web/client/src/hooks/use-emit-bind.hook.ts b/packages/gpt-runner-web/client/src/hooks/use-emit-bind.hook.ts index cace912..087f0c2 100644 --- a/packages/gpt-runner-web/client/src/hooks/use-emit-bind.hook.ts +++ b/packages/gpt-runner-web/client/src/hooks/use-emit-bind.hook.ts @@ -1,5 +1,5 @@ import { useEffect } from 'react' -import { ClientEventName } from '@nicepkg/gpt-runner-shared/common' +import { ClientEventName, toUnixPath } from '@nicepkg/gpt-runner-shared/common' import { useGlobalStore } from '../store/zustand/global' import { emitter } from '../helpers/emitter' import { useOn } from './use-on.hook' @@ -14,8 +14,10 @@ export function useEmitBind(deps: any[] = []) { useOn({ eventName: ClientEventName.UpdateIdeOpeningFiles, listener: ({ filePaths }) => { - console.log('updateIdeOpeningFilePaths', filePaths) - updateIdeOpeningFilePaths(filePaths) + const unixFilePaths = filePaths?.map(toUnixPath) + + console.log('updateIdeOpeningFilePaths', unixFilePaths) + updateIdeOpeningFilePaths(unixFilePaths) }, deps: [...deps, updateIdeOpeningFilePaths], }) @@ -23,8 +25,10 @@ export function useEmitBind(deps: any[] = []) { useOn({ eventName: ClientEventName.UpdateIdeActiveFilePath, listener: ({ filePath }) => { - console.log('updateIdeActiveFilePath', filePath) - updateIdeActiveFilePath(filePath) + const unixFilePath = toUnixPath(filePath) + + console.log('updateIdeActiveFilePath', unixFilePath) + updateIdeActiveFilePath(unixFilePath) }, deps: [...deps, updateIdeActiveFilePath], }) diff --git a/packages/gpt-runner-web/client/src/hooks/use-get-common-files-tree.hook.ts b/packages/gpt-runner-web/client/src/hooks/use-get-common-files-tree.hook.ts new file mode 100644 index 0000000..83e16dd --- /dev/null +++ b/packages/gpt-runner-web/client/src/hooks/use-get-common-files-tree.hook.ts @@ -0,0 +1,32 @@ +import { useQuery } from '@tanstack/react-query' +import { useEffect } from 'react' +import { useGlobalStore } from '../store/zustand/global' +import { fetchCommonFilesTree } from '../networks/common-files' +import { useTempStore } from '../store/zustand/temp' + +export interface UseGetCommonFilesTreeProps { + rootPath: string + syncChangeToStore?: boolean +} +export function useGetCommonFilesTree(props: UseGetCommonFilesTreeProps) { + const { rootPath, syncChangeToStore = true } = props + const { excludeFileExts } = useGlobalStore() + const { handleFetchCommonFilesTreeResChange } = useTempStore() + + const useQueryReturns = useQuery({ + queryKey: ['file-tree', rootPath, excludeFileExts.join(',')], + enabled: !!rootPath, + queryFn: () => fetchCommonFilesTree({ + rootPath, + excludeExts: excludeFileExts, + }), + }) + + useEffect(() => { + if (!syncChangeToStore) + return + handleFetchCommonFilesTreeResChange(useQueryReturns.data) + }, [useQueryReturns.data, syncChangeToStore]) + + return useQueryReturns +} diff --git a/packages/gpt-runner-web/client/src/hooks/use-token-num.hook.ts b/packages/gpt-runner-web/client/src/hooks/use-token-num.hook.ts new file mode 100644 index 0000000..6b43b58 --- /dev/null +++ b/packages/gpt-runner-web/client/src/hooks/use-token-num.hook.ts @@ -0,0 +1,117 @@ +import { useCallback, useMemo } from 'react' +import type { SingleChat, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common' +import { countTokenQuick } from '../helpers/utils' +import { useGlobalStore } from '../store/zustand/global' +import { useTempStore } from '../store/zustand/temp' +import { useUserConfig } from './use-user-config.hook' + +export interface UseTokenNumProps { + rootPath?: string + chatIdOrChatInstance?: string | SingleChat + singleFileConfig?: SingleFileConfig +} +export function useTokenNum(props?: UseTokenNumProps) { + const { rootPath, chatIdOrChatInstance, singleFileConfig: singleFileConfigFromParams } = props || {} + + const { + provideFileInfoPromptMap, + provideFileInfoToGptMap, + checkedFilePaths, + ideActiveFilePath, + ideOpeningFilePaths, + getChatInstance, + getContextFilePaths, + } = useGlobalStore() + const { fullPathFileMap } = useTempStore() + const filaPathsPromptTokenNum = countTokenQuick(provideFileInfoPromptMap.allFilePathsPrompt) + + const chatInstance: SingleChat | undefined = useMemo(() => { + if (!chatIdOrChatInstance) + return undefined + if (typeof chatIdOrChatInstance === 'string') + return getChatInstance(chatIdOrChatInstance) + + return chatIdOrChatInstance + }, [chatIdOrChatInstance, getChatInstance]) + + const { singleFileConfig: singleFileConfigFromRemote } = useUserConfig({ + rootPath, + singleFilePath: chatInstance?.singleFilePath, + enabled: !singleFileConfigFromParams, + }) + + const singleFileConfig = useMemo(() => { + return singleFileConfigFromParams || singleFileConfigFromRemote + }, [singleFileConfigFromParams, singleFileConfigFromRemote]) + + const countFilePathsTokenNum = useCallback((filePaths: string[]) => { + return filePaths.reduce((pre, cur) => { + const file = fullPathFileMap[cur] + return pre + (file?.otherInfo?.tokenNum ?? 0) + }, 0) + }, [fullPathFileMap]) + + const systemPromptTokenNum = useMemo(() => { + const { systemPrompt } = singleFileConfig || {} + + if (!systemPrompt) + return 0 + + return countTokenQuick(systemPrompt) + }, [singleFileConfig, countTokenQuick]) + + const messageTokenNum = useMemo(() => { + const { messages } = chatInstance || {} + + if (!messages || !messages.length) + return 0 + + return messages.reduce((total, messageItem) => { + const { text } = messageItem + return total + countTokenQuick(text) + }, 0) + }, [chatInstance, countTokenQuick]) + + const checkedFilesContentPromptTokenNum = useMemo(() => countFilePathsTokenNum(checkedFilePaths), [checkedFilePaths, countFilePathsTokenNum]) + + const ideOpeningFileTokenNum = useMemo(() => { + return countFilePathsTokenNum(ideOpeningFilePaths) + }, [ideOpeningFilePaths, countFilePathsTokenNum]) + + const ideActiveFileTokenNum = useMemo(() => { + if (!ideActiveFilePath) + return 0 + return countFilePathsTokenNum([ideActiveFilePath]) + }, [ideActiveFilePath, countFilePathsTokenNum]) + + const contextFilePaths = getContextFilePaths() + const contextFilesTokenNum = useMemo(() => + countFilePathsTokenNum(contextFilePaths) + , [contextFilePaths, countFilePathsTokenNum]) + + const totalTokenNum = useMemo(() => { + const { allFilePaths } = provideFileInfoToGptMap + let result = systemPromptTokenNum + messageTokenNum + contextFilesTokenNum + + if (allFilePaths) + result += filaPathsPromptTokenNum + + return result + }, [ + systemPromptTokenNum, + messageTokenNum, + contextFilesTokenNum, + provideFileInfoToGptMap, + filaPathsPromptTokenNum, + ]) + + return { + totalTokenNum, + systemPromptTokenNum, + messageTokenNum, + ideOpeningFileTokenNum, + ideActiveFileTokenNum, + checkedFilesContentPromptTokenNum, + filaPathsPromptTokenNum, + } +} diff --git a/packages/gpt-runner-web/client/src/networks/llm.ts b/packages/gpt-runner-web/client/src/networks/llm.ts index 0385bbd..c4ee620 100644 --- a/packages/gpt-runner-web/client/src/networks/llm.ts +++ b/packages/gpt-runner-web/client/src/networks/llm.ts @@ -23,6 +23,8 @@ export async function fetchLlmStream( singleFilePath, singleFileConfig, contextFilePaths, + editingFilePath, + overrideModelsConfig, rootPath, namespace, onMessage = () => {}, @@ -30,6 +32,16 @@ export async function fetchLlmStream( } = params try { + const finalOverrideModelsConfig = Object.fromEntries( + Object.entries(overrideModelsConfig || {}) + .map(([key, value]) => { + return [key, { + ...value, + type: key, + }] + }), + ) + await fetchEventSource(`${getGlobalConfig().serverBaseUrl}/api/chatgpt/chat-stream`, { method: 'POST', signal, @@ -45,6 +57,8 @@ export async function fetchLlmStream( singleFilePath, singleFileConfig, contextFilePaths, + editingFilePath, + overrideModelsConfig: finalOverrideModelsConfig, rootPath, } satisfies ChatStreamReqParams), openWhenHidden: true, diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/chat-panel.styles.ts b/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/chat-panel.styles.ts index 4b3a05d..f7bc180 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/chat-panel.styles.ts +++ b/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/chat-panel.styles.ts @@ -17,3 +17,12 @@ export const ChatPanelWrapper = styled.div` export const ChatPanelPopoverTreeWrapper = styled.div` height: 100%; ` + +export const ConfigFormTitle = styled.div` + padding-left: 0.5rem; + margin: 1rem; + margin-bottom: 0; + font-size: 1rem; + font-weight: bold; + border-left: 0.25rem solid var(--foreground); +` diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/index.tsx index 219ffa2..3a1a501 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/chat-panel/index.tsx @@ -24,7 +24,8 @@ import { isDarkTheme } from '../../../../styles/themes' import { emitter } from '../../../../helpers/emitter' import { ModelSettings } from '../settings/components/model-settings' import { ContentWrapper } from '../../chat.styles' -import { ChatPanelPopoverTreeWrapper, ChatPanelWrapper } from './chat-panel.styles' +import { ContextSettings } from '../settings/components/context-settings' +import { ChatPanelPopoverTreeWrapper, ChatPanelWrapper, ConfigFormTitle } from './chat-panel.styles' import { createRemarkOpenEditorPlugin } from './remark-plugin' export interface ChatPanelProps { @@ -358,7 +359,7 @@ export const ChatPanel: FC = memo((props) => { style={{ paddingLeft: '0.5rem', }} - text={'Model Settings'} + text={t('chat_page.model_settings_btn')} iconClassName='codicon-settings' hoverShowText={!isHovering} > @@ -367,7 +368,15 @@ export const ChatPanel: FC = memo((props) => { return + + + {` ${t('chat_page.override_settings')}`} + + + {t('chat_page.context_settings')} + + }} /> diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/file-tree/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/file-tree/index.tsx index f7865ef..d86e969 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/file-tree/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/file-tree/index.tsx @@ -1,22 +1,24 @@ -import { type FC, memo, useCallback, useEffect, useRef, useState } from 'react' -import { useQuery } from '@tanstack/react-query' +import { type FC, memo, useCallback, useEffect } from 'react' import { ClientEventName, travelTree, travelTreeDeepFirst } from '@nicepkg/gpt-runner-shared/common' import clsx from 'clsx' import { VSCodeCheckbox, VSCodeLink } from '@vscode/webview-ui-toolkit/react' import { Trans, useTranslation } from 'react-i18next' +import { toast } from 'react-hot-toast' import type { SidebarProps } from '../../../../components/sidebar' import { Sidebar } from '../../../../components/sidebar' import { ErrorView } from '../../../../components/error-view' -import { fetchCommonFilesTree } from '../../../../networks/common-files' import type { TreeItemProps, TreeItemState } from '../../../../components/tree-item' import { Icon } from '../../../../components/icon' import { IconButton } from '../../../../components/icon-button' -import { countTokenQuick, formatNumWithK } from '../../../../helpers/utils' +import { formatNumWithK } from '../../../../helpers/utils' import { useGlobalStore } from '../../../../store/zustand/global' import type { FileInfoSidebarTreeItem, FileSidebarTreeItem } from '../../../../store/zustand/global/file-tree.slice' import { PopoverMenu } from '../../../../components/popover-menu' import { useTempStore } from '../../../../store/zustand/temp' import { useOn } from '../../../../hooks/use-on.hook' +import { useGetCommonFilesTree } from '../../../../hooks/use-get-common-files-tree.hook' +import { useDebounceFn } from '../../../../hooks/use-debounce-fn.hook' +import { useTokenNum } from '../../../../hooks/use-token-num.hook' import { FileTreeItemRightWrapper, FileTreeSidebarHighlight, FileTreeSidebarUnderSearchWrapper, FilterWrapper } from './file-tree.styles' export interface FileTreeProps { @@ -28,60 +30,54 @@ export const FileTree: FC = memo((props: FileTreeProps) => { const { rootPath, reverseTreeUi } = props const { t } = useTranslation() - const [filesTree, _setFilesTree] = useState([]) - const fullPathFileMapRef = useRef>({}) const { excludeFileExts, updateExcludeFileExts, - expendedFilePaths, updateExpendedFilePaths, checkedFilePaths, updateCheckedFilePaths, - provideFilePathsTreePromptToGpt, - updateProvideFilePathsTreePromptToGpt, - filePathsTreePrompt, - updateFilePathsTreePrompt, + provideFileInfoToGptMap, + updateProvideFileInfoToGptMap, } = useGlobalStore() - const { updateFilesRelativePaths } = useTempStore() - const updateMap = useCallback((tree: FileSidebarTreeItem[]) => { - const result: Record = {} - travelTree(tree, (item) => { - if (item.otherInfo) - result[item.otherInfo.fullPath] = item - }) - fullPathFileMapRef.current = result - }, []) - - const setFilesTree = useCallback((tree: FileSidebarTreeItem[], isUpdateFullPathFileMap = false) => { - if (isUpdateFullPathFileMap) - updateMap(tree) - - _setFilesTree(tree) - }, [_setFilesTree, updateMap]) + const { + filesTree, + fullPathFileMap, + updateFilesTree, + } = useTempStore() const updateFileItem = useCallback((fileItemOrFullPath: FileSidebarTreeItem | string, updater: (fileItem: FileSidebarTreeItem) => void) => { const fullPath = typeof fileItemOrFullPath === 'string' ? fileItemOrFullPath : fileItemOrFullPath.otherInfo?.fullPath if (!fullPath) return - const fileItem = fullPathFileMapRef.current[fullPath] + const fileItem = fullPathFileMap[fullPath] if (!fileItem) return updater(fileItem) - setFilesTree([...filesTree]) - }, [filesTree, setFilesTree]) + updateFilesTree([...filesTree]) + }, [filesTree]) - const { data: fetchCommonFilesTreeRes, isLoading, refetch: refreshFileTree } = useQuery({ - queryKey: ['file-tree', rootPath, excludeFileExts.join(',')], - enabled: !!rootPath, - queryFn: () => fetchCommonFilesTree({ - rootPath, - excludeExts: excludeFileExts, - }), + const { data: fetchCommonFilesTreeRes, isLoading, refetch: refreshFileTree } = useGetCommonFilesTree({ + rootPath, }) + const { checkedFilesContentPromptTokenNum } = useTokenNum() + + const openProvideCheckedFileContentsAsPrompt = useCallback(() => { + if (provideFileInfoToGptMap.checkedFileContents) + return + + updateProvideFileInfoToGptMap({ + checkedFileContents: true, + }) + + toast.success(t('chat_page.toast_selected_files_as_prompt_reopened')) + }, [provideFileInfoToGptMap.checkedFileContents, updateProvideFileInfoToGptMap]) + + const debounceOpenProvideCheckedFileContentsAsPrompt = useDebounceFn(openProvideCheckedFileContentsAsPrompt) + useOn({ eventName: [ClientEventName.RefreshTree, ClientEventName.RefreshFileTree], listener: () => refreshFileTree(), @@ -90,12 +86,12 @@ export const FileTree: FC = memo((props: FileTreeProps) => { // sync checked state useEffect(() => { - if (!Object.values(fullPathFileMapRef.current).length || !filesTree.length) + if (!Object.values(fullPathFileMap).length || !filesTree.length) return // check all path in checkedFilePaths checkedFilePaths.forEach((fullPath) => { - const file = fullPathFileMapRef.current[fullPath] + const file = fullPathFileMap[fullPath] if (!file?.otherInfo) return @@ -119,50 +115,8 @@ export const FileTree: FC = memo((props: FileTreeProps) => { return item }) - // setFilesTree([...filesTree]) - }, [checkedFilePaths, filesTree, setFilesTree]) - - useEffect(() => { - const filesInfoTree = fetchCommonFilesTreeRes?.data?.filesInfoTree - if (!filesInfoTree) - return - - const filesRelativePaths: string[] = [] - const finalFilesSidebarTree = travelTree(filesInfoTree, (item) => { - const oldIsExpanded = expendedFilePaths.includes(item.fullPath) - const oldIsChecked = checkedFilePaths.includes(item.fullPath) - - const result: FileSidebarTreeItem = { - id: item.id, - name: item.name, - path: item.fullPath, - isLeaf: item.isFile, - otherInfo: { - ...item, - checked: oldIsChecked, - }, - isExpanded: oldIsExpanded, - } - - item.isFile && filesRelativePaths.push(item.projectRelativePath) - - return result - }) - - setFilesTree(finalFilesSidebarTree, true) - updateFilePathsTreePrompt(finalFilesSidebarTree) - updateFilesRelativePaths(filesRelativePaths) - }, [fetchCommonFilesTreeRes]) - - useEffect(() => { - if (excludeFileExts.length) - return - - const { includeFileExts = [], allFileExts = [] } = fetchCommonFilesTreeRes?.data || {} - const _excludeFileExts = allFileExts.filter(ext => !includeFileExts.includes(ext)) - - updateExcludeFileExts(_excludeFileExts) - }, [fetchCommonFilesTreeRes]) + // updateFileTree([...filesTree]) + }, [checkedFilePaths, filesTree]) const renderTreeItemLeftSlot = useCallback((props: TreeItemState) => { const { isLeaf, isExpanded, otherInfo } = props @@ -186,8 +140,8 @@ export const FileTree: FC = memo((props: FileTreeProps) => { return preState let finalPaths: string[] = [] - const isLeaf = fullPathFileMapRef.current[fullPath].isLeaf - const children = fullPathFileMapRef.current[fullPath]?.children || [] + const isLeaf = fullPathFileMap[fullPath].isLeaf + const children = fullPathFileMap[fullPath]?.children || [] if (!checked) { const shouldRemovePaths: string[] = [] @@ -220,6 +174,8 @@ export const FileTree: FC = memo((props: FileTreeProps) => { return finalPaths }) + + debounceOpenProvideCheckedFileContentsAsPrompt() } return <> @@ -274,7 +230,7 @@ export const FileTree: FC = memo((props: FileTreeProps) => { if (!fullPath) return - const file = fullPathFileMapRef.current[fullPath] + const file = fullPathFileMap[fullPath] file.isExpanded = isExpanded updateExpendedFilePaths((preState) => { @@ -282,8 +238,8 @@ export const FileTree: FC = memo((props: FileTreeProps) => { return finalPaths }) - setFilesTree([...filesTree]) - }, [filesTree, setFilesTree]) + updateFilesTree([...filesTree]) + }, [filesTree]) const buildSearchRightSlot = useCallback(() => { const { allFileExts = [] } = fetchCommonFilesTreeRes?.data || {} @@ -346,51 +302,33 @@ export const FileTree: FC = memo((props: FileTreeProps) => { showText={false} iconClassName='codicon-refresh' animatingWhenClick - onClick={refreshFileTree} + onClick={() => refreshFileTree()} > }, [fetchCommonFilesTreeRes, excludeFileExts, updateExcludeFileExts]) const buildUnderSearchSlot = useCallback(() => { - if (!Object.keys(fullPathFileMapRef.current).length) - return null - - const filaPathsPromptTokenNum = countTokenQuick(filePathsTreePrompt) - - const checkedFilesContentPromptTokenNum = checkedFilePaths.reduce((pre, cur) => { - const file = fullPathFileMapRef.current[cur] - return pre + (file?.otherInfo?.tokenNum ?? 0) - }, 0) - - let totalTokenNum = checkedFilesContentPromptTokenNum - - if (provideFilePathsTreePromptToGpt) - totalTokenNum += filaPathsPromptTokenNum - const resetAllChecked = () => { updateCheckedFilePaths((preState) => { preState.forEach((item) => { - const file = fullPathFileMapRef.current[item] + const file = fullPathFileMap[item] file.otherInfo!.checked = false return item }) + updateFilesTree([...filesTree]) + return [] }) - updateProvideFilePathsTreePromptToGpt(false) - } - - const handleProvideFilePathsTreePromptToGptChange = (e: any) => { - const checked = e.target?.checked as boolean - updateProvideFilePathsTreePromptToGpt(checked) } return , @@ -403,21 +341,8 @@ export const FileTree: FC = memo((props: FileTreeProps) => { }} onClick={resetAllChecked}> {t('chat_page.file_tree_top_clear_checked_btn')} - -
- - {t('chat_page.file_tree_top_all_file_path_as_prompt', { - tokenNum: formatNumWithK(filaPathsPromptTokenNum), - })} - -
- }, [filePathsTreePrompt, checkedFilePaths, provideFilePathsTreePromptToGpt]) + }, [checkedFilePaths]) const sortTreeItems = useCallback((items: TreeItemProps[]) => { return items?.sort((a, b) => { diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/init-gpt-files/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/init-gpt-files/index.tsx index 9831228..73b08f8 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/init-gpt-files/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/init-gpt-files/index.tsx @@ -47,6 +47,7 @@ export const InitGptFiles: FC = memo((props) => { return <Trans + t={t} i18nKey='chat_page.no_gpt_files_tips' components={{ Title: <Title />, @@ -57,6 +58,7 @@ export const InitGptFiles: FC<InitGptFilesProps> = memo((props) => { <Title> <Trans + t={t} i18nKey='chat_page.ask_for_create_gpt_file_tips' values={{ fileName: `./${GPT_RUNNER_OFFICIAL_FOLDER}/copilot.gpt.md`, diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/context-settings.styles.ts b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/context-settings.styles.ts new file mode 100644 index 0000000..dcd5dab --- /dev/null +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/context-settings.styles.ts @@ -0,0 +1,22 @@ +import { VSCodeBadge, VSCodeCheckbox } from '@vscode/webview-ui-toolkit/react' +import { styled } from 'styled-components' + +export const StyledBadge = styled(VSCodeBadge)` + white-space: nowrap; + margin: 0 0.25rem; +` + +export const StyledVSCodeCheckbox = styled(VSCodeCheckbox)` + margin-bottom: 0.5rem; + flex: 1; + + &::part(control) { + flex-shrink: 0; + } +` as typeof VSCodeCheckbox + +export const SelectWrapper = styled.div` + display: flex; + align-items: center; + margin-bottom: 0.5rem; +` diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/index.tsx new file mode 100644 index 0000000..f45c30f --- /dev/null +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/context-settings/index.tsx @@ -0,0 +1,152 @@ +import { memo, useState } from 'react' +import { Trans, useTranslation } from 'react-i18next' +import { StyledForm } from '../../settings.styles' +import { useGlobalStore } from '../../../../../../store/zustand/global' +import { formatNumWithK } from '../../../../../../helpers/utils' +import { useGetCommonFilesTree } from '../../../../../../hooks/use-get-common-files-tree.hook' +import { LoadingView } from '../../../../../../components/loading-view' +import type { ISelectOption } from '../../../../../../components/select-option' +import { SelectOption } from '../../../../../../components/select-option' +import { useTokenNum } from '../../../../../../hooks/use-token-num.hook' +import { SelectWrapper, StyledBadge, StyledVSCodeCheckbox } from './context-settings.styles' + +export interface ContextSettingsProps { + rootPath: string +} + +export const ContextSettings = memo((props: ContextSettingsProps) => { + const { rootPath } = props + + const { t } = useTranslation() + const { + provideFileInfoToGptMap, + checkedFilePaths, + ideOpeningFilePaths, + updateProvideFileInfoToGptMap, + } = useGlobalStore() + const { filaPathsPromptTokenNum, ideOpeningFileTokenNum, ideActiveFileTokenNum, checkedFilesContentPromptTokenNum } = useTokenNum() + + const { isLoading } = useGetCommonFilesTree({ + rootPath, + }) + + const handleProvideChange = (e: any, key: keyof typeof provideFileInfoToGptMap) => { + const checked = (e.target as HTMLInputElement).checked + updateProvideFileInfoToGptMap({ + [key]: checked, + }) + } + + const isProvideIdeFiles = provideFileInfoToGptMap.openingIdeFileContents || provideFileInfoToGptMap.activeIdeFileContents + + const ideFileAsPromptOptions: ISelectOption<keyof typeof provideFileInfoToGptMap>[] = [{ + label: <Trans + t={t} + i18nKey={'chat_page.context_settings_opening_ide_file_contents_checkbox_tips'} + values={{ + fileNum: ideOpeningFilePaths.length, + tokenNum: formatNumWithK(ideOpeningFileTokenNum), + }} + components={{ + FileNumWrapper: <StyledBadge></StyledBadge>, + TokenNumWrapper: <StyledBadge></StyledBadge>, + }} + />, + value: 'openingIdeFileContents', + }, { + label: <Trans + t={t} + i18nKey={'chat_page.context_settings_active_ide_file_contents_checkbox_tips'} + values={{ + tokenNum: formatNumWithK(ideActiveFileTokenNum), + }} + components={{ + TokenNumWrapper: <StyledBadge></StyledBadge>, + }} + />, + value: 'activeIdeFileContents', + }] + + const [ideOptionActiveValue, setIdeOptionActiveValue] = useState<keyof typeof provideFileInfoToGptMap>(() => { + if (provideFileInfoToGptMap.openingIdeFileContents) + return 'openingIdeFileContents' + + return 'activeIdeFileContents' + }) + + return <StyledForm> + {isLoading && <LoadingView absolute></LoadingView>} + + {/* ide opening files or active file */} + <SelectWrapper> + <StyledVSCodeCheckbox + style={{ + marginBottom: 0, + }} + checked={isProvideIdeFiles} + onChange={(e) => { + const checked = (e.target as HTMLInputElement).checked + updateProvideFileInfoToGptMap({ + [ideOptionActiveValue]: checked, + }) + }} + > + {ideFileAsPromptOptions.find(item => item.value === ideOptionActiveValue)?.label} + </StyledVSCodeCheckbox> + + <SelectOption + options={ideFileAsPromptOptions} + value={ideOptionActiveValue} + onChange={(_value) => { + const value = _value as keyof typeof provideFileInfoToGptMap + setIdeOptionActiveValue(value) + + if (!isProvideIdeFiles) + return + + updateProvideFileInfoToGptMap({ + openingIdeFileContents: value === 'openingIdeFileContents', + activeIdeFileContents: value === 'activeIdeFileContents', + }) + }} /> + </SelectWrapper> + + {/* selected files */} + <StyledVSCodeCheckbox + checked={provideFileInfoToGptMap.checkedFileContents} + onChange={e => handleProvideChange(e, 'checkedFileContents')} + > + <Trans + t={t} + i18nKey={'chat_page.context_settings_selected_files_checkbox_label'} + values={{ + fileNum: checkedFilePaths.length, + tokenNum: formatNumWithK(checkedFilesContentPromptTokenNum), + }} + components={{ + FileNumWrapper: <StyledBadge></StyledBadge>, + TokenNumWrapper: <StyledBadge></StyledBadge>, + }} + ></Trans> + </StyledVSCodeCheckbox> + + {/* all file paths */} + <StyledVSCodeCheckbox + checked={provideFileInfoToGptMap.allFilePaths} + onChange={e => handleProvideChange(e, 'allFilePaths')} + > + <Trans + t={t} + i18nKey={'chat_page.context_settings_all_file_paths_checkbox_label'} + values={{ + tokenNum: formatNumWithK(filaPathsPromptTokenNum), + }} + components={{ + TokenNumWrapper: <StyledBadge></StyledBadge>, + }} + ></Trans> + </StyledVSCodeCheckbox> + </StyledForm> +}) + +ContextSettings.displayName = 'ContextSettings' diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-model-settings.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-model-settings.tsx index d8a4881..5724ac5 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-model-settings.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-model-settings.tsx @@ -1,6 +1,6 @@ import { ChatModelType, getModelConfigTypeSchema } from '@nicepkg/gpt-runner-shared/common' import type { BaseModelConfig, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common' -import { memo, useCallback, useEffect, useMemo, useState } from 'react' +import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react' import type { ReactNode } from 'react' import type { Path, UseFormReturn } from 'react-hook-form' import { useForm } from 'react-hook-form' @@ -30,13 +30,13 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe const { singleFileConfig, formConfig } = props const { t } = useTranslation() - const { modelOverrideConfig, updateModelOverrideConfig } = useGlobalStore() + const { overrideModelsConfig, updateOverrideModelsConfig } = useGlobalStore() const currentModel = singleFileConfig?.model as FormData | undefined const currentModelType = currentModel?.type || ChatModelType.Openai const currentModelOverrideConfig = useMemo(() => { - return (modelOverrideConfig[currentModelType] || {}) as FormData - }, [modelOverrideConfig[currentModelType]]) + return (overrideModelsConfig[currentModelType] || {}) as FormData + }, [overrideModelsConfig[currentModelType]]) const currentFormNames = useMemo(() => formConfig.map(item => item.name), [formConfig]) @@ -52,16 +52,17 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe const { setValue, watch } = useFormReturns - const updateModelOverrideConfigFromCheckMap = useCallback((formData: FormData) => { + const updateOverrideModelsConfigFromCheckMap = useCallback((formData: FormData, _checkedMap?: Record<keyof FormData, boolean>) => { const checkedValues = {} as FormData + const finalCheckedMap = _checkedMap || checkedMap - Object.keys(checkedMap).forEach((key) => { + Object.keys(finalCheckedMap).forEach((key) => { const formName = key as keyof FormData - if (checkedMap[formName] === true) + if (finalCheckedMap[formName] === true) checkedValues[formName] = formData?.[formName] as any }) - updateModelOverrideConfig(preState => ({ + updateOverrideModelsConfig(preState => ({ ...preState, [currentModelType]: { ...checkedValues, @@ -71,14 +72,19 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe useEffect(() => { const subscription = watch((formData) => { - updateModelOverrideConfigFromCheckMap(formData as FormData) + updateOverrideModelsConfigFromCheckMap(formData as FormData) }) return () => subscription.unsubscribe() - }, [watch, updateModelOverrideConfigFromCheckMap]) + }, [watch, updateOverrideModelsConfigFromCheckMap]) + const isInitCheckMap = useRef(false) useEffect(() => { - // update checked map + if (isInitCheckMap.current || !singleFileConfig?.model || !currentModelOverrideConfig) + return + isInitCheckMap.current = true + + // init checked map const initCheckedMap = Object.keys(checkedMap).reduce((prev, key) => { const formName = key as keyof FormData const isOverride = currentModelOverrideConfig[formName] !== undefined @@ -98,19 +104,21 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe if (!isOverride && currentModel?.[formName] !== undefined) setValue(formName as Path<FormData>, currentModel[formName] as any) }) - }, [singleFileConfig?.model, JSON.stringify(currentModelOverrideConfig)]) + }, [isInitCheckMap.current, singleFileConfig?.model, JSON.stringify(currentModelOverrideConfig)]) const buildLabel = (label: string, formName: keyof FormData) => { return <LabelWrapper> <VSCodeCheckbox checked={checkedMap[formName]} - onChange={(e) => { - const checked = (e.target as HTMLInputElement).checked - setCheckedMap(prev => ({ - ...prev, - [formName]: checked, - })) - updateModelOverrideConfigFromCheckMap(watch()) + onClick={(e) => { + const newCheckedMap = { + ...checkedMap, + [formName]: !checkedMap[formName], + } + + setCheckedMap(newCheckedMap) + updateOverrideModelsConfigFromCheckMap(watch(), newCheckedMap) + e.stopPropagation() return false }} @@ -118,22 +126,22 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe </LabelWrapper> } + const isAllChecked = Object.values(checkedMap).every(Boolean) return <StyledForm> <VSCodeCheckbox style={{ marginBottom: '1rem', }} - checked={Object.values(checkedMap).every(Boolean)} - onChange={(e) => { - const checked = (e.target as HTMLInputElement).checked - setCheckedMap((prev) => { - return Object.fromEntries(Object.keys(prev).map(key => [key, checked])) as Record<keyof FormData, boolean> - }) - updateModelOverrideConfigFromCheckMap(watch()) + checked={isAllChecked} + onClick={(e) => { + const newCheckedMap = Object.fromEntries(Object.keys(checkedMap).map(key => [key, !isAllChecked])) as Record<keyof FormData, boolean> + setCheckedMap(newCheckedMap) + updateOverrideModelsConfigFromCheckMap(watch(), newCheckedMap) + e.stopPropagation() }} > - Override All Settings + {t('chat_page.override_all_settings')} </VSCodeCheckbox> {formConfig.map((formItemConfig, index) => { diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-secrets-settings.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-secrets-settings.tsx new file mode 100644 index 0000000..2e79497 --- /dev/null +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/base-secrets-settings.tsx @@ -0,0 +1,106 @@ +import { ChatModelType, ServerStorageName, getModelConfigTypeSchema } from '@nicepkg/gpt-runner-shared/common' +import type { SingleFileConfig } from '@nicepkg/gpt-runner-shared/common' +import { memo, useEffect } from 'react' +import type { ReactNode } from 'react' +import { useForm } from 'react-hook-form' +import type { Path, UseFormReturn } from 'react-hook-form' +import { useTranslation } from 'react-i18next' +import { useMutation, useQuery } from '@tanstack/react-query' +import { zodResolver } from '@hookform/resolvers/zod' +import { toast } from 'react-hot-toast' +import { VSCodeButton } from '@vscode/webview-ui-toolkit/react' +import { StyledForm, StyledFormItem } from '../../settings.styles' +import { getServerStorage, saveServerStorage } from '../../../../../../networks/server-storage' +import { IS_SAFE } from '../../../../../../helpers/constant' +import { useLoading } from '../../../../../../hooks/use-loading.hook' + +export interface BaseSecretsSettingsFormItemBuildViewState<FormData extends Record<string, any>> { + useFormReturns: UseFormReturn<FormData, any, undefined> +} + +export interface BaseSecretsSettingsFormItemConfig<FormData extends Record<string, any>> { + name: keyof FormData + buildView: (state: BaseSecretsSettingsFormItemBuildViewState<FormData>) => ReactNode +} + +export interface BaseSecretsSettingsProps<FormData extends Record<string, any>> { + singleFileConfig?: SingleFileConfig + formConfig: BaseSecretsSettingsFormItemConfig<FormData>[] +} + +function BaseSecretsSettings_<FormData extends Record<string, any>>(props: BaseSecretsSettingsProps<FormData>) { + const { singleFileConfig, formConfig } = props + + const { t } = useTranslation() + const { setLoading } = useLoading() + const currentModelType = singleFileConfig?.model?.type || ChatModelType.Openai + + const { data: querySecretsRes } = useQuery({ + queryKey: ['secrets', currentModelType], + enabled: !!currentModelType, + queryFn: () => getServerStorage({ + storageName: ServerStorageName.SecretsConfig, + key: currentModelType!, + }), + }) + + const { mutateAsync: saveSecrets } = useMutation({ + mutationFn: (value: FormData) => saveServerStorage({ + storageName: ServerStorageName.SecretsConfig, + key: currentModelType, + value, + }), + }) + + const remoteSecrets = querySecretsRes?.data?.value as FormData | undefined + + const useFormReturns = useForm<FormData>({ + mode: 'onBlur', + resolver: zodResolver(getModelConfigTypeSchema(currentModelType, 'secrets')), + }) + + const { handleSubmit, setValue } = useFormReturns + + useEffect(() => { + if (remoteSecrets) { + Object.keys(remoteSecrets).forEach((key) => { + setValue(key as Path<FormData>, remoteSecrets[key as keyof FormData]) + }) + } + }, [remoteSecrets]) + + const onSubmit = async (data: FormData) => { + setLoading(true) + + try { + await saveSecrets(data) + toast.success(t('chat_page.toast_save_success')) + } + finally { + setLoading(false) + } + } + + return <StyledForm onSubmit={handleSubmit(onSubmit)}> + {formConfig.map((formItemConfig, index) => { + const buildViewState: BaseSecretsSettingsFormItemBuildViewState<FormData> = { + useFormReturns, + } + return <StyledFormItem key={index}> + {formItemConfig.buildView(buildViewState)} + </StyledFormItem> + })} + + <VSCodeButton + disabled={!IS_SAFE} + appearance='primary' + type='submit' + > + {IS_SAFE ? t('chat_page.save_btn') : t('chat_page.disabled_save_secrets_config_btn')} + </VSCodeButton> + </StyledForm> +} + +BaseSecretsSettings_.displayName = 'BaseSecretsSettings' + +export const BaseSecretsSettings = memo(BaseSecretsSettings_) diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/index.tsx index 6bb4e0f..690a258 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/index.tsx @@ -42,7 +42,7 @@ export const ModelSettings: FC<ModelSettingsProps> = memo((props) => { const modelTypeViewMap: Record<ChatModelType, Record<ModelSettingsViewType, () => ReactNode>> = { [ChatModelType.Openai]: { - secrets: () => <OpenaiSecretsSettings />, + secrets: () => <OpenaiSecretsSettings singleFileConfig={resolvedSingleFileConfig} />, model: () => <OpenaiModelSettings singleFileConfig={resolvedSingleFileConfig} />, title: () => <>OpenAI</>, }, diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/model-settings.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/model-settings.tsx index 6e4105b..ec81967 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/model-settings.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/model-settings.tsx @@ -6,7 +6,7 @@ import { HookFormInput } from '../../../../../../../components/hook-form/hook-fo import { type ISelectOption, SelectOption } from '../../../../../../../components/select-option' import { BaseModelSettings, type BaseModelSettingsFormItemConfig } from '../base-model-settings' -export interface FormData extends Pick<OpenaiModelConfig, 'modelName' | 'temperature' | 'maxTokens' | 'topP' | 'frequencyPenalty' | 'presencePenalty'> { +interface FormData extends Pick<OpenaiModelConfig, 'modelName' | 'temperature' | 'maxTokens' | 'topP' | 'frequencyPenalty' | 'presencePenalty'> { } @@ -41,14 +41,11 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="modelName" - label={buildLabel('Model Name')} + label={buildLabel(t('chat_page.openai_model_name'))} labelInLeft placeholder={''} errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> <SelectOption options={modelTipOptions} @@ -65,15 +62,12 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="temperature" - label={buildLabel('Temperature')} + label={buildLabel(t('chat_page.openai_temperature'))} labelInLeft isNumber placeholder={'0 ~ 1'} errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> </> }, @@ -84,16 +78,13 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="maxTokens" - label={buildLabel('Max Reply Tokens')} + label={buildLabel(t('chat_page.openai_max_tokens'))} labelInLeft isNumber minNumber={0} placeholder={'0 ~ 2048'} errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> </> }, @@ -104,7 +95,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="topP" - label={buildLabel('Top P')} + label={buildLabel(t('chat_page.openai_top_p'))} labelInLeft minNumber={0} maxNumber={1} @@ -112,9 +103,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => isNumber errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> </> }, @@ -125,7 +113,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="frequencyPenalty" - label={buildLabel('Frequency Penalty')} + label={buildLabel(t('chat_page.openai_frequency_penalty'))} labelInLeft isNumber minNumber={-2} @@ -133,9 +121,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => placeholder={'-2 ~ 2'} errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> </> }, @@ -146,7 +131,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => return <> <HookFormInput name="presencePenalty" - label={buildLabel('Presence Penalty')} + label={buildLabel(t('chat_page.openai_presence_penalty'))} labelInLeft isNumber minNumber={-2} @@ -154,9 +139,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) => placeholder={'-2 ~ 2'} errors={formState.errors} control={control} - style={{ - width: '100%', - }} /> </> }, diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/secrets-settings.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/secrets-settings.tsx index ca43687..6e45db3 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/secrets-settings.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/components/model-settings/openai-settings/secrets-settings.tsx @@ -1,109 +1,73 @@ -import { DEFAULT_OPENAI_API_BASE_PATH, type OpenaiSecrets, SecretStorageKey, ServerStorageName } from '@nicepkg/gpt-runner-shared/common' -import { useMutation, useQuery } from '@tanstack/react-query' -import { type FC, memo, useEffect } from 'react' -import { useForm } from 'react-hook-form' -import { VSCodeButton, VSCodeLink } from '@vscode/webview-ui-toolkit/react' +import { DEFAULT_OPENAI_API_BASE_PATH } from '@nicepkg/gpt-runner-shared/common' +import type { OpenaiSecrets, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common' +import { type FC, memo } from 'react' +import { VSCodeLink } from '@vscode/webview-ui-toolkit/react' import { useTranslation } from 'react-i18next' -import toast from 'react-hot-toast' -import { getServerStorage, saveServerStorage } from '../../../../../../../networks/server-storage' -import { useLoading } from '../../../../../../../hooks/use-loading.hook' import { HookFormInput } from '../../../../../../../components/hook-form/hook-form-input' import { HookFormTextarea } from '../../../../../../../components/hook-form/hook-form-textarea' -import { IS_SAFE } from '../../../../../../../helpers/constant' -import { StyledForm, StyledFormItem } from '../../../settings.styles' +import { BaseSecretsSettings, type BaseSecretsSettingsFormItemConfig } from '../base-secrets-settings' -export interface FormData extends Pick<OpenaiSecrets, 'apiKey' | 'accessToken' | 'basePath'> { +interface FormData extends Pick<OpenaiSecrets, 'apiKey' | 'accessToken' | 'basePath'> { } -export const OpenaiSecretsSettings: FC = memo(() => { +export interface OpenaiSecretsSettingsProps { + singleFileConfig: SingleFileConfig +} + +export const OpenaiSecretsSettings: FC<OpenaiSecretsSettingsProps> = memo((props) => { + const { singleFileConfig } = props + const { t } = useTranslation() - const { setLoading } = useLoading() - const { data: querySecretsRes } = useQuery({ - queryKey: ['secrets', SecretStorageKey.Openai], - queryFn: () => getServerStorage({ - storageName: ServerStorageName.SecretsConfig, - key: SecretStorageKey.Openai, - }), - }) - const { mutateAsync: saveSecrets } = useMutation({ - mutationKey: ['secrets', SecretStorageKey.Openai], - mutationFn: (value: FormData) => saveServerStorage({ - storageName: ServerStorageName.SecretsConfig, - key: SecretStorageKey.Openai, - value, - }), - }) + const formConfig: BaseSecretsSettingsFormItemConfig<FormData>[] = [ + { + name: 'apiKey', + buildView: ({ useFormReturns: { control, formState } }) => { + return <> + <HookFormInput + label={t('chat_page.openai_api_key')} + placeholder={t('chat_page.openai_api_key_placeholder')} + name="apiKey" + errors={formState.errors} + control={control} + type="password" + /> + </> + }, + }, + { + name: 'basePath', + buildView: ({ useFormReturns: { control, formState } }) => { + return <> + <HookFormInput + label={t('chat_page.openai_api_base_path')} + placeholder={DEFAULT_OPENAI_API_BASE_PATH} + name="basePath" + errors={formState.errors} + control={control} + /> + </> + }, + }, { + name: 'accessToken', + buildView: ({ useFormReturns: { control, formState } }) => { + return <> + <HookFormTextarea + label={t('chat_page.openai_access_token')} + name="accessToken" + placeholder={t('chat_page.openai_access_token_placeholder')} + errors={formState.errors} + control={control} + /> + <div> + {t('chat_page.openai_get_access_token_tips')} <VSCodeLink href="https://chat.openai.com/api/auth/session" target="_blank" rel="noreferrer">https://chat.openai.com/api/auth/session</VSCodeLink> + </div> + </> + }, + }, + ] - const remoteSecrets = querySecretsRes?.data?.value as OpenaiSecrets | undefined - - const { handleSubmit, formState, control, setValue } = useForm<FormData>({ - mode: 'onBlur', - }) - - useEffect(() => { - if (remoteSecrets) { - setValue('apiKey', remoteSecrets.apiKey || '') - setValue('accessToken', remoteSecrets.accessToken || '') - setValue('basePath', remoteSecrets.basePath || '') - } - }, [remoteSecrets]) - - const onSubmit = async (data: FormData) => { - setLoading(true) - - try { - await saveSecrets(data) - toast.success(t('chat_page.toast_save_success')) - } - finally { - setLoading(false) - } - } - - return <StyledForm onSubmit={handleSubmit(onSubmit)}> - <StyledFormItem key={0}> - <HookFormInput - label={t('chat_page.openai_api_key')} - placeholder={t('chat_page.openai_api_key_placeholder')} - name="apiKey" - errors={formState.errors} - control={control} - type="password" - /> - </StyledFormItem> - - <StyledFormItem key={1}> - <HookFormInput - label={t('chat_page.openai_api_base_path')} - placeholder={DEFAULT_OPENAI_API_BASE_PATH} - name="basePath" - errors={formState.errors} - control={control} - /> - </StyledFormItem> - - <StyledFormItem key={2}> - <HookFormTextarea - label={t('chat_page.openai_access_token')} - name="accessToken" - placeholder={t('chat_page.openai_access_token_placeholder')} - errors={formState.errors} - control={control} - /> - <div> - {t('chat_page.openai_get_access_token_tips')} <VSCodeLink href="https://chat.openai.com/api/auth/session" target="_blank" rel="noreferrer">https://chat.openai.com/api/auth/session</VSCodeLink> - </div> - </StyledFormItem> - - <VSCodeButton - disabled={!IS_SAFE} - appearance='primary' - type='submit' - > - {IS_SAFE ? t('chat_page.save_btn') : t('chat_page.disabled_save_secrets_config_btn')} - </VSCodeButton> - </StyledForm> + return <BaseSecretsSettings singleFileConfig={singleFileConfig} formConfig={formConfig} /> }) OpenaiSecretsSettings.displayName = 'OpenaiSecretsSettings' diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/settings/settings.styles.ts b/packages/gpt-runner-web/client/src/pages/chat/components/settings/settings.styles.ts index 9ee9192..b77e42b 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/settings/settings.styles.ts +++ b/packages/gpt-runner-web/client/src/pages/chat/components/settings/settings.styles.ts @@ -38,10 +38,12 @@ export const StyledForm = styled.form` display: flex; flex-direction: column; margin: 1rem; + position: relative; ` export const StyledFormItem = styled.div` display: flex; flex-direction: column; margin-bottom: 1rem; + width: 100%; ` diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/index.tsx index 2519e30..1cc4f1d 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/index.tsx @@ -5,19 +5,23 @@ import { PopoverMenu } from '../../../../components/popover-menu' import { IconButton } from '../../../../components/icon-button' import { ChatPanelPopoverTreeWrapper } from '../chat-panel/chat-panel.styles' import { useIsMobile } from '../../../../hooks/use-is-mobile.hook' -import { TopToolbarBlank, TopToolbarWrapper } from './top-toolbar.styles' +import type { UseTokenNumProps } from '../../../../hooks/use-token-num.hook' +import { useTokenNum } from '../../../../hooks/use-token-num.hook' +import { formatNumWithK } from '../../../../helpers/utils' +import { TopToolbarBlank, TopToolbarLeft, TopToolbarRight, TopToolbarWrapper } from './top-toolbar.styles' -export interface TopToolbarProps { +export interface TopToolbarProps extends UseTokenNumProps { settingsView?: React.ReactNode configInfoView?: React.ReactNode aboutView?: React.ReactNode } export const TopToolbar = memo(forwardRef<HTMLDivElement, TopToolbarProps>((props, ref) => { - const { settingsView, configInfoView, aboutView } = props + const { settingsView, configInfoView, aboutView, ...useTokenNumProps } = props const { t } = useTranslation() const isMobile = useIsMobile() + const { totalTokenNum } = useTokenNum(useTokenNumProps) const popMenus: { text: string @@ -32,54 +36,62 @@ export const TopToolbar = memo(forwardRef<HTMLDivElement, TopToolbarProps>((prop menuView: settingsView, }, { text: t('chat_page.settings_tab_config_info'), - alwaysShowText: true, + alwaysShowText: !isMobile, iconClassName: 'codicon-gist', menuView: configInfoView, }, { text: t('chat_page.settings_tab_about'), - alwaysShowText: true, + alwaysShowText: !isMobile, iconClassName: 'codicon-info', menuView: aboutView, }] return <> <TopToolbarWrapper ref={ref}> - {popMenus.map((popMenu, index) => { - const { text, alwaysShowText, iconClassName, menuView, menuProps } = popMenu + <TopToolbarLeft> + {popMenus.map((popMenu, index) => { + const { text, alwaysShowText, iconClassName, menuView, menuProps } = popMenu - return <PopoverMenu - key={index} - clickMode - xPosition='center' - yPosition='bottom' - menuMaskStyle={{ - marginLeft: '0', - marginRight: '0', - paddingTop: '0.5rem', - }} - menuStyle={{ - border: isMobile ? 'none' : '', - width: isMobile ? '100vw' : '', - }} - minusHeightSpace={isMobile ? 10 : 100} - buildChildrenSlot={({ isHovering }) => { - return <IconButton - text={text} - iconClassName={iconClassName} - hoverShowText={!alwaysShowText && !isHovering} - style={{ - paddingLeft: '0.5rem', - }} - ></IconButton> - }} - buildMenuSlot={() => { - return <ChatPanelPopoverTreeWrapper> - {menuView} - </ChatPanelPopoverTreeWrapper> - }} - {...menuProps} - /> - })} + return <PopoverMenu + key={index} + clickMode + xPosition='center' + yPosition='bottom' + menuMaskStyle={{ + marginLeft: '0', + marginRight: '0', + paddingTop: '0.5rem', + }} + menuStyle={{ + border: isMobile ? 'none' : '', + width: isMobile ? '100vw' : '', + }} + minusHeightSpace={isMobile ? 10 : 100} + buildChildrenSlot={({ isHovering }) => { + return <IconButton + text={text} + iconClassName={iconClassName} + hoverShowText={!alwaysShowText && !isHovering} + style={{ + paddingLeft: '0.5rem', + }} + ></IconButton> + }} + buildMenuSlot={() => { + return <ChatPanelPopoverTreeWrapper> + {menuView} + </ChatPanelPopoverTreeWrapper> + }} + {...menuProps} + /> + })} + </TopToolbarLeft> + + <TopToolbarRight> + <div title="Tokens" style={{ marginRight: '0.5rem' }}> + {(isMobile ? '' : 'Tokens: ') + formatNumWithK(totalTokenNum)} + </div> + </TopToolbarRight> </TopToolbarWrapper> <TopToolbarBlank /> </> diff --git a/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/top-toolbar.styles.ts b/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/top-toolbar.styles.ts index 851f0d8..f96bb11 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/top-toolbar.styles.ts +++ b/packages/gpt-runner-web/client/src/pages/chat/components/top-toolbar/top-toolbar.styles.ts @@ -10,10 +10,25 @@ export const TopToolbarWrapper = styled.div` width: 100%; height: ${toolbarHeight}; align-items: center; + justify-content: space-between; + font-size: var(--type-ramp-base-font-size); + color: var(--foreground); background-color: var(--panel-view-background); border-bottom: 1px solid var(--panel-view-border); ` +export const TopToolbarLeft = styled.div` + display: flex; + align-items: center; + flex-shrink: 0; +` + +export const TopToolbarRight = styled.div` + display: flex; + align-items: center; + flex-shrink: 0; +` + export const TopToolbarBlank = styled.div` flex-shrink: 0; width: 100%; diff --git a/packages/gpt-runner-web/client/src/pages/chat/index.tsx b/packages/gpt-runner-web/client/src/pages/chat/index.tsx index b903402..9f83a6f 100644 --- a/packages/gpt-runner-web/client/src/pages/chat/index.tsx +++ b/packages/gpt-runner-web/client/src/pages/chat/index.tsx @@ -16,6 +16,7 @@ import { DragResizeView } from '../../components/drag-resize-view' import { fetchProjectInfo } from '../../networks/config' import { useEmitBind } from '../../hooks/use-emit-bind.hook' import { useSize } from '../../hooks/use-size.hook' +import { useGetCommonFilesTree } from '../../hooks/use-get-common-files-tree.hook' import { ContentWrapper, StyledVSCodePanels } from './chat.styles' import { ChatSidebar } from './components/chat-sidebar' import { ChatPanel } from './components/chat-panel' @@ -48,8 +49,15 @@ const Chat: FC = memo(() => { queryKey: ['fetchProjectInfo'], queryFn: () => fetchProjectInfo(), }) + const rootPath = getGlobalConfig().rootPath + // sometime file tree popover menu is hidden at mount + // and the store is not updated, so we need to update it + useGetCommonFilesTree({ + rootPath, + }) + useEmitBind([rootPath]) // when active chat id change, change tab active id @@ -217,6 +225,8 @@ const Chat: FC = memo(() => { <FlexColumn style={{ width: '100%', height: '100%' }}> <TopToolbar ref={toolbarRef} + rootPath={rootPath} + chatIdOrChatInstance={chatInstance} settingsView={renderSettings(true, SettingsTabId.Settings)} configInfoView={renderSettings(true, SettingsTabId.ConfigInfo)} aboutView={renderSettings(true, SettingsTabId.About)} diff --git a/packages/gpt-runner-web/client/src/store/zustand/global/chat.slice.ts b/packages/gpt-runner-web/client/src/store/zustand/global/chat.slice.ts index e8ebd8e..351ecfe 100644 --- a/packages/gpt-runner-web/client/src/store/zustand/global/chat.slice.ts +++ b/packages/gpt-runner-web/client/src/store/zustand/global/chat.slice.ts @@ -1,5 +1,5 @@ import type { StateCreator } from 'zustand' -import type { ChatModelTypeMap, SingleChat } from '@nicepkg/gpt-runner-shared/common' +import type { PartialChatModelTypeMap, SingleChat } from '@nicepkg/gpt-runner-shared/common' import { ChatMessageStatus, ChatRole, STREAM_DONE_FLAG, travelTree } from '@nicepkg/gpt-runner-shared/common' import { v4 as uuidv4 } from 'uuid' import type { GetState } from '../types' @@ -16,7 +16,7 @@ export enum GenerateAnswerType { export interface ChatSlice { activeChatId: string chatInstances: SingleChat[] - modelOverrideConfig: Partial<ChatModelTypeMap> + overrideModelsConfig: PartialChatModelTypeMap updateActiveChatId: (activeChatId: string) => void /** @@ -39,7 +39,8 @@ export interface ChatSlice { generateChatAnswer: (chatId: string, type?: GenerateAnswerType) => Promise<void> regenerateLastChatAnswer: (chatId: string) => Promise<void> stopGeneratingChatAnswer: (chatId: string) => void - updateModelOverrideConfig: (modelOverrideConfig: Partial<ChatModelTypeMap> | ((oldModelOverrideConfig: Partial<ChatModelTypeMap>) => Partial<ChatModelTypeMap>)) => void + updateOverrideModelsConfig: (overrideModelsConfig: PartialChatModelTypeMap | ((oldModelOverrideConfig: PartialChatModelTypeMap) => PartialChatModelTypeMap)) => void + getContextFilePaths: () => string[] } export type ChatState = GetState<ChatSlice> @@ -48,7 +49,7 @@ function getInitialState() { return { activeChatId: '', chatInstances: [], - modelOverrideConfig: {}, + overrideModelsConfig: {}, } satisfies ChatState } @@ -267,8 +268,8 @@ export const createChatSlice: StateCreator< const appendSystemPrompt = (() => { let result = '' - if (state.provideFilePathsTreePromptToGpt) - result += `\n${state.filePathsTreePrompt}` + if (state.provideFileInfoToGptMap.allFilePaths) + result += `\n${state.provideFileInfoPromptMap.allFilePathsPrompt}` return result })() @@ -283,13 +284,17 @@ export const createChatSlice: StateCreator< chatIdAbortCtrlMap.set(chatId, abortCtrl) + const contextFilePaths = state.getContextFilePaths() + await fetchLlmStream({ signal: abortCtrl.signal, messages: sendMessages, prompt: sendInputtingPrompt, appendSystemPrompt, singleFilePath, - contextFilePaths: state.checkedFilePaths, + contextFilePaths, + editingFilePath: state.ideActiveFilePath, + overrideModelsConfig: state.overrideModelsConfig, rootPath: getGlobalConfig().rootPath, onError(e) { console.error('fetchLlmStream error:', e) @@ -342,10 +347,26 @@ export const createChatSlice: StateCreator< status: ChatMessageStatus.Success, }, false) }, - updateModelOverrideConfig(modelOverrideConfig) { + updateOverrideModelsConfig(overrideModelsConfig) { const state = get() - const finalModelOverrideConfig = typeof modelOverrideConfig === 'function' ? modelOverrideConfig(state.modelOverrideConfig) : modelOverrideConfig + const finalModelOverrideConfig = typeof overrideModelsConfig === 'function' ? overrideModelsConfig(state.overrideModelsConfig) : overrideModelsConfig - set({ modelOverrideConfig: finalModelOverrideConfig }) + set({ overrideModelsConfig: finalModelOverrideConfig }) + }, + getContextFilePaths() { + const state = get() + const contextPaths: string[] = [] + const { checkedFileContents, activeIdeFileContents, openingIdeFileContents } = state.provideFileInfoToGptMap + + if (checkedFileContents) + contextPaths.push(...state.checkedFilePaths) + + if (activeIdeFileContents) + contextPaths.push(state.ideActiveFilePath) + + if (openingIdeFileContents) + contextPaths.push(...state.ideOpeningFilePaths) + + return [...new Set(contextPaths)] }, }) diff --git a/packages/gpt-runner-web/client/src/store/zustand/global/file-tree.slice.ts b/packages/gpt-runner-web/client/src/store/zustand/global/file-tree.slice.ts index 06be75e..6eaecb4 100644 --- a/packages/gpt-runner-web/client/src/store/zustand/global/file-tree.slice.ts +++ b/packages/gpt-runner-web/client/src/store/zustand/global/file-tree.slice.ts @@ -11,36 +11,48 @@ export type FileInfoSidebarTreeItem = FileInfoTreeItem & { export type FileSidebarTreeItem = TreeItemBaseState<FileInfoSidebarTreeItem> export interface FileTreeSlice { - filePathsTreePrompt: string - provideFilePathsTreePromptToGpt: boolean expendedFilePaths: string[] checkedFilePaths: string[] excludeFileExts: string[] ideActiveFilePath: string ideOpeningFilePaths: string[] - provideIdeOpeningFilePathsToGpt: boolean + provideFileInfoToGptMap: { + allFilePaths: boolean + checkedFileContents: boolean + activeIdeFileContents: boolean + openingIdeFileContents: boolean + } + provideFileInfoPromptMap: { + allFilePathsPrompt: string + } updateExcludeFileExts: (excludeFileExts: string[] | ((oldExcludeFileExts: string[]) => string[])) => void - updateProvideFilePathsTreePromptToGpt: (provideFilePathsTreePromptToGpt: boolean) => void - updateFilePathsTreePrompt: (promptOrFileTreeItem: string | FileSidebarTreeItem[]) => void updateExpendedFilePaths: (expendedFilePaths: string[] | ((oldExpendedFilePaths: string[]) => string[])) => void updateCheckedFilePaths: (checkedFilePaths: string[] | ((oldCheckedFilePaths: string[]) => string[])) => void updateIdeActiveFilePath: (ideActiveFilePath: string) => void updateIdeOpeningFilePaths: (ideOpeningFilePaths: string[] | ((oldIdeOpeningFilePaths: string[]) => string[])) => void - updateProvideIdeOpeningFilePathsToGpt: (provideIdeOpeningFilePathsToGpt: boolean) => void + updateProvideFileInfoToGptMap: (provideFileInfoToGptMap: Partial<FileTreeSlice['provideFileInfoToGptMap']>) => void + updateProvideFileInfoPromptMap: (provideFileInfoPromptMap: Partial<FileTreeSlice['provideFileInfoPromptMap']>) => void + updateAllFilePathsPrompt: (allFilePathsPromptOrFileTreeItem: string | FileSidebarTreeItem[]) => void } export type FileTreeState = GetState<FileTreeSlice> function getInitialState() { return { - filePathsTreePrompt: '', - provideFilePathsTreePromptToGpt: false, expendedFilePaths: [], checkedFilePaths: [], excludeFileExts: [], ideActiveFilePath: '', ideOpeningFilePaths: [], - provideIdeOpeningFilePathsToGpt: false, + provideFileInfoToGptMap: { + allFilePaths: false, + checkedFileContents: true, + activeIdeFileContents: false, + openingIdeFileContents: true, + }, + provideFileInfoPromptMap: { + allFilePathsPrompt: '', + }, } satisfies FileTreeState } @@ -58,10 +70,25 @@ export const createFileTreeSlice: StateCreator< excludeFileExts: [...new Set(_excludeFileExts)], }) }, - updateProvideFilePathsTreePromptToGpt(provideFilePathsTreePromptToGpt) { - set({ provideFilePathsTreePromptToGpt }) + updateProvideFileInfoToGptMap(provideFileInfoToGptMap) { + set({ + provideFileInfoToGptMap: { + ...get().provideFileInfoToGptMap, + ...provideFileInfoToGptMap, + }, + }) }, - updateFilePathsTreePrompt(promptOrFileTreeItem) { + updateProvideFileInfoPromptMap(provideFileInfoPromptMap) { + set({ + provideFileInfoPromptMap: { + ...get().provideFileInfoPromptMap, + ...provideFileInfoPromptMap, + }, + }) + }, + updateAllFilePathsPrompt(promptOrFileTreeItem) { + const state = get() + let result = '' if (typeof promptOrFileTreeItem === 'string') @@ -75,7 +102,9 @@ export const createFileTreeSlice: StateCreator< }) } - set({ filePathsTreePrompt: result }) + state.updateProvideFileInfoPromptMap({ + allFilePathsPrompt: result, + }) }, updateExpendedFilePaths(expendedFilePaths) { const result = typeof expendedFilePaths === 'function' ? expendedFilePaths(get().expendedFilePaths) : expendedFilePaths @@ -92,7 +121,4 @@ export const createFileTreeSlice: StateCreator< const result = typeof ideOpeningFilePaths === 'function' ? ideOpeningFilePaths(get().ideOpeningFilePaths) : ideOpeningFilePaths set({ ideOpeningFilePaths: result }) }, - updateProvideIdeOpeningFilePathsToGpt(provideIdeOpeningFilePathsToGpt) { - set({ provideIdeOpeningFilePathsToGpt }) - }, }) diff --git a/packages/gpt-runner-web/client/src/store/zustand/temp/index.ts b/packages/gpt-runner-web/client/src/store/zustand/temp/index.ts index a9bd349..4803848 100644 --- a/packages/gpt-runner-web/client/src/store/zustand/temp/index.ts +++ b/packages/gpt-runner-web/client/src/store/zustand/temp/index.ts @@ -1,21 +1,30 @@ import type { StateCreator } from 'zustand' import type { GetState } from '../types' import { createStore } from '../utils' +import { FileSidebarTreeItem } from '../global/file-tree.slice' +import { BaseResponse, GetCommonFilesResData, travelTree } from '@nicepkg/gpt-runner-shared/common' +import { useGlobalStore } from '../global' export interface TempSlice { + filesTree: FileSidebarTreeItem[] + fullPathFileMap: Record<string, FileSidebarTreeItem> filesRelativePaths: string[] + updateFilesTree: (filesTree: FileSidebarTreeItem[], updateFullPathFileMap?: boolean) => void updateFilesRelativePaths: (filesRelativePaths: string[]) => void + updateFullPathFileMapFromFileTree: (filesTree: FileSidebarTreeItem[]) => void + handleFetchCommonFilesTreeResChange: (fetchCommonFilesTreeRes: BaseResponse<GetCommonFilesResData> | undefined) => void } export type TempState = GetState<TempSlice> function getInitialState() { return { + filesTree: [], + fullPathFileMap: {}, filesRelativePaths: [], } satisfies TempState } - export const createTempSlice: StateCreator< TempSlice, [], @@ -23,14 +32,77 @@ export const createTempSlice: StateCreator< TempSlice > = (set, get) => ({ ...getInitialState(), - updateFilesRelativePaths(filesRelativePaths: string[]) { + updateFilesTree(filesTree, updateFullPathFileMap = false) { + const state = get() + + if (updateFullPathFileMap) + state.updateFullPathFileMapFromFileTree(filesTree) + + set({ + filesTree, + }) + }, + updateFilesRelativePaths(filesRelativePaths) { set({ filesRelativePaths: [...new Set(filesRelativePaths)], }) + }, + updateFullPathFileMapFromFileTree(filesTree) { + const result: Record<string, FileSidebarTreeItem> = {} + travelTree(filesTree, (item) => { + if (item.otherInfo) + result[item.otherInfo.fullPath] = item + }) + set({ + fullPathFileMap: result, + }) + }, + handleFetchCommonFilesTreeResChange(fetchCommonFilesTreeRes) { + const filesInfoTree = fetchCommonFilesTreeRes?.data?.filesInfoTree + const state = get() + const globalState = useGlobalStore.getState() + + if (globalState.excludeFileExts.length) { + // update excludeFileExts + const { includeFileExts = [], allFileExts = [] } = fetchCommonFilesTreeRes?.data || {} + const excludeFileExts = allFileExts.filter(ext => !includeFileExts.includes(ext)) + globalState.updateExcludeFileExts(excludeFileExts) + } + + + if (!filesInfoTree) + return + + const filesRelativePaths: string[] = [] + const finalFilesSidebarTree = travelTree(filesInfoTree, (item) => { + const oldIsExpanded = globalState.expendedFilePaths.includes(item.fullPath) + const oldIsChecked = globalState.checkedFilePaths.includes(item.fullPath) + + const result: FileSidebarTreeItem = { + id: item.id, + name: item.name, + path: item.fullPath, + isLeaf: item.isFile, + otherInfo: { + ...item, + checked: oldIsChecked, + }, + isExpanded: oldIsExpanded, + } + + item.isFile && filesRelativePaths.push(item.projectRelativePath) + + return result + }) + + + state.updateFilesTree(finalFilesSidebarTree, true) + globalState.updateAllFilePathsPrompt(finalFilesSidebarTree) + state.updateFilesRelativePaths(filesRelativePaths) } }) -export const useTempStore = createStore('TempStore')<TempSlice, any>( +export const useTempStore = createStore('TempStore', false)<TempSlice, any>( (...args) => ({ ...createTempSlice(...args), }) diff --git a/packages/gpt-runner-web/client/src/store/zustand/utils.ts b/packages/gpt-runner-web/client/src/store/zustand/utils.ts index 35997f6..e5c4158 100644 --- a/packages/gpt-runner-web/client/src/store/zustand/utils.ts +++ b/packages/gpt-runner-web/client/src/store/zustand/utils.ts @@ -16,12 +16,12 @@ export function resetAllState() { resetStateQueue.forEach(resetState => resetState()) } -export function createStore(devtoolsName: string) { +export function createStore(devtoolsName: string, connectToDevTools = true) { const newCreate = (store: any) => { let result: any // https://github.com/pmndrs/zustand/issues/852#issuecomment-1059783350 - if (EnvConfig.get('NODE_ENV') === 'development') { + if (EnvConfig.get('NODE_ENV') === 'development' && connectToDevTools) { result = create( devtools(store, { name: devtoolsName, diff --git a/packages/gpt-runner-web/server/src/controllers/gpt-files.controller.ts b/packages/gpt-runner-web/server/src/controllers/gpt-files.controller.ts index fd71854..143cf6e 100644 --- a/packages/gpt-runner-web/server/src/controllers/gpt-files.controller.ts +++ b/packages/gpt-runner-web/server/src/controllers/gpt-files.controller.ts @@ -6,8 +6,6 @@ import { Debug, GetGptFileInfoReqParamsSchema, GetGptFilesReqParamsSchema, InitG import type { ControllerConfig } from '../types' import { getValidFinalPath } from '../services/valid-path' -const debug = new Debug('gpt-files.controller') - export const gptFilesControllers: ControllerConfig = { namespacePath: '/gpt-files', controllers: [ @@ -15,6 +13,7 @@ export const gptFilesControllers: ControllerConfig = { url: '/', method: 'get', handler: async (req, res) => { + const debug = new Debug('gpt-files.controller') const query = req.query as GetGptFilesReqParams verifyParamsByZod(query, GetGptFilesReqParamsSchema) diff --git a/packages/gpt-runner-web/server/src/controllers/llm.controller.ts b/packages/gpt-runner-web/server/src/controllers/llm.controller.ts index ff47cfb..1d34212 100644 --- a/packages/gpt-runner-web/server/src/controllers/llm.controller.ts +++ b/packages/gpt-runner-web/server/src/controllers/llm.controller.ts @@ -1,9 +1,8 @@ import type { Request, Response } from 'express' import type { ChatModelType, ChatStreamReqParams, FailResponse, SingleFileConfig, SuccessResponse } from '@nicepkg/gpt-runner-shared/common' -import { ChatStreamReqParamsSchema, STREAM_DONE_FLAG, buildFailResponse, buildSuccessResponse } from '@nicepkg/gpt-runner-shared/common' +import { ChatStreamReqParamsSchema, Debug, STREAM_DONE_FLAG, buildFailResponse, buildSuccessResponse, toUnixPath } from '@nicepkg/gpt-runner-shared/common' import { PathUtils, verifyParamsByZod } from '@nicepkg/gpt-runner-shared/node' -import { createFileContext, getSecrets, loadUserConfig, parseGptFile } from '@nicepkg/gpt-runner-core' -import { llmChain } from '../services' +import { createFileContext, getSecrets, llmChain, loadUserConfig, parseGptFile } from '@nicepkg/gpt-runner-core' import { getValidFinalPath } from '../services/valid-path' import type { ControllerConfig } from '../types' @@ -14,6 +13,8 @@ export const llmControllers: ControllerConfig = { url: '/chat-stream', method: 'post', handler: async (req: Request, res: Response) => { + const debug = new Debug('llm.controller') + res.writeHead(200, { 'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache, no-transform', @@ -32,6 +33,8 @@ export const llmControllers: ControllerConfig = { singleFileConfig: singleFileConfigFromParams, appendSystemPrompt = '', contextFilePaths, + editingFilePath, + overrideModelsConfig, rootPath, } = body @@ -53,7 +56,10 @@ export const llmControllers: ControllerConfig = { }) } - const model = singleFileConfig?.model + const model = { + ...singleFileConfig?.model, + ...overrideModelsConfig?.[singleFileConfig?.model?.type as ChatModelType || ''], + } as SingleFileConfig['model'] const secretFromUserConfig = userConfig.model?.type === model?.type ? userConfig.model?.secrets : undefined let secretsFromStorage = await getSecrets(model?.type as ChatModelType || null) @@ -74,21 +80,25 @@ export const llmControllers: ControllerConfig = { return res.write(`data: ${JSON.stringify(buildFailResponse(options))}\n\n`) } - let finalSystemPrompt = systemPromptFromParams || singleFileConfig?.systemPrompt || '' - - // provide file context - if (contextFilePaths && finalPath) { - const fileContext = await createFileContext({ - rootPath: finalPath, - filePaths: contextFilePaths, - }) - - finalSystemPrompt += `\n${fileContext}\n` - } - - finalSystemPrompt += appendSystemPrompt + console.log('debug', process.env.DEBUG) + debug.log('model config', model) try { + let finalSystemPrompt = systemPromptFromParams || singleFileConfig?.systemPrompt || '' + + // provide file context + if (contextFilePaths && finalPath) { + const fileContext = await createFileContext({ + rootPath: finalPath, + filePaths: contextFilePaths?.map(toUnixPath), + editingFilePath: toUnixPath(editingFilePath), + }) + + finalSystemPrompt += `\n${fileContext}\n` + } + + finalSystemPrompt += appendSystemPrompt + const chain = await llmChain({ messages, systemPrompt: finalSystemPrompt, diff --git a/packages/gpt-runner-web/server/src/middleware.ts b/packages/gpt-runner-web/server/src/middleware.ts index f136f0a..e80d947 100644 --- a/packages/gpt-runner-web/server/src/middleware.ts +++ b/packages/gpt-runner-web/server/src/middleware.ts @@ -3,9 +3,8 @@ import { Debug } from '@nicepkg/gpt-runner-shared/common' import { sendFailResponse } from '@nicepkg/gpt-runner-shared/node' import type { NextFunction, Request, Response } from 'express' -const debug = new Debug('middleware.ts') - export function errorHandlerMiddleware(err: Error, req: Request, res: Response, next: NextFunction) { + const debug = new Debug('middleware.ts') debug.error(String(err)) // console.error(err.stack) // Log the error stack trace diff --git a/packages/gpt-runner-web/server/src/services/index.ts b/packages/gpt-runner-web/server/src/services/index.ts deleted file mode 100644 index 639918d..0000000 --- a/packages/gpt-runner-web/server/src/services/index.ts +++ /dev/null @@ -1 +0,0 @@ -export * from '@nicepkg/gpt-runner-core' diff --git a/playground/scripts/gpt/i18n-helper.gpt.md b/playground/scripts/gpt/i18n-helper.gpt.md new file mode 100644 index 0000000..c0f14c5 --- /dev/null +++ b/playground/scripts/gpt/i18n-helper.gpt.md @@ -0,0 +1,44 @@ +```json +{ + "title": "common/i18n helper", + "model": { + "modelName": "gpt-4" + } +} +``` + +# System Prompt + +User is writing frontend code, and he wants to use i18n to support multiple languages. User is using react-i18next lib to support multiple languages. You can help him write the code. + +User will provide some key value of json for you like: + +[en] +"copy_btn": "Copy", +"insert_btn": "Insert", + +You should help user to translate these key value to zh_CN and zh_Hant and ja and de. You should reply like this: + +```md +[zh_CN] +"copy_btn": "复制", +"insert_btn": "插入", + +[zh_Hant] +"copy_btn": "複製", +"insert_btn": "插入", + +[ja] +"copy_btn": "コピー", +"insert_btn": "挿入", + +[de] +"copy_btn": "Kopieren", +"insert_btn": "Einfügen", +``` + +# User Prompt + +[en] + + diff --git a/playground/scripts/gpt/solid.gpt.md b/playground/scripts/gpt/solid.gpt.md index 9881d70..56acbf4 100644 --- a/playground/scripts/gpt/solid.gpt.md +++ b/playground/scripts/gpt/solid.gpt.md @@ -2,7 +2,7 @@ { "title": "common/", "model": { - "modalName": "gpt-3.5-turbo-16k", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0 } }