feat(gpt-runner-vscode): add watching current edit file as context

This commit is contained in:
JinmingYang
2023-07-03 02:22:37 +08:00
parent 684d8ad7a6
commit b0cc8df73c
52 changed files with 1158 additions and 422 deletions

View File

@@ -2,7 +2,7 @@
{
"title": "分类目录/AI角色名字",
"model": {
"modalName": "gpt-3.5-turbo-16k",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0
}
}
@@ -20,6 +20,6 @@
这里可以写你的备注
`model` / `modalName` / `temperature` / `System Prompt` / `User Prompt` 都是**可选**参数,而且可定制参数还有非常多。
`model` / `modelName` / `temperature` / `System Prompt` / `User Prompt` 都是**可选**参数,而且可定制参数还有非常多。
你还可以通过项目根目录下的 `gptr.config.json` 覆盖很多参数的默认值

View File

@@ -2,7 +2,7 @@
{
"title": "Categories-Name/AI-Preset-Name",
"model": {
"modalName": "gpt-3.5-turbo-16k",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0
}
}
@@ -21,6 +21,6 @@ when you create a new chat with this preset, user prompt text will auto fill in
Here you can write your remarks
`model` / `modalName` / `temperature` / `System Prompt` / `User Prompt` are **optional** parameters, and there are many customizable parameters
`model` / `modelName` / `temperature` / `System Prompt` / `User Prompt` are **optional** parameters, and there are many customizable parameters
You can also override the default values of many parameters through `gptr.config.json` in the project root directory

View File

@@ -143,10 +143,16 @@ export async function getCommonFileTree(params: GetCommonFileTreeParams): Promis
export interface CreateFileContextParams {
rootPath: string
filePaths: string[]
editingFilePath?: string
}
export async function createFileContext(params: CreateFileContextParams) {
const { rootPath, filePaths } = params
const { rootPath, filePaths, editingFilePath } = params
// exclude editing file path
const contextFilePaths = editingFilePath ? filePaths.filter(filePath => filePath !== editingFilePath) : filePaths
const editingFileRelativePath = editingFilePath ? PathUtils.relative(rootPath, editingFilePath) : ''
const editingFileContent = editingFilePath ? await FileUtils.readFile({ filePath: editingFilePath }) : ''
const baseTips = `Please answer the user's question based on the user's file path and file content.
The file path and file content will be separated by five single quotes.
@@ -155,7 +161,7 @@ export async function createFileContext(params: CreateFileContextParams) {
let tips = baseTips
for (const filePath of filePaths) {
for (const filePath of contextFilePaths) {
const relativePath = PathUtils.relative(rootPath, filePath)
const content = await FileUtils.readFile({ filePath })
@@ -172,6 +178,22 @@ ${content}
tips += fileTips
}
if (editingFileRelativePath) {
tips += `\nAt the same time,
User is editing the content of this file,
maybe User is asking you about this file,
Here is the file:
'''''
[file path]
${editingFileRelativePath}
[file content]
${editingFileContent}
'''''
`
}
tips += `\nWhen you want to create/modify/delete a file or talk about a file, you should always return the full path of the file.
For example, if user provide you with a file path \`src/component/button.ts\`, you should return \`src/component/button.ts\` instead of \`button.ts\ when you talk about it.

View File

@@ -9,8 +9,9 @@ export class Debug {
constructor(label: string) {
this.label = `gpt-runner:${label}`
if (process.env.DEBUG === 'enabled')
if (process.env.DEBUG)
debug.enable(this.label)
this.debugger = debug(this.label)
// @ts-ignore

View File

@@ -24,6 +24,8 @@ export interface ChatModelTypeMap {
[ChatModelType.HuggingFace]: HuggingFaceModelConfig
}
export type PartialChatModelTypeMap = Partial<ChatModelTypeMap>
export type GetModelConfigType<T extends ChatModelType, P extends 'config' | 'secrets'> = {
config: ChatModelTypeMap[T]
secrets: ChatModelTypeMap[T]['secrets']

View File

@@ -1,5 +1,5 @@
import type { FileInfoTree } from './common-file'
import type { SingleChatMessage, SingleFileConfig, UserConfig } from './config'
import type { PartialChatModelTypeMap, SingleChatMessage, SingleFileConfig, UserConfig } from './config'
import type { ServerStorageName } from './enum'
import type { GptFileInfo, GptFileInfoTree } from './gpt-file'
@@ -34,7 +34,9 @@ export interface ChatStreamReqParams {
* and get the real time singleFileConfig and then provide singleFileConfig to LangchainJs
*/
singleFileConfig?: SingleFileConfig
overrideModelsConfig?: PartialChatModelTypeMap
contextFilePaths?: string[]
editingFilePath?: string
rootPath?: string
}

View File

@@ -2,7 +2,7 @@ import type { z } from 'zod'
import type { GetModelConfigType } from '../../types'
import { ChatModelType } from '../../types'
import { OpenaiModelConfigSchema, OpenaiSecretsSchema } from './openai.zod'
import { HuggingFaceModelConfigSchema, HuggingFaceSecretsSchema } from './hugging-fface.zod'
import { HuggingFaceModelConfigSchema, HuggingFaceSecretsSchema } from './hugging-face.zod'
export * from './base.zod'
export * from './openai.zod'

View File

@@ -1,7 +1,9 @@
import { z } from 'zod'
import type { FilterPattern, FormCheckboxGroupConfig, FormFieldBaseConfig, FormInputConfig, FormItemConfig, FormOption, FormRadioGroupConfig, FormSelectConfig, FormTextareaConfig, SingleChatMessage, SingleFileConfig, UserConfig, UserConfigForUser } from '../../types'
import { type ChatModel, ChatModelType, type FilterPattern, type FormCheckboxGroupConfig, type FormFieldBaseConfig, type FormInputConfig, type FormItemConfig, type FormOption, type FormRadioGroupConfig, type FormSelectConfig, type FormTextareaConfig, type SingleChatMessage, type SingleFileConfig, type UserConfig, type UserConfigForUser } from '../../types'
import { ChatRoleSchema } from '../enum.zod'
import type { PartialChatModelTypeMap } from './../../types/config/base.config'
import { OpenaiModelConfigSchema } from './openai.zod'
import { HuggingFaceModelConfigSchema } from './hugging-face.zod'
export const FilterPatternSchema = z.union([
z.array(z.union([z.string(), z.instanceof(RegExp)])),
@@ -12,8 +14,19 @@ export const FilterPatternSchema = z.union([
z.undefined(),
]) satisfies z.ZodType<FilterPattern>
// OpenaiModelConfigSchema or HuggingFaceModelConfigSchema
export const ChatModelSchema = z.union([
OpenaiModelConfigSchema,
HuggingFaceModelConfigSchema,
]) satisfies z.ZodType<ChatModel>
export const PartialChatModelTypeMapSchema = z.object({
[ChatModelType.Openai]: OpenaiModelConfigSchema.optional(),
[ChatModelType.HuggingFace]: HuggingFaceModelConfigSchema.optional(),
}) satisfies z.ZodType<PartialChatModelTypeMap>
export const UserConfigSchema = z.object({
model: OpenaiModelConfigSchema.optional().describe('The LLM model configuration'),
model: ChatModelSchema.optional().describe('The LLM model configuration'),
rootPath: z.string().optional().describe('The root path of the project'),
exts: z.array(z.string()).optional().default(['.gpt.md']).describe('The file extensions to be used'),
includes: FilterPatternSchema.optional().default(null).describe('The include patterns for filtering files'),

View File

@@ -1,6 +1,6 @@
import { z } from 'zod'
import type { ChatStreamReqParams, GetCommonFilesReqParams, GetGptFileInfoReqParams, GetGptFilesReqParams, GetUserConfigReqParams, InitGptFilesReqParams, OpenEditorReqParams, StorageClearReqParams, StorageGetItemReqParams, StorageRemoveItemReqParams, StorageSetItemReqParams } from '../types'
import { SingleChatMessageSchema, SingleFileConfigSchema } from './config'
import { PartialChatModelTypeMapSchema, SingleChatMessageSchema, SingleFileConfigSchema } from './config'
import { ServerStorageNameSchema } from './enum.zod'
export const ChatStreamReqParamsSchema = z.object({
@@ -10,7 +10,9 @@ export const ChatStreamReqParamsSchema = z.object({
appendSystemPrompt: z.string().optional(),
singleFilePath: z.string().optional(),
singleFileConfig: SingleFileConfigSchema.optional(),
overrideModelsConfig: PartialChatModelTypeMapSchema.optional(),
contextFilePaths: z.array(z.string()).optional(),
editingFilePath: z.string().optional(),
rootPath: z.string().optional(),
}) satisfies z.ZodType<ChatStreamReqParams>

View File

@@ -5,8 +5,6 @@ import { getGlobalCacheDir } from '../get-cache-dir'
import { getAxiosInstance } from '../axios'
import { Debug } from '../../../common'
const debug = new Debug('tunnel')
// see: https://github.com/gradio-app/gradio/blob/main/gradio/tunneling.py
export class BinaryDownloader {
private static readonly VERSION = '0.2'
@@ -23,6 +21,7 @@ export class BinaryDownloader {
}
public static async downloadBinary() {
const debug = new Debug('tunnel')
const binaryPath = await BinaryDownloader.getBinaryPath()
if (!fs.existsSync(binaryPath)) {

View File

@@ -39,7 +39,7 @@ export async function registerSyncOpeningFilePaths(
const maybeActiveDocs: (vscode.TextDocument | undefined)[] = [
vscode.window.activeTextEditor?.document,
state.activeEditor?.document,
...vscode.window.visibleTextEditors.map(editor => editor.document),
// ...vscode.window.visibleTextEditors.map(editor => editor.document),
]
state.activeFilePath = toUnixPath(maybeActiveDocs.find(doc => docIsFile(doc))?.uri.fsPath ?? '')
@@ -63,9 +63,12 @@ export async function registerSyncOpeningFilePaths(
debounceUpdateActiveFile()
}))
// update files when vscode is activated
debounceUpdateOpenFiles()
debounceUpdateActiveFile()
setTimeout(() => {
// wait for all document to be load
// update files when vscode is activated
debounceUpdateOpenFiles()
debounceUpdateActiveFile()
}, 1000)
return vscode.Disposable.from({
dispose,

View File

@@ -33,7 +33,6 @@
"search_placeholder": "Suchen...",
"file_tree_top_tokens_tips": "<FileNumWrapper>{{fileNum}}</FileNumWrapper> Dateien <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"file_tree_top_clear_checked_btn": "Auswahl aufheben",
"file_tree_top_all_file_path_as_prompt": "Alle Dateipfade als Vorschlag {{tokenNum}} Tokens",
"search_files_placeholder": "Dateien durchsuchen...",
"no_gpt_files_tips": "Es gibt keine <Badge>xxx.gpt.md</Badge> Datei im aktuellen Verzeichnis.",
"ask_for_create_gpt_file_tips": "Möchten Sie eine <Badge>{{fileName}}</Badge> Datei erstellen?",
@@ -46,6 +45,20 @@
"settings_tab_settings": "Einstellungen",
"settings_tab_config_info": "Konfigurationsinformationen",
"settings_tab_about": "Über",
"override_settings": "Einstellungen überschreiben",
"override_all_settings": "Alle Einstellungen überschreiben",
"context_settings": "Kontext-Einstellungen",
"context_settings_opening_ide_file_contents_checkbox_tips": "Öffnen von IDE-Dateiinhalten als Aufforderung<FileNumWrapper>{{fileNum}}</FileNumWrapper>Dateien<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>Token.",
"context_settings_active_ide_file_contents_checkbox_tips": "Aktive IDE-Dateiinhalte als Aufforderung<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>Token.",
"context_settings_selected_files_checkbox_label": "Ausgewählte Dateien als Aufforderung. Aktuell ausgewählte<FileNumWrapper>{{fileNum}}</FileNumWrapper>Dateien<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>Token.",
"context_settings_all_file_paths_checkbox_label": "Alle Dateipfade als Aufforderung<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>Token.",
"model_settings_btn": "Modell-Einstellungen",
"openai_model_name": "Modellname",
"openai_temperature": "Temperatur",
"openai_max_tokens": "Maximale Antwort-Token",
"openai_top_p": "Top P",
"openai_frequency_penalty": "Frequenzstrafe",
"openai_presence_penalty": "Anwesenheitsstrafe",
"version": "Version",
"github": "Github",
"reward": "Belohnung",
@@ -65,6 +78,7 @@
"toast_save_error": "Speichern fehlgeschlagen!",
"toast_create_success": "Erstellen erfolgreich!",
"toast_create_error": "Erstellen fehlgeschlagen!",
"toast_copy_success": "Kopiert!"
"toast_copy_success": "Kopiert!",
"toast_selected_files_as_prompt_reopened": "Ausgewählte Dateien als Aufforderung wurden wieder geöffnet!"
}
}
}

View File

@@ -33,7 +33,6 @@
"search_placeholder": "Search...",
"file_tree_top_tokens_tips": "<FileNumWrapper>{{fileNum}}</FileNumWrapper> Files <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"file_tree_top_clear_checked_btn": "Clear Checked",
"file_tree_top_all_file_path_as_prompt": "All file path as prompt {{tokenNum}} tokens",
"search_files_placeholder": "Search files...",
"no_gpt_files_tips": "There is no <Badge>xxx.gpt.md</Badge> file in the current directory.",
"ask_for_create_gpt_file_tips": "Do you want to create a <Badge>{{fileName}}</Badge> file?",
@@ -46,6 +45,20 @@
"settings_tab_settings": "Settings",
"settings_tab_config_info": "Config Info",
"settings_tab_about": "About",
"override_settings": "Override Settings",
"override_all_settings": "Override All Settings",
"context_settings": "Context Settings",
"context_settings_opening_ide_file_contents_checkbox_tips": "Opening IDE File Contents As Prompt <FileNumWrapper>{{fileNum}}</FileNumWrapper> Files <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"context_settings_active_ide_file_contents_checkbox_tips": "Active IDE File Contents As Prompt <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"context_settings_selected_files_checkbox_label": "Selected Files As Prompt. Current Selected <FileNumWrapper>{{fileNum}}</FileNumWrapper> Files <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"context_settings_all_file_paths_checkbox_label": "All File Path As Prompt <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> Tokens.",
"model_settings_btn": "Model Settings",
"openai_model_name": "Model Name",
"openai_temperature": "Temperature",
"openai_max_tokens": "Max Reply Tokens",
"openai_top_p": "Top P",
"openai_frequency_penalty": "Frequency Penalty",
"openai_presence_penalty": "Presence Penalty",
"version": "Version",
"github": "Github",
"reward": "Reward",
@@ -65,6 +78,7 @@
"toast_save_error": "Save error!",
"toast_create_success": "Create success!",
"toast_create_error": "Create error!",
"toast_copy_success": "Copied!"
"toast_copy_success": "Copied!",
"toast_selected_files_as_prompt_reopened": "Selected Files As Prompt Has Reopened!"
}
}
}

View File

@@ -33,7 +33,6 @@
"search_placeholder": "検索...",
"file_tree_top_tokens_tips": "<FileNumWrapper>{{fileNum}}</FileNumWrapper>個のファイル、合計<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>トークン。",
"file_tree_top_clear_checked_btn": "選択をクリア",
"file_tree_top_all_file_path_as_prompt": "すべてのファイルパスをプロンプトとして使用、合計{{tokenNum}}トークン",
"search_files_placeholder": "ファイルを検索...",
"no_gpt_files_tips": "現在のディレクトリに<Badge>xxx.gpt.md</Badge>ファイルはありません。",
"ask_for_create_gpt_file_tips": "<Badge>{{fileName}}</Badge>ファイルを作成しますか?",
@@ -46,6 +45,20 @@
"settings_tab_settings": "設定",
"settings_tab_config_info": "設定情報",
"settings_tab_about": "情報",
"override_settings": "設定を上書き",
"override_all_settings": "すべての設定を上書きする",
"context_settings": "コンテキスト設定",
"context_settings_opening_ide_file_contents_checkbox_tips": "開いているIDEファイルの内容をプロンプトとして使用<FileNumWrapper>{{fileNum}}</FileNumWrapper>ファイル<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>トークン。",
"context_settings_active_ide_file_contents_checkbox_tips": "アクティブなIDEファイルの内容をプロンプトとして使用<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>トークン。",
"context_settings_selected_files_checkbox_label": "選択したファイルをプロンプトとして使用。現在選択中<FileNumWrapper>{{fileNum}}</FileNumWrapper>ファイル<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>トークン。",
"context_settings_all_file_paths_checkbox_label": "すべてのファイルパスをプロンプトとして使用<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>トークン。",
"model_settings_btn": "モデル設定",
"openai_model_name": "モデル名",
"openai_temperature": "温度",
"openai_max_tokens": "最大回答トークン数",
"openai_top_p": "トップP",
"openai_frequency_penalty": "頻度ペナルティ",
"openai_presence_penalty": "存在ペナルティ",
"version": "バージョン",
"github": "GitHub",
"reward": "寄付",
@@ -65,6 +78,7 @@
"toast_save_error": "保存できませんでした!",
"toast_create_success": "作成しました!",
"toast_create_error": "作成できませんでした!",
"toast_copy_success": "コピーしました!"
"toast_copy_success": "コピーしました!",
"toast_selected_files_as_prompt_reopened": "選択したファイルをプロンプトとして再度開きました!"
}
}
}

View File

@@ -33,7 +33,6 @@
"search_placeholder": "搜索...",
"file_tree_top_tokens_tips": "<FileNumWrapper>{{fileNum}}</FileNumWrapper>个文件,共<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>个 tokens。",
"file_tree_top_clear_checked_btn": "清除选中",
"file_tree_top_all_file_path_as_prompt": "将所有文件路径作为提示,共 {{tokenNum}} 个 tokens",
"search_files_placeholder": "搜索文件...",
"no_gpt_files_tips": "当前目录中没有<Badge>xxx.gpt.md</Badge>文件。",
"ask_for_create_gpt_file_tips": "是否要创建一个<Badge>{{fileName}}</Badge>文件?",
@@ -46,6 +45,20 @@
"settings_tab_settings": "设置",
"settings_tab_config_info": "配置信息",
"settings_tab_about": "关于",
"override_settings": "覆盖设置",
"override_all_settings": "覆盖所有设置",
"context_settings": "上下文设置",
"context_settings_opening_ide_file_contents_checkbox_tips": "将 IDE 正在打开的文件内容作为提示,<FileNumWrapper>{{fileNum}}</FileNumWrapper> 个文件, <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_active_ide_file_contents_checkbox_tips": "将 IDE 正在编辑的文件内容作为提示,<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_selected_files_checkbox_label": "将选定的文件作为提示,当前选定 <FileNumWrapper>{{fileNum}}</FileNumWrapper> 个文件,<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_all_file_paths_checkbox_label": "将所有文件路径作为提示,<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"model_settings_btn": "模型设置",
"openai_model_name": "模型名称",
"openai_temperature": "温度",
"openai_max_tokens": "最大回复令牌",
"openai_top_p": "Top P",
"openai_frequency_penalty": "频率惩罚",
"openai_presence_penalty": "存在惩罚",
"version": "版本",
"github": "GitHub",
"reward": "赞赏",
@@ -65,6 +78,7 @@
"toast_save_error": "保存失败!",
"toast_create_success": "创建成功!",
"toast_create_error": "创建失败!",
"toast_copy_success": "复制成功!"
"toast_copy_success": "复制成功!",
"toast_selected_files_as_prompt_reopened": "已重新打开选定的文件作为提示!"
}
}
}

View File

@@ -33,7 +33,6 @@
"search_placeholder": "搜索...",
"file_tree_top_tokens_tips": "<FileNumWrapper>{{fileNum}}</FileNumWrapper>個文件,共<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper>個 tokens。",
"file_tree_top_clear_checked_btn": "清除選中",
"file_tree_top_all_file_path_as_prompt": "將所有文件路徑作為提示,共 {{tokenNum}} 個 tokens",
"search_files_placeholder": "搜索文件...",
"no_gpt_files_tips": "當前目錄中沒有<Badge>xxx.gpt.md</Badge>文件。",
"ask_for_create_gpt_file_tips": "是否要創建一個<Badge>{{fileName}}</Badge>文件?",
@@ -46,6 +45,20 @@
"settings_tab_settings": "設定",
"settings_tab_config_info": "配置信息",
"settings_tab_about": "關於",
"override_settings": "覆寫設置",
"override_all_settings": "覆蓋所有設置",
"context_settings": "上下文設置",
"context_settings_opening_ide_file_contents_checkbox_tips": "將 IDE 正在打開的文件內容作為提示,<FileNumWrapper>{{fileNum}}</FileNumWrapper> 個文件, <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_active_ide_file_contents_checkbox_tips": "將 IDE 正在編輯的文件內容作為提示,<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_selected_files_checkbox_label": "將選定的文件作為提示,當前選定 <FileNumWrapper>{{fileNum}}</FileNumWrapper> 個文件, <TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"context_settings_all_file_paths_checkbox_label": "將所有文件路徑作為提示,<TokenNumWrapper>{{tokenNum}}</TokenNumWrapper> tokens",
"model_settings_btn": "模型設置",
"openai_model_name": "模型名稱",
"openai_temperature": "溫度",
"openai_max_tokens": "最大回覆令牌",
"openai_top_p": "前P",
"openai_frequency_penalty": "頻率處罰",
"openai_presence_penalty": "存在處罰",
"version": "版本",
"github": "GitHub",
"reward": "贊賞",
@@ -65,6 +78,7 @@
"toast_save_error": "保存失敗!",
"toast_create_success": "创建成功!",
"toast_create_error": "创建失败!",
"toast_copy_success": "複製成功!"
"toast_copy_success": "複製成功!",
"toast_selected_files_as_prompt_reopened": "已重新開啟選定文件作為提示!"
}
}
}

View File

@@ -2,6 +2,8 @@ import { VSCodeTextField } from '@vscode/webview-ui-toolkit/react'
import styled from 'styled-components'
export const StyledVSCodeTextField = styled(VSCodeTextField)`
width: 100%;
&::part(root) {
border-radius: 0.25rem;
overflow: hidden;

View File

@@ -1,5 +1,5 @@
import { VSCodeButton } from '@vscode/webview-ui-toolkit/react'
import type { FC } from 'react'
import type { FC, MouseEvent } from 'react'
import { memo, useCallback, useEffect, useState } from 'react'
import clsx from 'clsx'
import type { AnimationProps, Target, Tween } from 'framer-motion'
@@ -25,7 +25,7 @@ export interface IconButtonProps extends GetComponentProps<InstanceType<typeof V
to: Target
}
animatingWhenClick?: boolean
onClick?: () => MaybePromise<any>
onClick?: (e: MouseEvent<HTMLElement>) => MaybePromise<any>
buttonStyle?: React.CSSProperties
}
@@ -68,11 +68,11 @@ export const IconButton: FC<IconButtonProps> = memo((props) => {
isAnimating && setDebouncedIsAnimating(isAnimating)
}, [isAnimating])
const handleClick = useCallback(async () => {
const handleClick = useCallback(async (e: MouseEvent<HTMLElement>) => {
if (animatingWhenClick)
setIsAnimating(true)
await onClick?.()
await onClick?.(e)
if (animatingWhenClick)
setIsAnimating(false)

View File

@@ -255,13 +255,18 @@ export const PopoverMenu: React.FC<PopoverMenuProps> = memo((props) => {
</div>
)}
>
<ChildrenWrapper onClick={() => {
<ChildrenWrapper onClick={(e: any) => {
e.stopPropagation()
if (!clickMode)
return
return false
getIsPopoverOpen() ? handleClose() : handleOpen()
if (!clickOutSideToClose)
setIsPin(true)
return false
}}>
<Children
ref={childrenHoverRef}

View File

@@ -1,4 +1,4 @@
import type { FC } from 'react'
import type { FC, ReactNode } from 'react'
import { memo, useState } from 'react'
import type { PopoverMenuProps } from '../popover-menu'
import { PopoverMenu } from '../popover-menu'
@@ -6,7 +6,7 @@ import { IconButton } from '../icon-button'
import { SelectOptionItem, SelectOptionList } from './select-option.styles'
export interface ISelectOption<T extends string = string> {
label: string
label: ReactNode
value: T
}

View File

@@ -6,26 +6,32 @@ export const SelectOptionList = styled.div`
width: max-content;
color: var(--foreground);
background: var(--panel-view-background);
max-width: 100%;
`
export const SelectOptionItem = styled.div`
padding: 0.5rem;
display: flex;
flex-wrap: wrap;
align-items: center;
cursor: pointer;
user-select: none;
font-size: var(--type-ramp-base-font-size);
border-bottom: 1px solid var(--panel-view-border);
&:last-child {
border-bottom: none;
}
&:hover {
color: var(--button-primary-foreground);
background: var(--button-primary-hover-background);
color: var(--button-secondary-foreground);
background: var(--button-secondary-hover-background);
border-bottom-color: transparent;
}
&[data-selected=true] {
color: var(--button-primary-foreground);
background: var(--button-primary-background);
color: var(--button-secondary-foreground);
background: var(--button-secondary-background);
border-bottom-color: transparent;
}
`

View File

@@ -0,0 +1,54 @@
import { useCallback, useEffect, useRef } from 'react'
type Procedure = (...args: any[]) => void
interface DebounceOptions {
delay?: number
leading?: boolean
}
export function useDebounceFn<F extends Procedure>(
fn: F,
options: DebounceOptions = {},
): F {
const { delay = 300, leading = false } = options
const fnRef = useRef(fn)
const timerRef = useRef<number | null>(null)
useEffect(() => {
fnRef.current = fn
}, [fn])
const cancel = useCallback(() => {
if (timerRef.current !== null) {
clearTimeout(timerRef.current)
timerRef.current = null
}
}, [])
const debouncedFn = useCallback<F>(
((...args: any[]) => {
const callNow = leading && timerRef.current === null
cancel()
timerRef.current = window.setTimeout(() => {
if (!callNow)
fnRef.current(...args)
timerRef.current = null
}, delay)
if (callNow)
fnRef.current(...args)
}) as F,
[delay, leading, cancel],
)
useEffect(() => {
return () => {
cancel()
}
}, [cancel])
return debouncedFn
}

View File

@@ -1,5 +1,5 @@
import { useEffect } from 'react'
import { ClientEventName } from '@nicepkg/gpt-runner-shared/common'
import { ClientEventName, toUnixPath } from '@nicepkg/gpt-runner-shared/common'
import { useGlobalStore } from '../store/zustand/global'
import { emitter } from '../helpers/emitter'
import { useOn } from './use-on.hook'
@@ -14,8 +14,10 @@ export function useEmitBind(deps: any[] = []) {
useOn({
eventName: ClientEventName.UpdateIdeOpeningFiles,
listener: ({ filePaths }) => {
console.log('updateIdeOpeningFilePaths', filePaths)
updateIdeOpeningFilePaths(filePaths)
const unixFilePaths = filePaths?.map(toUnixPath)
console.log('updateIdeOpeningFilePaths', unixFilePaths)
updateIdeOpeningFilePaths(unixFilePaths)
},
deps: [...deps, updateIdeOpeningFilePaths],
})
@@ -23,8 +25,10 @@ export function useEmitBind(deps: any[] = []) {
useOn({
eventName: ClientEventName.UpdateIdeActiveFilePath,
listener: ({ filePath }) => {
console.log('updateIdeActiveFilePath', filePath)
updateIdeActiveFilePath(filePath)
const unixFilePath = toUnixPath(filePath)
console.log('updateIdeActiveFilePath', unixFilePath)
updateIdeActiveFilePath(unixFilePath)
},
deps: [...deps, updateIdeActiveFilePath],
})

View File

@@ -0,0 +1,32 @@
import { useQuery } from '@tanstack/react-query'
import { useEffect } from 'react'
import { useGlobalStore } from '../store/zustand/global'
import { fetchCommonFilesTree } from '../networks/common-files'
import { useTempStore } from '../store/zustand/temp'
export interface UseGetCommonFilesTreeProps {
rootPath: string
syncChangeToStore?: boolean
}
export function useGetCommonFilesTree(props: UseGetCommonFilesTreeProps) {
const { rootPath, syncChangeToStore = true } = props
const { excludeFileExts } = useGlobalStore()
const { handleFetchCommonFilesTreeResChange } = useTempStore()
const useQueryReturns = useQuery({
queryKey: ['file-tree', rootPath, excludeFileExts.join(',')],
enabled: !!rootPath,
queryFn: () => fetchCommonFilesTree({
rootPath,
excludeExts: excludeFileExts,
}),
})
useEffect(() => {
if (!syncChangeToStore)
return
handleFetchCommonFilesTreeResChange(useQueryReturns.data)
}, [useQueryReturns.data, syncChangeToStore])
return useQueryReturns
}

View File

@@ -0,0 +1,117 @@
import { useCallback, useMemo } from 'react'
import type { SingleChat, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common'
import { countTokenQuick } from '../helpers/utils'
import { useGlobalStore } from '../store/zustand/global'
import { useTempStore } from '../store/zustand/temp'
import { useUserConfig } from './use-user-config.hook'
export interface UseTokenNumProps {
rootPath?: string
chatIdOrChatInstance?: string | SingleChat
singleFileConfig?: SingleFileConfig
}
export function useTokenNum(props?: UseTokenNumProps) {
const { rootPath, chatIdOrChatInstance, singleFileConfig: singleFileConfigFromParams } = props || {}
const {
provideFileInfoPromptMap,
provideFileInfoToGptMap,
checkedFilePaths,
ideActiveFilePath,
ideOpeningFilePaths,
getChatInstance,
getContextFilePaths,
} = useGlobalStore()
const { fullPathFileMap } = useTempStore()
const filaPathsPromptTokenNum = countTokenQuick(provideFileInfoPromptMap.allFilePathsPrompt)
const chatInstance: SingleChat | undefined = useMemo(() => {
if (!chatIdOrChatInstance)
return undefined
if (typeof chatIdOrChatInstance === 'string')
return getChatInstance(chatIdOrChatInstance)
return chatIdOrChatInstance
}, [chatIdOrChatInstance, getChatInstance])
const { singleFileConfig: singleFileConfigFromRemote } = useUserConfig({
rootPath,
singleFilePath: chatInstance?.singleFilePath,
enabled: !singleFileConfigFromParams,
})
const singleFileConfig = useMemo(() => {
return singleFileConfigFromParams || singleFileConfigFromRemote
}, [singleFileConfigFromParams, singleFileConfigFromRemote])
const countFilePathsTokenNum = useCallback((filePaths: string[]) => {
return filePaths.reduce((pre, cur) => {
const file = fullPathFileMap[cur]
return pre + (file?.otherInfo?.tokenNum ?? 0)
}, 0)
}, [fullPathFileMap])
const systemPromptTokenNum = useMemo(() => {
const { systemPrompt } = singleFileConfig || {}
if (!systemPrompt)
return 0
return countTokenQuick(systemPrompt)
}, [singleFileConfig, countTokenQuick])
const messageTokenNum = useMemo(() => {
const { messages } = chatInstance || {}
if (!messages || !messages.length)
return 0
return messages.reduce((total, messageItem) => {
const { text } = messageItem
return total + countTokenQuick(text)
}, 0)
}, [chatInstance, countTokenQuick])
const checkedFilesContentPromptTokenNum = useMemo(() => countFilePathsTokenNum(checkedFilePaths), [checkedFilePaths, countFilePathsTokenNum])
const ideOpeningFileTokenNum = useMemo(() => {
return countFilePathsTokenNum(ideOpeningFilePaths)
}, [ideOpeningFilePaths, countFilePathsTokenNum])
const ideActiveFileTokenNum = useMemo(() => {
if (!ideActiveFilePath)
return 0
return countFilePathsTokenNum([ideActiveFilePath])
}, [ideActiveFilePath, countFilePathsTokenNum])
const contextFilePaths = getContextFilePaths()
const contextFilesTokenNum = useMemo(() =>
countFilePathsTokenNum(contextFilePaths)
, [contextFilePaths, countFilePathsTokenNum])
const totalTokenNum = useMemo(() => {
const { allFilePaths } = provideFileInfoToGptMap
let result = systemPromptTokenNum + messageTokenNum + contextFilesTokenNum
if (allFilePaths)
result += filaPathsPromptTokenNum
return result
}, [
systemPromptTokenNum,
messageTokenNum,
contextFilesTokenNum,
provideFileInfoToGptMap,
filaPathsPromptTokenNum,
])
return {
totalTokenNum,
systemPromptTokenNum,
messageTokenNum,
ideOpeningFileTokenNum,
ideActiveFileTokenNum,
checkedFilesContentPromptTokenNum,
filaPathsPromptTokenNum,
}
}

View File

@@ -23,6 +23,8 @@ export async function fetchLlmStream(
singleFilePath,
singleFileConfig,
contextFilePaths,
editingFilePath,
overrideModelsConfig,
rootPath,
namespace,
onMessage = () => {},
@@ -30,6 +32,16 @@ export async function fetchLlmStream(
} = params
try {
const finalOverrideModelsConfig = Object.fromEntries(
Object.entries(overrideModelsConfig || {})
.map(([key, value]) => {
return [key, {
...value,
type: key,
}]
}),
)
await fetchEventSource(`${getGlobalConfig().serverBaseUrl}/api/chatgpt/chat-stream`, {
method: 'POST',
signal,
@@ -45,6 +57,8 @@ export async function fetchLlmStream(
singleFilePath,
singleFileConfig,
contextFilePaths,
editingFilePath,
overrideModelsConfig: finalOverrideModelsConfig,
rootPath,
} satisfies ChatStreamReqParams),
openWhenHidden: true,

View File

@@ -17,3 +17,12 @@ export const ChatPanelWrapper = styled.div`
export const ChatPanelPopoverTreeWrapper = styled.div`
height: 100%;
`
export const ConfigFormTitle = styled.div`
padding-left: 0.5rem;
margin: 1rem;
margin-bottom: 0;
font-size: 1rem;
font-weight: bold;
border-left: 0.25rem solid var(--foreground);
`

View File

@@ -24,7 +24,8 @@ import { isDarkTheme } from '../../../../styles/themes'
import { emitter } from '../../../../helpers/emitter'
import { ModelSettings } from '../settings/components/model-settings'
import { ContentWrapper } from '../../chat.styles'
import { ChatPanelPopoverTreeWrapper, ChatPanelWrapper } from './chat-panel.styles'
import { ContextSettings } from '../settings/components/context-settings'
import { ChatPanelPopoverTreeWrapper, ChatPanelWrapper, ConfigFormTitle } from './chat-panel.styles'
import { createRemarkOpenEditorPlugin } from './remark-plugin'
export interface ChatPanelProps {
@@ -358,7 +359,7 @@ export const ChatPanel: FC<ChatPanelProps> = memo((props) => {
style={{
paddingLeft: '0.5rem',
}}
text={'Model Settings'}
text={t('chat_page.model_settings_btn')}
iconClassName='codicon-settings'
hoverShowText={!isHovering}
></IconButton>
@@ -367,7 +368,15 @@ export const ChatPanel: FC<ChatPanelProps> = memo((props) => {
return <ContentWrapper $isPopoverContent style={{
maxWidth: '400px',
}}>
<ConfigFormTitle>
<ModelSettings rootPath={rootPath} singleFilePath={chatInstance?.singleFilePath} viewType='title'></ModelSettings>
{` ${t('chat_page.override_settings')}`}
</ConfigFormTitle>
<ModelSettings rootPath={rootPath} singleFilePath={chatInstance?.singleFilePath} viewType='model'></ModelSettings>
<ConfigFormTitle>
{t('chat_page.context_settings')}
</ConfigFormTitle>
<ContextSettings rootPath={rootPath}></ContextSettings>
</ContentWrapper>
}}
/>

View File

@@ -1,22 +1,24 @@
import { type FC, memo, useCallback, useEffect, useRef, useState } from 'react'
import { useQuery } from '@tanstack/react-query'
import { type FC, memo, useCallback, useEffect } from 'react'
import { ClientEventName, travelTree, travelTreeDeepFirst } from '@nicepkg/gpt-runner-shared/common'
import clsx from 'clsx'
import { VSCodeCheckbox, VSCodeLink } from '@vscode/webview-ui-toolkit/react'
import { Trans, useTranslation } from 'react-i18next'
import { toast } from 'react-hot-toast'
import type { SidebarProps } from '../../../../components/sidebar'
import { Sidebar } from '../../../../components/sidebar'
import { ErrorView } from '../../../../components/error-view'
import { fetchCommonFilesTree } from '../../../../networks/common-files'
import type { TreeItemProps, TreeItemState } from '../../../../components/tree-item'
import { Icon } from '../../../../components/icon'
import { IconButton } from '../../../../components/icon-button'
import { countTokenQuick, formatNumWithK } from '../../../../helpers/utils'
import { formatNumWithK } from '../../../../helpers/utils'
import { useGlobalStore } from '../../../../store/zustand/global'
import type { FileInfoSidebarTreeItem, FileSidebarTreeItem } from '../../../../store/zustand/global/file-tree.slice'
import { PopoverMenu } from '../../../../components/popover-menu'
import { useTempStore } from '../../../../store/zustand/temp'
import { useOn } from '../../../../hooks/use-on.hook'
import { useGetCommonFilesTree } from '../../../../hooks/use-get-common-files-tree.hook'
import { useDebounceFn } from '../../../../hooks/use-debounce-fn.hook'
import { useTokenNum } from '../../../../hooks/use-token-num.hook'
import { FileTreeItemRightWrapper, FileTreeSidebarHighlight, FileTreeSidebarUnderSearchWrapper, FilterWrapper } from './file-tree.styles'
export interface FileTreeProps {
@@ -28,60 +30,54 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
const { rootPath, reverseTreeUi } = props
const { t } = useTranslation()
const [filesTree, _setFilesTree] = useState<FileSidebarTreeItem[]>([])
const fullPathFileMapRef = useRef<Record<string, FileSidebarTreeItem>>({})
const {
excludeFileExts,
updateExcludeFileExts,
expendedFilePaths,
updateExpendedFilePaths,
checkedFilePaths,
updateCheckedFilePaths,
provideFilePathsTreePromptToGpt,
updateProvideFilePathsTreePromptToGpt,
filePathsTreePrompt,
updateFilePathsTreePrompt,
provideFileInfoToGptMap,
updateProvideFileInfoToGptMap,
} = useGlobalStore()
const { updateFilesRelativePaths } = useTempStore()
const updateMap = useCallback((tree: FileSidebarTreeItem[]) => {
const result: Record<string, FileSidebarTreeItem> = {}
travelTree(tree, (item) => {
if (item.otherInfo)
result[item.otherInfo.fullPath] = item
})
fullPathFileMapRef.current = result
}, [])
const setFilesTree = useCallback((tree: FileSidebarTreeItem[], isUpdateFullPathFileMap = false) => {
if (isUpdateFullPathFileMap)
updateMap(tree)
_setFilesTree(tree)
}, [_setFilesTree, updateMap])
const {
filesTree,
fullPathFileMap,
updateFilesTree,
} = useTempStore()
const updateFileItem = useCallback((fileItemOrFullPath: FileSidebarTreeItem | string, updater: (fileItem: FileSidebarTreeItem) => void) => {
const fullPath = typeof fileItemOrFullPath === 'string' ? fileItemOrFullPath : fileItemOrFullPath.otherInfo?.fullPath
if (!fullPath)
return
const fileItem = fullPathFileMapRef.current[fullPath]
const fileItem = fullPathFileMap[fullPath]
if (!fileItem)
return
updater(fileItem)
setFilesTree([...filesTree])
}, [filesTree, setFilesTree])
updateFilesTree([...filesTree])
}, [filesTree])
const { data: fetchCommonFilesTreeRes, isLoading, refetch: refreshFileTree } = useQuery({
queryKey: ['file-tree', rootPath, excludeFileExts.join(',')],
enabled: !!rootPath,
queryFn: () => fetchCommonFilesTree({
rootPath,
excludeExts: excludeFileExts,
}),
const { data: fetchCommonFilesTreeRes, isLoading, refetch: refreshFileTree } = useGetCommonFilesTree({
rootPath,
})
const { checkedFilesContentPromptTokenNum } = useTokenNum()
const openProvideCheckedFileContentsAsPrompt = useCallback(() => {
if (provideFileInfoToGptMap.checkedFileContents)
return
updateProvideFileInfoToGptMap({
checkedFileContents: true,
})
toast.success(t('chat_page.toast_selected_files_as_prompt_reopened'))
}, [provideFileInfoToGptMap.checkedFileContents, updateProvideFileInfoToGptMap])
const debounceOpenProvideCheckedFileContentsAsPrompt = useDebounceFn(openProvideCheckedFileContentsAsPrompt)
useOn({
eventName: [ClientEventName.RefreshTree, ClientEventName.RefreshFileTree],
listener: () => refreshFileTree(),
@@ -90,12 +86,12 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
// sync checked state
useEffect(() => {
if (!Object.values(fullPathFileMapRef.current).length || !filesTree.length)
if (!Object.values(fullPathFileMap).length || !filesTree.length)
return
// check all path in checkedFilePaths
checkedFilePaths.forEach((fullPath) => {
const file = fullPathFileMapRef.current[fullPath]
const file = fullPathFileMap[fullPath]
if (!file?.otherInfo)
return
@@ -119,50 +115,8 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
return item
})
// setFilesTree([...filesTree])
}, [checkedFilePaths, filesTree, setFilesTree])
useEffect(() => {
const filesInfoTree = fetchCommonFilesTreeRes?.data?.filesInfoTree
if (!filesInfoTree)
return
const filesRelativePaths: string[] = []
const finalFilesSidebarTree = travelTree(filesInfoTree, (item) => {
const oldIsExpanded = expendedFilePaths.includes(item.fullPath)
const oldIsChecked = checkedFilePaths.includes(item.fullPath)
const result: FileSidebarTreeItem = {
id: item.id,
name: item.name,
path: item.fullPath,
isLeaf: item.isFile,
otherInfo: {
...item,
checked: oldIsChecked,
},
isExpanded: oldIsExpanded,
}
item.isFile && filesRelativePaths.push(item.projectRelativePath)
return result
})
setFilesTree(finalFilesSidebarTree, true)
updateFilePathsTreePrompt(finalFilesSidebarTree)
updateFilesRelativePaths(filesRelativePaths)
}, [fetchCommonFilesTreeRes])
useEffect(() => {
if (excludeFileExts.length)
return
const { includeFileExts = [], allFileExts = [] } = fetchCommonFilesTreeRes?.data || {}
const _excludeFileExts = allFileExts.filter(ext => !includeFileExts.includes(ext))
updateExcludeFileExts(_excludeFileExts)
}, [fetchCommonFilesTreeRes])
// updateFileTree([...filesTree])
}, [checkedFilePaths, filesTree])
const renderTreeItemLeftSlot = useCallback((props: TreeItemState<FileInfoSidebarTreeItem>) => {
const { isLeaf, isExpanded, otherInfo } = props
@@ -186,8 +140,8 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
return preState
let finalPaths: string[] = []
const isLeaf = fullPathFileMapRef.current[fullPath].isLeaf
const children = fullPathFileMapRef.current[fullPath]?.children || []
const isLeaf = fullPathFileMap[fullPath].isLeaf
const children = fullPathFileMap[fullPath]?.children || []
if (!checked) {
const shouldRemovePaths: string[] = []
@@ -220,6 +174,8 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
return finalPaths
})
debounceOpenProvideCheckedFileContentsAsPrompt()
}
return <>
@@ -274,7 +230,7 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
if (!fullPath)
return
const file = fullPathFileMapRef.current[fullPath]
const file = fullPathFileMap[fullPath]
file.isExpanded = isExpanded
updateExpendedFilePaths((preState) => {
@@ -282,8 +238,8 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
return finalPaths
})
setFilesTree([...filesTree])
}, [filesTree, setFilesTree])
updateFilesTree([...filesTree])
}, [filesTree])
const buildSearchRightSlot = useCallback(() => {
const { allFileExts = [] } = fetchCommonFilesTreeRes?.data || {}
@@ -346,51 +302,33 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
showText={false}
iconClassName='codicon-refresh'
animatingWhenClick
onClick={refreshFileTree}
onClick={() => refreshFileTree()}
></IconButton>
</>
}, [fetchCommonFilesTreeRes, excludeFileExts, updateExcludeFileExts])
const buildUnderSearchSlot = useCallback(() => {
if (!Object.keys(fullPathFileMapRef.current).length)
return null
const filaPathsPromptTokenNum = countTokenQuick(filePathsTreePrompt)
const checkedFilesContentPromptTokenNum = checkedFilePaths.reduce((pre, cur) => {
const file = fullPathFileMapRef.current[cur]
return pre + (file?.otherInfo?.tokenNum ?? 0)
}, 0)
let totalTokenNum = checkedFilesContentPromptTokenNum
if (provideFilePathsTreePromptToGpt)
totalTokenNum += filaPathsPromptTokenNum
const resetAllChecked = () => {
updateCheckedFilePaths((preState) => {
preState.forEach((item) => {
const file = fullPathFileMapRef.current[item]
const file = fullPathFileMap[item]
file.otherInfo!.checked = false
return item
})
updateFilesTree([...filesTree])
return []
})
updateProvideFilePathsTreePromptToGpt(false)
}
const handleProvideFilePathsTreePromptToGptChange = (e: any) => {
const checked = e.target?.checked as boolean
updateProvideFilePathsTreePromptToGpt(checked)
}
return <FileTreeSidebarUnderSearchWrapper>
<Trans
t={t}
i18nKey={'chat_page.file_tree_top_tokens_tips'}
values={{
fileNum: checkedFilePaths.length,
tokenNum: formatNumWithK(totalTokenNum),
tokenNum: formatNumWithK(checkedFilesContentPromptTokenNum),
}}
components={{
FileNumWrapper: <FileTreeSidebarHighlight style={{ marginLeft: 0 }}></FileTreeSidebarHighlight>,
@@ -403,21 +341,8 @@ export const FileTree: FC<FileTreeProps> = memo((props: FileTreeProps) => {
}} onClick={resetAllChecked}>
{t('chat_page.file_tree_top_clear_checked_btn')}
</VSCodeLink>
<div>
<VSCodeCheckbox
style={{
marginTop: '0.5rem',
}}
checked={provideFilePathsTreePromptToGpt}
onChange={handleProvideFilePathsTreePromptToGptChange}>
{t('chat_page.file_tree_top_all_file_path_as_prompt', {
tokenNum: formatNumWithK(filaPathsPromptTokenNum),
})}
</VSCodeCheckbox>
</div>
</FileTreeSidebarUnderSearchWrapper>
}, [filePathsTreePrompt, checkedFilePaths, provideFilePathsTreePromptToGpt])
}, [checkedFilePaths])
const sortTreeItems = useCallback((items: TreeItemProps<FileInfoSidebarTreeItem>[]) => {
return items?.sort((a, b) => {

View File

@@ -47,6 +47,7 @@ export const InitGptFiles: FC<InitGptFilesProps> = memo((props) => {
return <Wrapper>
<Title>
<Trans
t={t}
i18nKey='chat_page.no_gpt_files_tips'
components={{
Title: <Title />,
@@ -57,6 +58,7 @@ export const InitGptFiles: FC<InitGptFilesProps> = memo((props) => {
<Title>
<Trans
t={t}
i18nKey='chat_page.ask_for_create_gpt_file_tips'
values={{
fileName: `./${GPT_RUNNER_OFFICIAL_FOLDER}/copilot.gpt.md`,

View File

@@ -0,0 +1,22 @@
import { VSCodeBadge, VSCodeCheckbox } from '@vscode/webview-ui-toolkit/react'
import { styled } from 'styled-components'
export const StyledBadge = styled(VSCodeBadge)`
white-space: nowrap;
margin: 0 0.25rem;
`
export const StyledVSCodeCheckbox = styled(VSCodeCheckbox)`
margin-bottom: 0.5rem;
flex: 1;
&::part(control) {
flex-shrink: 0;
}
` as typeof VSCodeCheckbox
export const SelectWrapper = styled.div`
display: flex;
align-items: center;
margin-bottom: 0.5rem;
`

View File

@@ -0,0 +1,152 @@
import { memo, useState } from 'react'
import { Trans, useTranslation } from 'react-i18next'
import { StyledForm } from '../../settings.styles'
import { useGlobalStore } from '../../../../../../store/zustand/global'
import { formatNumWithK } from '../../../../../../helpers/utils'
import { useGetCommonFilesTree } from '../../../../../../hooks/use-get-common-files-tree.hook'
import { LoadingView } from '../../../../../../components/loading-view'
import type { ISelectOption } from '../../../../../../components/select-option'
import { SelectOption } from '../../../../../../components/select-option'
import { useTokenNum } from '../../../../../../hooks/use-token-num.hook'
import { SelectWrapper, StyledBadge, StyledVSCodeCheckbox } from './context-settings.styles'
export interface ContextSettingsProps {
rootPath: string
}
export const ContextSettings = memo((props: ContextSettingsProps) => {
const { rootPath } = props
const { t } = useTranslation()
const {
provideFileInfoToGptMap,
checkedFilePaths,
ideOpeningFilePaths,
updateProvideFileInfoToGptMap,
} = useGlobalStore()
const { filaPathsPromptTokenNum, ideOpeningFileTokenNum, ideActiveFileTokenNum, checkedFilesContentPromptTokenNum } = useTokenNum()
const { isLoading } = useGetCommonFilesTree({
rootPath,
})
const handleProvideChange = (e: any, key: keyof typeof provideFileInfoToGptMap) => {
const checked = (e.target as HTMLInputElement).checked
updateProvideFileInfoToGptMap({
[key]: checked,
})
}
const isProvideIdeFiles = provideFileInfoToGptMap.openingIdeFileContents || provideFileInfoToGptMap.activeIdeFileContents
const ideFileAsPromptOptions: ISelectOption<keyof typeof provideFileInfoToGptMap>[] = [{
label: <Trans
t={t}
i18nKey={'chat_page.context_settings_opening_ide_file_contents_checkbox_tips'}
values={{
fileNum: ideOpeningFilePaths.length,
tokenNum: formatNumWithK(ideOpeningFileTokenNum),
}}
components={{
FileNumWrapper: <StyledBadge></StyledBadge>,
TokenNumWrapper: <StyledBadge></StyledBadge>,
}}
/>,
value: 'openingIdeFileContents',
}, {
label: <Trans
t={t}
i18nKey={'chat_page.context_settings_active_ide_file_contents_checkbox_tips'}
values={{
tokenNum: formatNumWithK(ideActiveFileTokenNum),
}}
components={{
TokenNumWrapper: <StyledBadge></StyledBadge>,
}}
/>,
value: 'activeIdeFileContents',
}]
const [ideOptionActiveValue, setIdeOptionActiveValue] = useState<keyof typeof provideFileInfoToGptMap>(() => {
if (provideFileInfoToGptMap.openingIdeFileContents)
return 'openingIdeFileContents'
return 'activeIdeFileContents'
})
return <StyledForm>
{isLoading && <LoadingView absolute></LoadingView>}
{/* ide opening files or active file */}
<SelectWrapper>
<StyledVSCodeCheckbox
style={{
marginBottom: 0,
}}
checked={isProvideIdeFiles}
onChange={(e) => {
const checked = (e.target as HTMLInputElement).checked
updateProvideFileInfoToGptMap({
[ideOptionActiveValue]: checked,
})
}}
>
{ideFileAsPromptOptions.find(item => item.value === ideOptionActiveValue)?.label}
</StyledVSCodeCheckbox>
<SelectOption
options={ideFileAsPromptOptions}
value={ideOptionActiveValue}
onChange={(_value) => {
const value = _value as keyof typeof provideFileInfoToGptMap
setIdeOptionActiveValue(value)
if (!isProvideIdeFiles)
return
updateProvideFileInfoToGptMap({
openingIdeFileContents: value === 'openingIdeFileContents',
activeIdeFileContents: value === 'activeIdeFileContents',
})
}} />
</SelectWrapper>
{/* selected files */}
<StyledVSCodeCheckbox
checked={provideFileInfoToGptMap.checkedFileContents}
onChange={e => handleProvideChange(e, 'checkedFileContents')}
>
<Trans
t={t}
i18nKey={'chat_page.context_settings_selected_files_checkbox_label'}
values={{
fileNum: checkedFilePaths.length,
tokenNum: formatNumWithK(checkedFilesContentPromptTokenNum),
}}
components={{
FileNumWrapper: <StyledBadge></StyledBadge>,
TokenNumWrapper: <StyledBadge></StyledBadge>,
}}
></Trans>
</StyledVSCodeCheckbox>
{/* all file paths */}
<StyledVSCodeCheckbox
checked={provideFileInfoToGptMap.allFilePaths}
onChange={e => handleProvideChange(e, 'allFilePaths')}
>
<Trans
t={t}
i18nKey={'chat_page.context_settings_all_file_paths_checkbox_label'}
values={{
tokenNum: formatNumWithK(filaPathsPromptTokenNum),
}}
components={{
TokenNumWrapper: <StyledBadge></StyledBadge>,
}}
></Trans>
</StyledVSCodeCheckbox>
</StyledForm>
})
ContextSettings.displayName = 'ContextSettings'

View File

@@ -1,6 +1,6 @@
import { ChatModelType, getModelConfigTypeSchema } from '@nicepkg/gpt-runner-shared/common'
import type { BaseModelConfig, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common'
import { memo, useCallback, useEffect, useMemo, useState } from 'react'
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
import type { ReactNode } from 'react'
import type { Path, UseFormReturn } from 'react-hook-form'
import { useForm } from 'react-hook-form'
@@ -30,13 +30,13 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe
const { singleFileConfig, formConfig } = props
const { t } = useTranslation()
const { modelOverrideConfig, updateModelOverrideConfig } = useGlobalStore()
const { overrideModelsConfig, updateOverrideModelsConfig } = useGlobalStore()
const currentModel = singleFileConfig?.model as FormData | undefined
const currentModelType = currentModel?.type || ChatModelType.Openai
const currentModelOverrideConfig = useMemo(() => {
return (modelOverrideConfig[currentModelType] || {}) as FormData
}, [modelOverrideConfig[currentModelType]])
return (overrideModelsConfig[currentModelType] || {}) as FormData
}, [overrideModelsConfig[currentModelType]])
const currentFormNames = useMemo(() => formConfig.map(item => item.name), [formConfig])
@@ -52,16 +52,17 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe
const { setValue, watch } = useFormReturns
const updateModelOverrideConfigFromCheckMap = useCallback((formData: FormData) => {
const updateOverrideModelsConfigFromCheckMap = useCallback((formData: FormData, _checkedMap?: Record<keyof FormData, boolean>) => {
const checkedValues = {} as FormData
const finalCheckedMap = _checkedMap || checkedMap
Object.keys(checkedMap).forEach((key) => {
Object.keys(finalCheckedMap).forEach((key) => {
const formName = key as keyof FormData
if (checkedMap[formName] === true)
if (finalCheckedMap[formName] === true)
checkedValues[formName] = formData?.[formName] as any
})
updateModelOverrideConfig(preState => ({
updateOverrideModelsConfig(preState => ({
...preState,
[currentModelType]: {
...checkedValues,
@@ -71,14 +72,19 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe
useEffect(() => {
const subscription = watch((formData) => {
updateModelOverrideConfigFromCheckMap(formData as FormData)
updateOverrideModelsConfigFromCheckMap(formData as FormData)
})
return () => subscription.unsubscribe()
}, [watch, updateModelOverrideConfigFromCheckMap])
}, [watch, updateOverrideModelsConfigFromCheckMap])
const isInitCheckMap = useRef(false)
useEffect(() => {
// update checked map
if (isInitCheckMap.current || !singleFileConfig?.model || !currentModelOverrideConfig)
return
isInitCheckMap.current = true
// init checked map
const initCheckedMap = Object.keys(checkedMap).reduce((prev, key) => {
const formName = key as keyof FormData
const isOverride = currentModelOverrideConfig[formName] !== undefined
@@ -98,19 +104,21 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe
if (!isOverride && currentModel?.[formName] !== undefined)
setValue(formName as Path<FormData>, currentModel[formName] as any)
})
}, [singleFileConfig?.model, JSON.stringify(currentModelOverrideConfig)])
}, [isInitCheckMap.current, singleFileConfig?.model, JSON.stringify(currentModelOverrideConfig)])
const buildLabel = (label: string, formName: keyof FormData) => {
return <LabelWrapper>
<VSCodeCheckbox
checked={checkedMap[formName]}
onChange={(e) => {
const checked = (e.target as HTMLInputElement).checked
setCheckedMap(prev => ({
...prev,
[formName]: checked,
}))
updateModelOverrideConfigFromCheckMap(watch())
onClick={(e) => {
const newCheckedMap = {
...checkedMap,
[formName]: !checkedMap[formName],
}
setCheckedMap(newCheckedMap)
updateOverrideModelsConfigFromCheckMap(watch(), newCheckedMap)
e.stopPropagation()
return false
}}
@@ -118,22 +126,22 @@ function BaseModelSettings_<FormData extends BaseModelConfig>(props: BaseModelSe
</LabelWrapper>
}
const isAllChecked = Object.values(checkedMap).every(Boolean)
return <StyledForm>
<VSCodeCheckbox
style={{
marginBottom: '1rem',
}}
checked={Object.values(checkedMap).every(Boolean)}
onChange={(e) => {
const checked = (e.target as HTMLInputElement).checked
setCheckedMap((prev) => {
return Object.fromEntries(Object.keys(prev).map(key => [key, checked])) as Record<keyof FormData, boolean>
})
updateModelOverrideConfigFromCheckMap(watch())
checked={isAllChecked}
onClick={(e) => {
const newCheckedMap = Object.fromEntries(Object.keys(checkedMap).map(key => [key, !isAllChecked])) as Record<keyof FormData, boolean>
setCheckedMap(newCheckedMap)
updateOverrideModelsConfigFromCheckMap(watch(), newCheckedMap)
e.stopPropagation()
}}
>
Override All Settings
{t('chat_page.override_all_settings')}
</VSCodeCheckbox>
{formConfig.map((formItemConfig, index) => {

View File

@@ -0,0 +1,106 @@
import { ChatModelType, ServerStorageName, getModelConfigTypeSchema } from '@nicepkg/gpt-runner-shared/common'
import type { SingleFileConfig } from '@nicepkg/gpt-runner-shared/common'
import { memo, useEffect } from 'react'
import type { ReactNode } from 'react'
import { useForm } from 'react-hook-form'
import type { Path, UseFormReturn } from 'react-hook-form'
import { useTranslation } from 'react-i18next'
import { useMutation, useQuery } from '@tanstack/react-query'
import { zodResolver } from '@hookform/resolvers/zod'
import { toast } from 'react-hot-toast'
import { VSCodeButton } from '@vscode/webview-ui-toolkit/react'
import { StyledForm, StyledFormItem } from '../../settings.styles'
import { getServerStorage, saveServerStorage } from '../../../../../../networks/server-storage'
import { IS_SAFE } from '../../../../../../helpers/constant'
import { useLoading } from '../../../../../../hooks/use-loading.hook'
export interface BaseSecretsSettingsFormItemBuildViewState<FormData extends Record<string, any>> {
useFormReturns: UseFormReturn<FormData, any, undefined>
}
export interface BaseSecretsSettingsFormItemConfig<FormData extends Record<string, any>> {
name: keyof FormData
buildView: (state: BaseSecretsSettingsFormItemBuildViewState<FormData>) => ReactNode
}
export interface BaseSecretsSettingsProps<FormData extends Record<string, any>> {
singleFileConfig?: SingleFileConfig
formConfig: BaseSecretsSettingsFormItemConfig<FormData>[]
}
function BaseSecretsSettings_<FormData extends Record<string, any>>(props: BaseSecretsSettingsProps<FormData>) {
const { singleFileConfig, formConfig } = props
const { t } = useTranslation()
const { setLoading } = useLoading()
const currentModelType = singleFileConfig?.model?.type || ChatModelType.Openai
const { data: querySecretsRes } = useQuery({
queryKey: ['secrets', currentModelType],
enabled: !!currentModelType,
queryFn: () => getServerStorage({
storageName: ServerStorageName.SecretsConfig,
key: currentModelType!,
}),
})
const { mutateAsync: saveSecrets } = useMutation({
mutationFn: (value: FormData) => saveServerStorage({
storageName: ServerStorageName.SecretsConfig,
key: currentModelType,
value,
}),
})
const remoteSecrets = querySecretsRes?.data?.value as FormData | undefined
const useFormReturns = useForm<FormData>({
mode: 'onBlur',
resolver: zodResolver(getModelConfigTypeSchema(currentModelType, 'secrets')),
})
const { handleSubmit, setValue } = useFormReturns
useEffect(() => {
if (remoteSecrets) {
Object.keys(remoteSecrets).forEach((key) => {
setValue(key as Path<FormData>, remoteSecrets[key as keyof FormData])
})
}
}, [remoteSecrets])
const onSubmit = async (data: FormData) => {
setLoading(true)
try {
await saveSecrets(data)
toast.success(t('chat_page.toast_save_success'))
}
finally {
setLoading(false)
}
}
return <StyledForm onSubmit={handleSubmit(onSubmit)}>
{formConfig.map((formItemConfig, index) => {
const buildViewState: BaseSecretsSettingsFormItemBuildViewState<FormData> = {
useFormReturns,
}
return <StyledFormItem key={index}>
{formItemConfig.buildView(buildViewState)}
</StyledFormItem>
})}
<VSCodeButton
disabled={!IS_SAFE}
appearance='primary'
type='submit'
>
{IS_SAFE ? t('chat_page.save_btn') : t('chat_page.disabled_save_secrets_config_btn')}
</VSCodeButton>
</StyledForm>
}
BaseSecretsSettings_.displayName = 'BaseSecretsSettings'
export const BaseSecretsSettings = memo(BaseSecretsSettings_)

View File

@@ -42,7 +42,7 @@ export const ModelSettings: FC<ModelSettingsProps> = memo((props) => {
const modelTypeViewMap: Record<ChatModelType, Record<ModelSettingsViewType, () => ReactNode>> = {
[ChatModelType.Openai]: {
secrets: () => <OpenaiSecretsSettings />,
secrets: () => <OpenaiSecretsSettings singleFileConfig={resolvedSingleFileConfig} />,
model: () => <OpenaiModelSettings singleFileConfig={resolvedSingleFileConfig} />,
title: () => <>OpenAI</>,
},

View File

@@ -6,7 +6,7 @@ import { HookFormInput } from '../../../../../../../components/hook-form/hook-fo
import { type ISelectOption, SelectOption } from '../../../../../../../components/select-option'
import { BaseModelSettings, type BaseModelSettingsFormItemConfig } from '../base-model-settings'
export interface FormData extends Pick<OpenaiModelConfig, 'modelName' | 'temperature' | 'maxTokens' | 'topP' | 'frequencyPenalty' | 'presencePenalty'> {
interface FormData extends Pick<OpenaiModelConfig, 'modelName' | 'temperature' | 'maxTokens' | 'topP' | 'frequencyPenalty' | 'presencePenalty'> {
}
@@ -41,14 +41,11 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="modelName"
label={buildLabel('Model Name')}
label={buildLabel(t('chat_page.openai_model_name'))}
labelInLeft
placeholder={''}
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
<SelectOption
options={modelTipOptions}
@@ -65,15 +62,12 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="temperature"
label={buildLabel('Temperature')}
label={buildLabel(t('chat_page.openai_temperature'))}
labelInLeft
isNumber
placeholder={'0 ~ 1'}
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
</>
},
@@ -84,16 +78,13 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="maxTokens"
label={buildLabel('Max Reply Tokens')}
label={buildLabel(t('chat_page.openai_max_tokens'))}
labelInLeft
isNumber
minNumber={0}
placeholder={'0 ~ 2048'}
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
</>
},
@@ -104,7 +95,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="topP"
label={buildLabel('Top P')}
label={buildLabel(t('chat_page.openai_top_p'))}
labelInLeft
minNumber={0}
maxNumber={1}
@@ -112,9 +103,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
isNumber
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
</>
},
@@ -125,7 +113,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="frequencyPenalty"
label={buildLabel('Frequency Penalty')}
label={buildLabel(t('chat_page.openai_frequency_penalty'))}
labelInLeft
isNumber
minNumber={-2}
@@ -133,9 +121,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
placeholder={'-2 ~ 2'}
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
</>
},
@@ -146,7 +131,7 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
return <>
<HookFormInput
name="presencePenalty"
label={buildLabel('Presence Penalty')}
label={buildLabel(t('chat_page.openai_presence_penalty'))}
labelInLeft
isNumber
minNumber={-2}
@@ -154,9 +139,6 @@ export const OpenaiModelSettings: FC<OpenaiModelSettingsProps> = memo((props) =>
placeholder={'-2 ~ 2'}
errors={formState.errors}
control={control}
style={{
width: '100%',
}}
/>
</>
},

View File

@@ -1,109 +1,73 @@
import { DEFAULT_OPENAI_API_BASE_PATH, type OpenaiSecrets, SecretStorageKey, ServerStorageName } from '@nicepkg/gpt-runner-shared/common'
import { useMutation, useQuery } from '@tanstack/react-query'
import { type FC, memo, useEffect } from 'react'
import { useForm } from 'react-hook-form'
import { VSCodeButton, VSCodeLink } from '@vscode/webview-ui-toolkit/react'
import { DEFAULT_OPENAI_API_BASE_PATH } from '@nicepkg/gpt-runner-shared/common'
import type { OpenaiSecrets, SingleFileConfig } from '@nicepkg/gpt-runner-shared/common'
import { type FC, memo } from 'react'
import { VSCodeLink } from '@vscode/webview-ui-toolkit/react'
import { useTranslation } from 'react-i18next'
import toast from 'react-hot-toast'
import { getServerStorage, saveServerStorage } from '../../../../../../../networks/server-storage'
import { useLoading } from '../../../../../../../hooks/use-loading.hook'
import { HookFormInput } from '../../../../../../../components/hook-form/hook-form-input'
import { HookFormTextarea } from '../../../../../../../components/hook-form/hook-form-textarea'
import { IS_SAFE } from '../../../../../../../helpers/constant'
import { StyledForm, StyledFormItem } from '../../../settings.styles'
import { BaseSecretsSettings, type BaseSecretsSettingsFormItemConfig } from '../base-secrets-settings'
export interface FormData extends Pick<OpenaiSecrets, 'apiKey' | 'accessToken' | 'basePath'> {
interface FormData extends Pick<OpenaiSecrets, 'apiKey' | 'accessToken' | 'basePath'> {
}
export const OpenaiSecretsSettings: FC = memo(() => {
export interface OpenaiSecretsSettingsProps {
singleFileConfig: SingleFileConfig
}
export const OpenaiSecretsSettings: FC<OpenaiSecretsSettingsProps> = memo((props) => {
const { singleFileConfig } = props
const { t } = useTranslation()
const { setLoading } = useLoading()
const { data: querySecretsRes } = useQuery({
queryKey: ['secrets', SecretStorageKey.Openai],
queryFn: () => getServerStorage({
storageName: ServerStorageName.SecretsConfig,
key: SecretStorageKey.Openai,
}),
})
const { mutateAsync: saveSecrets } = useMutation({
mutationKey: ['secrets', SecretStorageKey.Openai],
mutationFn: (value: FormData) => saveServerStorage({
storageName: ServerStorageName.SecretsConfig,
key: SecretStorageKey.Openai,
value,
}),
})
const formConfig: BaseSecretsSettingsFormItemConfig<FormData>[] = [
{
name: 'apiKey',
buildView: ({ useFormReturns: { control, formState } }) => {
return <>
<HookFormInput
label={t('chat_page.openai_api_key')}
placeholder={t('chat_page.openai_api_key_placeholder')}
name="apiKey"
errors={formState.errors}
control={control}
type="password"
/>
</>
},
},
{
name: 'basePath',
buildView: ({ useFormReturns: { control, formState } }) => {
return <>
<HookFormInput
label={t('chat_page.openai_api_base_path')}
placeholder={DEFAULT_OPENAI_API_BASE_PATH}
name="basePath"
errors={formState.errors}
control={control}
/>
</>
},
}, {
name: 'accessToken',
buildView: ({ useFormReturns: { control, formState } }) => {
return <>
<HookFormTextarea
label={t('chat_page.openai_access_token')}
name="accessToken"
placeholder={t('chat_page.openai_access_token_placeholder')}
errors={formState.errors}
control={control}
/>
<div>
{t('chat_page.openai_get_access_token_tips')} <VSCodeLink href="https://chat.openai.com/api/auth/session" target="_blank" rel="noreferrer">https://chat.openai.com/api/auth/session</VSCodeLink>
</div>
</>
},
},
]
const remoteSecrets = querySecretsRes?.data?.value as OpenaiSecrets | undefined
const { handleSubmit, formState, control, setValue } = useForm<FormData>({
mode: 'onBlur',
})
useEffect(() => {
if (remoteSecrets) {
setValue('apiKey', remoteSecrets.apiKey || '')
setValue('accessToken', remoteSecrets.accessToken || '')
setValue('basePath', remoteSecrets.basePath || '')
}
}, [remoteSecrets])
const onSubmit = async (data: FormData) => {
setLoading(true)
try {
await saveSecrets(data)
toast.success(t('chat_page.toast_save_success'))
}
finally {
setLoading(false)
}
}
return <StyledForm onSubmit={handleSubmit(onSubmit)}>
<StyledFormItem key={0}>
<HookFormInput
label={t('chat_page.openai_api_key')}
placeholder={t('chat_page.openai_api_key_placeholder')}
name="apiKey"
errors={formState.errors}
control={control}
type="password"
/>
</StyledFormItem>
<StyledFormItem key={1}>
<HookFormInput
label={t('chat_page.openai_api_base_path')}
placeholder={DEFAULT_OPENAI_API_BASE_PATH}
name="basePath"
errors={formState.errors}
control={control}
/>
</StyledFormItem>
<StyledFormItem key={2}>
<HookFormTextarea
label={t('chat_page.openai_access_token')}
name="accessToken"
placeholder={t('chat_page.openai_access_token_placeholder')}
errors={formState.errors}
control={control}
/>
<div>
{t('chat_page.openai_get_access_token_tips')} <VSCodeLink href="https://chat.openai.com/api/auth/session" target="_blank" rel="noreferrer">https://chat.openai.com/api/auth/session</VSCodeLink>
</div>
</StyledFormItem>
<VSCodeButton
disabled={!IS_SAFE}
appearance='primary'
type='submit'
>
{IS_SAFE ? t('chat_page.save_btn') : t('chat_page.disabled_save_secrets_config_btn')}
</VSCodeButton>
</StyledForm>
return <BaseSecretsSettings singleFileConfig={singleFileConfig} formConfig={formConfig} />
})
OpenaiSecretsSettings.displayName = 'OpenaiSecretsSettings'

View File

@@ -38,10 +38,12 @@ export const StyledForm = styled.form`
display: flex;
flex-direction: column;
margin: 1rem;
position: relative;
`
export const StyledFormItem = styled.div`
display: flex;
flex-direction: column;
margin-bottom: 1rem;
width: 100%;
`

View File

@@ -5,19 +5,23 @@ import { PopoverMenu } from '../../../../components/popover-menu'
import { IconButton } from '../../../../components/icon-button'
import { ChatPanelPopoverTreeWrapper } from '../chat-panel/chat-panel.styles'
import { useIsMobile } from '../../../../hooks/use-is-mobile.hook'
import { TopToolbarBlank, TopToolbarWrapper } from './top-toolbar.styles'
import type { UseTokenNumProps } from '../../../../hooks/use-token-num.hook'
import { useTokenNum } from '../../../../hooks/use-token-num.hook'
import { formatNumWithK } from '../../../../helpers/utils'
import { TopToolbarBlank, TopToolbarLeft, TopToolbarRight, TopToolbarWrapper } from './top-toolbar.styles'
export interface TopToolbarProps {
export interface TopToolbarProps extends UseTokenNumProps {
settingsView?: React.ReactNode
configInfoView?: React.ReactNode
aboutView?: React.ReactNode
}
export const TopToolbar = memo(forwardRef<HTMLDivElement, TopToolbarProps>((props, ref) => {
const { settingsView, configInfoView, aboutView } = props
const { settingsView, configInfoView, aboutView, ...useTokenNumProps } = props
const { t } = useTranslation()
const isMobile = useIsMobile()
const { totalTokenNum } = useTokenNum(useTokenNumProps)
const popMenus: {
text: string
@@ -32,54 +36,62 @@ export const TopToolbar = memo(forwardRef<HTMLDivElement, TopToolbarProps>((prop
menuView: settingsView,
}, {
text: t('chat_page.settings_tab_config_info'),
alwaysShowText: true,
alwaysShowText: !isMobile,
iconClassName: 'codicon-gist',
menuView: configInfoView,
}, {
text: t('chat_page.settings_tab_about'),
alwaysShowText: true,
alwaysShowText: !isMobile,
iconClassName: 'codicon-info',
menuView: aboutView,
}]
return <>
<TopToolbarWrapper ref={ref}>
{popMenus.map((popMenu, index) => {
const { text, alwaysShowText, iconClassName, menuView, menuProps } = popMenu
<TopToolbarLeft>
{popMenus.map((popMenu, index) => {
const { text, alwaysShowText, iconClassName, menuView, menuProps } = popMenu
return <PopoverMenu
key={index}
clickMode
xPosition='center'
yPosition='bottom'
menuMaskStyle={{
marginLeft: '0',
marginRight: '0',
paddingTop: '0.5rem',
}}
menuStyle={{
border: isMobile ? 'none' : '',
width: isMobile ? '100vw' : '',
}}
minusHeightSpace={isMobile ? 10 : 100}
buildChildrenSlot={({ isHovering }) => {
return <IconButton
text={text}
iconClassName={iconClassName}
hoverShowText={!alwaysShowText && !isHovering}
style={{
paddingLeft: '0.5rem',
}}
></IconButton>
}}
buildMenuSlot={() => {
return <ChatPanelPopoverTreeWrapper>
{menuView}
</ChatPanelPopoverTreeWrapper>
}}
{...menuProps}
/>
})}
return <PopoverMenu
key={index}
clickMode
xPosition='center'
yPosition='bottom'
menuMaskStyle={{
marginLeft: '0',
marginRight: '0',
paddingTop: '0.5rem',
}}
menuStyle={{
border: isMobile ? 'none' : '',
width: isMobile ? '100vw' : '',
}}
minusHeightSpace={isMobile ? 10 : 100}
buildChildrenSlot={({ isHovering }) => {
return <IconButton
text={text}
iconClassName={iconClassName}
hoverShowText={!alwaysShowText && !isHovering}
style={{
paddingLeft: '0.5rem',
}}
></IconButton>
}}
buildMenuSlot={() => {
return <ChatPanelPopoverTreeWrapper>
{menuView}
</ChatPanelPopoverTreeWrapper>
}}
{...menuProps}
/>
})}
</TopToolbarLeft>
<TopToolbarRight>
<div title="Tokens" style={{ marginRight: '0.5rem' }}>
{(isMobile ? '' : 'Tokens: ') + formatNumWithK(totalTokenNum)}
</div>
</TopToolbarRight>
</TopToolbarWrapper>
<TopToolbarBlank />
</>

View File

@@ -10,10 +10,25 @@ export const TopToolbarWrapper = styled.div`
width: 100%;
height: ${toolbarHeight};
align-items: center;
justify-content: space-between;
font-size: var(--type-ramp-base-font-size);
color: var(--foreground);
background-color: var(--panel-view-background);
border-bottom: 1px solid var(--panel-view-border);
`
export const TopToolbarLeft = styled.div`
display: flex;
align-items: center;
flex-shrink: 0;
`
export const TopToolbarRight = styled.div`
display: flex;
align-items: center;
flex-shrink: 0;
`
export const TopToolbarBlank = styled.div`
flex-shrink: 0;
width: 100%;

View File

@@ -16,6 +16,7 @@ import { DragResizeView } from '../../components/drag-resize-view'
import { fetchProjectInfo } from '../../networks/config'
import { useEmitBind } from '../../hooks/use-emit-bind.hook'
import { useSize } from '../../hooks/use-size.hook'
import { useGetCommonFilesTree } from '../../hooks/use-get-common-files-tree.hook'
import { ContentWrapper, StyledVSCodePanels } from './chat.styles'
import { ChatSidebar } from './components/chat-sidebar'
import { ChatPanel } from './components/chat-panel'
@@ -48,8 +49,15 @@ const Chat: FC = memo(() => {
queryKey: ['fetchProjectInfo'],
queryFn: () => fetchProjectInfo(),
})
const rootPath = getGlobalConfig().rootPath
// sometime file tree popover menu is hidden at mount
// and the store is not updated, so we need to update it
useGetCommonFilesTree({
rootPath,
})
useEmitBind([rootPath])
// when active chat id change, change tab active id
@@ -217,6 +225,8 @@ const Chat: FC = memo(() => {
<FlexColumn style={{ width: '100%', height: '100%' }}>
<TopToolbar
ref={toolbarRef}
rootPath={rootPath}
chatIdOrChatInstance={chatInstance}
settingsView={renderSettings(true, SettingsTabId.Settings)}
configInfoView={renderSettings(true, SettingsTabId.ConfigInfo)}
aboutView={renderSettings(true, SettingsTabId.About)}

View File

@@ -1,5 +1,5 @@
import type { StateCreator } from 'zustand'
import type { ChatModelTypeMap, SingleChat } from '@nicepkg/gpt-runner-shared/common'
import type { PartialChatModelTypeMap, SingleChat } from '@nicepkg/gpt-runner-shared/common'
import { ChatMessageStatus, ChatRole, STREAM_DONE_FLAG, travelTree } from '@nicepkg/gpt-runner-shared/common'
import { v4 as uuidv4 } from 'uuid'
import type { GetState } from '../types'
@@ -16,7 +16,7 @@ export enum GenerateAnswerType {
export interface ChatSlice {
activeChatId: string
chatInstances: SingleChat[]
modelOverrideConfig: Partial<ChatModelTypeMap>
overrideModelsConfig: PartialChatModelTypeMap
updateActiveChatId: (activeChatId: string) => void
/**
@@ -39,7 +39,8 @@ export interface ChatSlice {
generateChatAnswer: (chatId: string, type?: GenerateAnswerType) => Promise<void>
regenerateLastChatAnswer: (chatId: string) => Promise<void>
stopGeneratingChatAnswer: (chatId: string) => void
updateModelOverrideConfig: (modelOverrideConfig: Partial<ChatModelTypeMap> | ((oldModelOverrideConfig: Partial<ChatModelTypeMap>) => Partial<ChatModelTypeMap>)) => void
updateOverrideModelsConfig: (overrideModelsConfig: PartialChatModelTypeMap | ((oldModelOverrideConfig: PartialChatModelTypeMap) => PartialChatModelTypeMap)) => void
getContextFilePaths: () => string[]
}
export type ChatState = GetState<ChatSlice>
@@ -48,7 +49,7 @@ function getInitialState() {
return {
activeChatId: '',
chatInstances: [],
modelOverrideConfig: {},
overrideModelsConfig: {},
} satisfies ChatState
}
@@ -267,8 +268,8 @@ export const createChatSlice: StateCreator<
const appendSystemPrompt = (() => {
let result = ''
if (state.provideFilePathsTreePromptToGpt)
result += `\n${state.filePathsTreePrompt}`
if (state.provideFileInfoToGptMap.allFilePaths)
result += `\n${state.provideFileInfoPromptMap.allFilePathsPrompt}`
return result
})()
@@ -283,13 +284,17 @@ export const createChatSlice: StateCreator<
chatIdAbortCtrlMap.set(chatId, abortCtrl)
const contextFilePaths = state.getContextFilePaths()
await fetchLlmStream({
signal: abortCtrl.signal,
messages: sendMessages,
prompt: sendInputtingPrompt,
appendSystemPrompt,
singleFilePath,
contextFilePaths: state.checkedFilePaths,
contextFilePaths,
editingFilePath: state.ideActiveFilePath,
overrideModelsConfig: state.overrideModelsConfig,
rootPath: getGlobalConfig().rootPath,
onError(e) {
console.error('fetchLlmStream error:', e)
@@ -342,10 +347,26 @@ export const createChatSlice: StateCreator<
status: ChatMessageStatus.Success,
}, false)
},
updateModelOverrideConfig(modelOverrideConfig) {
updateOverrideModelsConfig(overrideModelsConfig) {
const state = get()
const finalModelOverrideConfig = typeof modelOverrideConfig === 'function' ? modelOverrideConfig(state.modelOverrideConfig) : modelOverrideConfig
const finalModelOverrideConfig = typeof overrideModelsConfig === 'function' ? overrideModelsConfig(state.overrideModelsConfig) : overrideModelsConfig
set({ modelOverrideConfig: finalModelOverrideConfig })
set({ overrideModelsConfig: finalModelOverrideConfig })
},
getContextFilePaths() {
const state = get()
const contextPaths: string[] = []
const { checkedFileContents, activeIdeFileContents, openingIdeFileContents } = state.provideFileInfoToGptMap
if (checkedFileContents)
contextPaths.push(...state.checkedFilePaths)
if (activeIdeFileContents)
contextPaths.push(state.ideActiveFilePath)
if (openingIdeFileContents)
contextPaths.push(...state.ideOpeningFilePaths)
return [...new Set(contextPaths)]
},
})

View File

@@ -11,36 +11,48 @@ export type FileInfoSidebarTreeItem = FileInfoTreeItem & {
export type FileSidebarTreeItem = TreeItemBaseState<FileInfoSidebarTreeItem>
export interface FileTreeSlice {
filePathsTreePrompt: string
provideFilePathsTreePromptToGpt: boolean
expendedFilePaths: string[]
checkedFilePaths: string[]
excludeFileExts: string[]
ideActiveFilePath: string
ideOpeningFilePaths: string[]
provideIdeOpeningFilePathsToGpt: boolean
provideFileInfoToGptMap: {
allFilePaths: boolean
checkedFileContents: boolean
activeIdeFileContents: boolean
openingIdeFileContents: boolean
}
provideFileInfoPromptMap: {
allFilePathsPrompt: string
}
updateExcludeFileExts: (excludeFileExts: string[] | ((oldExcludeFileExts: string[]) => string[])) => void
updateProvideFilePathsTreePromptToGpt: (provideFilePathsTreePromptToGpt: boolean) => void
updateFilePathsTreePrompt: (promptOrFileTreeItem: string | FileSidebarTreeItem[]) => void
updateExpendedFilePaths: (expendedFilePaths: string[] | ((oldExpendedFilePaths: string[]) => string[])) => void
updateCheckedFilePaths: (checkedFilePaths: string[] | ((oldCheckedFilePaths: string[]) => string[])) => void
updateIdeActiveFilePath: (ideActiveFilePath: string) => void
updateIdeOpeningFilePaths: (ideOpeningFilePaths: string[] | ((oldIdeOpeningFilePaths: string[]) => string[])) => void
updateProvideIdeOpeningFilePathsToGpt: (provideIdeOpeningFilePathsToGpt: boolean) => void
updateProvideFileInfoToGptMap: (provideFileInfoToGptMap: Partial<FileTreeSlice['provideFileInfoToGptMap']>) => void
updateProvideFileInfoPromptMap: (provideFileInfoPromptMap: Partial<FileTreeSlice['provideFileInfoPromptMap']>) => void
updateAllFilePathsPrompt: (allFilePathsPromptOrFileTreeItem: string | FileSidebarTreeItem[]) => void
}
export type FileTreeState = GetState<FileTreeSlice>
function getInitialState() {
return {
filePathsTreePrompt: '',
provideFilePathsTreePromptToGpt: false,
expendedFilePaths: [],
checkedFilePaths: [],
excludeFileExts: [],
ideActiveFilePath: '',
ideOpeningFilePaths: [],
provideIdeOpeningFilePathsToGpt: false,
provideFileInfoToGptMap: {
allFilePaths: false,
checkedFileContents: true,
activeIdeFileContents: false,
openingIdeFileContents: true,
},
provideFileInfoPromptMap: {
allFilePathsPrompt: '',
},
} satisfies FileTreeState
}
@@ -58,10 +70,25 @@ export const createFileTreeSlice: StateCreator<
excludeFileExts: [...new Set(_excludeFileExts)],
})
},
updateProvideFilePathsTreePromptToGpt(provideFilePathsTreePromptToGpt) {
set({ provideFilePathsTreePromptToGpt })
updateProvideFileInfoToGptMap(provideFileInfoToGptMap) {
set({
provideFileInfoToGptMap: {
...get().provideFileInfoToGptMap,
...provideFileInfoToGptMap,
},
})
},
updateFilePathsTreePrompt(promptOrFileTreeItem) {
updateProvideFileInfoPromptMap(provideFileInfoPromptMap) {
set({
provideFileInfoPromptMap: {
...get().provideFileInfoPromptMap,
...provideFileInfoPromptMap,
},
})
},
updateAllFilePathsPrompt(promptOrFileTreeItem) {
const state = get()
let result = ''
if (typeof promptOrFileTreeItem === 'string')
@@ -75,7 +102,9 @@ export const createFileTreeSlice: StateCreator<
})
}
set({ filePathsTreePrompt: result })
state.updateProvideFileInfoPromptMap({
allFilePathsPrompt: result,
})
},
updateExpendedFilePaths(expendedFilePaths) {
const result = typeof expendedFilePaths === 'function' ? expendedFilePaths(get().expendedFilePaths) : expendedFilePaths
@@ -92,7 +121,4 @@ export const createFileTreeSlice: StateCreator<
const result = typeof ideOpeningFilePaths === 'function' ? ideOpeningFilePaths(get().ideOpeningFilePaths) : ideOpeningFilePaths
set({ ideOpeningFilePaths: result })
},
updateProvideIdeOpeningFilePathsToGpt(provideIdeOpeningFilePathsToGpt) {
set({ provideIdeOpeningFilePathsToGpt })
},
})

View File

@@ -1,21 +1,30 @@
import type { StateCreator } from 'zustand'
import type { GetState } from '../types'
import { createStore } from '../utils'
import { FileSidebarTreeItem } from '../global/file-tree.slice'
import { BaseResponse, GetCommonFilesResData, travelTree } from '@nicepkg/gpt-runner-shared/common'
import { useGlobalStore } from '../global'
export interface TempSlice {
filesTree: FileSidebarTreeItem[]
fullPathFileMap: Record<string, FileSidebarTreeItem>
filesRelativePaths: string[]
updateFilesTree: (filesTree: FileSidebarTreeItem[], updateFullPathFileMap?: boolean) => void
updateFilesRelativePaths: (filesRelativePaths: string[]) => void
updateFullPathFileMapFromFileTree: (filesTree: FileSidebarTreeItem[]) => void
handleFetchCommonFilesTreeResChange: (fetchCommonFilesTreeRes: BaseResponse<GetCommonFilesResData> | undefined) => void
}
export type TempState = GetState<TempSlice>
function getInitialState() {
return {
filesTree: [],
fullPathFileMap: {},
filesRelativePaths: [],
} satisfies TempState
}
export const createTempSlice: StateCreator<
TempSlice,
[],
@@ -23,14 +32,77 @@ export const createTempSlice: StateCreator<
TempSlice
> = (set, get) => ({
...getInitialState(),
updateFilesRelativePaths(filesRelativePaths: string[]) {
updateFilesTree(filesTree, updateFullPathFileMap = false) {
const state = get()
if (updateFullPathFileMap)
state.updateFullPathFileMapFromFileTree(filesTree)
set({
filesTree,
})
},
updateFilesRelativePaths(filesRelativePaths) {
set({
filesRelativePaths: [...new Set(filesRelativePaths)],
})
},
updateFullPathFileMapFromFileTree(filesTree) {
const result: Record<string, FileSidebarTreeItem> = {}
travelTree(filesTree, (item) => {
if (item.otherInfo)
result[item.otherInfo.fullPath] = item
})
set({
fullPathFileMap: result,
})
},
handleFetchCommonFilesTreeResChange(fetchCommonFilesTreeRes) {
const filesInfoTree = fetchCommonFilesTreeRes?.data?.filesInfoTree
const state = get()
const globalState = useGlobalStore.getState()
if (globalState.excludeFileExts.length) {
// update excludeFileExts
const { includeFileExts = [], allFileExts = [] } = fetchCommonFilesTreeRes?.data || {}
const excludeFileExts = allFileExts.filter(ext => !includeFileExts.includes(ext))
globalState.updateExcludeFileExts(excludeFileExts)
}
if (!filesInfoTree)
return
const filesRelativePaths: string[] = []
const finalFilesSidebarTree = travelTree(filesInfoTree, (item) => {
const oldIsExpanded = globalState.expendedFilePaths.includes(item.fullPath)
const oldIsChecked = globalState.checkedFilePaths.includes(item.fullPath)
const result: FileSidebarTreeItem = {
id: item.id,
name: item.name,
path: item.fullPath,
isLeaf: item.isFile,
otherInfo: {
...item,
checked: oldIsChecked,
},
isExpanded: oldIsExpanded,
}
item.isFile && filesRelativePaths.push(item.projectRelativePath)
return result
})
state.updateFilesTree(finalFilesSidebarTree, true)
globalState.updateAllFilePathsPrompt(finalFilesSidebarTree)
state.updateFilesRelativePaths(filesRelativePaths)
}
})
export const useTempStore = createStore('TempStore')<TempSlice, any>(
export const useTempStore = createStore('TempStore', false)<TempSlice, any>(
(...args) => ({
...createTempSlice(...args),
})

View File

@@ -16,12 +16,12 @@ export function resetAllState() {
resetStateQueue.forEach(resetState => resetState())
}
export function createStore(devtoolsName: string) {
export function createStore(devtoolsName: string, connectToDevTools = true) {
const newCreate = (store: any) => {
let result: any
// https://github.com/pmndrs/zustand/issues/852#issuecomment-1059783350
if (EnvConfig.get('NODE_ENV') === 'development') {
if (EnvConfig.get('NODE_ENV') === 'development' && connectToDevTools) {
result = create(
devtools(store, {
name: devtoolsName,

View File

@@ -6,8 +6,6 @@ import { Debug, GetGptFileInfoReqParamsSchema, GetGptFilesReqParamsSchema, InitG
import type { ControllerConfig } from '../types'
import { getValidFinalPath } from '../services/valid-path'
const debug = new Debug('gpt-files.controller')
export const gptFilesControllers: ControllerConfig = {
namespacePath: '/gpt-files',
controllers: [
@@ -15,6 +13,7 @@ export const gptFilesControllers: ControllerConfig = {
url: '/',
method: 'get',
handler: async (req, res) => {
const debug = new Debug('gpt-files.controller')
const query = req.query as GetGptFilesReqParams
verifyParamsByZod(query, GetGptFilesReqParamsSchema)

View File

@@ -1,9 +1,8 @@
import type { Request, Response } from 'express'
import type { ChatModelType, ChatStreamReqParams, FailResponse, SingleFileConfig, SuccessResponse } from '@nicepkg/gpt-runner-shared/common'
import { ChatStreamReqParamsSchema, STREAM_DONE_FLAG, buildFailResponse, buildSuccessResponse } from '@nicepkg/gpt-runner-shared/common'
import { ChatStreamReqParamsSchema, Debug, STREAM_DONE_FLAG, buildFailResponse, buildSuccessResponse, toUnixPath } from '@nicepkg/gpt-runner-shared/common'
import { PathUtils, verifyParamsByZod } from '@nicepkg/gpt-runner-shared/node'
import { createFileContext, getSecrets, loadUserConfig, parseGptFile } from '@nicepkg/gpt-runner-core'
import { llmChain } from '../services'
import { createFileContext, getSecrets, llmChain, loadUserConfig, parseGptFile } from '@nicepkg/gpt-runner-core'
import { getValidFinalPath } from '../services/valid-path'
import type { ControllerConfig } from '../types'
@@ -14,6 +13,8 @@ export const llmControllers: ControllerConfig = {
url: '/chat-stream',
method: 'post',
handler: async (req: Request, res: Response) => {
const debug = new Debug('llm.controller')
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache, no-transform',
@@ -32,6 +33,8 @@ export const llmControllers: ControllerConfig = {
singleFileConfig: singleFileConfigFromParams,
appendSystemPrompt = '',
contextFilePaths,
editingFilePath,
overrideModelsConfig,
rootPath,
} = body
@@ -53,7 +56,10 @@ export const llmControllers: ControllerConfig = {
})
}
const model = singleFileConfig?.model
const model = {
...singleFileConfig?.model,
...overrideModelsConfig?.[singleFileConfig?.model?.type as ChatModelType || ''],
} as SingleFileConfig['model']
const secretFromUserConfig = userConfig.model?.type === model?.type ? userConfig.model?.secrets : undefined
let secretsFromStorage = await getSecrets(model?.type as ChatModelType || null)
@@ -74,21 +80,25 @@ export const llmControllers: ControllerConfig = {
return res.write(`data: ${JSON.stringify(buildFailResponse(options))}\n\n`)
}
let finalSystemPrompt = systemPromptFromParams || singleFileConfig?.systemPrompt || ''
// provide file context
if (contextFilePaths && finalPath) {
const fileContext = await createFileContext({
rootPath: finalPath,
filePaths: contextFilePaths,
})
finalSystemPrompt += `\n${fileContext}\n`
}
finalSystemPrompt += appendSystemPrompt
console.log('debug', process.env.DEBUG)
debug.log('model config', model)
try {
let finalSystemPrompt = systemPromptFromParams || singleFileConfig?.systemPrompt || ''
// provide file context
if (contextFilePaths && finalPath) {
const fileContext = await createFileContext({
rootPath: finalPath,
filePaths: contextFilePaths?.map(toUnixPath),
editingFilePath: toUnixPath(editingFilePath),
})
finalSystemPrompt += `\n${fileContext}\n`
}
finalSystemPrompt += appendSystemPrompt
const chain = await llmChain({
messages,
systemPrompt: finalSystemPrompt,

View File

@@ -3,9 +3,8 @@ import { Debug } from '@nicepkg/gpt-runner-shared/common'
import { sendFailResponse } from '@nicepkg/gpt-runner-shared/node'
import type { NextFunction, Request, Response } from 'express'
const debug = new Debug('middleware.ts')
export function errorHandlerMiddleware(err: Error, req: Request, res: Response, next: NextFunction) {
const debug = new Debug('middleware.ts')
debug.error(String(err))
// console.error(err.stack) // Log the error stack trace

View File

@@ -1 +0,0 @@
export * from '@nicepkg/gpt-runner-core'

View File

@@ -0,0 +1,44 @@
```json
{
"title": "common/i18n helper",
"model": {
"modelName": "gpt-4"
}
}
```
# System Prompt
User is writing frontend code, and he wants to use i18n to support multiple languages. User is using react-i18next lib to support multiple languages. You can help him write the code.
User will provide some key value of json for you like:
[en]
"copy_btn": "Copy",
"insert_btn": "Insert",
You should help user to translate these key value to zh_CN and zh_Hant and ja and de. You should reply like this:
```md
[zh_CN]
"copy_btn": "复制",
"insert_btn": "插入",
[zh_Hant]
"copy_btn": "複製",
"insert_btn": "插入",
[ja]
"copy_btn": "コピー",
"insert_btn": "挿入",
[de]
"copy_btn": "Kopieren",
"insert_btn": "Einfügen",
```
# User Prompt
[en]

View File

@@ -2,7 +2,7 @@
{
"title": "common/",
"model": {
"modalName": "gpt-3.5-turbo-16k",
"modelName": "gpt-3.5-turbo-16k",
"temperature": 0
}
}