fix(gpt-runner-web): fix remove chat bug

This commit is contained in:
JinmingYang
2023-06-27 23:07:10 +08:00
parent e6ceb87f70
commit 32b3a045bf
10 changed files with 131 additions and 35 deletions

View File

@@ -2,6 +2,7 @@ export const MIN_NODE_VERSION = '16.15.0'
export const SECRET_KEY_PLACEHOLDER = '********'
export const STREAM_DONE_FLAG = '[DONE]'
export const GPT_RUNNER_OFFICIAL_FOLDER = '.gpt-runner'
export const DEFAULT_OPENAI_API_BASE_PATH = 'https://api.openai.com/v1'
export const DEFAULT_EXCLUDE_FILES = [
'**/node_modules',

View File

@@ -1,5 +1,6 @@
import { z } from 'zod'
import type { BaseModelConfig, FilterPattern, FormCheckboxGroupConfig, FormFieldBaseConfig, FormInputConfig, FormItemConfig, FormOption, FormRadioGroupConfig, FormSelectConfig, FormTextareaConfig, OpenaiModelConfig, OpenaiSecrets, SingleChatMessage, SingleFileConfig, UserConfig, UserConfigForUser } from '../types'
import { DEFAULT_OPENAI_API_BASE_PATH } from '../helpers'
import { ChatModelTypeSchema, ChatRoleSchema } from './enum.zod'
export const FilterPatternSchema = z.union([
@@ -12,27 +13,28 @@ export const FilterPatternSchema = z.union([
]) satisfies z.ZodType<FilterPattern>
export const BaseModelConfigSchema = z.object({
type: ChatModelTypeSchema,
modelName: z.string().optional(),
type: ChatModelTypeSchema.describe('The type of the model'),
modelName: z.string().optional().describe('The name of the model'),
secrets: z.any().optional().describe('The API secrets config'),
}) satisfies z.ZodType<BaseModelConfig>
export const OpenaiSecretsSchema = z.object({
apiKey: z.string(),
organization: z.string().optional(),
username: z.string().optional(),
password: z.string().optional(),
accessToken: z.string().optional(),
basePath: z.string().optional(),
apiKey: z.string().optional().describe('The OpenAI API key'),
organization: z.string().optional().describe('The OpenAI organization'),
username: z.string().optional().describe('The OpenAI username'),
password: z.string().optional().describe('The OpenAI password'),
accessToken: z.string().optional().describe('The OpenAI access token'),
basePath: z.string().optional().default(DEFAULT_OPENAI_API_BASE_PATH).describe('The Chatgpt base path'),
}) satisfies z.ZodType<OpenaiSecrets>
export const OpenaiModelConfigSchema = BaseModelConfigSchema.extend({
type: z.literal('openai'),
secrets: OpenaiSecretsSchema.optional(),
temperature: z.number().optional(),
maxTokens: z.number().optional(),
topP: z.number().optional(),
frequencyPenalty: z.number().optional(),
presencePenalty: z.number().optional(),
type: z.literal('openai').describe('Use Open AI model'),
secrets: OpenaiSecretsSchema.optional().describe('The OpenAI API secrets config'),
temperature: z.number().optional().describe('The temperature for the OpenAI model'),
maxTokens: z.number().optional().describe('The maximum number of tokens for the OpenAI model'),
topP: z.number().optional().describe('The top P value for the OpenAI model'),
frequencyPenalty: z.number().optional().describe('The frequency penalty for the OpenAI model'),
presencePenalty: z.number().optional().describe('The presence penalty for the OpenAI model'),
}) satisfies z.ZodType<OpenaiModelConfig>
export const OpenaiBaseConfigSchema = OpenaiModelConfigSchema.omit({
@@ -40,12 +42,12 @@ export const OpenaiBaseConfigSchema = OpenaiModelConfigSchema.omit({
})
export const UserConfigSchema = z.object({
model: OpenaiModelConfigSchema.optional(),
rootPath: z.string().optional(),
exts: z.array(z.string()).optional().default(['.gpt.md']),
includes: FilterPatternSchema.optional().default(null),
excludes: FilterPatternSchema.optional().default(null),
respectGitIgnore: z.boolean().optional().default(true),
model: OpenaiModelConfigSchema.optional().describe('The LLM model configuration'),
rootPath: z.string().optional().describe('The root path of the project'),
exts: z.array(z.string()).optional().default(['.gpt.md']).describe('The file extensions to be used'),
includes: FilterPatternSchema.optional().default(null).describe('The include patterns for filtering files'),
excludes: FilterPatternSchema.optional().default(null).describe('The exclude patterns for filtering files'),
respectGitIgnore: z.boolean().optional().default(true).describe('Whether to respect .gitignore rules'),
}) satisfies z.ZodType<UserConfig>
export const UserConfigForUserSchema = UserConfigSchema.omit({

View File

@@ -71,6 +71,15 @@
}
}
},
"configurationDefaults": {
"[markdown]": {
"editor.quickSuggestions": {
"other": true,
"comments": false,
"strings": true
}
}
},
"menus": {
"editor/title": [
{
@@ -79,7 +88,17 @@
"icon": "res/logo.svg"
}
]
}
},
"jsonValidation": [
{
"fileMatch": "gptr.config.json",
"url": "./dist/json-schema/user-config.json"
},
{
"fileMatch": "gpt-runner.config.json",
"url": "./dist/json-schema/user-config.json"
}
]
},
"scripts": {
"build": "pnpm esno ./scripts/build.ts",
@@ -98,4 +117,4 @@
"fs-extra": "^11.1.1",
"uuid": "^9.0.0"
}
}
}

View File

@@ -15,8 +15,8 @@ export interface DragDirectionConfig {
boundary: number[]
}
export interface DragResizeViewProps {
initWidth: number
initHeight: number
initWidth?: number
initHeight?: number
dragConfig?: Omit<UserDragConfig, 'axis' | 'bounds'>
dragDirectionConfigs: DragDirectionConfig[]
style?: React.CSSProperties
@@ -50,10 +50,15 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
const finalHeight = useMotionValue(initHeight)
useEffect(() => {
if (initWidth === undefined)
return
finalWidth.set(initWidth)
}, [initWidth])
useEffect(() => {
if (initHeight === undefined)
return
finalHeight.set(initHeight)
}, [initHeight])
@@ -80,6 +85,8 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
const leftDragLingBind = useDrag(
({ offset }) => {
if (initWidth === undefined)
return
finalWidth.set(initWidth - offset[0])
},
{
@@ -95,6 +102,8 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
const rightDragLingBind = useDrag(
({ offset }) => {
if (initWidth === undefined)
return
finalWidth.set(initWidth + offset[0])
},
{
@@ -110,6 +119,8 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
const topDragLingBind = useDrag(
({ offset }) => {
if (initHeight === undefined)
return
finalHeight.set(initHeight - offset[1])
},
{
@@ -125,6 +136,8 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
const bottomDragLingBind = useDrag(
({ offset }) => {
if (initHeight === undefined)
return
finalHeight.set(initHeight + offset[1])
},
{
@@ -154,7 +167,7 @@ export const DragResizeView: FC<DragResizeViewProps> = memo((props) => {
className={className}
style={{
...style,
width: finalWidth,
width: initWidth !== undefined ? finalWidth : '',
height: finalHeight,
position: 'relative',
}} ref={ref}>

View File

@@ -464,12 +464,11 @@ export const ChatPanel: FC<ChatPanelProps> = memo((props) => {
return <ChatPanelWrapper ref={chatPanelRef}>
<ChatMessagePanel ref={scrollDownRef} {...messagePanelProps}></ChatMessagePanel>
<DragResizeView
initWidth={chatPanelWidth}
initHeight={250}
dragDirectionConfigs={[
{
direction: 'top',
boundary: [-200, 50],
boundary: [-300, 50],
},
]}>
<ChatMessageInput

View File

@@ -1,4 +1,4 @@
import { ChatModelType, type OpenaiSecrets, ServerStorageName } from '@nicepkg/gpt-runner-shared/common'
import { ChatModelType, DEFAULT_OPENAI_API_BASE_PATH, type OpenaiSecrets, ServerStorageName } from '@nicepkg/gpt-runner-shared/common'
import { useMutation, useQuery } from '@tanstack/react-query'
import { type FC, memo, useEffect } from 'react'
import { useForm } from 'react-hook-form'
@@ -81,7 +81,7 @@ export const OpenaiSettings: FC = memo(() => {
<StyledFormItem key={1}>
<HookFormInput
label={t('chat_page.openai_api_base_path')}
placeholder="https://api.openai.com/v1"
placeholder={DEFAULT_OPENAI_API_BASE_PATH}
name="basePath"
errors={formState.errors}
control={control}

View File

@@ -1,7 +1,8 @@
import type { FC } from 'react'
import { Suspense, useEffect } from 'react'
import { Suspense, useEffect, useMemo, useRef } from 'react'
import { Route, HashRouter as Router, Routes, useLocation, useNavigate } from 'react-router-dom'
import { VSCodeProgressRing } from '@vscode/webview-ui-toolkit/react'
import { getSearchParams } from '@nicepkg/gpt-runner-shared/browser'
import Home from './pages/home/index'
import Error404 from './pages/error/404'
import { useLoading } from './hooks/use-loading.hook'
@@ -12,6 +13,23 @@ const HackRouter: FC = () => {
const navigate = useNavigate()
const location = useLocation()
const rootPathFromUrl = useMemo(() => {
const rootPath = getSearchParams('rootPath', location.search)
return rootPath
}, [location.search])
const rootPathHasChanged = useRef(false)
useEffect(() => {
if (!rootPathHasChanged.current) {
// skip the first time
rootPathHasChanged.current = true
return
}
window.location.reload()
}, [rootPathFromUrl])
useEffect(() => {
if (location.pathname === '/')
navigate(getGlobalConfig().initialRoutePath)

View File

@@ -17,6 +17,11 @@ export interface ChatSlice {
activeChatId: string
chatInstances: SingleChat[]
updateActiveChatId: (activeChatId: string) => void
/**
* Switch to next chat in the same level file of current active chat
*/
switchToNewActiveChatId: (oldActiveInstance?: SingleChat) => void
getChatInstance: (chatId: string) => SingleChat | undefined
getChatInstancesBySingleFilePath: (singleFilePath: string) => SingleChat[]
addChatInstance: (gptFileId: string, instance: Omit<SingleChat, 'id'>) => {
@@ -57,10 +62,28 @@ export const createChatSlice: StateCreator<
updateActiveChatId(activeChatId) {
set({ activeChatId })
},
switchToNewActiveChatId(_oldActiveInstance) {
const state = get()
const oldActiveInstance = _oldActiveInstance || state.getChatInstance(state.activeChatId)
const sameLevelChatInstances = state.getChatInstancesBySingleFilePath(oldActiveInstance?.singleFilePath ?? '')
if (!oldActiveInstance || !chatIdChatInstanceMap.has(oldActiveInstance.id) && sameLevelChatInstances.length === 0) {
state.updateActiveChatId('')
return
}
const nextChatInstance = sameLevelChatInstances.sort((a, b) => b.createAt - a.createAt)
.find(chatInstance => chatInstance.id !== state.activeChatId)
if (nextChatInstance && nextChatInstance.id !== state.activeChatId)
state.updateActiveChatId(nextChatInstance.id)
},
getChatInstance(chatId) {
return chatIdChatInstanceMap.get(chatId)
},
getChatInstancesBySingleFilePath(singleFilePath) {
if (!singleFilePath)
return []
return singleFilePathChatInstancesMap.get(singleFilePath) || []
},
addChatInstance(gptFileId, instance) {
@@ -143,7 +166,22 @@ export const createChatSlice: StateCreator<
removeChatInstance(chatId) {
const state = get()
state.updateChatInstances(chatInstances => chatInstances.filter(chatInstance => chatInstance.id !== chatId))
let targetChatInstance: SingleChat | undefined
state.updateChatInstances((chatInstances) => {
const finalChatInstances = chatInstances.filter((chatInstance) => {
const notTargetChatInstance = chatInstance.id !== chatId
if (!notTargetChatInstance)
targetChatInstance = chatInstance
return notTargetChatInstance
})
return finalChatInstances
})
if (targetChatInstance && targetChatInstance.id === state.activeChatId)
state.switchToNewActiveChatId(targetChatInstance)
const nextSidebarTree = travelTree(state.sidebarTree, (item) => {
if (item.id === chatId)

View File

@@ -1,11 +1,9 @@
import path from 'node:path'
import { defineConfig } from '@nicepkg/gpt-runner'
export default defineConfig({
rootPath: path.resolve(__dirname, '../'),
model: {
type: 'openai',
modelName: 'gpt-3.5-turbo-16k-error',
modelName: 'gpt-3.5-turbo-16k',
secrets: {
apiKey: process.env.OPENAI_KEY!,
},

View File

@@ -0,0 +1,8 @@
{
"model": {
"type": "openai",
"modelName": "gpt-3.5",
"frequencyPenalty": 0,
"temperature": 0
}
}