use-config.ts 10.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316
  1. import { useCallback, useEffect, useRef, useState } from 'react'
  2. import produce from 'immer'
  3. import useVarList from '../_base/hooks/use-var-list'
  4. import { VarType } from '../../types'
  5. import type { Memory, ValueSelector, Var } from '../../types'
  6. import { useStore } from '../../store'
  7. import {
  8. useIsChatMode,
  9. useNodesReadOnly,
  10. } from '../../hooks'
  11. import type { LLMNodeType } from './types'
  12. import { Resolution } from '@/types/app'
  13. import { useModelListAndDefaultModelAndCurrentProviderAndModel, useTextGenerationCurrentProviderAndModelAndModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
  14. import {
  15. ModelFeatureEnum,
  16. ModelTypeEnum,
  17. } from '@/app/components/header/account-setting/model-provider-page/declarations'
  18. import useNodeCrud from '@/app/components/workflow/nodes/_base/hooks/use-node-crud'
  19. import useOneStepRun from '@/app/components/workflow/nodes/_base/hooks/use-one-step-run'
  20. import type { PromptItem } from '@/models/debug'
  21. import { RETRIEVAL_OUTPUT_STRUCT } from '@/app/components/workflow/constants'
  22. import { checkHasContextBlock, checkHasHistoryBlock, checkHasQueryBlock } from '@/app/components/base/prompt-editor/constants'
  23. const useConfig = (id: string, payload: LLMNodeType) => {
  24. const { nodesReadOnly: readOnly } = useNodesReadOnly()
  25. const isChatMode = useIsChatMode()
  26. const defaultConfig = useStore(s => s.nodesDefaultConfigs)[payload.type]
  27. const [defaultRolePrefix, setDefaultRolePrefix] = useState<{ user: string; assistant: string }>({ user: '', assistant: '' })
  28. const { inputs, setInputs: doSetInputs } = useNodeCrud<LLMNodeType>(id, payload)
  29. const setInputs = useCallback((newInputs: LLMNodeType) => {
  30. if (newInputs.memory && !newInputs.memory.role_prefix) {
  31. const newPayload = produce(newInputs, (draft) => {
  32. draft.memory!.role_prefix = defaultRolePrefix
  33. })
  34. doSetInputs(newPayload)
  35. return
  36. }
  37. doSetInputs(newInputs)
  38. }, [doSetInputs, defaultRolePrefix])
  39. const inputRef = useRef(inputs)
  40. useEffect(() => {
  41. inputRef.current = inputs
  42. }, [inputs])
  43. // model
  44. const model = inputs.model
  45. const modelMode = inputs.model?.mode
  46. const isChatModel = modelMode === 'chat'
  47. const isCompletionModel = !isChatModel
  48. const hasSetBlockStatus = (() => {
  49. const promptTemplate = inputs.prompt_template
  50. const hasSetContext = isChatModel ? (promptTemplate as PromptItem[]).some(item => checkHasContextBlock(item.text)) : checkHasContextBlock((promptTemplate as PromptItem).text)
  51. if (!isChatMode) {
  52. return {
  53. history: false,
  54. query: false,
  55. context: hasSetContext,
  56. }
  57. }
  58. if (isChatModel) {
  59. return {
  60. history: false,
  61. query: (promptTemplate as PromptItem[]).some(item => checkHasQueryBlock(item.text)),
  62. context: hasSetContext,
  63. }
  64. }
  65. else {
  66. return {
  67. history: checkHasHistoryBlock((promptTemplate as PromptItem).text),
  68. query: checkHasQueryBlock((promptTemplate as PromptItem).text),
  69. context: hasSetContext,
  70. }
  71. }
  72. })()
  73. const shouldShowContextTip = !hasSetBlockStatus.context && inputs.context.enabled
  74. const appendDefaultPromptConfig = useCallback((draft: LLMNodeType, defaultConfig: any, passInIsChatMode?: boolean) => {
  75. const promptTemplates = defaultConfig.prompt_templates
  76. if (passInIsChatMode === undefined ? isChatModel : passInIsChatMode) {
  77. draft.prompt_template = promptTemplates.chat_model.prompts
  78. }
  79. else {
  80. draft.prompt_template = promptTemplates.completion_model.prompt
  81. setDefaultRolePrefix({
  82. user: promptTemplates.completion_model.conversation_histories_role.user_prefix,
  83. assistant: promptTemplates.completion_model.conversation_histories_role.assistant_prefix,
  84. })
  85. }
  86. }, [isChatModel])
  87. useEffect(() => {
  88. const isReady = defaultConfig && Object.keys(defaultConfig).length > 0
  89. if (isReady && !inputs.prompt_template) {
  90. const newInputs = produce(inputs, (draft) => {
  91. appendDefaultPromptConfig(draft, defaultConfig)
  92. })
  93. setInputs(newInputs)
  94. }
  95. // eslint-disable-next-line react-hooks/exhaustive-deps
  96. }, [defaultConfig, isChatModel])
  97. const {
  98. currentProvider,
  99. currentModel,
  100. } = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.textGeneration)
  101. const handleModelChanged = useCallback((model: { provider: string; modelId: string; mode?: string }) => {
  102. const newInputs = produce(inputRef.current, (draft) => {
  103. draft.model.provider = model.provider
  104. draft.model.name = model.modelId
  105. draft.model.mode = model.mode!
  106. const isModeChange = model.mode !== inputRef.current.model.mode
  107. if (isModeChange && defaultConfig && Object.keys(defaultConfig).length > 0)
  108. appendDefaultPromptConfig(draft, defaultConfig, model.mode === 'chat')
  109. })
  110. setInputs(newInputs)
  111. }, [setInputs, defaultConfig, appendDefaultPromptConfig])
  112. useEffect(() => {
  113. if (currentProvider?.provider && currentModel?.model && !model.provider) {
  114. handleModelChanged({
  115. provider: currentProvider?.provider,
  116. modelId: currentModel?.model,
  117. mode: currentModel?.model_properties?.mode as string,
  118. })
  119. }
  120. }, [model.provider, currentProvider, currentModel, handleModelChanged])
  121. const handleCompletionParamsChange = useCallback((newParams: Record<string, any>) => {
  122. const newInputs = produce(inputs, (draft) => {
  123. draft.model.completion_params = newParams
  124. })
  125. setInputs(newInputs)
  126. }, [inputs, setInputs])
  127. const {
  128. currentModel: currModel,
  129. } = useTextGenerationCurrentProviderAndModelAndModelList(
  130. {
  131. provider: model.provider,
  132. model: model.name,
  133. },
  134. )
  135. const isShowVisionConfig = !!currModel?.features?.includes(ModelFeatureEnum.vision)
  136. // variables
  137. const { handleVarListChange, handleAddVariable } = useVarList<LLMNodeType>({
  138. inputs,
  139. setInputs,
  140. })
  141. // context
  142. const handleContextVarChange = useCallback((newVar: ValueSelector | string) => {
  143. const newInputs = produce(inputs, (draft) => {
  144. draft.context.variable_selector = newVar as ValueSelector || []
  145. draft.context.enabled = !!(newVar && newVar.length > 0)
  146. })
  147. setInputs(newInputs)
  148. }, [inputs, setInputs])
  149. const handlePromptChange = useCallback((newPrompt: PromptItem[] | PromptItem) => {
  150. const newInputs = produce(inputs, (draft) => {
  151. draft.prompt_template = newPrompt
  152. })
  153. setInputs(newInputs)
  154. }, [inputs, setInputs])
  155. const handleMemoryChange = useCallback((newMemory?: Memory) => {
  156. const newInputs = produce(inputs, (draft) => {
  157. draft.memory = newMemory
  158. })
  159. setInputs(newInputs)
  160. }, [inputs, setInputs])
  161. const handleVisionResolutionChange = useCallback((newResolution: Resolution) => {
  162. const newInputs = produce(inputs, (draft) => {
  163. if (!draft.vision.configs) {
  164. draft.vision.configs = {
  165. detail: Resolution.high,
  166. }
  167. }
  168. draft.vision.configs.detail = newResolution
  169. })
  170. setInputs(newInputs)
  171. }, [inputs, setInputs])
  172. const filterInputVar = useCallback((varPayload: Var) => {
  173. return [VarType.number, VarType.string].includes(varPayload.type)
  174. }, [])
  175. const filterVar = useCallback((varPayload: Var) => {
  176. return [VarType.arrayObject, VarType.array, VarType.string].includes(varPayload.type)
  177. }, [])
  178. // single run
  179. const {
  180. isShowSingleRun,
  181. hideSingleRun,
  182. getInputVars,
  183. runningStatus,
  184. handleRun,
  185. handleStop,
  186. runInputData,
  187. setRunInputData,
  188. runResult,
  189. } = useOneStepRun<LLMNodeType>({
  190. id,
  191. data: inputs,
  192. defaultRunInputData: {
  193. '#context#': [RETRIEVAL_OUTPUT_STRUCT],
  194. '#files#': [],
  195. },
  196. })
  197. // const handleRun = (submitData: Record<string, any>) => {
  198. // console.log(submitData)
  199. // const res = produce(submitData, (draft) => {
  200. // debugger
  201. // if (draft.contexts) {
  202. // draft['#context#'] = draft.contexts
  203. // delete draft.contexts
  204. // }
  205. // if (draft.visionFiles) {
  206. // draft['#files#'] = draft.visionFiles
  207. // delete draft.visionFiles
  208. // }
  209. // })
  210. // doHandleRun(res)
  211. // }
  212. const inputVarValues = (() => {
  213. const vars: Record<string, any> = {}
  214. Object.keys(runInputData)
  215. .filter(key => !['#context#', '#files#'].includes(key))
  216. .forEach((key) => {
  217. vars[key] = runInputData[key]
  218. })
  219. return vars
  220. })()
  221. const setInputVarValues = useCallback((newPayload: Record<string, any>) => {
  222. const newVars = {
  223. ...newPayload,
  224. '#context#': runInputData['#context#'],
  225. '#files#': runInputData['#files#'],
  226. }
  227. setRunInputData(newVars)
  228. }, [runInputData, setRunInputData])
  229. const contexts = runInputData['#context#']
  230. const setContexts = useCallback((newContexts: string[]) => {
  231. setRunInputData({
  232. ...runInputData,
  233. '#context#': newContexts,
  234. })
  235. }, [runInputData, setRunInputData])
  236. const visionFiles = runInputData['#files#']
  237. const setVisionFiles = useCallback((newFiles: any[]) => {
  238. setRunInputData({
  239. ...runInputData,
  240. '#files#': newFiles,
  241. })
  242. }, [runInputData, setRunInputData])
  243. const allVarStrArr = (() => {
  244. const arr = isChatModel ? (inputs.prompt_template as PromptItem[]).map(item => item.text) : [(inputs.prompt_template as PromptItem).text]
  245. if (isChatMode && isChatModel && !!inputs.memory)
  246. arr.push('{{#sys.query#}}')
  247. return arr
  248. })()
  249. const varInputs = getInputVars(allVarStrArr)
  250. return {
  251. readOnly,
  252. isChatMode,
  253. inputs,
  254. isChatModel,
  255. isCompletionModel,
  256. hasSetBlockStatus,
  257. shouldShowContextTip,
  258. isShowVisionConfig,
  259. handleModelChanged,
  260. handleCompletionParamsChange,
  261. handleVarListChange,
  262. handleAddVariable,
  263. handleContextVarChange,
  264. filterInputVar,
  265. filterVar,
  266. handlePromptChange,
  267. handleMemoryChange,
  268. handleVisionResolutionChange,
  269. isShowSingleRun,
  270. hideSingleRun,
  271. inputVarValues,
  272. setInputVarValues,
  273. visionFiles,
  274. setVisionFiles,
  275. contexts,
  276. setContexts,
  277. varInputs,
  278. runningStatus,
  279. handleRun,
  280. handleStop,
  281. runResult,
  282. }
  283. }
  284. export default useConfig