index.tsx 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. import { useCallback, useEffect, useRef, useState } from 'react'
  2. import { useTranslation } from 'react-i18next'
  3. import { useParams, usePathname } from 'next/navigation'
  4. import {
  5. RiCloseLine,
  6. RiLoader2Line,
  7. } from '@remixicon/react'
  8. import Recorder from 'js-audio-recorder'
  9. import { useRafInterval } from 'ahooks'
  10. import { convertToMp3 } from './utils'
  11. import s from './index.module.css'
  12. import cn from '@/utils/classnames'
  13. import { StopCircle } from '@/app/components/base/icons/src/vender/solid/mediaAndDevices'
  14. import { audioToText } from '@/service/share'
  15. type VoiceInputTypes = {
  16. onConverted: (text: string) => void
  17. onCancel: () => void
  18. }
  19. const VoiceInput = ({
  20. onCancel,
  21. onConverted,
  22. }: VoiceInputTypes) => {
  23. const { t } = useTranslation()
  24. const recorder = useRef(new Recorder({
  25. sampleBits: 16,
  26. sampleRate: 16000,
  27. numChannels: 1,
  28. compiling: false,
  29. }))
  30. const canvasRef = useRef<HTMLCanvasElement | null>(null)
  31. const ctxRef = useRef<CanvasRenderingContext2D | null>(null)
  32. const drawRecordId = useRef<number | null>(null)
  33. const [originDuration, setOriginDuration] = useState(0)
  34. const [startRecord, setStartRecord] = useState(false)
  35. const [startConvert, setStartConvert] = useState(false)
  36. const pathname = usePathname()
  37. const params = useParams()
  38. const clearInterval = useRafInterval(() => {
  39. setOriginDuration(originDuration + 1)
  40. }, 1000)
  41. const drawRecord = useCallback(() => {
  42. drawRecordId.current = requestAnimationFrame(drawRecord)
  43. const canvas = canvasRef.current!
  44. const ctx = ctxRef.current!
  45. const dataUnit8Array = recorder.current.getRecordAnalyseData()
  46. const dataArray = [].slice.call(dataUnit8Array)
  47. const lineLength = parseInt(`${canvas.width / 3}`)
  48. const gap = parseInt(`${1024 / lineLength}`)
  49. ctx.clearRect(0, 0, canvas.width, canvas.height)
  50. ctx.beginPath()
  51. let x = 0
  52. for (let i = 0; i < lineLength; i++) {
  53. let v = dataArray.slice(i * gap, i * gap + gap).reduce((prev: number, next: number) => {
  54. return prev + next
  55. }, 0) / gap
  56. if (v < 128)
  57. v = 128
  58. if (v > 178)
  59. v = 178
  60. const y = (v - 128) / 50 * canvas.height
  61. ctx.moveTo(x, 16)
  62. if (ctx.roundRect)
  63. ctx.roundRect(x, 16 - y, 2, y, [1, 1, 0, 0])
  64. else
  65. ctx.rect(x, 16 - y, 2, y)
  66. ctx.fill()
  67. x += 3
  68. }
  69. ctx.closePath()
  70. }, [])
  71. const handleStopRecorder = useCallback(async () => {
  72. clearInterval()
  73. setStartRecord(false)
  74. setStartConvert(true)
  75. recorder.current.stop()
  76. drawRecordId.current && cancelAnimationFrame(drawRecordId.current)
  77. drawRecordId.current = null
  78. const canvas = canvasRef.current!
  79. const ctx = ctxRef.current!
  80. ctx.clearRect(0, 0, canvas.width, canvas.height)
  81. const mp3Blob = convertToMp3(recorder.current)
  82. const mp3File = new File([mp3Blob], 'temp.mp3', { type: 'audio/mp3' })
  83. const formData = new FormData()
  84. formData.append('file', mp3File)
  85. let url = ''
  86. let isPublic = false
  87. if (params.token) {
  88. url = '/audio-to-text'
  89. isPublic = true
  90. }
  91. else if (params.appId) {
  92. if (pathname.search('explore/installed') > -1)
  93. url = `/installed-apps/${params.appId}/audio-to-text`
  94. else
  95. url = `/apps/${params.appId}/audio-to-text`
  96. }
  97. try {
  98. const audioResponse = await audioToText(url, isPublic, formData)
  99. onConverted(audioResponse.text)
  100. onCancel()
  101. }
  102. catch (e) {
  103. onConverted('')
  104. onCancel()
  105. }
  106. }, [])
  107. const handleStartRecord = async () => {
  108. try {
  109. await recorder.current.start()
  110. setStartRecord(true)
  111. setStartConvert(false)
  112. if (canvasRef.current && ctxRef.current)
  113. drawRecord()
  114. }
  115. catch (e) {
  116. onCancel()
  117. }
  118. }
  119. const initCanvas = () => {
  120. const dpr = window.devicePixelRatio || 1
  121. const canvas = document.getElementById('voice-input-record') as HTMLCanvasElement
  122. if (canvas) {
  123. const { width: cssWidth, height: cssHeight } = canvas.getBoundingClientRect()
  124. canvas.width = dpr * cssWidth
  125. canvas.height = dpr * cssHeight
  126. canvasRef.current = canvas
  127. const ctx = canvas.getContext('2d')
  128. if (ctx) {
  129. ctx.scale(dpr, dpr)
  130. ctx.fillStyle = 'rgba(209, 224, 255, 1)'
  131. ctxRef.current = ctx
  132. }
  133. }
  134. }
  135. if (originDuration >= 120 && startRecord)
  136. handleStopRecorder()
  137. useEffect(() => {
  138. initCanvas()
  139. handleStartRecord()
  140. const recorderRef = recorder?.current
  141. return () => {
  142. recorderRef?.stop()
  143. }
  144. }, [])
  145. const minutes = parseInt(`${parseInt(`${originDuration}`) / 60}`)
  146. const seconds = parseInt(`${originDuration}`) % 60
  147. return (
  148. <div className={cn(s.wrapper, 'absolute inset-0 rounded-xl')}>
  149. <div className='absolute inset-[1.5px] flex items-center pl-[14.5px] pr-[6.5px] py-[14px] bg-primary-25 rounded-[10.5px] overflow-hidden'>
  150. <canvas id='voice-input-record' className='absolute left-0 bottom-0 w-full h-4' />
  151. {
  152. startConvert && <RiLoader2Line className='animate-spin mr-2 w-4 h-4 text-primary-700' />
  153. }
  154. <div className='grow'>
  155. {
  156. startRecord && (
  157. <div className='text-sm text-gray-500'>
  158. {t('common.voiceInput.speaking')}
  159. </div>
  160. )
  161. }
  162. {
  163. startConvert && (
  164. <div className={cn(s.convert, 'text-sm')}>
  165. {t('common.voiceInput.converting')}
  166. </div>
  167. )
  168. }
  169. </div>
  170. {
  171. startRecord && (
  172. <div
  173. className='flex justify-center items-center mr-1 w-8 h-8 hover:bg-primary-100 rounded-lg cursor-pointer'
  174. onClick={handleStopRecorder}
  175. >
  176. <StopCircle className='w-5 h-5 text-primary-600' />
  177. </div>
  178. )
  179. }
  180. {
  181. startConvert && (
  182. <div
  183. className='flex justify-center items-center mr-1 w-8 h-8 hover:bg-gray-200 rounded-lg cursor-pointer'
  184. onClick={onCancel}
  185. >
  186. <RiCloseLine className='w-4 h-4 text-gray-500' />
  187. </div>
  188. )
  189. }
  190. <div className={`w-[45px] pl-1 text-xs font-medium ${originDuration > 110 ? 'text-[#F04438]' : 'text-gray-700'}`}>{`0${minutes.toFixed(0)}:${seconds >= 10 ? seconds : `0${seconds}`}`}</div>
  191. </div>
  192. </div>
  193. )
  194. }
  195. export default VoiceInput