Skip to content

Commit

Permalink
Merge pull request #215 from HyperChatBot/refactor-migrate-to-tauri2
Browse files Browse the repository at this point in the history
refactor: migrate to tauri v2
  • Loading branch information
YanceyOfficial authored Oct 9, 2024
2 parents bf32e45 + 604f8f4 commit 8389b1e
Show file tree
Hide file tree
Showing 11 changed files with 55 additions and 73 deletions.
25 changes: 11 additions & 14 deletions src/components/ChatBox/Markdown.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { FC, memo } from 'react'
import ReactMarkdown from 'react-markdown'
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'
import { oneDark as mdCodeTheme } from 'react-syntax-highlighter/dist/esm/styles/prism'
import rehypeMathjax from 'rehype-mathjax'
// import rehypeMathjax from 'rehype-mathjax'
import remarkGfm from 'remark-gfm'
import remarkMath from 'remark-math'

Expand All @@ -15,11 +15,11 @@ const Markdown: FC<Props> = ({ raw }) => {
return (
<ReactMarkdown
remarkPlugins={[remarkGfm, remarkMath]}
rehypePlugins={[rehypeMathjax]}
// rehypePlugins={[rehypeMathjax]}
components={{
code({ inline, className, children, ...props }) {
code({ className, children, ...props }) {
const match = /language-(\w+)/.exec(className || '')
return !inline ? (
return (
<SyntaxHighlighter
style={mdCodeTheme}
// FIXME: Azure OpenAI Service does not return the language of block code.
Expand All @@ -30,11 +30,11 @@ const Markdown: FC<Props> = ({ raw }) => {
>
{String(children).replace(/\n$/, '')}
</SyntaxHighlighter>
) : (
<code className={classNames('font-semibold', className)}>
`{children}`
</code>
)

// <code className={classNames('font-semibold', className)}>
// `{children}`
// </code>
},
p({ className, children, ...props }) {
return (
Expand All @@ -61,34 +61,31 @@ const Markdown: FC<Props> = ({ raw }) => {
)
},
ol({ className, children, ...props }) {
const _props = { ...props, ordered: props.ordered.toString() }
return (
<ol
className={classNames('mb-3 list-disc pl-3 last:mb-0', className)}
{..._props}
{...props}
>
{children}
</ol>
)
},
ul({ className, children, ...props }) {
const _props = { ...props, ordered: props.ordered.toString() }
return (
<ul
className={classNames(
'mb-3 list-decimal pl-3 last:mb-0',
className
)}
{..._props}
{...props}
>
{children}
</ul>
)
},
li({ className, children, ...props }) {
const _props = { ...props, ordered: props.ordered.toString() }
return (
<li className={classNames('mb-3 last:mb-0', className)} {..._props}>
<li className={classNames('mb-3 last:mb-0', className)} {...props}>
{children}
</li>
)
Expand Down
8 changes: 2 additions & 6 deletions src/components/Configuration/ImageGeneration.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,7 @@ const Configuration: FC = () => {
{...formik.getFieldProps('size')}
>
{sizes.map((size) => (
<MenuItem key={size} value={size}>
{size}
</MenuItem>
<MenuItem key={size}>{size}</MenuItem>
))}
</Select>
<FormHelperText>
Expand All @@ -126,9 +124,7 @@ const Configuration: FC = () => {
{...formik.getFieldProps('responseFormat')}
>
{responseFormats.map((responseFormat) => (
<MenuItem key={responseFormat} value={responseFormat}>
{responseFormat}
</MenuItem>
<MenuItem key={responseFormat}>{responseFormat}</MenuItem>
))}
</Select>
<FormHelperText>
Expand Down
2 changes: 1 addition & 1 deletion src/components/ImportAndExportDexie/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ const ImportAndExportDexie: FC = () => {
const text = await blob.text()
const filename = `dexie-export-${Date.now()}.json`
await writeTextFile(filename, text, {
dir: BaseDirectory.Download
baseDir: BaseDirectory.Download
})

toast.success(
Expand Down
9 changes: 3 additions & 6 deletions src/configurations/imageGeneration.ts
Original file line number Diff line number Diff line change
@@ -1,21 +1,18 @@
import {
CreateImageRequestResponseFormatEnum,
CreateImageRequestSizeEnum
} from 'openai'
import { ImageGenerateParams } from 'openai/resources'

export interface ImageGenerationConfiguration {
n: number
size: (typeof sizes)[number]
responseFormat: (typeof responseFormats)[number]
}

export const sizes: CreateImageRequestSizeEnum[] = [
export const sizes: ImageGenerateParams['size'][] = [
'256x256',
'512x512',
'1024x1024'
]

export const responseFormats: CreateImageRequestResponseFormatEnum[] = [
export const responseFormats: ImageGenerateParams['response_format'][] = [
'url',
'b64_json'
]
Expand Down
30 changes: 13 additions & 17 deletions src/hooks/useAudio.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { AudioResponseFormat } from 'openai/resources'
import { Uploadable } from 'openai/src/uploads'
import { useRecoilValue, useSetRecoilState } from 'recoil'
import { AudioTranscriptionConfiguration } from 'src/configurations/audioTranscription'
import { AudioTranslationConfiguration } from 'src/configurations/audioTranslation'
Expand Down Expand Up @@ -26,20 +28,16 @@ const useAudio = (prompt: string, hashFile: HashFile | null) => {
setLoading(true)

// TODO: Uses pure fetch.
const transcription = await openai.createTranscription(
hashFile.file,
const transcription = await openai.audio.transcriptions.create({
file: hashFile.file as Uploadable,
model,
prompt,
responseFormat,
response_format: responseFormat as AudioResponseFormat,
temperature,
language === '' ? undefined : language
)
language: language === '' ? undefined : language
})

saveCommonAssistantMessage(
// If `responseFormat` is `json` or `verbose_json`, the result is `transcription.data.text`.
// If `responseFormat` is `text`, `vtt` `or `srt`, the result is `transcription.data`.
transcription.data.text || (transcription.data as unknown as string)
)
saveCommonAssistantMessage(transcription.text)
} catch (error) {
showApiRequestErrorToast()
rollbackMessage()
Expand All @@ -59,18 +57,16 @@ const useAudio = (prompt: string, hashFile: HashFile | null) => {
setLoading(true)

// TODO: Uses pure fetch.
const translation = await openai.createTranslation(
hashFile.file,
const translation = await openai.audio.translations.create({
file: hashFile.file,
model,
prompt,
responseFormat,
response_format: responseFormat as AudioResponseFormat,
temperature
)
})

saveCommonAssistantMessage(
// If `responseFormat` is `json` or `verbose_json`, the result is `translation.data.text`.
// If `responseFormat` is `text`, `vtt` `or `srt`, the result is `translation.data`.
translation.data.text || (translation.data as unknown as string)
translation.text
)
} catch (error) {
showApiRequestErrorToast()
Expand Down
4 changes: 2 additions & 2 deletions src/hooks/useChatCompletion.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { CreateChatCompletionRequest } from 'openai'
import { ChatCompletionCreateParams } from 'openai/resources'
import { useRecoilValue, useSetRecoilState } from 'recoil'
import toast from 'src/components/Snackbar'
import { ChatConfiguration, models } from 'src/configurations/chatCompletion'
Expand Down Expand Up @@ -46,7 +46,7 @@ const useChatCompletion = (prompt: string) => {
)
return
}
const context: CreateChatCompletionRequest['messages'] = []
const context: ChatCompletionCreateParams['messages'] = []
currConversation.messages
.slice()
.reverse()
Expand Down
2 changes: 1 addition & 1 deletion src/hooks/useImageGeneration.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { DateTime } from 'luxon'
import { ImagesResponse } from 'openai'
import { ImagesResponse } from 'openai/resources'
import { useRecoilValue, useSetRecoilState } from 'recoil'
import { ImageGenerationConfiguration } from 'src/configurations/imageGeneration'
import { useMessages, useServices } from 'src/hooks'
Expand Down
10 changes: 2 additions & 8 deletions src/hooks/useOpenAI.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,11 @@
import { OpenAI } from 'openai'
import { OpenAI } from 'openai'
import { useSettings } from 'src/hooks'

class CustomFormData extends FormData {
getHeaders() {
return {}
}
}

const useOpenAI = () => {
const { settings } = useSettings()

const openai = new OpenAI({
apiKey: settings?.openaiSecretKey,
apiKey: settings?.openaiSecretKey || '',
organization: settings?.openaiOrganizationId,
dangerouslyAllowBrowser: true
})
Expand Down
14 changes: 8 additions & 6 deletions src/hooks/useSettings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,15 @@ const useSettings = () => {
}

if (currSettings.assistantAvatarFilename) {
const src = await transformFilenameToSrc(
currSettings.assistantAvatarFilename
)
try {
const src = await transformFilenameToSrc(
currSettings.assistantAvatarFilename
)

if (src) {
setSettings({ ...currSettings, assistantAvatarFilename: src })
} else {
if (src) {
setSettings({ ...currSettings, assistantAvatarFilename: src })
}
} catch {
// if transform is error
setSettings(currSettings)
}
Expand Down
20 changes: 10 additions & 10 deletions src/hooks/useSevices.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import {
CreateChatCompletionRequest,
CreateCompletionRequest,
CreateImageRequest
} from 'openai'
ChatCompletionCreateParams,
CompletionCreateParams,
ImageGenerateParams,
} from 'openai/resources'
import {
OPENAI_CHAT_COMPLETION_URL,
OPENAI_IMAGE_GENERATION_URL,
Expand Down Expand Up @@ -37,28 +37,28 @@ const useServices = () => {

const company = {
[Companies.Azure]: {
[Products.ChatCompletion]: (body: CreateChatCompletionRequest) =>
[Products.ChatCompletion]: (body: ChatCompletionCreateParams) =>
_fetch(
`${settings?.azureEndPoint}/openai/deployments/${settings?.azureDeploymentName}/chat/completions?api-version=2023-03-15-preview`,
body
),
[Products.TextCompletion]: (body: CreateCompletionRequest) =>
[Products.TextCompletion]: (body: CompletionCreateParams) =>
_fetch(
`${settings?.azureEndPoint}/openai/deployments/${settings?.azureDeploymentName}/completions?api-version=2022-12-01`,
body
),
[Products.ImageGeneration]: (body: CreateImageRequest) =>
[Products.ImageGeneration]: (body: ImageGenerateParams) =>
_fetch(
`${settings?.azureEndPoint}/openai/images/generations:submit?api-version=2023-06-01-preview`,
body
)
},
[Companies.OpenAI]: {
[Products.ChatCompletion]: (body: CreateChatCompletionRequest) =>
[Products.ChatCompletion]: (body: ChatCompletionCreateParams) =>
_fetch(OPENAI_CHAT_COMPLETION_URL, body),
[Products.TextCompletion]: (body: CreateCompletionRequest) =>
[Products.TextCompletion]: (body: CompletionCreateParams) =>
_fetch(OPENAI_TEXT_COMPLETION_URL, body),
[Products.ImageGeneration]: (body: CreateImageRequest) =>
[Products.ImageGeneration]: (body: ImageGenerateParams) =>
_fetch(OPENAI_IMAGE_GENERATION_URL, body)
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/hooks/useTextCompletion.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { CreateCompletionResponse } from 'openai'
import { Completion } from 'openai/resources'
import { useRecoilValue, useSetRecoilState } from 'recoil'
import { TextCompletionConfiguration } from 'src/configurations/textCompletion'
import { useMessages, useServices } from 'src/hooks'
Expand Down Expand Up @@ -41,7 +41,7 @@ const useTextCompletion = (prompt: string) => {
frequency_penalty: frequencyPenalty,
presence_penalty: presencePenalty
})
const completion: CreateCompletionResponse = await response.json()
const completion: Completion = await response.json()

const preResponseText = preResponse.checked ? preResponse.content : ''
const postResponseText = postResponse.checked ? postResponse.content : ''
Expand Down

0 comments on commit 8389b1e

Please sign in to comment.