Skip to content

Commit

Permalink
chore: refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
janrtvld committed Dec 3, 2024
1 parent 0ad3f72 commit 02fccbf
Show file tree
Hide file tree
Showing 6 changed files with 41 additions and 29 deletions.
2 changes: 1 addition & 1 deletion apps/easypid/src/app/_layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { DefaultTheme, ThemeProvider } from '@react-navigation/native'
import { Slot } from 'expo-router'
import * as SplashScreen from 'expo-splash-screen'

import { useCheckIncompleteDownload } from '@easypid/llm/useLLM'
import { useCheckIncompleteDownload } from '@easypid/llm'
import tamaguiConfig from '../../tamagui.config'

void SplashScreen.preventAutoHideAsync()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { HeroIcons } from '@package/ui/src/content/Icon'

import { Switch } from '@package/ui/src/base/Switch'

import { useIsDeviceCapable, useLLM } from '@easypid/llm/useLLM'
import { useIsDeviceCapable, useLLM } from '@easypid/llm'
import { ConfirmationSheet } from '@package/app/src/components/ConfirmationSheet'
import { useHasInternetConnection, useIsConnectedToWifi } from 'packages/app/src/hooks'
import { useToastController } from 'packages/ui/src'
Expand Down
2 changes: 1 addition & 1 deletion apps/easypid/src/hooks/useOverAskingAi.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { useEffect, useState } from 'react'

import { useLLM } from '@easypid/llm/useLLM'
import { useLLM } from '@easypid/llm'
import type { OverAskingInput, OverAskingResponse } from '@easypid/use-cases/OverAskingApi'
import { checkForOverAskingApi as analyzeVerificationApi } from '@easypid/use-cases/OverAskingApi'

Expand Down
3 changes: 3 additions & 0 deletions apps/easypid/src/llm/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
export * from './useLLM'
export * from './state'
export * from './types'
27 changes: 27 additions & 0 deletions apps/easypid/src/llm/state.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import { useMMKVBoolean } from 'react-native-mmkv'

import { mmkv } from '@easypid/storage/mmkv'

export function useIsModelReady() {
return useMMKVBoolean('isModelReady', mmkv)
}

export function removeIsModelReady() {
mmkv.delete('isModelReady')
}

export function useIsModelActivated() {
return useMMKVBoolean('isModelActivated', mmkv)
}

export function removeIsModelActivated() {
mmkv.delete('isModelActivated')
}

export function useIsModelDownloading() {
return useMMKVBoolean('isModelDownloading', mmkv)
}

export function removeIsModelDownloading() {
mmkv.delete('isModelDownloading')
}
34 changes: 8 additions & 26 deletions apps/easypid/src/llm/useLLM.tsx
Original file line number Diff line number Diff line change
@@ -1,40 +1,22 @@
import { mmkv } from '@easypid/storage/mmkv'
import { useCallback, useEffect, useRef, useState } from 'react'
import { Platform } from 'react-native'
import { LLAMA3_2_1B_QLORA_URL, LLAMA3_2_1B_TOKENIZER } from 'react-native-executorch'
import { useMMKVBoolean } from 'react-native-mmkv'
import RnExecutorch, { subscribeToDownloadProgress, subscribeToTokenGenerated } from './RnExecutorchModule'
import { DEFAULT_CONTEXT_WINDOW_LENGTH, EOT_TOKEN } from './constants'
import {
removeIsModelActivated,
removeIsModelDownloading,
removeIsModelReady,
useIsModelActivated,
useIsModelDownloading,
useIsModelReady,
} from './state'
import type { Model, ResourceSource } from './types'

const interrupt = () => {
RnExecutorch.interrupt()
}

export function useIsModelReady() {
return useMMKVBoolean('isModelReady', mmkv)
}

export function removeIsModelReady() {
mmkv.delete('isModelReady')
}

export function useIsModelActivated() {
return useMMKVBoolean('isModelActivated', mmkv)
}

export function removeIsModelActivated() {
mmkv.delete('isModelActivated')
}

export function useIsModelDownloading() {
return useMMKVBoolean('isModelDownloading', mmkv)
}

export function removeIsModelDownloading() {
mmkv.delete('isModelDownloading')
}

export const useLLM = ({
modelSource = LLAMA3_2_1B_QLORA_URL,
tokenizerSource = LLAMA3_2_1B_TOKENIZER,
Expand Down

0 comments on commit 02fccbf

Please sign in to comment.