@@ -45,7 +45,6 @@ import {
4545 // loadCustomModels,
4646} from '../utils/storage'
4747import type { LLMMessage } from '../utils/llmMessages'
48- import { initLlama , LlamaContext } from 'llama.rn'
4948import type { LLMProvider } from '../services/llm/LLMProvider'
5049import { LocalLLMProvider } from '../services/llm/LocalLLMProvider'
5150import { RoutstrProvider } from '../services/llm/RoutstrProvider'
@@ -76,7 +75,6 @@ export default function SimpleChatScreen({ navigation }: { navigation: any }) {
7675 const [ , setMessagesVersion ] = useState ( 0 ) // For UI updates
7776 const [ isInitLoading , setIsInitLoading ] = useState ( false )
7877 const [ isGenerating , setIsGenerating ] = useState ( false )
79- const [ context , setContext ] = useState < LlamaContext | null > ( null )
8078 const [ llm , setLlm ] = useState < LLMProvider | null > ( null )
8179 const [ isModelReady , setIsModelReady ] = useState ( false )
8280 const [ initProgress , setInitProgress ] = useState ( 0 )
@@ -108,12 +106,9 @@ export default function SimpleChatScreen({ navigation }: { navigation: any }) {
108106 setRoutstrToken ( token )
109107 }
110108 loadToken ( )
111- return ( ) => {
112- if ( context ) {
113- context . release ( )
114- }
115- }
116- } , [ context ] )
109+ } , [ ] )
110+
111+ useEffect ( ( ) => ( ) => { void llm ?. release ( ) } , [ llm ] )
117112
118113 // // Load custom models on mount (disabled for minimal app)
119114 // useEffect(() => {
@@ -269,18 +264,6 @@ export default function SimpleChatScreen({ navigation }: { navigation: any }) {
269264 setProvider ( 'local' )
270265
271266 const params = contextParams || ( await loadContextParams ( ) )
272- const llamaContext = await initLlama (
273- {
274- model : modelPath ,
275- ...params ,
276- } ,
277- ( progress ) => {
278- // Progress is reported as 1 to 100
279- setInitProgress ( progress )
280- } ,
281- )
282-
283- setContext ( llamaContext )
284267 const provider = new LocalLLMProvider ( modelPath . split ( '/' ) . pop ( ) || 'Local Model' )
285268 await provider . initialize ( { model : modelPath , params, onProgress : ( p ) => setInitProgress ( p ) } )
286269 setLlm ( provider )
@@ -464,7 +447,6 @@ export default function SimpleChatScreen({ navigation }: { navigation: any }) {
464447 setSelectedModelName ( modelInfo . name )
465448 await llm ?. release ( )
466449 await initializeModel ( path )
467- setLlm ( new LocalLLMProvider ( modelInfo . name ) )
468450 closeDrawer ( )
469451 } }
470452 />
0 commit comments