2024-01-30 14:00:48 +01:00
import { defineComponent , ref , onMounted , computed } from 'vue'
import { useI18n } from '@src/boot/i18n'
import { tools } from '../../store/Modules/tools'
import { shared_consts } from '@/common/shared_vuejs'
import { useUserStore } from '@store/UserStore'
import { useGlobalStore } from '@store/globalStore'
import { useQuasar } from 'quasar'
import { costanti } from '@costanti'
import { useRouter } from 'vue-router'
2025-02-06 19:00:19 +01:00
import { serv_constants } from '@store/Modules/serv_constants'
2024-01-30 14:00:48 +01:00
export default defineComponent ( {
name : 'CAITools' ,
props : {
} ,
components : {
} ,
setup ( props , { emit } ) {
const $q = useQuasar ( )
const { t } = useI18n ( )
const userStore = useUserStore ( )
const globalStore = useGlobalStore ( )
const isfinishLoading = computed ( ( ) = > globalStore . finishLoading )
const $router = useRouter ( )
2025-02-06 19:00:19 +01:00
const options = ref ( < any > { } )
2024-01-30 14:00:48 +01:00
const querySel = ref ( '' )
2025-02-06 19:00:19 +01:00
const contestSysteList = [
{ label : 'Standard' , value : '' } ,
{ label : 'Matematica' , value : 'Sei un esperto in Matematica' } ,
{ label : 'Editoriale' , value : 'Sei un esperto in Editoria e scrittura di articoli e blog' } ,
{ label : 'Programmazione' , value : 'Sei un esperto in programmazione' } ,
]
const modelList = [
{ label : 'DeepSeek' , value : 'deepseek-chat' } ,
]
const outputTypeList = [
{ label : 'Formato Testo' , value : 'Ritornami l\'output in formato testo' } ,
{ label : 'Per Telegram' , value : 'Ritornami l\'output formattato per incollarlo sulla chat Telegram, usando delle emoticons in punti chiave e il grassetto (**) nelle parole chiave.' } ,
{ label : 'Formato JSON' , value : 'Ritornami l\'output in formato JSON' } ,
{ label : 'Formato CSV (campi separati da \'|\')' , value : 'Ritornami l\'output in formato CSV, con i campi separati da \'|\'' } ,
]
const tempList = [ { label : 'Temperatura a 0.3' , value : 0.3 } ,
{ label : 'Temperatura a 0.5' , value : 0.5 } ,
{ label : 'Temperatura a 1' , value : 1 } ,
{ label : 'Temperatura a 1.2' , value : 1.2 } ,
{ label : 'Temperatura a 1.5' , value : 1.5 } ,
]
const tokenList = [
{ label : '50 Token' , value : 50 } ,
{ label : '100 Token' , value : 100 } ,
2025-02-10 22:48:53 +01:00
{ label : '200 Token' , value : 200 } ,
{ label : '500 Token' , value : 500 } ,
{ label : '1000 Token' , value : 1000 } ,
{ label : '2500 Token' , value : 2500 } ,
{ label : '4000 Token' , value : 4000 } ,
{ label : '5000 Token' , value : 5000 } ,
{ label : '10000 Token' , value : 10000 } ,
2025-02-06 19:00:19 +01:00
]
const model = ref ( 'deepseek-chat' )
2025-02-10 22:48:53 +01:00
const max_tokens = ref ( 50 )
2025-02-06 19:00:19 +01:00
const outputType = ref ( '' )
2025-02-10 22:48:53 +01:00
const temperatura = ref ( 0.3 )
2025-02-06 19:00:19 +01:00
const stream = ref ( false )
const contestsystem = ref ( '' )
const inputPrompt = ref ( '' ) ;
const result = ref ( '' ) ;
2025-02-10 22:48:53 +01:00
const outputvisibile = ref ( '' ) ;
2025-02-06 19:00:19 +01:00
const isLoading = ref ( false ) ;
const errorMessage = ref ( '' ) ;
const finish_reason = ref ( '' ) ;
const withexplain = ref ( false ) ;
const querylist = ref ( < any [ ] > [ ] )
const modelLabel = computed ( ( ) = > {
const foundModel = modelList . find ( ( item : any ) = > item . value === model . value ) ;
return foundModel ? foundModel.label : null ;
} )
2024-01-30 14:00:48 +01:00
function mount() {
// Mount
2025-02-06 19:00:19 +01:00
querylist . value = globalStore . getQueryAI ( )
2025-02-10 22:48:53 +01:00
model . value = tools . getCookie ( 'AI_MOD' , 'deepseek-chat' )
max_tokens . value = tools . getCookie ( 'AI_MT' , 50 , true )
withexplain . value = tools . getCookie ( 'AI_WS' , '0' ) === '1'
outputType . value = tools . getCookie ( 'AI_OT' , outputTypeList [ 0 ] . value )
temperatura . value = tools . convstrToNum ( tools . getCookie ( 'AI_TEM' , '0.3' ) )
stream . value = tools . getCookie ( 'AI_ST' , '0' ) === '1'
contestsystem . value = tools . getCookie ( 'AI_CON' , '' )
inputPrompt . value = tools . getCookie ( 'AI_PRO' , '' )
}
2024-01-30 14:00:48 +01:00
2025-02-10 22:48:53 +01:00
function getInput() {
return "Prompt:\n" + inputPrompt . value + '\n\nRisposta:\n'
2024-01-30 14:00:48 +01:00
}
2025-02-06 19:00:19 +01:00
async function handleSubmit() {
isLoading . value = true ;
errorMessage . value = '' ;
2025-02-10 22:48:53 +01:00
if ( outputvisibile . value ) {
outputvisibile . value += '\n\n'
}
outputvisibile . value += getInput ( ) ;
2025-02-06 19:00:19 +01:00
result . value = '' ;
2025-02-10 22:48:53 +01:00
finish_reason . value = '' ;
tools . setCookie ( 'AI_MOD' , model . value )
tools . setCookie ( 'AI_MT' , max_tokens . value . toString ( ) )
tools . setCookie ( 'AI_OT' , outputType . value )
tools . setCookie ( 'AI_TEM' , temperatura . value . toString ( ) )
tools . setCookie ( 'AI_ST' , stream . value ? '1' : '0' )
tools . setCookie ( 'AI_WE' , withexplain . value ? '1' : '0' )
tools . setCookie ( 'AI_CON' , contestsystem . value )
tools . setCookie ( 'AI_PRO' , inputPrompt . value )
2025-02-06 19:00:19 +01:00
options . value = {
model : model.value ,
max_tokens : max_tokens.value ,
2025-02-10 22:48:53 +01:00
temp : temperatura.value ,
2025-02-06 19:00:19 +01:00
stream : stream.value ,
withexplain : withexplain.value ,
2025-02-10 22:48:53 +01:00
outputType : outputType.value ,
} ;
2025-02-06 19:00:19 +01:00
try {
2025-02-10 22:48:53 +01:00
if ( options . value . stream ) {
// Modalità streaming
const response = await globalStore . getQueryDS ( inputPrompt . value , options . value ) ;
console . log ( 'uscita da getQueryDS' )
isLoading . value = false ;
// Leggi il flusso di dati
const reader = response . data . getReader ( ) ;
const decoder = new TextDecoder ( 'utf-8' ) ;
while ( true ) {
const { done , value } = await reader . read ( ) ;
if ( done ) break ;
// Decodifica il chunk e gestisci i dati
const chunk = decoder . decode ( value ) ;
console . log ( 'Received chunk:' , chunk ) ; // Log del chunk ricevuto
const lines = chunk . split ( '\n\n' ) . filter ( ( line ) = > line . trim ( ) !== '' ) ;
for ( const line of lines ) {
if ( line . startsWith ( 'data: ' ) ) {
const data = JSON . parse ( line . slice ( 6 ) ) ; // Rimuovi "data: " e parsifica il JSON
if ( data . choice && data . choice . delta && data . choice . delta . content ) {
result . value += data . choice . delta . content || ''
outputvisibile . value += data . choice . delta . content || ''
}
/ * e r r o r M e s s a g e . v a l u e = d a t a . e r r o r ;
$q . notify ( {
color : 'negative' ,
icon : 'error' ,
message : 'Errore durante la richiesta' ,
caption : errorMessage.value ,
} ) ;
break ; // Interrompi il ciclo in caso di errore
} * /
} else if ( line . startsWith ( 'data: [DONE]' ) ) {
const data = JSON . parse ( line . slice ( 12 ) ) ; // Rimuovi "data: " e parsifica il JSON
if ( data . choice && data . choice . finish_reason ) {
finish_reason . value = data . choice . finish_reason ;
}
inputPrompt . value = '' ;
}
}
2025-02-06 19:00:19 +01:00
}
2025-02-10 22:48:53 +01:00
} else {
// Modalità non streaming
const resdata = await globalStore . getQueryDS ( inputPrompt . value , options . value ) ;
if ( resdata . code === serv_constants . RIS_CODE_OK ) {
inputPrompt . value = '' ;
if ( resdata . choice ) {
finish_reason . value = resdata . choice . finish_reason || '' ;
}
if ( resdata . choice . message ) {
result . value = resdata . choice . message . content || '' ;
outputvisibile . value += result . value
}
} else if ( resdata . code === serv_constants . RIS_CODE_ERR ) {
errorMessage . value = resdata . error . message || resdata . error ;
$q . notify ( {
color : 'negative' ,
icon : 'error' ,
message : 'Errore durante la richiesta' ,
caption : errorMessage.value ,
} ) ;
2025-02-06 19:00:19 +01:00
}
2025-02-10 22:48:53 +01:00
isLoading . value = false ;
}
} catch ( error ) {
2025-02-06 19:00:19 +01:00
errorMessage . value = error . response ? . data ? . error || error . message ;
$q . notify ( {
color : 'negative' ,
icon : 'error' ,
message : 'Errore durante la richiesta' ,
2025-02-10 22:48:53 +01:00
caption : errorMessage.value ,
2025-02-06 19:00:19 +01:00
} ) ;
}
2025-02-10 22:48:53 +01:00
2025-02-06 19:00:19 +01:00
}
const copyToClipboard = ( ) = > {
if ( ! result . value ) return ;
navigator . clipboard . writeText ( result . value ) . then ( ( ) = > {
$q . notify ( {
message : 'Copiato negli appunti!' ,
color : 'positive' ,
icon : 'check' ,
} ) ;
} ) . catch ( err = > {
console . error ( 'Errore nella copia:' , err ) ;
$q . notify ( {
message : 'Errore nella copia!' ,
color : 'negative' ,
icon : 'error' ,
} ) ;
} ) ;
2025-02-10 22:48:53 +01:00
}
function submitPrompt ( event : any ) {
if ( inputPrompt . value . trim ( ) ) { // Controlla che l'input non sia vuoto
handleSubmit ( ) ; // Inviare la richiesta
}
}
2025-02-06 19:00:19 +01:00
2024-01-30 14:00:48 +01:00
onMounted ( mount )
return {
t ,
querySel ,
$q ,
globalStore ,
2025-02-06 19:00:19 +01:00
inputPrompt ,
result ,
2025-02-10 22:48:53 +01:00
outputvisibile ,
2025-02-06 19:00:19 +01:00
isLoading ,
errorMessage ,
handleSubmit ,
querylist ,
copyToClipboard ,
max_tokens ,
tokenList ,
modelList ,
tempList ,
stream ,
model ,
contestSysteList ,
contestsystem ,
finish_reason ,
modelLabel ,
withexplain ,
outputType ,
outputTypeList ,
2025-02-10 22:48:53 +01:00
temperatura ,
submitPrompt ,
2024-01-30 14:00:48 +01:00
}
}
} )