From 3fdf7c9e38fb4ee80f9a5c0d64eb0b2843fc4602 Mon Sep 17 00:00:00 2001 From: Aidan Date: Tue, 1 Jul 2025 13:33:11 -0400 Subject: [PATCH] bug fixes, temperature and language section improvements --- src/commands/main.ts | 120 ++++++++++++++++++++++++++++++------ src/locales/english.json | 15 +++-- src/locales/portuguese.json | 13 ++-- 3 files changed, 121 insertions(+), 27 deletions(-) diff --git a/src/commands/main.ts b/src/commands/main.ts index 9082275..4c894ec 100644 --- a/src/commands/main.ts +++ b/src/commands/main.ts @@ -50,7 +50,7 @@ function getSettingsMenu(user: UserRow, Strings: any): SettingsMenu { const langLabel = langObj ? langObj.label : user.languageCode; const userId = user.telegramId; return { - text: Strings.settings.selectSetting, + text: `*${Strings.settings.selectSetting}*`, reply_markup: { inline_keyboard: [ [ @@ -171,9 +171,10 @@ export default (bot: Telegraf, db: NodePgDatabase) => { await ctx.editMessageText( `${Strings.settings.ai.selectSeries}`, { + parse_mode: 'Markdown', reply_markup: { - inline_keyboard: models.map(series => [ - { text: series.label, callback_data: `selectseries_${series.name}_${user.telegramId}` } + inline_keyboard: models.map((series, idx) => [ + { text: series.label, callback_data: `selectseries_${idx}_${user.telegramId}` } ]).concat([[ { text: `${Strings.varStrings.varBack}`, callback_data: `settings_back_${user.telegramId}` } ]]) @@ -193,7 +194,7 @@ export default (bot: Telegraf, db: NodePgDatabase) => { } }); - bot.action(/^selectseries_.+_\d+$/, async (ctx) => { + bot.action(/^selectseries_\d+_\d+$/, async (ctx) => { const data = (ctx.callbackQuery as any).data; const userId = extractUserIdFromCallback(data); const allowed = !!userId && String(ctx.from.id) === userId; @@ -205,8 +206,10 @@ export default (bot: Telegraf, db: NodePgDatabase) => { await ctx.answerCbQuery(); const { user, Strings } = await getUserAndStrings(ctx, db); if (!user) return; - const seriesName = data.replace(/^selectseries_/, '').replace(/_\d+$/, ''); - const series = models.find(s => s.name === seriesName); + const match = data.match(/^selectseries_(\d+)_\d+$/); + if (!match) return; + const seriesIdx = parseInt(match[1], 10); + const series = models[seriesIdx]; if (!series) return; const desc = user.languageCode === 'pt' ? series.descriptionPt : series.descriptionEn; try { @@ -214,8 +217,8 @@ export default (bot: Telegraf, db: NodePgDatabase) => { `${Strings.settings.ai.seriesDescription.replace('{seriesDescription}', desc)}\n\n${Strings.settings.ai.selectParameterSize.replace('{seriesLabel}', series.label)}\n\n${Strings.settings.ai.parameterSizeExplanation}`, { reply_markup: { - inline_keyboard: series.models.map(m => [ - { text: `${m.label} (${m.parameterSize})`, callback_data: `setmodel_${series.name}_${m.name}_${user.telegramId}` } + inline_keyboard: series.models.map((m, idx) => [ + { text: `${m.label} (${m.parameterSize})`, callback_data: `setmodel_${seriesIdx}_${idx}_${user.telegramId}` } ]).concat([[ { text: `${Strings.varStrings.varBack}`, callback_data: `settings_aiModel_${user.telegramId}` } ]]) @@ -235,7 +238,7 @@ export default (bot: Telegraf, db: NodePgDatabase) => { } }); - bot.action(/^setmodel_.+_\d+$/, async (ctx) => { + bot.action(/^setmodel_\d+_\d+_\d+$/, async (ctx) => { const data = (ctx.callbackQuery as any).data; const userId = extractUserIdFromCallback(data); const allowed = !!userId && String(ctx.from.id) === userId; @@ -247,11 +250,12 @@ export default (bot: Telegraf, db: NodePgDatabase) => { await ctx.answerCbQuery(); const { user, Strings } = await getUserAndStrings(ctx, db); if (!user) return; - const parts = data.split('_'); - const seriesName = parts[1]; - const modelName = parts.slice(2, -1).join('_'); - const series = models.find(s => s.name === seriesName); - const model = series?.models.find(m => m.name === modelName); + const match = data.match(/^setmodel_(\d+)_(\d+)_\d+$/); + if (!match) return; + const seriesIdx = parseInt(match[1], 10); + const modelIdx = parseInt(match[2], 10); + const series = models[seriesIdx]; + const model = series?.models[modelIdx]; if (!series || !model) return; await db.update(schema.usersTable) .set({ customAiModel: model.name }) @@ -299,9 +303,55 @@ export default (bot: Telegraf, db: NodePgDatabase) => { const { user, Strings } = await getUserAndStrings(ctx, db); if (!user) return; const temps = [0.2, 0.5, 0.7, 0.9, 1.2]; + try { + await ctx.editMessageText( + `${Strings.settings.ai.temperatureExplanation}\n\n${Strings.settings.ai.selectTemperature}`, + { + parse_mode: 'Markdown', + reply_markup: { + inline_keyboard: temps.map(t => [{ text: t.toString(), callback_data: `settemp_${t}_${user.telegramId}` }]) + .concat([ + [{ text: Strings.varStrings.varMore, callback_data: `show_more_temps_${user.telegramId}` }], + [ + { text: Strings.varStrings.varBack, callback_data: `settings_back_${user.telegramId}` } + ] + ]) + } + } + ); + } catch (err) { + if ( + !( + err.response.description?.includes('query is too old') || + err.response.description?.includes('query ID is invalid') || + err.response.description?.includes('message is not modified') || + err.response.description?.includes('message to edit not found') + ) + ) + console.error('Unexpected Telegram error:', err); + } + }); + + bot.action(/^show_more_temps_\d+$/, async (ctx) => { + const data = (ctx.callbackQuery as any).data; + const userId = extractUserIdFromCallback(data); + const allowed = !!userId && String(ctx.from.id) === userId; + logSettingsAccess('show_more_temps', ctx, allowed, userId); + if (!allowed) { + const { Strings } = await getUserAndStrings(ctx, db); + return ctx.answerCbQuery(getNotAllowedMessage(Strings), { show_alert: true }); + } + await ctx.answerCbQuery(); + const { user, Strings } = await getUserAndStrings(ctx, db); + if (!user) return; + const moreTemps = [1.4, 1.6, 1.8, 2.0]; try { await ctx.editMessageReplyMarkup({ - inline_keyboard: temps.map(t => [{ text: t.toString(), callback_data: `settemp_${t}_${user.telegramId}` }]).concat([[{ text: `${Strings.varStrings.varBack}`, callback_data: `settings_back_${user.telegramId}` }]]) + inline_keyboard: moreTemps.map(t => [{ text: `🔥 ${t}`, callback_data: `settemp_${t}_${user.telegramId}` }]) + .concat([ + [{ text: Strings.varStrings.varLess, callback_data: `settings_aiTemperature_${user.telegramId}` }], + [{ text: Strings.varStrings.varBack, callback_data: `settings_back_${user.telegramId}` }] + ]) }); } catch (err) { if ( @@ -349,9 +399,15 @@ export default (bot: Telegraf, db: NodePgDatabase) => { const { user, Strings } = await getUserAndStrings(ctx, db); if (!user) return; try { - await ctx.editMessageReplyMarkup({ - inline_keyboard: langs.map(l => [{ text: l.label, callback_data: `setlang_${l.code}_${user.telegramId}` }]).concat([[{ text: `${Strings.varStrings.varBack}`, callback_data: `settings_back_${user.telegramId}` }]]) - }); + await ctx.editMessageText( + Strings.settings.selectLanguage, + { + parse_mode: 'Markdown', + reply_markup: { + inline_keyboard: langs.map(l => [{ text: l.label, callback_data: `setlang_${l.code}_${user.telegramId}` }]).concat([[{ text: `${Strings.varStrings.varBack}`, callback_data: `settings_back_${user.telegramId}` }]]) + } + } + ); } catch (err) { if ( !( @@ -377,7 +433,33 @@ export default (bot: Telegraf, db: NodePgDatabase) => { await ctx.answerCbQuery(); const { user, Strings } = await getUserAndStrings(ctx, db); if (!user) return; - await updateSettingsKeyboard(ctx, user, Strings); + const menu = getSettingsMenu(user, Strings); + try { + if (ctx.callbackQuery.message) { + await ctx.editMessageText( + menu.text, + { + reply_markup: menu.reply_markup, + parse_mode: 'Markdown' + } + ); + } else { + await ctx.reply(menu.text, { + reply_markup: menu.reply_markup, + parse_mode: 'Markdown' + }); + } + } catch (err) { + if ( + !( + err.response.description?.includes('query is too old') || + err.response.description?.includes('query ID is invalid') || + err.response.description?.includes('message is not modified') || + err.response.description?.includes('message to edit not found') + ) + ) + console.error('[Settings] Unexpected Telegram error:', err); + } }); bot.action(/^setlang_.+_\d+$/, async (ctx) => { diff --git a/src/locales/english.json b/src/locales/english.json index b565e37..713aee9 100644 --- a/src/locales/english.json +++ b/src/locales/english.json @@ -13,7 +13,9 @@ "varWas": "was", "varNone": "None", "varUnknown": "Unknown", - "varBack": "⬅️ Back" + "varBack": "⬅️ Back", + "varMore": "➡️ More", + "varLess": "➖ Less" }, "unexpectedErr": "An unexpected error occurred: {error}", "errInvalidOption": "Whoops! Invalid option!", @@ -83,7 +85,9 @@ "statusComplete": "✅ Complete!", "modelHeader": "🤖 *{model}* | 🌡️ *{temperature}* | {status}", "noChatFound": "No chat found", - "pulled": "✅ Pulled {model} successfully, please retry the command." + "pulled": "✅ Pulled {model} successfully, please retry the command.", + "selectTemperature": "*Please select a temperature:*", + "temperatureExplanation": "Temperature controls the randomness of the AI's responses. Lower values (e.g., 0.2) make the model more focused and deterministic, while higher values (e.g., 1.2 or above) make it more creative and random." }, "maInvalidModule": "Please provide a valid module ID from The Mod Archive.\nExample: `/modarchive 81574`", "maDownloadError": "Error downloading the file. Check the module ID and try again.", @@ -119,12 +123,15 @@ "aiEnabledSetTo": "AI Enabled set to {aiEnabled}", "aiModelSetTo": "AI Model set to {aiModel}", "aiTemperatureSetTo": "AI Temperature set to {aiTemperature}", - "selectSeries": "Please select a model series.", + "selectSeries": "*Please select a model series.*", "seriesDescription": "{seriesDescription}", "selectParameterSize": "Please select a parameter size for {seriesLabel}.", "parameterSizeExplanation": "Parameter size (e.g. 2B, 4B) refers to the number of parameters in the model. Larger models may be more capable but require more resources.", - "modelSetTo": "Model set to {aiModel} ({parameterSize})" + "modelSetTo": "Model set to {aiModel} ({parameterSize})", + "selectTemperature": "*Please select a temperature:*", + "temperatureExplanation": "Temperature controls the randomness of the AI's responses. Lower values (e.g., 0.2) make the model more focused and deterministic, while higher values (e.g., 1.2 or above) make it more creative and random." }, + "selectLanguage": "*Please select a language:*", "languageCodeSetTo": "Language set to {languageCode}", "unknownAction": "Unknown action." }, diff --git a/src/locales/portuguese.json b/src/locales/portuguese.json index 43b90cb..7186051 100644 --- a/src/locales/portuguese.json +++ b/src/locales/portuguese.json @@ -12,7 +12,9 @@ "varWas": "estava", "varNone": "Nenhum", "varUnknown": "Desconhecido", - "varBack": "⬅️ Voltar" + "varBack": "⬅️ Voltar", + "varMore": "➡️ Mais", + "varLess": "➖ Menos" }, "unexpectedErr": "Ocorreu um erro inesperado: {error}", "errInvalidOption": "Ops! Opção inválida!", @@ -78,7 +80,7 @@ "aiEnabled": "IA", "aiModel": "Modelo", "aiTemperature": "Temperatura", - "selectSeries": "Por favor, selecione uma série de modelos.", + "selectSeries": "*Por favor, selecione uma série de modelos.*", "seriesDescription": "{seriesDescription}", "selectParameterSize": "Por favor, selecione um tamanho de parâmetro para {seriesLabel}.", "parameterSizeExplanation": "O tamanho do parâmetro (ex: 2B, 4B) refere-se ao número de parâmetros do modelo. Modelos maiores podem ser mais capazes, mas exigem mais recursos.", @@ -124,12 +126,15 @@ "aiEnabledSetTo": "Inteligência Artificial definido para {aiEnabled}", "aiModelSetTo": "Modelo personalizado definido para {aiModel}", "aiTemperatureSetTo": "Temperatura definida para {aiTemperature}", - "selectSeries": "Por favor, selecione uma série de modelos.", + "selectSeries": "*Por favor, selecione uma série de modelos.*", "seriesDescription": "{seriesDescription}", "selectParameterSize": "Por favor, selecione um tamanho de parâmetro para {seriesLabel}.", "parameterSizeExplanation": "O tamanho do parâmetro (ex: 2B, 4B) refere-se ao número de parâmetros do modelo. Modelos maiores podem ser mais capazes, mas exigem mais recursos.", - "modelSetTo": "Modelo definido para {aiModel} ({parameterSize})" + "modelSetTo": "Modelo definido para {aiModel} ({parameterSize})", + "selectTemperature": "*Por favor, selecione uma temperatura:*", + "temperatureExplanation": "A temperatura controla a aleatoriedade das respostas da IA. Valores mais baixos (ex: 0.2) tornam o modelo mais focado e determinístico, enquanto valores mais altos (ex: 1.2 ou mais) tornam as respostas mais criativas e aleatórias." }, + "selectLanguage": "*Por favor, selecione um idioma:*", "languageCodeSetTo": "Idioma definido para {languageCode}", "unknownAction": "Ação desconhecida." },