From 69d586394768e3e30ab2c37436d437a407255740 Mon Sep 17 00:00:00 2001 From: Aishlia Date: Wed, 30 Aug 2023 11:06:39 -0700 Subject: [PATCH 01/14] Only flag bad words in positive prompt --- src/modules/sd-images/index.ts | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/modules/sd-images/index.ts b/src/modules/sd-images/index.ts index e5389268..d7d7a690 100644 --- a/src/modules/sd-images/index.ts +++ b/src/modules/sd-images/index.ts @@ -49,7 +49,10 @@ export class SDImagesBot extends SDImagesBotBase { return refundCallback("Unsupported command"); } - if (promptHasBadWords(operation.prompt)) { + const prompt = operation.prompt + let parsedPrompt = prompt.substring(prompt.indexOf(" ") + 1, prompt.indexOf("--")).trim(); + + if (promptHasBadWords(parsedPrompt)) { console.log(`### promptHasBadWords ${ctx.message?.text}`); await ctx.reply("Your prompt has been flagged for potentially generating illegal or malicious content. If you believe there has been a mistake, please reach out to support."); return refundCallback("Prompt has bad words"); From 31a74ed00fabc81c4a2ba5871e14ffdffc18aaf0 Mon Sep 17 00:00:00 2001 From: fegloff Date: Mon, 4 Sep 2023 17:14:06 -0500 Subject: [PATCH 02/14] add group permission error handling --- src/bot.ts | 34 +++++++++++++----------- src/modules/open-ai/controller/index.ts | 23 +--------------- src/modules/open-ai/index.ts | 34 +++++++++++++++--------- src/modules/sd-images/SDImagesBotBase.ts | 33 ++++++++++++++++++----- 4 files changed, 66 insertions(+), 58 deletions(-) diff --git a/src/bot.ts b/src/bot.ts index 35d711e9..f8f49389 100644 --- a/src/bot.ts +++ b/src/bot.ts @@ -1,4 +1,4 @@ -import {TranslateBot} from "./modules/translate/TranslateBot"; +import { TranslateBot } from "./modules/translate/TranslateBot"; require("events").EventEmitter.defaultMaxListeners = 30; import express from "express"; @@ -98,8 +98,8 @@ function createInitialSessionData(): BotSessionData { }, translate: { languages: [], - enable: false - } + enable: false, + }, }; } @@ -270,13 +270,15 @@ const onMessage = async (ctx: OnMessageContext) => { const price = translateBot.getEstimatedPrice(ctx); const isPaid = await payments.pay(ctx, price); - if(isPaid) { - const response = await translateBot.onEvent(ctx, (reason?: string) => { - payments.refundPayment(reason, ctx, price); - }).catch((e) => { - payments.refundPayment(e.message || "Unknown error", ctx, price); - return {next: false}; - }); + if (isPaid) { + const response = await translateBot + .onEvent(ctx, (reason?: string) => { + payments.refundPayment(reason, ctx, price); + }) + .catch((e) => { + payments.refundPayment(e.message || "Unknown error", ctx, price); + return { next: false }; + }); if (!response.next) { return; @@ -284,7 +286,7 @@ const onMessage = async (ctx: OnMessageContext) => { } } - if (openAiBot.isSupportedEvent(ctx)) { + if (await openAiBot.isSupportedEvent(ctx)) { if (ctx.session.openAi.imageGen.isEnabled) { const price = openAiBot.getEstimatedPrice(ctx); const isPaid = await payments.pay(ctx, price!); @@ -438,15 +440,15 @@ bot.command("love", (ctx) => { }); }); -bot.command('stop', (ctx) => { +bot.command("stop", (ctx) => { logger.info("/stop command"); ctx.session.openAi.chatGpt.chatConversation = []; ctx.session.openAi.chatGpt.usage = 0; - ctx.session.openAi.chatGpt.price = 0; + ctx.session.openAi.chatGpt.price = 0; ctx.session.translate.enable = false; - ctx.session.translate.languages = [] - ctx.session.oneCountry.lastDomain = "" -}) + ctx.session.translate.languages = []; + ctx.session.oneCountry.lastDomain = ""; +}); // bot.command("memo", (ctx) => { // ctx.reply(MEMO.text, { // parse_mode: "Markdown", diff --git a/src/modules/open-ai/controller/index.ts b/src/modules/open-ai/controller/index.ts index aa521f87..3d0bdf43 100644 --- a/src/modules/open-ai/controller/index.ts +++ b/src/modules/open-ai/controller/index.ts @@ -14,6 +14,7 @@ import { getChatModelPrice, } from "../api/openAi"; import config from "../../../config"; +import { GrammyError } from "grammy"; interface ImageGenPayload { chatId: number; @@ -40,28 +41,6 @@ const logger = pino({ }, }); -export const imgGen = async ( - data: ImageGenPayload, - ctx: OnMessageContext | OnCallBackQueryData -) => { - const { chatId, prompt, numImages, imgSize } = data; - try { - const imgs = await postGenerateImg(prompt, numImages, imgSize); - imgs.map(async (img: any) => { - await ctx - .replyWithPhoto(img.url, { - caption: `/dalle ${prompt}`, - }) - .catch((e) => { - throw e; - }); - }); - return true; - } catch (e: any) { - throw e; - } -}; - export const imgGenEnhanced = async ( data: ImageGenPayload, ctx: OnMessageContext | OnCallBackQueryData diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 91808184..00fe7bed 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -9,8 +9,8 @@ import { OnCallBackQueryData, ChatConversation, } from "../types"; -import { getChatModel, getDalleModel, getDalleModelPrice } from "./api/openAi"; -import { alterImg, imgGen, imgGenEnhanced, promptGen } from "./controller"; +import { getChatModel, getDalleModel, getDalleModelPrice, postGenerateImg } from "./api/openAi"; +import { alterImg, imgGenEnhanced, promptGen } from "./controller"; import { appText } from "./utils/text"; import { chatService } from "../../database/services"; import { ChatGPTModelsEnum } from "./types"; @@ -97,9 +97,9 @@ export class OpenAIBot { return false; } - public isSupportedEvent( + public async isSupportedEvent( ctx: OnMessageContext | OnCallBackQueryData - ): boolean { + ): Promise { const hasCommand = ctx.hasCommand( Object.values(SupportedCommands).map((command) => command.name) ); @@ -350,20 +350,24 @@ export class OpenAIBot { prompt = config.openAi.dalle.defaultPrompt; } ctx.chatAction = "upload_photo"; - const payload = { - chatId: ctx.chat?.id!, - prompt: prompt as string, - numImages: await ctx.session.openAi.imageGen.numImages, // lazy load - imgSize: await ctx.session.openAi.imageGen.imgSize, // lazy load - }; - await imgGen(payload, ctx); + const numImages = await ctx.session.openAi.imageGen.numImages + const imgSize = await ctx.session.openAi.imageGen.imgSize + const imgs = await postGenerateImg(prompt, numImages, imgSize); + imgs.map(async (img: any) => { + await ctx + .replyWithPhoto(img.url, { + caption: `/dalle ${prompt}`, + }).catch((e) => { + this.onError(ctx,e,MAX_TRIES) + }); + }); } else { await ctx .reply("Bot disabled") .catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); } } catch (e) { - this.onError(ctx, e, 3, "There was an error while generating the image"); + this.onError(ctx, e, MAX_TRIES, "There was an error while generating the image"); } }; @@ -808,7 +812,9 @@ export class OpenAIBot { return; } if (e instanceof GrammyError) { - if (e.error_code === 429) { + if (e.error_code === 400 && e.description.includes('not enough rights')) { + ctx.reply('Error: The bot does not have permission to send photos in chat') + } else if (e.error_code === 429) { this.botSuspended = true; const retryAfter = e.parameters.retry_after ? e.parameters.retry_after < 60 @@ -830,6 +836,8 @@ export class OpenAIBot { } await sleep(retryAfter * 1000); // wait retryAfter seconds to enable bot this.botSuspended = false; + } else { + this.logger.error(`On method "${e.method}" | ${e.error_code} - ${e.description}`) } } else if (e instanceof OpenAI.APIError) { // 429 RateLimitError diff --git a/src/modules/sd-images/SDImagesBotBase.ts b/src/modules/sd-images/SDImagesBotBase.ts index c649e5d9..24ed0d43 100644 --- a/src/modules/sd-images/SDImagesBotBase.ts +++ b/src/modules/sd-images/SDImagesBotBase.ts @@ -1,8 +1,9 @@ import { SDNodeApi, IModel } from "./api"; import { OnMessageContext, OnCallBackQueryData } from "../types"; import { getTelegramFileUrl, loadFile, sleep, uuidv4 } from "./utils"; -import { InputFile } from "grammy"; +import { GrammyError, InputFile } from "grammy"; import { COMMAND } from './helpers'; +import { Logger, pino } from "pino"; export interface ISession { id: string; @@ -17,12 +18,22 @@ export interface ISession { export class SDImagesBotBase { sdNodeApi: SDNodeApi; + private logger: Logger; private sessions: ISession[] = []; queue: string[] = []; constructor() { this.sdNodeApi = new SDNodeApi(); + this.logger = pino({ + name: "SDImagesBotBase", + transport: { + target: "pino-pretty", + options: { + colorize: true, + }, + }, + }); } createSession = async ( @@ -111,10 +122,18 @@ export class SDImagesBotBase { if (ctx.chat?.id && queueMessageId) { await ctx.api.deleteMessage(ctx.chat?.id, queueMessageId); } - } catch (e) { - console.error(e); - ctx.reply(`Error: something went wrong... Refunding payments`); - refundCallback(); + } catch (e: any) { + if (e instanceof GrammyError) { + if (e.error_code === 400 && e.description.includes('not enough rights')) { + ctx.reply(`Error: The bot does not have permission to send photos in chat... Refunding payments`); + } else { + ctx.reply(`Error: something went wrong... Refunding payments`); + } + } else { + this.logger.error(e.toString()); + ctx.reply(`Error: something went wrong... Refunding payments`); + refundCallback(); + } } this.queue = this.queue.filter((v) => v !== uuid); @@ -190,8 +209,8 @@ export class SDImagesBotBase { if (ctx.chat?.id && queueMessageId) { await ctx.api.deleteMessage(ctx.chat?.id, queueMessageId); } - } catch (e) { - console.error(e); + } catch (e: any) { + this.logger.error(e.toString()); ctx.reply(`Error: something went wrong... Refunding payments`); refundCallback(); } From 1a2fc3c867fa86d2be2a0f3864c150b9889825ce Mon Sep 17 00:00:00 2001 From: fegloff Date: Mon, 4 Sep 2023 17:25:14 -0500 Subject: [PATCH 03/14] refactor by adding helper.ts --- src/modules/open-ai/helpers.ts | 175 ++++++++++++++++++++++ src/modules/open-ai/index.ts | 261 +++++++-------------------------- 2 files changed, 232 insertions(+), 204 deletions(-) create mode 100644 src/modules/open-ai/helpers.ts diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts new file mode 100644 index 00000000..87227291 --- /dev/null +++ b/src/modules/open-ai/helpers.ts @@ -0,0 +1,175 @@ +import config from "../../config"; +import { isValidUrl } from "./utils/web-crawler"; +import { OnMessageContext, OnCallBackQueryData } from "../types"; + +export const SupportedCommands = { + chat: { + name: "chat", + }, + ask: { + name: "ask", + }, + sum: { + name: "sum", + }, + ask35: { + name: "ask35", + }, + new: { + name: "new", + }, + gpt4: { + name: "gpt4", + }, + gpt: { + name: "gpt", + }, + last: { + name: "last", + }, + dalle: { + name: "DALLE", + }, + dalleLC: { + name: "dalle", + }, + genImgEn: { + name: "genImgEn", + }, +}; + +export const MAX_TRIES = 3; + +export const isMentioned = ( + ctx: OnMessageContext | OnCallBackQueryData +): boolean => { + if (ctx.entities()[0]) { + const { offset, text } = ctx.entities()[0]; + const { username } = ctx.me; + if (username === text.slice(1) && offset === 0) { + const prompt = ctx.message?.text!.slice(text.length); + if (prompt && prompt.split(" ").length > 0) { + return true; + } + } + } + return false; +}; + +export const hasChatPrefix = (prompt: string): string => { + const prefixList = config.openAi.chatGpt.prefixes.chatPrefix; + for (let i = 0; i < prefixList.length; i++) { + if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { + return prefixList[i]; + } + } + return ""; +}; + +export const hasDallePrefix = (prompt: string): string => { + const prefixList = config.openAi.chatGpt.prefixes.dallePrefix; + for (let i = 0; i < prefixList.length; i++) { + if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { + return prefixList[i]; + } + } + return ""; +}; + +export const hasNewPrefix = (prompt: string): string => { + const prefixList = config.openAi.chatGpt.prefixes.newPrefix; + for (let i = 0; i < prefixList.length; i++) { + if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { + return prefixList[i]; + } + } + return ""; +}; + +export const hasUrl = (prompt: string) => { + const promptArray = prompt.split(" "); + let url = ""; + for (let i = 0; i < promptArray.length; i++) { + if (isValidUrl(promptArray[i])) { + url = promptArray[i]; + promptArray.splice(i, 1); + break; + } + } + return { + url, + newPrompt: promptArray.join(" "), + }; +}; + +export const hasUsernamePassword = (prompt: string) => { + let user = ""; + let password = ""; + const parts = prompt.split(" "); + + for (let i = 0; i < parts.length; i++) { + const part = parts[i].toLowerCase(); + if (part.includes("=")) { + const [keyword, value] = parts[i].split("="); + if (keyword === "user" || keyword === "username") { + user = value; + } else if (keyword === "password" || keyword === "pwd") { + password = value; + } + if (user !== "" && password !== "") { + break; + } + } else if (part === "user") { + user = parts[i + 1]; + } else if (part === "password") { + password = parts[i + 1]; + } + } + return { user, password }; +} + +// doesn't get all the special characters like ! +export const hasUserPasswordRegex = (prompt: string) => { + const pattern = + /\b(user=|password=|user|password)\s*([^\s]+)\b.*\b(user=|password=|user|password)\s*([^\s]+)\b/i; + const matches = pattern.exec(prompt); + + let user = ""; + let password = ""; + + if (matches) { + const [_, keyword, word, __, word2] = matches; + if ( + keyword.toLowerCase() === "user" || + keyword.toLowerCase() === "user=" + ) { + user = word; + password = word2; + } else if ( + keyword.toLowerCase() === "password" || + keyword.toLowerCase() === "password=" + ) { + password = word; + user = word2; + } + } + return { user, password }; +} + +export const preparePrompt = async ( + ctx: OnMessageContext | OnCallBackQueryData, + prompt: string +) => { + const msg = await ctx.message?.reply_to_message?.text; + if (msg) { + return `${prompt} ${msg}`; + } + return prompt; +} + + +export const hasPrefix = (prompt: string): string => { + return ( + hasChatPrefix(prompt) || hasDallePrefix(prompt) || hasNewPrefix(prompt) + ); +}; diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 00fe7bed..729b0d5d 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -9,7 +9,12 @@ import { OnCallBackQueryData, ChatConversation, } from "../types"; -import { getChatModel, getDalleModel, getDalleModelPrice, postGenerateImg } from "./api/openAi"; +import { + getChatModel, + getDalleModel, + getDalleModelPrice, + postGenerateImg, +} from "./api/openAi"; import { alterImg, imgGenEnhanced, promptGen } from "./controller"; import { appText } from "./utils/text"; import { chatService } from "../../database/services"; @@ -17,50 +22,18 @@ import { ChatGPTModelsEnum } from "./types"; import config from "../../config"; import { sleep } from "../sd-images/utils"; import { - isValidUrl, - getWebContent, - getCrawlerPrice, -} from "./utils/web-crawler"; - -export const SupportedCommands = { - chat: { - name: "chat", - }, - ask: { - name: "ask", - }, - sum: { - name: "sum", - }, - ask35: { - name: "ask35", - }, - new: { - name: "new", - }, - gpt4: { - name: "gpt4", - }, - gpt: { - name: "gpt", - }, - last: { - name: "last", - }, - dalle: { - name: "DALLE", - }, - dalleLC: { - name: "dalle", - }, - genImgEn: { - name: "genImgEn", - } -}; + hasChatPrefix, + hasNewPrefix, + hasPrefix, + hasUrl, + hasUsernamePassword, + isMentioned, + MAX_TRIES, + preparePrompt, + SupportedCommands, +} from "./helpers"; +import { getWebContent, getCrawlerPrice } from "./utils/web-crawler"; -const MAX_TRIES = 3; - -// const payments = new BotPayments(); export class OpenAIBot { private logger: Logger; private payments: BotPayments; @@ -83,87 +56,23 @@ export class OpenAIBot { } } - private isMentioned(ctx: OnMessageContext | OnCallBackQueryData): boolean { - if (ctx.entities()[0]) { - const { offset, text } = ctx.entities()[0]; - const { username } = ctx.me; - if (username === text.slice(1) && offset === 0) { - const prompt = ctx.message?.text!.slice(text.length); - if (prompt && prompt.split(" ").length > 0) { - return true; - } - } - } - return false; - } - public async isSupportedEvent( ctx: OnMessageContext | OnCallBackQueryData ): Promise { const hasCommand = ctx.hasCommand( Object.values(SupportedCommands).map((command) => command.name) ); - if (this.isMentioned(ctx)) { + if (isMentioned(ctx)) { return true; } const hasReply = this.isSupportedImageReply(ctx); - const chatPrefix = this.hasPrefix(ctx.message?.text || ""); + const chatPrefix = hasPrefix(ctx.message?.text || ""); if (chatPrefix !== "") { return true; } return hasCommand || hasReply; } - private hasPrefix(prompt: string): string { - return this.hasChatPrefix(prompt) || this.hasDallePrefix(prompt) || this.hasNewPrefix(prompt) - } - - private hasChatPrefix(prompt: string): string { - const prefixList = config.openAi.chatGpt.prefixes.chatPrefix; - for (let i = 0; i < prefixList.length; i++) { - if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { - return prefixList[i]; - } - } - return ""; - } - - private hasDallePrefix(prompt: string): string { - const prefixList = config.openAi.chatGpt.prefixes.dallePrefix; - for (let i = 0; i < prefixList.length; i++) { - if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { - return prefixList[i]; - } - } - return ""; - } - - private hasNewPrefix(prompt: string): string { - const prefixList = config.openAi.chatGpt.prefixes.newPrefix; - for (let i = 0; i < prefixList.length; i++) { - if (prompt.toLocaleLowerCase().startsWith(prefixList[i])) { - return prefixList[i]; - } - } - return ""; - } - - private hasUrl(prompt: string) { - const promptArray = prompt.split(" "); - let url = ""; - for (let i = 0; i < promptArray.length; i++) { - if (isValidUrl(promptArray[i])) { - url = promptArray[i]; - promptArray.splice(i, 1); - break; - } - } - return { - url, - newPrompt: promptArray.join(" "), - }; - } - public getEstimatedPrice(ctx: any): number { try { const priceAdjustment = config.openAi.chatGpt.priceAdjustment; @@ -229,7 +138,7 @@ export class OpenAIBot { if ( ctx.hasCommand(SupportedCommands.chat.name) || - (ctx.message?.text?.startsWith("chat ") && ctx.chat?.type === 'private') + (ctx.message?.text?.startsWith("chat ") && ctx.chat?.type === "private") ) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4; await this.onChat(ctx); @@ -238,17 +147,17 @@ export class OpenAIBot { if ( ctx.hasCommand(SupportedCommands.new.name) || - (ctx.message?.text?.startsWith("new ") && ctx.chat?.type === 'private') + (ctx.message?.text?.startsWith("new ") && ctx.chat?.type === "private") ) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4; - await this.onEnd(ctx) + await this.onEnd(ctx); this.onChat(ctx); return; } if ( ctx.hasCommand(SupportedCommands.ask.name) || - (ctx.message?.text?.startsWith("ask ") && ctx.chat?.type === 'private') + (ctx.message?.text?.startsWith("ask ") && ctx.chat?.type === "private") ) { ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4; this.onChat(ctx); @@ -275,7 +184,7 @@ export class OpenAIBot { if ( ctx.hasCommand(SupportedCommands.dalle.name) || ctx.hasCommand(SupportedCommands.dalleLC.name) || - (ctx.message?.text?.startsWith("dalle ") && ctx.chat?.type === 'private') + (ctx.message?.text?.startsWith("dalle ") && ctx.chat?.type === "private") ) { this.onGenImgCmd(ctx); return; @@ -293,7 +202,7 @@ export class OpenAIBot { if ( ctx.hasCommand(SupportedCommands.sum.name) || - (ctx.message?.text?.startsWith("sum ") && ctx.chat?.type === 'private') + (ctx.message?.text?.startsWith("sum ") && ctx.chat?.type === "private") ) { this.onSum(ctx); return; @@ -303,18 +212,18 @@ export class OpenAIBot { return; } - if (this.hasChatPrefix(ctx.message?.text || "") !== "") { + if (hasChatPrefix(ctx.message?.text || "") !== "") { this.onPrefix(ctx); return; } - if (this.hasNewPrefix(ctx.message?.text || "") !== "") { - await this.onEnd(ctx) + if (hasNewPrefix(ctx.message?.text || "") !== "") { + await this.onEnd(ctx); this.onPrefix(ctx); return; } - if (this.isMentioned(ctx)) { + if (isMentioned(ctx)) { this.onMention(ctx); return; } @@ -350,15 +259,16 @@ export class OpenAIBot { prompt = config.openAi.dalle.defaultPrompt; } ctx.chatAction = "upload_photo"; - const numImages = await ctx.session.openAi.imageGen.numImages - const imgSize = await ctx.session.openAi.imageGen.imgSize + const numImages = await ctx.session.openAi.imageGen.numImages; + const imgSize = await ctx.session.openAi.imageGen.imgSize; const imgs = await postGenerateImg(prompt, numImages, imgSize); imgs.map(async (img: any) => { await ctx .replyWithPhoto(img.url, { caption: `/dalle ${prompt}`, - }).catch((e) => { - this.onError(ctx,e,MAX_TRIES) + }) + .catch((e) => { + this.onError(ctx, e, MAX_TRIES); }); }); } else { @@ -367,7 +277,12 @@ export class OpenAIBot { .catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); } } catch (e) { - this.onError(ctx, e, MAX_TRIES, "There was an error while generating the image"); + this.onError( + ctx, + e, + MAX_TRIES, + "There was an error while generating the image" + ); } }; @@ -431,72 +346,6 @@ export class OpenAIBot { } }; - // doesn't get all the special characters like ! - private hasUserPasswordRegex(prompt: string) { - const pattern = - /\b(user=|password=|user|password)\s*([^\s]+)\b.*\b(user=|password=|user|password)\s*([^\s]+)\b/i; - const matches = pattern.exec(prompt); - - let user = ""; - let password = ""; - - if (matches) { - const [_, keyword, word, __, word2] = matches; - if ( - keyword.toLowerCase() === "user" || - keyword.toLowerCase() === "user=" - ) { - user = word; - password = word2; - } else if ( - keyword.toLowerCase() === "password" || - keyword.toLowerCase() === "password=" - ) { - password = word; - user = word2; - } - } - - return { user, password }; - } - - private hasUsernamePassword(prompt: string) { - let user = ""; - let password = ""; - const parts = prompt.split(" "); - - for (let i = 0; i < parts.length; i++) { - const part = parts[i].toLowerCase(); - if (part.includes("=")) { - const [keyword, value] = parts[i].split("="); - if (keyword === "user" || keyword === "username") { - user = value; - } else if (keyword === "password" || keyword === "pwd") { - password = value; - } - if (user !== "" && password !== "") { - break; - } - } else if (part === "user") { - user = parts[i + 1]; - } else if (part === "password") { - password = parts[i + 1]; - } - } - return { user, password }; - } - - private async preparePrompt( - ctx: OnMessageContext | OnCallBackQueryData, - prompt: string - ) { - const msg = await ctx.message?.reply_to_message?.text; - if (msg) { - return `${prompt} ${msg}`; - } - return prompt; - } - async onSum(ctx: OnMessageContext | OnCallBackQueryData) { if (this.botSuspended) { await ctx @@ -506,12 +355,12 @@ export class OpenAIBot { } try { const { prompt } = getCommandNamePrompt(ctx, SupportedCommands); - const { url, newPrompt } = this.hasUrl(prompt); + const { url, newPrompt } = hasUrl(prompt); if (url) { let chat: ChatConversation[] = []; this.onWebCrawler( ctx, - await this.preparePrompt(ctx, newPrompt), + await preparePrompt(ctx, newPrompt), chat, url, "sum" @@ -538,7 +387,7 @@ export class OpenAIBot { const chatModel = getChatModel(model); const webCrawlerMaxTokens = chatModel.maxContextTokens - config.openAi.maxTokens * 2; - const { user, password } = this.hasUsernamePassword(prompt); + const { user, password } = hasUsernamePassword(prompt); if (user && password) { // && ctx.chat?.type !== 'private' const maskedPrompt = @@ -625,7 +474,7 @@ export class OpenAIBot { const { username } = ctx.me; const prompt = ctx.message?.text?.slice(username.length + 1) || ""; //@ ctx.session.openAi.chatGpt.requestQueue.push( - await this.preparePrompt(ctx, prompt) + await preparePrompt(ctx, prompt) ); if (!ctx.session.openAi.chatGpt.isProcessingQueue) { ctx.session.openAi.chatGpt.isProcessingQueue = true; @@ -650,9 +499,9 @@ export class OpenAIBot { ctx, SupportedCommands ); - const prefix = this.hasPrefix(prompt); + const prefix = hasPrefix(prompt); ctx.session.openAi.chatGpt.requestQueue.push( - await this.preparePrompt(ctx, prompt.slice(prefix.length)) + await preparePrompt(ctx, prompt.slice(prefix.length)) ); if (!ctx.session.openAi.chatGpt.isProcessingQueue) { ctx.session.openAi.chatGpt.isProcessingQueue = true; @@ -674,7 +523,7 @@ export class OpenAIBot { return; } ctx.session.openAi.chatGpt.requestQueue.push( - await this.preparePrompt(ctx, ctx.message?.text!) + await preparePrompt(ctx, ctx.message?.text!) ); if (!ctx.session.openAi.chatGpt.isProcessingQueue) { ctx.session.openAi.chatGpt.isProcessingQueue = true; @@ -697,7 +546,7 @@ export class OpenAIBot { } const prompt = ctx.match ? ctx.match : ctx.message?.text; ctx.session.openAi.chatGpt.requestQueue.push( - await this.preparePrompt(ctx, prompt as string) + await preparePrompt(ctx, prompt as string) ); if (!ctx.session.openAi.chatGpt.isProcessingQueue) { ctx.session.openAi.chatGpt.isProcessingQueue = true; @@ -728,7 +577,7 @@ export class OpenAIBot { .catch((e) => this.onError(ctx, e)); return; } - const { url, newPrompt } = this.hasUrl(prompt); + const { url, newPrompt } = hasUrl(prompt); if (url) { await this.onWebCrawler( ctx, @@ -784,7 +633,7 @@ export class OpenAIBot { ctx.session.openAi.chatGpt.usage = 0; ctx.session.openAi.chatGpt.price = 0; } - + async onNotBalanceMessage(ctx: OnMessageContext | OnCallBackQueryData) { const accountId = this.payments.getAccountId(ctx as OnMessageContext); const account = await this.payments.getUserAccount(accountId); @@ -812,8 +661,10 @@ export class OpenAIBot { return; } if (e instanceof GrammyError) { - if (e.error_code === 400 && e.description.includes('not enough rights')) { - ctx.reply('Error: The bot does not have permission to send photos in chat') + if (e.error_code === 400 && e.description.includes("not enough rights")) { + ctx.reply( + "Error: The bot does not have permission to send photos in chat" + ); } else if (e.error_code === 429) { this.botSuspended = true; const retryAfter = e.parameters.retry_after @@ -837,7 +688,9 @@ export class OpenAIBot { await sleep(retryAfter * 1000); // wait retryAfter seconds to enable bot this.botSuspended = false; } else { - this.logger.error(`On method "${e.method}" | ${e.error_code} - ${e.description}`) + this.logger.error( + `On method "${e.method}" | ${e.error_code} - ${e.description}` + ); } } else if (e instanceof OpenAI.APIError) { // 429 RateLimitError From 1bd5288dbb8284f49f69d06553748727f67e1bbf Mon Sep 17 00:00:00 2001 From: fegloff Date: Mon, 4 Sep 2023 23:18:26 -0500 Subject: [PATCH 04/14] add topic handler for dalle + SDImageBotBase + minor refactoring --- src/modules/open-ai/helpers.ts | 13 +++++++ src/modules/open-ai/index.ts | 24 ++++++++---- src/modules/sd-images/SDImagesBotBase.ts | 49 +++++++++++++++++------- src/modules/types.ts | 5 +++ 4 files changed, 70 insertions(+), 21 deletions(-) diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index 87227291..714eb62b 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -167,6 +167,19 @@ export const preparePrompt = async ( return prompt; } +export const messageTopic = async (ctx: OnMessageContext | OnCallBackQueryData) => { + return await ctx.message?.message_thread_id +} + +export const sendMessage = async (ctx: OnMessageContext | OnCallBackQueryData, msg: string) => { + const topic = await messageTopic(ctx) + if (topic) { + ctx.reply(msg, { + message_thread_id: topic, + parse_mode: 'Markdown' + }) + } +} export const hasPrefix = (prompt: string): string => { return ( diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 729b0d5d..23147f47 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -8,6 +8,7 @@ import { OnMessageContext, OnCallBackQueryData, ChatConversation, + MessageExtras } from "../types"; import { getChatModel, @@ -29,7 +30,9 @@ import { hasUsernamePassword, isMentioned, MAX_TRIES, + messageTopic, preparePrompt, + sendMessage, SupportedCommands, } from "./helpers"; import { getWebContent, getCrawlerPrice } from "./utils/web-crawler"; @@ -59,6 +62,7 @@ export class OpenAIBot { public async isSupportedEvent( ctx: OnMessageContext | OnCallBackQueryData ): Promise { + const hasCommand = ctx.hasCommand( Object.values(SupportedCommands).map((command) => command.name) ); @@ -262,11 +266,16 @@ export class OpenAIBot { const numImages = await ctx.session.openAi.imageGen.numImages; const imgSize = await ctx.session.openAi.imageGen.imgSize; const imgs = await postGenerateImg(prompt, numImages, imgSize); + const topicId = await messageTopic(ctx) + let msgExtras: MessageExtras = { + caption: `/dalle ${prompt}` + } + if (topicId) { + msgExtras['message_thread_id'] = topicId + } imgs.map(async (img: any) => { await ctx - .replyWithPhoto(img.url, { - caption: `/dalle ${prompt}`, - }) + .replyWithPhoto(img.url, msgExtras) .catch((e) => { this.onError(ctx, e, MAX_TRIES); }); @@ -366,7 +375,8 @@ export class OpenAIBot { "sum" ); } else { - ctx.reply(`Error: Missing url`); + await sendMessage(ctx, `Error: Missing url`) + // ctx.reply(); } } catch (e) { this.onError(ctx, e); @@ -395,7 +405,7 @@ export class OpenAIBot { ?.text!.replaceAll(user, "****") .replaceAll(password, "*****") || ""; ctx.api.deleteMessage(ctx.chat?.id!, ctx.message?.message_id!); - ctx.reply(maskedPrompt); + sendMessage(ctx,maskedPrompt) } const webContent = await getWebContent( url, @@ -662,9 +672,7 @@ export class OpenAIBot { } if (e instanceof GrammyError) { if (e.error_code === 400 && e.description.includes("not enough rights")) { - ctx.reply( - "Error: The bot does not have permission to send photos in chat" - ); + await sendMessage(ctx, "Error: The bot does not have permission to send photos in chat") } else if (e.error_code === 429) { this.botSuspended = true; const retryAfter = e.parameters.retry_after diff --git a/src/modules/sd-images/SDImagesBotBase.ts b/src/modules/sd-images/SDImagesBotBase.ts index 68546b77..ed365ba7 100644 --- a/src/modules/sd-images/SDImagesBotBase.ts +++ b/src/modules/sd-images/SDImagesBotBase.ts @@ -1,5 +1,5 @@ import { SDNodeApi, IModel } from "./api"; -import { OnMessageContext, OnCallBackQueryData } from "../types"; +import { OnMessageContext, OnCallBackQueryData, MessageExtras } from "../types"; import { getTelegramFileUrl, loadFile, sleep, uuidv4 } from "./utils"; import { GrammyError, InputFile } from "grammy"; import { COMMAND } from './helpers'; @@ -79,9 +79,13 @@ export class SDImagesBotBase { this.queue.push(uuid); let idx = this.queue.findIndex((v) => v === uuid); - + const topicId = await ctx.message?.message_thread_id + let msgExtras: MessageExtras = {} + if (topicId) { + msgExtras['message_thread_id'] = topicId + } const { message_id } = await ctx.reply( - `You are #${idx + 1}, wait about ${(idx + 1) * 15} seconds` + `You are #${idx + 1}, wait about ${(idx + 1) * 15} seconds`, msgExtras ); // waiting queue @@ -120,23 +124,33 @@ export class SDImagesBotBase { : `/${model.aliases[0]} ${prompt}`; - await ctx.replyWithPhoto(new InputFile(imageBuffer), { - caption: reqMessage, - }); + const topicId = await ctx.message?.message_thread_id + let msgExtras: MessageExtras = { + caption: reqMessage + } + if (topicId) { + msgExtras['message_thread_id'] = topicId + } + await ctx.replyWithPhoto(new InputFile(imageBuffer),msgExtras); if (ctx.chat?.id && queueMessageId) { await ctx.api.deleteMessage(ctx.chat?.id, queueMessageId); } } catch (e: any) { - if (e instanceof GrammyError) { + const topicId = await ctx.message?.message_thread_id + let msgExtras: MessageExtras = {} + if (topicId) { + msgExtras['message_thread_id'] = topicId + } + if (e instanceof GrammyError) { if (e.error_code === 400 && e.description.includes('not enough rights')) { - ctx.reply(`Error: The bot does not have permission to send photos in chat... Refunding payments`); + ctx.reply(`Error: The bot does not have permission to send photos in chat... Refunding payments`, msgExtras); } else { - ctx.reply(`Error: something went wrong... Refunding payments`); + ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras) } } else { this.logger.error(e.toString()); - ctx.reply(`Error: something went wrong... Refunding payments`); + ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras); refundCallback(); } } @@ -197,7 +211,11 @@ export class SDImagesBotBase { `${session.message} ${prompt}` : `/${model.aliases[0]} ${prompt}`; - + const topicId = await ctx.message?.message_thread_id + let msgExtras: MessageExtras = {} + if (topicId) { + msgExtras['message_thread_id'] = topicId + } await ctx.replyWithMediaGroup([ { type: "photo", @@ -209,14 +227,19 @@ export class SDImagesBotBase { media: new InputFile(imageBuffer), // caption: reqMessage, } - ]); + ],msgExtras); if (ctx.chat?.id && queueMessageId) { await ctx.api.deleteMessage(ctx.chat?.id, queueMessageId); } } catch (e: any) { + const topicId = await ctx.message?.message_thread_id + let msgExtras: MessageExtras = {} + if (topicId) { + msgExtras['message_thread_id'] = topicId + } this.logger.error(e.toString()); - ctx.reply(`Error: something went wrong... Refunding payments`); + ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras); refundCallback(); } diff --git a/src/modules/types.ts b/src/modules/types.ts index 335b5623..a52d7132 100644 --- a/src/modules/types.ts +++ b/src/modules/types.ts @@ -6,12 +6,17 @@ import { type ConversationFlavor, } from "@grammyjs/conversations"; import { AutoChatActionFlavor } from "@grammyjs/auto-chat-action"; + export interface ImageGenSessionData { numImages: number; imgSize: string; isEnabled: boolean; } +export interface MessageExtras { + caption?: string, + message_thread_id?: number +} export interface ChatCompletion { completion: string; usage: number; From a0bf0309b6eba3b49fb8236bedd5d9c17fd9af35 Mon Sep 17 00:00:00 2001 From: Yuriy Date: Tue, 5 Sep 2023 15:30:25 +0300 Subject: [PATCH 05/14] SD Images: added more Loras --- src/modules/sd-images/api/index.ts | 6 +-- src/modules/sd-images/api/loras-config.ts | 56 +++++++++++++++++++---- src/modules/sd-images/helpers.ts | 4 +- 3 files changed, 51 insertions(+), 15 deletions(-) diff --git a/src/modules/sd-images/api/index.ts b/src/modules/sd-images/api/index.ts index b900533b..7e15c62f 100644 --- a/src/modules/sd-images/api/index.ts +++ b/src/modules/sd-images/api/index.ts @@ -31,14 +31,10 @@ export class SDNodeApi { selectedLora = options.lora; loraStrength = 1; } else if (params.loraName) { - selectedLora = getLoraByParam(params.loraName); + selectedLora = getLoraByParam(params.loraName, options.model.baseModel); loraStrength = params.loraStrength; } - if (selectedLora && selectedLora.baseModel !== options.model.baseModel) { - selectedLora = undefined; - } - if (selectedLora?.shortName === 'logo') { params.promptWithoutParams = `logo, ${params.promptWithoutParams}, LogoRedAF`; } diff --git a/src/modules/sd-images/api/loras-config.ts b/src/modules/sd-images/api/loras-config.ts index c4d9ee73..00fe09bf 100644 --- a/src/modules/sd-images/api/loras-config.ts +++ b/src/modules/sd-images/api/loras-config.ts @@ -15,10 +15,30 @@ export const LORAS_CONFIGS: ILora[] = [ name: 'Detail Tweaker LoRA', id: '58390', hash: '47AAAF0D29', - shortName: 'add_detail', + shortName: 'detail', link: 'https://civitai.com/models/58390/detail-tweaker-lora-lora', baseModel: 'SD 1.5', - aliases: ['add_detail'], + aliases: ['add_detail', 'detail'], + }, + { + path: 'add-detail-xl.safetensors', + name: 'Detail Tweaker XL', + id: '122359', + hash: '0D9BD1B873', + shortName: 'detail', + link: 'https://civitai.com/models/122359/detail-tweaker-xl', + baseModel: 'SDXL 1.0', + aliases: ['add-detail-xl', 'add_detail', 'detail'], + }, + { + path: 'logo_20230705215526.safetensors', + name: 'logo 设计 Lora', + id: '104072', + hash: '0AAD77BD39', + shortName: 'logo', + link: 'https://civitai.com/models/104072/logo-lora', + baseModel: 'SD 1.5', + aliases: ['logo_20230705215526', 'logo'], }, { path: 'LogoRedmond_LogoRedAF.safetensors', @@ -29,15 +49,35 @@ export const LORAS_CONFIGS: ILora[] = [ link: 'https://civitai.com/models/124609/logoredmond-logo-lora-for-sd-xl-10', baseModel: 'SDXL 1.0', aliases: ['LogoRedmond_LogoRedAF', 'logo'], - } + }, + { + path: 'pixelartV3.safetensors', + name: 'Pixel art style', + id: '43820', + hash: '8A2E1EA746', + shortName: 'pixel', + link: 'https://civitai.com/models/43820/pixel-art-style', + baseModel: 'SD 1.5', + aliases: ['pixel-art-style', 'pixel'], + }, + { + path: 'pixel-art-xl-v1.1.safetensors', + name: 'Pixel Art XL', + id: '120096', + hash: 'BBF3D8DEFB', + shortName: 'pixel', + link: 'https://civitai.com/models/120096/pixel-art-xl', + baseModel: 'SDXL 1.0', + aliases: ['pixel-art-xl', 'pixel'], + }, ]; -export const getLoraByParam = (param: string) => { +export const getLoraByParam = (param: string, baseModel: 'SD 1.5' | 'SDXL 1.0') => { const model = LORAS_CONFIGS.find(m => - m.id === param || - m.hash === param || - m.shortName === param || - m.aliases.includes(param) + (m.id === param || + m.hash === param || + m.shortName === param || + m.aliases.includes(param)) && m.baseModel === baseModel ); return model; diff --git a/src/modules/sd-images/helpers.ts b/src/modules/sd-images/helpers.ts index 71870ad0..fb406548 100644 --- a/src/modules/sd-images/helpers.ts +++ b/src/modules/sd-images/helpers.ts @@ -105,8 +105,8 @@ export const parseCtx = (ctx: Context): IOperation | false => { hasCommand(ctx, 'logo') || hasCommand(ctx, 'l') ) { command = COMMAND.TEXT_TO_IMAGE; - lora = getLoraByParam('logo'); model = getModelByParam('xl'); + lora = getLoraByParam('logo', model?.baseModel || 'SDXL 1.0'); } if (hasCommand(ctx, 'images')) { @@ -140,8 +140,8 @@ export const parseCtx = (ctx: Context): IOperation | false => { } if (messageText.startsWith('l.')) { - lora = getLoraByParam('logo'); model = getModelByParam('xl'); + lora = getLoraByParam('logo', model?.baseModel || 'SDXL 1.0'); } if (!model) { From ac0053fb8213002d4cabb5d9c20fe8baf04dd851 Mon Sep 17 00:00:00 2001 From: Yuriy Date: Tue, 5 Sep 2023 16:09:18 +0300 Subject: [PATCH 06/14] SD Images: fix --- src/modules/sd-images/api/helpers.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/sd-images/api/helpers.ts b/src/modules/sd-images/api/helpers.ts index 128c33ea..d3a3e573 100644 --- a/src/modules/sd-images/api/helpers.ts +++ b/src/modules/sd-images/api/helpers.ts @@ -123,7 +123,7 @@ export const getParamsFromPrompt = (originalPrompt: string, model: IModel): IPar if (loraMatch) { loraName = loraMatch[1]; - loraStrength = Number(loraMatch[1]); + loraStrength = Number(loraMatch[2]); prompt = prompt.replace(//, ''); } From 2c7adeb78771d0c2d61afc99887b2d748f1b5e7a Mon Sep 17 00:00:00 2001 From: ahiipsa Date: Tue, 5 Sep 2023 19:12:38 +0400 Subject: [PATCH 07/14] Added user engagement by command --- src/database/stats.service.ts | 25 +++++++++++++++++++++++++ src/modules/payment/index.ts | 4 +++- src/modules/schedule/index.ts | 19 ++++++++++++++++--- 3 files changed, 44 insertions(+), 4 deletions(-) diff --git a/src/database/stats.service.ts b/src/database/stats.service.ts index f2b05051..01f5a395 100644 --- a/src/database/stats.service.ts +++ b/src/database/stats.service.ts @@ -18,6 +18,12 @@ export interface BotPaymentLog { amountCredits: number } +export interface EngagementByCommand { + command: string, + commandCount: string, + oneAmount: string, +} + export class StatsService { public writeLog(log: BotPaymentLog) { let paymentLog = new BotLog() @@ -87,6 +93,25 @@ export class StatsService { return rows.length ? +rows[0].count : 0 } + public async getUserEngagementByCommand(daysPeriod = 7): Promise { + const currentTime = moment(); + const dateStart = moment() + .tz('America/Los_Angeles') + .set({ hour: 0, minute: 0, second: 0 }) + .subtract(daysPeriod,'days') + .unix() + + const dateEnd = currentTime.unix(); + + const rows = await logRepository.createQueryBuilder('logs') + .select('logs.command, count(logs.command) as "commandCount", SUM(logs.amountOne) as "oneAmount"') + .groupBy('logs.command') + .where(`logs.createdAt BETWEEN TO_TIMESTAMP(${dateStart}) and TO_TIMESTAMP(${dateEnd})`) + .orderBy('"commandCount"', 'DESC').limit(10).execute(); + + return rows; + } + public addCommandStat({tgUserId, rawMessage, command}: {tgUserId: number, rawMessage: string, command: string}) { const stat = new StatBotCommand(); diff --git a/src/modules/payment/index.ts b/src/modules/payment/index.ts index 8e94c43c..cdfb56c9 100644 --- a/src/modules/payment/index.ts +++ b/src/modules/payment/index.ts @@ -272,7 +272,9 @@ export class BotPayments { let [command = ''] = text.split(' ') if(!command) { if(audio || voice) { - command = 'voice-memo' + command = '/voice-memo' + } else { + command = '/openai' } } diff --git a/src/modules/schedule/index.ts b/src/modules/schedule/index.ts index a6a919d4..3cc401a5 100644 --- a/src/modules/schedule/index.ts +++ b/src/modules/schedule/index.ts @@ -139,6 +139,16 @@ export class BotSchedule { return report; } + public async generateReportEngagementByCommand(days: number) { + const dbRows = await statsService.getUserEngagementByCommand(days); + + const rows = dbRows.map((row) => { + return `${abbreviateNumber(+row.commandCount).padEnd(4)} ${row.command}` + }) + + return "```\n" + rows.join('\n') + "\n```"; + } + public async generateFullReport() { const [ botFeesReport, @@ -148,7 +158,8 @@ export class BotSchedule { totalCredits, weeklyUsers, totalMessages, - totalSupportedMessages + totalSupportedMessages, + engagementByCommand, ] = await Promise.all([ this.getBotFeeReport(this.holderAddress), getBotFee(this.holderAddress, 7), @@ -157,7 +168,8 @@ export class BotSchedule { statsService.getTotalFreeCredits(), statsService.getActiveUsers(7), statsService.getTotalMessages(7), - statsService.getTotalMessages(7, true) + statsService.getTotalMessages(7, true), + this.generateReportEngagementByCommand(7), ]) const report = `\nBot fees: *${botFeesReport}*` + @@ -167,7 +179,8 @@ export class BotSchedule { `\nTotal fees users pay in free credits: *${abbreviateNumber(totalCredits)}*` + `\nWeekly active users: *${abbreviateNumber(weeklyUsers)}*` + `\nWeekly user engagement (any commands): *${abbreviateNumber(totalMessages)}*` + - `\nWeekly user engagement (commands supported by bot): *${abbreviateNumber(totalSupportedMessages)}*` + `\nWeekly user engagement (commands supported by bot): *${abbreviateNumber(totalSupportedMessages)}*` + + `\n\n${engagementByCommand}` return report; } From 318291121da69dfc6bbbe56621bab374106ffabb Mon Sep 17 00:00:00 2001 From: Aishlia Date: Tue, 5 Sep 2023 10:57:15 -0700 Subject: [PATCH 08/14] Add support for double dash or em dash flag --- src/modules/sd-images/api/helpers.ts | 37 ++++++++++++++-------------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/modules/sd-images/api/helpers.ts b/src/modules/sd-images/api/helpers.ts index 128c33ea..d26f51ad 100644 --- a/src/modules/sd-images/api/helpers.ts +++ b/src/modules/sd-images/api/helpers.ts @@ -29,7 +29,8 @@ export const getParamsFromPrompt = (originalPrompt: string, model: IModel): IPar let prompt = originalPrompt; // --ar Aspect ratio flag : - const aspectRatioMatch = prompt.match(/--ar\s+(\d+:\d+)/); + const aspectRatioMatch = prompt.match(/(--|\—)ar\s+(\d+:\d+)/); + let width = model.baseModel === 'SDXL 1.0' ? 1024 : 512; let height = model.baseModel === 'SDXL 1.0' ? 1024 : 768; @@ -42,79 +43,79 @@ export const getParamsFromPrompt = (originalPrompt: string, model: IModel): IPar height = Math.round(aspectHeight * scaleFactor); } - prompt = prompt.replace(/--ar\s+(\d+:\d+)/, ''); + prompt = prompt.replace(/(--|\—)ar\s+(\d+:\d+)/, ''); } // --d Dimensions flag x - const dimensionsMatch = prompt.match(/--d\s+(\d+x\d+)/); + const dimensionsMatch = prompt.match(/(--|\—)d\s+(\d+x\d+)/); if (dimensionsMatch) { const dimensions = dimensionsMatch[1]; [width, height] = dimensions.split('x').map(Number); - prompt = prompt.replace(/--d\s+(\d+x\d+)/, ''); + prompt = prompt.replace(/(--|\—)d\s+(\d+x\d+)/, ''); } // --cfg cfgScale flag - const cfgScaleMatch = prompt.match(/--cfg\s+(\d+(\.\d+)?)/); + const cfgScaleMatch = prompt.match(/(--|\—)cfg\s+(\d+(\.\d+)?)/); let cfgScale = 7.0; if (cfgScaleMatch) { cfgScale = parseFloat(cfgScaleMatch[1]); - prompt = prompt.replace(/--cfg\s+(\d+(\.\d+)?)/, ''); + prompt = prompt.replace(/(--|\—)cfg\s+(\d+(\.\d+)?)/, ''); } // --steps Steps flag - const stepsMatch = prompt.match(/--steps\s+(\d+)/); + const stepsMatch = prompt.match(/(--|\—)steps\s+(\d+)/); let steps = 26; if (stepsMatch) { steps = parseInt(stepsMatch[1]); - prompt = prompt.replace(/--steps\s+(\d+)/, ''); + prompt = prompt.replace(/(--|\—)steps\s+(\d+)/, ''); } - // --c Controlnet flag - const controlnetVersionMatch = prompt.match(/--c\s+(\d+)/); + // --c Controlnet version flag + const controlnetVersionMatch = prompt.match(/(--|\—)c\s+(\d+)/); let controlnetVersion = 1; if (controlnetVersionMatch) { controlnetVersion = parseInt(controlnetVersionMatch[1]); - prompt = prompt.replace(/--c\s+(\d+)/, ''); + prompt = prompt.replace(/(--|\—)c\s+(\d+)/, ''); } let seed; // --seed cfgScale flag - const seedMatch = prompt.match(/--seed\s+(\d+)/); + const seedMatch = prompt.match(/(--|\—)seed\s+(\d+)/); if (seedMatch) { seed = parseInt(seedMatch[1]); - prompt = prompt.replace(/--seed\s+(\d+)/, ''); + prompt = prompt.replace(/(--|\—)seed\s+(\d+)/, ''); } let denoise; - // --seed cfgScale flag - const denoiseMatch = prompt.match(/--denoise\s+(\d+\.\d+)/); + // --denoise Denoise scale flag + const denoiseMatch = prompt.match(/(--|\—)denoise\s+(\d+\.\d+)/); if (denoiseMatch) { denoise = Number(denoiseMatch[1]); - prompt = prompt.replace(/--denoise\s+(\d+\.\d+)/, ''); + prompt = prompt.replace(/(--|\—)denoise\s+(\d+\.\d+)/, ''); } // --no Negative prompt flag - const noMatch = prompt.match(/--no\s+(.+?)(?=\s+--|$)/); + const noMatch = prompt.match(/(--|\—)no\s+(.+?)(?=\s+--|$)/); let negativePrompt = NEGATIVE_PROMPT; if (noMatch) { negativePrompt = noMatch[1].trim(); - prompt = prompt.replace(/--no\s+(.+?)(?=\s+--|$)/, ''); + prompt = prompt.replace(/(--|\—)no\s+(.+?)(?=\s+--|$)/, ''); } const loraMatch = prompt.match(//); From aa45461a294bb6f8c4dc996c95911f92c01f4110 Mon Sep 17 00:00:00 2001 From: Aishlia Date: Tue, 5 Sep 2023 11:38:49 -0700 Subject: [PATCH 09/14] Added more words to banned list --- src/modules/sd-images/words-blacklist.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/sd-images/words-blacklist.ts b/src/modules/sd-images/words-blacklist.ts index 385be568..806ddad9 100644 --- a/src/modules/sd-images/words-blacklist.ts +++ b/src/modules/sd-images/words-blacklist.ts @@ -5,7 +5,7 @@ export const childrenWords = [ export const sexWords = [ // Sexually Explicit Words - 'sex', 'seks', 'ahegao', 'pinup', 'ballgag', 'Playboy', 'Bimbo', 'pleasure', 'bodily fluids', 'pleasures', 'boudoir', 'rule34', 'brothel', 'seducing', 'dominatrix', 'seductive', 'erotic seductive', 'fuck', 'sensual', 'Hardcore', 'sexy', 'Hentai', 'Shag', 'horny', 'shibari', 'incest', 'Smut', 'jav', 'succubus', 'Jerk off king at pic', 'thot', 'kinbaku', 'transparent', 'legs spread', 'twerk', 'making love', 'voluptuous', 'naughty', 'wincest', 'orgy', 'Sultry', 'XXX', 'Bondage', 'Bdsm', 'Dog collar', 'Slavegirl', 'Transparent and Translucent', 'Arse', 'Labia', 'Ass', 'Mammaries', 'Human centipede', 'Badonkers', 'Minge', 'Massive chests', 'Big Ass', 'Mommy Milker', 'Booba', 'Nipple', 'Booty', 'Oppai ', 'Bosom', 'Organs', 'Breasts', 'Ovaries', 'Busty', 'Penis', 'Clunge', 'Phallus', 'Crotch', 'sexy female', 'Dick', 'Skimpy', 'Girth', 'Thick', 'Honkers', 'Vagina', 'Hooters', 'Veiny', 'Knob', 'no clothes', 'Speedo', 'au naturale', 'no shirt', 'bare chest', 'nude', 'barely dressed', 'bra', 'risqué', 'clear', 'scantily', 'clad', 'cleavage', 'stripped', 'full frontal unclothed', 'invisible clothes', 'wearing nothing', 'lingerie with no shirt', 'naked', 'without clothes on', 'negligee', 'zero clothes', 'Pornography', 'Explicit', 'Adult', 'NSFW (Not Safe For Work)', 'XXX', 'Erotic', 'Sexual', 'Sensual', 'Intimate', 'Nudity', 'Obscene', 'Vulgar', 'Graphic', 'Hardcore', 'Fetish', 'Kink', 'Erotic art', 'Erotica', 'Pornographic', 'Nude', 'Provocative', 'Lewd', 'Passionate', 'Seductive', 'X-rated', 'Adult content', 'Sexually explicit', 'Mature content', 'Adult entertainment', 'Erotic imagery', 'Desirable', 'Pleasure', 'Lust', 'Orgasm', 'Indecent', 'Raunchy', 'Steamy', 'Uncensored', 'Naughty', 'Bedroom', 'Censored', 'Bodily', 'Erogenous', 'Adult industry', 'Graphic content', 'Sensuous', 'Taboo', 'Forbidden', 'Private parts', 'Erotic literature', 'Sexual encounter', 'Intimate moment', 'Adult film', 'Provocative pose', 'Erotic scene', 'Naked', 'Swimsuit', 'Lingerie', 'Bikini', 'Underwear', 'Intercourse', 'Erogenous zones', 'Pleasure zones', 'Kinky', 'Dominatrix', 'Swingers', 'Threesome', 'Gangbang', 'BDSM', 'Escort services', 'Camgirl/Camboy', 'Virtual adult entertainment', 'Phone sex', 'Porn star', 'Pornographic materials', 'Erotic chat', 'Online dating', 'Hookup', 'Cybersex', 'Explicit language', 'Sex talk', 'Sexual innuendo', 'Condom', 'Lubricant', 'Vibrator', 'Dildo', 'Prostitution', 'Adult toys', 'Sex act names', 'blowjob', 'anal', 'doggy style', 'Sexual positions', 'Erotic massage', 'Nudist/naturist', 'Exhibitionist', 'Voyeurism', 'Adult chat', 'Online adult communities', 'Live streaming adult content', 'Erotic roleplay', 'Adult dating sites', 'Erotic', 'Sexually explicit stories', 'Nipple', 'Genitals', 'Lolicon', 'Shotacon', 'porn', 'Torture', 'Disturbing', 'Farts', 'Fart', 'Poop', 'Errect', 'Big Black', 'Voluptuous', 'Seductive', 'Sperm', 'Hot', 'Sexy', 'Sensored', 'Censored', 'Silenced', 'Deepfake', 'Inappropriate', 'Waifu', 'mp5', 'Succubus', + 'sex', 'seks', 'ahegao', 'pinup', 'ballgag', 'Playboy', 'Bimbo', 'pleasure', 'bodily fluids', 'pleasures', 'boudoir', 'rule34', 'brothel', 'seducing', 'dominatrix', 'seductive', 'erotic seductive', 'fuck', 'sensual', 'Hardcore', 'sexy', 'Hentai', 'Shag', 'horny', 'shibari', 'incest', 'Smut', 'jav', 'succubus', 'Jerk off king at pic', 'thot', 'kinbaku', 'transparent', 'legs spread', 'twerk', 'making love', 'voluptuous', 'naughty', 'wincest', 'orgy', 'Sultry', 'XXX', 'Bondage', 'Bdsm', 'Dog collar', 'Slavegirl', 'Transparent and Translucent', 'Arse', 'Labia', 'Ass', 'Mammaries', 'Human centipede', 'Badonkers', 'Minge', 'Massive chests', 'Big Ass', 'Mommy Milker', 'Booba', 'Nipple', 'Booty', 'Oppai ', 'Bosom', 'Organs', 'Breasts', 'Ovaries', 'Busty', 'Penis', 'Clunge', 'Phallus', 'Crotch', 'sexy female', 'Dick', 'Skimpy', 'Girth', 'Thick', 'Honkers', 'Vagina', 'Hooters', 'Veiny', 'Knob', 'no clothes', 'Speedo', 'au naturale', 'no shirt', 'bare chest', 'nude', 'barely dressed', 'bra', 'risqué', 'clear', 'scantily', 'clad', 'cleavage', 'stripped', 'full frontal unclothed', 'invisible clothes', 'wearing nothing', 'lingerie with no shirt', 'naked', 'without clothes on', 'negligee', 'zero clothes', 'Pornography', 'Explicit', 'Adult', 'NSFW (Not Safe For Work)', 'XXX', 'Erotic', 'Sexual', 'Sensual', 'Intimate', 'Nudity', 'Obscene', 'Vulgar', 'Graphic', 'Hardcore', 'Fetish', 'Kink', 'Erotic art', 'Erotica', 'Pornographic', 'Nude', 'Provocative', 'Lewd', 'Passionate', 'Seductive', 'X-rated', 'Adult content', 'Sexually explicit', 'Mature content', 'Adult entertainment', 'Erotic imagery', 'Desirable', 'Pleasure', 'Lust', 'Orgasm', 'Indecent', 'Raunchy', 'Steamy', 'Uncensored', 'Naughty', 'Bedroom', 'Censored', 'Bodily', 'Erogenous', 'Adult industry', 'Graphic content', 'Sensuous', 'Taboo', 'Forbidden', 'Private parts', 'Erotic literature', 'Sexual encounter', 'Intimate moment', 'Adult film', 'Provocative pose', 'Erotic scene', 'Naked', 'Swimsuit', 'Lingerie', 'Bikini', 'Underwear', 'Intercourse', 'Erogenous zones', 'Pleasure zones', 'Kinky', 'Dominatrix', 'Swingers', 'Threesome', 'Gangbang', 'BDSM', 'Escort services', 'Camgirl/Camboy', 'Virtual adult entertainment', 'Phone sex', 'Porn star', 'Pornographic materials', 'Erotic chat', 'Online dating', 'Hookup', 'Cybersex', 'Explicit language', 'Sex talk', 'Sexual innuendo', 'Condom', 'Lubricant', 'Vibrator', 'Dildo', 'Prostitution', 'Adult toys', 'Sex act names', 'blowjob', 'anal', 'doggy style', 'Sexual positions', 'Erotic massage', 'Nudist/naturist', 'Exhibitionist', 'Voyeurism', 'Adult chat', 'Online adult communities', 'Live streaming adult content', 'Erotic roleplay', 'Adult dating sites', 'Erotic', 'Sexually explicit stories', 'Nipple', 'Genitals', 'Lolicon', 'Shotacon', 'porn', 'Torture', 'Disturbing', 'Farts', 'Fart', 'Poop', 'Errect', 'Big Black', 'Voluptuous', 'Seductive', 'Sperm', 'Hot', 'Sexy', 'Sensored', 'Censored', 'Silenced', 'Deepfake', 'Inappropriate', 'Waifu', 'mp5', 'Succubus','testicle','testis' ] export const tabooWords = [ From 226d0055dbb330224e944e8562e62dc8caf51ddc Mon Sep 17 00:00:00 2001 From: fegloff Date: Tue, 5 Sep 2023 14:23:02 -0500 Subject: [PATCH 10/14] refactor OpenAI class + add topic support --- src/modules/open-ai/api/openAi.ts | 7 + src/modules/open-ai/controller/index.ts | 147 ---------- src/modules/open-ai/helpers.ts | 90 ++++-- src/modules/open-ai/index.ts | 357 +++++++++++++++--------- src/modules/open-ai/types.ts | 26 +- src/modules/types.ts | 12 +- 6 files changed, 335 insertions(+), 304 deletions(-) delete mode 100644 src/modules/open-ai/controller/index.ts diff --git a/src/modules/open-ai/api/openAi.ts b/src/modules/open-ai/api/openAi.ts index 3251f94a..df4fe92f 100644 --- a/src/modules/open-ai/api/openAi.ts +++ b/src/modules/open-ai/api/openAi.ts @@ -17,6 +17,7 @@ import { DalleGPTModel, DalleGPTModels, } from "../types"; +import { getMessageExtras } from "../helpers"; const openai = new OpenAI({ apiKey: config.openAiKey, @@ -134,6 +135,9 @@ export const streamChatCompletion = async ( const wordCountMinimum = config.openAi.chatGpt.wordCountBetween; return new Promise(async (resolve, reject) => { try { + // const extras = getMessageExtras({ + // topicId: ctx.message?.message_thread_id + // }) const stream = await openai.chat.completions.create({ model: model, messages: @@ -157,6 +161,9 @@ export const streamChatCompletion = async ( completion = completion.replaceAll("..", ""); completion += ".."; wordCount = 0; + // const extras = getMessageExtras({ + // topicId: ctx.message?.message_thread_id + // }) await ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch(async (e: any) => { diff --git a/src/modules/open-ai/controller/index.ts b/src/modules/open-ai/controller/index.ts deleted file mode 100644 index 3d0bdf43..00000000 --- a/src/modules/open-ai/controller/index.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { pino } from "pino"; -import { - ChatConversation, - OnCallBackQueryData, - OnMessageContext, -} from "../../types"; -import { - improvePrompt, - postGenerateImg, - alterGeneratedImg, - streamChatCompletion, - getTokenNumber, - getChatModel, - getChatModelPrice, -} from "../api/openAi"; -import config from "../../../config"; -import { GrammyError } from "grammy"; - -interface ImageGenPayload { - chatId: number; - prompt: string; - numImages?: number; - imgSize?: string; - filePath?: string; - model?: string; -} - -interface ChatGptPayload { - conversation: ChatConversation[]; - model: string; - ctx: OnMessageContext | OnCallBackQueryData; -} - -const logger = pino({ - name: "openAI-controller", - transport: { - target: "pino-pretty", - options: { - colorize: true, - }, - }, -}); - -export const imgGenEnhanced = async ( - data: ImageGenPayload, - ctx: OnMessageContext | OnCallBackQueryData -) => { - const { chatId, prompt, numImages, imgSize, model } = data; - try { - const upgratedPrompt = await improvePrompt(prompt, model!); - if (upgratedPrompt) { - await ctx - .reply( - `The following description was added to your prompt: ${upgratedPrompt}` - ) - .catch((e) => { - throw e; - }); - } - // bot.api.sendMessage(chatId, "generating the output..."); - const imgs = await postGenerateImg( - upgratedPrompt || prompt, - numImages, - imgSize - ); - imgs.map(async (img: any) => { - await ctx - .replyWithPhoto(img.url, { - caption: `/DALLE ${upgratedPrompt || prompt}`, - }) - .catch((e) => { - throw e; - }); - }); - return true; - } catch (e) { - throw e; - } -}; - -export const alterImg = async ( - data: ImageGenPayload, - ctx: OnMessageContext | OnCallBackQueryData -) => { - const { chatId, prompt, numImages, imgSize, filePath } = data; - try { - ctx.chatAction = "upload_photo"; - const imgs = await alterGeneratedImg(prompt!, filePath!, ctx, imgSize!); - if (imgs) { - imgs!.map(async (img: any) => { - await ctx.replyWithPhoto(img.url).catch((e) => { - throw e; - }); - }); - } - ctx.chatAction = null; - } catch (e) { - throw e; - } -}; - -export const promptGen = async ( - data: ChatGptPayload, - chat: ChatConversation[] -) => { - const { conversation, ctx, model } = data; - try { - let msgId = (await ctx.reply("...")).message_id; - const isTypingEnabled = config.openAi.chatGpt.isTypingEnabled; - if (isTypingEnabled) { - ctx.chatAction = "typing"; - } - const completion = await streamChatCompletion( - conversation!, - ctx, - model, - msgId, - true // telegram messages has a character limit - ); - if (isTypingEnabled) { - ctx.chatAction = null; - } - if (completion) { - const prompt = conversation[conversation.length - 1].content; - const promptTokens = getTokenNumber(prompt); - const completionTokens = getTokenNumber(completion); - const modelPrice = getChatModel(model); - const price = - getChatModelPrice(modelPrice, true, promptTokens, completionTokens) * - config.openAi.chatGpt.priceAdjustment; - logger.info( - `streamChatCompletion result = tokens: ${ - promptTokens + completionTokens - } | ${modelPrice.name} | price: ${price}¢` - ); - conversation.push({ content: completion, role: "system" }); - ctx.session.openAi.chatGpt.usage += promptTokens + completionTokens; - ctx.session.openAi.chatGpt.price += price; - chat = [...conversation!]; - return price; - } - return 0; - } catch (e: any) { - ctx.chatAction = null; - throw e; - } -}; diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index 714eb62b..c377b296 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -1,6 +1,15 @@ import config from "../../config"; import { isValidUrl } from "./utils/web-crawler"; -import { OnMessageContext, OnCallBackQueryData } from "../types"; +import { + OnMessageContext, + OnCallBackQueryData, + ChatConversation, + ChatCompletion, +} from "../types"; +import { parse } from "path"; +import { ParseMode } from "grammy/types"; +import { getChatModel, getChatModelPrice, getTokenNumber } from "./api/openAi"; +import { ChatGptPayload } from "./types"; export const SupportedCommands = { chat: { @@ -126,7 +135,7 @@ export const hasUsernamePassword = (prompt: string) => { } } return { user, password }; -} +}; // doesn't get all the special characters like ! export const hasUserPasswordRegex = (prompt: string) => { @@ -139,10 +148,7 @@ export const hasUserPasswordRegex = (prompt: string) => { if (matches) { const [_, keyword, word, __, word2] = matches; - if ( - keyword.toLowerCase() === "user" || - keyword.toLowerCase() === "user=" - ) { + if (keyword.toLowerCase() === "user" || keyword.toLowerCase() === "user=") { user = word; password = word2; } else if ( @@ -154,7 +160,7 @@ export const hasUserPasswordRegex = (prompt: string) => { } } return { user, password }; -} +}; export const preparePrompt = async ( ctx: OnMessageContext | OnCallBackQueryData, @@ -165,24 +171,74 @@ export const preparePrompt = async ( return `${prompt} ${msg}`; } return prompt; +}; + +export const messageTopic = async ( + ctx: OnMessageContext | OnCallBackQueryData +) => { + return await ctx.message?.message_thread_id; +}; + +export interface MessageExtras { + caption?: string; + message_thread_id?: number; + parse_mode?: ParseMode; } -export const messageTopic = async (ctx: OnMessageContext | OnCallBackQueryData) => { - return await ctx.message?.message_thread_id +interface GetMessagesExtras { + parseMode?: ParseMode | undefined; + topicId?: number | undefined; + caption?: string | undefined; } -export const sendMessage = async (ctx: OnMessageContext | OnCallBackQueryData, msg: string) => { - const topic = await messageTopic(ctx) - if (topic) { - ctx.reply(msg, { - message_thread_id: topic, - parse_mode: 'Markdown' - }) +export const getMessageExtras = (params: GetMessagesExtras) => { + const { parseMode, topicId, caption } = params; + let extras: MessageExtras = {}; + if (parseMode) { + extras["parse_mode"] = parseMode; } -} + if (topicId) { + extras["message_thread_id"] = parseInt( + String(topicId) + ) as unknown as number; + } + if (caption) { + extras["caption"] = caption; + } + return extras; +}; + +export const sendMessage = async ( + ctx: OnMessageContext | OnCallBackQueryData, + msg: string, + msgExtras: GetMessagesExtras +) => { + const extras = getMessageExtras(msgExtras); + return await ctx.reply(msg, extras); +}; export const hasPrefix = (prompt: string): string => { return ( hasChatPrefix(prompt) || hasDallePrefix(prompt) || hasNewPrefix(prompt) ); }; + +export const getPromptPrice = (completion: string, data: ChatGptPayload) => { + const { conversation, ctx, model } = data; + + const prompt = conversation[conversation.length - 1].content; + const promptTokens = getTokenNumber(prompt); + const completionTokens = getTokenNumber(completion); + const modelPrice = getChatModel(model); + const price = + getChatModelPrice(modelPrice, true, promptTokens, completionTokens) * + config.openAi.chatGpt.priceAdjustment; + conversation.push({ content: completion, role: "system" }); + ctx.session.openAi.chatGpt.usage += promptTokens + completionTokens; + ctx.session.openAi.chatGpt.price += price; + return { + price, + promptTokens, + completionTokens, + }; +}; diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 23147f47..4272b7cf 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -8,21 +8,23 @@ import { OnMessageContext, OnCallBackQueryData, ChatConversation, - MessageExtras } from "../types"; import { + alterGeneratedImg, getChatModel, getDalleModel, getDalleModelPrice, postGenerateImg, + streamChatCompletion, } from "./api/openAi"; -import { alterImg, imgGenEnhanced, promptGen } from "./controller"; import { appText } from "./utils/text"; import { chatService } from "../../database/services"; -import { ChatGPTModelsEnum } from "./types"; +import { ChatGPTModelsEnum, ChatGptPayload } from "./types"; import config from "../../config"; import { sleep } from "../sd-images/utils"; import { + getMessageExtras, + getPromptPrice, hasChatPrefix, hasNewPrefix, hasPrefix, @@ -62,7 +64,6 @@ export class OpenAIBot { public async isSupportedEvent( ctx: OnMessageContext | OnCallBackQueryData ): Promise { - const hasCommand = ctx.hasCommand( Object.values(SupportedCommands).map((command) => command.name) ); @@ -194,10 +195,10 @@ export class OpenAIBot { return; } - if (ctx.hasCommand(SupportedCommands.genImgEn.name)) { - this.onGenImgEnCmd(ctx); - return; - } + // if (ctx.hasCommand(SupportedCommands.genImgEn.name)) { + // this.onGenImgEnCmd(ctx); + // return; + // } if (this.isSupportedImageReply(ctx)) { this.onAlterImage(ctx); @@ -238,9 +239,9 @@ export class OpenAIBot { } this.logger.warn(`### unsupported command`); - await ctx - .reply("### unsupported command") - .catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); + sendMessage(ctx, "### unsupported command", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e, MAX_TRIES, "### unsupported command")); } private async hasBalance(ctx: OnMessageContext | OnCallBackQueryData) { @@ -266,24 +267,19 @@ export class OpenAIBot { const numImages = await ctx.session.openAi.imageGen.numImages; const imgSize = await ctx.session.openAi.imageGen.imgSize; const imgs = await postGenerateImg(prompt, numImages, imgSize); - const topicId = await messageTopic(ctx) - let msgExtras: MessageExtras = { - caption: `/dalle ${prompt}` - } - if (topicId) { - msgExtras['message_thread_id'] = topicId - } + const msgExtras = getMessageExtras({ + caption: `/dalle ${prompt}`, + topicId: await messageTopic(ctx), + }); imgs.map(async (img: any) => { - await ctx - .replyWithPhoto(img.url, msgExtras) - .catch((e) => { - this.onError(ctx, e, MAX_TRIES); - }); + await ctx.replyWithPhoto(img.url, msgExtras).catch((e) => { + this.onError(ctx, e, MAX_TRIES); + }); }); } else { - await ctx - .reply("Bot disabled") - .catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); + sendMessage(ctx, "Bot disabled", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); } } catch (e) { this.onError( @@ -295,38 +291,6 @@ export class OpenAIBot { } }; - onGenImgEnCmd = async (ctx: OnMessageContext | OnCallBackQueryData) => { - try { - if (ctx.session.openAi.imageGen.isEnabled) { - const prompt = await ctx.match; - if (!prompt) { - await ctx - .reply("Error: Missing prompt") - .catch((e) => - this.onError(ctx, e, MAX_TRIES, "Error: Missing prompt") - ); - return; - } - const payload = { - chatId: await ctx.chat?.id!, - prompt: prompt as string, - numImages: await ctx.session.openAi.imageGen.numImages, - imgSize: await ctx.session.openAi.imageGen.imgSize, - }; - await ctx - .reply("generating improved prompt...") - .catch((e) => this.onError(ctx, e)); - await imgGenEnhanced(payload, ctx); - } else { - await ctx - .reply("Bot disabled") - .catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); - } - } catch (e) { - this.onError(ctx, e); - } - }; - onAlterImage = async (ctx: OnMessageContext | OnCallBackQueryData) => { try { if (ctx.session.openAi.imageGen.isEnabled) { @@ -336,14 +300,17 @@ export class OpenAIBot { const file_id = photo?.pop()?.file_id; // with pop() get full image quality const file = await ctx.api.getFile(file_id!); const filePath = `${config.openAi.dalle.telegramFileUrl}${config.telegramBotAuthToken}/${file.file_path}`; - const payload = { - chatId: ctx.chat?.id!, - prompt: prompt as string, - numImages: await ctx.session.openAi.imageGen.numImages, - imgSize: await ctx.session.openAi.imageGen.imgSize, - filePath: filePath, - }; - await alterImg(payload, ctx); + const imgSize = await ctx.session.openAi.imageGen.imgSize; + ctx.chatAction = "upload_photo"; + const imgs = await alterGeneratedImg(prompt!, filePath!, ctx, imgSize!); + if (imgs) { + imgs!.map(async (img: any) => { + await ctx.replyWithPhoto(img.url).catch((e) => { + throw e; + }); + }); + } + ctx.chatAction = null; } } catch (e: any) { this.onError( @@ -355,11 +322,50 @@ export class OpenAIBot { } }; + private async promptGen(data: ChatGptPayload, chat: ChatConversation[]) { + const { conversation, ctx, model } = data; + try { + const extras = getMessageExtras({ + topicId: ctx.message?.message_thread_id, + }); + let msgId = (await ctx.reply("...", extras)).message_id; + const isTypingEnabled = config.openAi.chatGpt.isTypingEnabled; + if (isTypingEnabled) { + ctx.chatAction = "typing"; + } + const completion = await streamChatCompletion( + conversation!, + ctx, + model, + msgId, + true // telegram messages has a character limit + ); + if (isTypingEnabled) { + ctx.chatAction = null; + } + if (completion) { + const price = getPromptPrice(completion, data); + this.logger.info( + `streamChatCompletion result = tokens: ${ + price.promptTokens + price.completionTokens + } | ${model} | price: ${price}¢` + ); + conversation.push({ content: completion, role: "system" }); + chat = [...conversation!]; + return price.price; + } + return 0; + } catch (e: any) { + ctx.chatAction = null; + throw e; + } + } + async onSum(ctx: OnMessageContext | OnCallBackQueryData) { if (this.botSuspended) { - await ctx - .reply("The bot is suspended") - .catch((e) => this.onError(ctx, e)); + sendMessage(ctx, "The bot is suspended", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); return; } try { @@ -375,8 +381,9 @@ export class OpenAIBot { "sum" ); } else { - await sendMessage(ctx, `Error: Missing url`) - // ctx.reply(); + await sendMessage(ctx, `Error: Missing url`, { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); } } catch (e) { this.onError(ctx, e); @@ -405,7 +412,9 @@ export class OpenAIBot { ?.text!.replaceAll(user, "****") .replaceAll(password, "*****") || ""; ctx.api.deleteMessage(ctx.chat?.id!, ctx.message?.message_id!); - sendMessage(ctx,maskedPrompt) + sendMessage(ctx, maskedPrompt, { + topicId: ctx.message?.message_thread_id, + }); } const webContent = await getWebContent( url, @@ -414,14 +423,14 @@ export class OpenAIBot { password ); if (webContent.urlText !== "") { - // ctx.reply(`URL downloaded`, - // // `${(webContent.networkTraffic / 1048576).toFixed( + // await sendMessage(ctx,`URL downloaded`, + // // `${(webContent.networkTraffic / 1048576).toFixed( // // 2 // // )} MB in ${(webContent.elapsedTime / 1000).toFixed(2)} seconds`, - // { - // parse_mode: "Markdown", - // } - // ); + // { + // topicId: ctx.message?.message_thread_id, + // parseMode: "Markdown", + // }).catch((e) => this.onError(ctx, e)); if ( !(await this.payments.pay(ctx as OnMessageContext, webContent.fees)) ) { @@ -449,16 +458,21 @@ export class OpenAIBot { model: model || config.openAi.chatGpt.model, ctx, }; - const price = await promptGen(payload, chat); + const price = await this.promptGen(payload, chat); if (!(await this.payments.pay(ctx as OnMessageContext, price))) { this.onNotBalanceMessage(ctx); } } } } else { - ctx.reply( - "Url not supported, incorrect web site address or missing user credentials" - ); + await sendMessage( + ctx, + "Url not supported, incorrect web site address or missing user credentials", + { + topicId: ctx.message?.message_thread_id, + parseMode: "Markdown", + } + ).catch((e) => this.onError(ctx, e)); return; } return { @@ -476,9 +490,9 @@ export class OpenAIBot { async onMention(ctx: OnMessageContext | OnCallBackQueryData) { try { if (this.botSuspended) { - await ctx - .reply("The bot is suspended") - .catch((e) => this.onError(ctx, e)); + sendMessage(ctx, "The bot is suspended", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); return; } const { username } = ctx.me; @@ -500,9 +514,9 @@ export class OpenAIBot { async onPrefix(ctx: OnMessageContext | OnCallBackQueryData) { try { if (this.botSuspended) { - await ctx - .reply("The bot is suspended") - .catch((e) => this.onError(ctx, e)); + sendMessage(ctx, "The bot is suspended", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); return; } const { prompt, commandName } = getCommandNamePrompt( @@ -527,9 +541,9 @@ export class OpenAIBot { async onPrivateChat(ctx: OnMessageContext | OnCallBackQueryData) { try { if (this.botSuspended) { - await ctx - .reply("The bot is suspended") - .catch((e) => this.onError(ctx, e)); + sendMessage(ctx, "The bot is suspended", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); return; } ctx.session.openAi.chatGpt.requestQueue.push( @@ -549,9 +563,9 @@ export class OpenAIBot { async onChat(ctx: OnMessageContext | OnCallBackQueryData) { try { if (this.botSuspended) { - await ctx - .reply("The bot is suspended") - .catch((e) => this.onError(ctx, e)); + sendMessage(ctx, "The bot is suspended", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e)); return; } const prompt = ctx.match ? ctx.match : ctx.message?.text; @@ -582,9 +596,10 @@ export class OpenAIBot { chatConversation[chatConversation.length - 1].content }_` : appText.introText; - await ctx - .reply(msg, { parse_mode: "Markdown" }) - .catch((e) => this.onError(ctx, e)); + await sendMessage(ctx, msg, { + topicId: ctx.message?.message_thread_id, + parseMode: "Markdown", + }).catch((e) => this.onError(ctx, e)); return; } const { url, newPrompt } = hasUrl(prompt); @@ -606,7 +621,7 @@ export class OpenAIBot { model: model || config.openAi.chatGpt.model, ctx, }; - const price = await promptGen(payload, chatConversation); + const price = await this.promptGen(payload, chatConversation); if (!(await this.payments.pay(ctx as OnMessageContext, price))) { this.onNotBalanceMessage(ctx); } @@ -624,17 +639,19 @@ export class OpenAIBot { async onLast(ctx: OnMessageContext | OnCallBackQueryData) { if (ctx.session.openAi.chatGpt.chatConversation.length > 0) { const chat = ctx.session.openAi.chatGpt.chatConversation; - await ctx - .reply(`${appText.gptLast}\n_${chat[chat.length - 1].content}_`, { - parse_mode: "Markdown", - }) - .catch((e) => this.onError(ctx, e)); + await sendMessage( + ctx, + `${appText.gptLast}\n_${chat[chat.length - 1].content}_`, + { + topicId: ctx.message?.message_thread_id, + parseMode: "Markdown", + } + ).catch((e) => this.onError(ctx, e)); } else { - await ctx - .reply(`To start a conversation please write */ask*`, { - parse_mode: "Markdown", - }) - .catch((e) => this.onError(ctx, e)); + await sendMessage(ctx, `To start a conversation please write */ask*`, { + topicId: ctx.message?.message_thread_id, + parseMode: "Markdown", + }).catch((e) => this.onError(ctx, e)); } } @@ -654,9 +671,10 @@ export class OpenAIBot { const balanceMessage = appText.notEnoughBalance .replaceAll("$CREDITS", balanceOne) .replaceAll("$WALLET_ADDRESS", account?.address || ""); - await ctx - .reply(balanceMessage, { parse_mode: "Markdown" }) - .catch((e) => this.onError(ctx, e)); + await sendMessage(ctx, balanceMessage, { + topicId: ctx.message?.message_thread_id, + parseMode: "Markdown", + }).catch((e) => this.onError(ctx, e)); } async onError( @@ -672,7 +690,13 @@ export class OpenAIBot { } if (e instanceof GrammyError) { if (e.error_code === 400 && e.description.includes("not enough rights")) { - await sendMessage(ctx, "Error: The bot does not have permission to send photos in chat") + await sendMessage( + ctx, + "Error: The bot does not have permission to send photos in chat", + { + topicId: ctx.message?.message_thread_id, + } + ); } else if (e.error_code === 429) { this.botSuspended = true; const retryAfter = e.parameters.retry_after @@ -683,13 +707,15 @@ export class OpenAIBot { const method = e.method; const errorMessage = `On method "${method}" | ${e.error_code} - ${e.description}`; this.logger.error(errorMessage); - await ctx - .reply( - `${ - ctx.from.username ? ctx.from.username : "" - } Bot has reached limit, wait ${retryAfter} seconds` - ) - .catch((e) => this.onError(ctx, e, retryCount - 1)); + await sendMessage( + ctx, + `${ + ctx.from.username ? ctx.from.username : "" + } Bot has reached limit, wait ${retryAfter} seconds`, + { + topicId: ctx.message?.message_thread_id, + } + ).catch((e) => this.onError(ctx, e, retryCount - 1)); if (method === "editMessageText") { ctx.session.openAi.chatGpt.chatConversation.pop(); //deletes last prompt } @@ -705,20 +731,95 @@ export class OpenAIBot { // e.status = 400 || e.code = BadRequestError this.logger.error(`OPENAI Error ${e.status}(${e.code}) - ${e.message}`); if (e.code === "context_length_exceeded") { - await ctx - .reply(`${e.message}`) - .catch((e) => this.onError(ctx, e, retryCount - 1)); + await sendMessage(ctx, e.message, { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e, retryCount - 1)); this.onEnd(ctx); } else { - await ctx - .reply(`Error accessing OpenAI (ChatGPT). Please try later`) - .catch((e) => this.onError(ctx, e, retryCount - 1)); + await sendMessage( + ctx, + `Error accessing OpenAI (ChatGPT). Please try later`, + { + topicId: ctx.message?.message_thread_id, + } + ).catch((e) => this.onError(ctx, e, retryCount - 1)); } } else { this.logger.error(`${e.toString()}`); - await ctx - .reply(msg ? msg : "Error handling your request") - .catch((e) => this.onError(ctx, e, retryCount - 1)); + await sendMessage(ctx, "Error handling your request", { + topicId: ctx.message?.message_thread_id, + }).catch((e) => this.onError(ctx, e, retryCount - 1)); } } } + +// onGenImgEnCmd = async (ctx: OnMessageContext | OnCallBackQueryData) => { +// try { +// if (ctx.session.openAi.imageGen.isEnabled) { +// const prompt = await ctx.match; +// if (!prompt) { +// sendMessage(ctx, "Error: Missing prompt", { +// topicId: ctx.message?.message_thread_id, +// }).catch((e) => +// this.onError(ctx, e, MAX_TRIES, "Error: Missing prompt") +// ); +// return; +// } +// const payload = { +// chatId: await ctx.chat?.id!, +// prompt: prompt as string, +// numImages: await ctx.session.openAi.imageGen.numImages, +// imgSize: await ctx.session.openAi.imageGen.imgSize, +// }; +// sendMessage(ctx, "generating improved prompt...", { +// topicId: ctx.message?.message_thread_id, +// }).catch((e) => +// this.onError(ctx, e, MAX_TRIES, "generating improved prompt...") +// ); +// await imgGenEnhanced(payload, ctx); +// } else { +// sendMessage(ctx, "Bot disabled", { +// topicId: ctx.message?.message_thread_id, +// }).catch((e) => this.onError(ctx, e, MAX_TRIES, "Bot disabled")); +// } +// } catch (e) { +// this.onError(ctx, e); +// } +// }; + +// private async imgGenEnhanced( +// data: ImageGenPayload, +// ctx: OnMessageContext | OnCallBackQueryData +// ) { +// const { chatId, prompt, numImages, imgSize, model } = data; +// try { +// const upgratedPrompt = await improvePrompt(prompt, model!); +// if (upgratedPrompt) { +// await ctx +// .reply( +// `The following description was added to your prompt: ${upgratedPrompt}` +// ) +// .catch((e) => { +// throw e; +// }); +// } +// // bot.api.sendMessage(chatId, "generating the output..."); +// const imgs = await postGenerateImg( +// upgratedPrompt || prompt, +// numImages, +// imgSize +// ); +// imgs.map(async (img: any) => { +// await ctx +// .replyWithPhoto(img.url, { +// caption: `/DALLE ${upgratedPrompt || prompt}`, +// }) +// .catch((e) => { +// throw e; +// }); +// }); +// return true; +// } catch (e) { +// throw e; +// } +// }; diff --git a/src/modules/open-ai/types.ts b/src/modules/open-ai/types.ts index 76934741..4fab0b27 100644 --- a/src/modules/open-ai/types.ts +++ b/src/modules/open-ai/types.ts @@ -1,3 +1,9 @@ +import { + ChatConversation, + OnCallBackQueryData, + OnMessageContext, +} from "../types"; + export interface ChatGPTModel { name: string; inputPrice: number; @@ -10,6 +16,12 @@ export interface DalleGPTModel { price: number; } +export interface ChatGptPayload { + conversation: ChatConversation[]; + model: string; + ctx: OnMessageContext | OnCallBackQueryData; +} + export enum ChatGPTModelsEnum { GPT_4 = "gpt-4", GPT_4_32K = "gpt-4-32k", @@ -22,39 +34,39 @@ export const ChatGPTModels: Record = { name: "gpt-4", inputPrice: 0.03, outputPrice: 0.06, - maxContextTokens: 8192 + maxContextTokens: 8192, }, "gpt-4-32k": { name: "gpt-4-32k", inputPrice: 0.06, outputPrice: 0.12, - maxContextTokens: 32000 + maxContextTokens: 32000, }, "gpt-3.5-turbo": { name: "gpt-3.5-turbo", inputPrice: 0.0015, outputPrice: 0.002, - maxContextTokens: 4000 + maxContextTokens: 4000, }, "gpt-3.5-turbo-16k": { name: "gpt-3.5-turbo-16k", inputPrice: 0.003, outputPrice: 0.004, - maxContextTokens: 16000 + maxContextTokens: 16000, }, }; export const DalleGPTModels: Record = { "1024x1024": { size: "1024x1024", - price: 0.020 + price: 0.02, }, "512x512": { size: "512x512", - price: 0.018 + price: 0.018, }, "256x256": { size: "256x256", - price: 0.016 + price: 0.016, }, }; diff --git a/src/modules/types.ts b/src/modules/types.ts index a52d7132..da11cfc8 100644 --- a/src/modules/types.ts +++ b/src/modules/types.ts @@ -6,6 +6,7 @@ import { type ConversationFlavor, } from "@grammyjs/conversations"; import { AutoChatActionFlavor } from "@grammyjs/auto-chat-action"; +import { ParseMode } from "grammy/types"; export interface ImageGenSessionData { numImages: number; @@ -14,8 +15,9 @@ export interface ImageGenSessionData { } export interface MessageExtras { - caption?: string, - message_thread_id?: number + caption?: string; + message_thread_id?: number; + parse_mode?: ParseMode; } export interface ChatCompletion { completion: string; @@ -45,14 +47,14 @@ export interface OneCountryData { } export interface TranslateBotData { - languages: string[], - enable: boolean, + languages: string[]; + enable: boolean; } export interface BotSessionData { oneCountry: OneCountryData; openAi: OpenAiSessionData; - translate: TranslateBotData + translate: TranslateBotData; } export type BotContext = Context & From 0b61ad4398c043ebf75efb3e52374b563f0c1c24 Mon Sep 17 00:00:00 2001 From: ahiipsa Date: Tue, 5 Sep 2023 23:24:07 +0400 Subject: [PATCH 11/14] Added /other section --- src/database/stats.service.ts | 4 ++-- src/modules/schedule/index.ts | 13 ++++++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/database/stats.service.ts b/src/database/stats.service.ts index 01f5a395..89fd2699 100644 --- a/src/database/stats.service.ts +++ b/src/database/stats.service.ts @@ -105,9 +105,9 @@ export class StatsService { const rows = await logRepository.createQueryBuilder('logs') .select('logs.command, count(logs.command) as "commandCount", SUM(logs.amountOne) as "oneAmount"') - .groupBy('logs.command') .where(`logs.createdAt BETWEEN TO_TIMESTAMP(${dateStart}) and TO_TIMESTAMP(${dateEnd})`) - .orderBy('"commandCount"', 'DESC').limit(10).execute(); + .groupBy('logs.command') + .orderBy('"commandCount"', 'DESC').execute(); return rows; } diff --git a/src/modules/schedule/index.ts b/src/modules/schedule/index.ts index 3cc401a5..2746f980 100644 --- a/src/modules/schedule/index.ts +++ b/src/modules/schedule/index.ts @@ -142,10 +142,21 @@ export class BotSchedule { public async generateReportEngagementByCommand(days: number) { const dbRows = await statsService.getUserEngagementByCommand(days); - const rows = dbRows.map((row) => { + const cropIndex = dbRows.length >= 10 ? 10 : dbRows.length - 1; + + let otherCommandCount = 0; + for (let i = cropIndex; i < dbRows.length; i++) { + otherCommandCount += Number(dbRows[i].commandCount); + } + + const rows = dbRows.slice(0, cropIndex).map((row) => { return `${abbreviateNumber(+row.commandCount).padEnd(4)} ${row.command}` }) + if (otherCommandCount > 0) { + rows.push(`${abbreviateNumber(otherCommandCount).padEnd(4)} /other`); + } + return "```\n" + rows.join('\n') + "\n```"; } From a8fba21a36eaecb9216574e60da6df5621389315 Mon Sep 17 00:00:00 2001 From: fegloff Date: Tue, 5 Sep 2023 18:18:01 -0500 Subject: [PATCH 12/14] add permission error handling for QR and sd-images bot --- src/config.ts | 2 +- src/modules/open-ai/helpers.ts | 14 +- src/modules/open-ai/index.ts | 6 +- src/modules/qrcode/QRCodeBot.ts | 298 +++++++++++++++-------- src/modules/sd-images/SDImagesBotBase.ts | 22 +- 5 files changed, 218 insertions(+), 124 deletions(-) diff --git a/src/config.ts b/src/config.ts index 4a290f6f..ec8546c4 100644 --- a/src/config.ts +++ b/src/config.ts @@ -70,7 +70,7 @@ export default { : ["d."], newPrefix: process.env.NEW_PREFIX ? process.env.NEW_PREFIX.split(",") - : ["n."], + : ["n.",".."], }, minimumBalance: process.env.MIN_BALANCE ? parseInt(process.env.MIN_BALANCE) diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index c377b296..cbfdf7c8 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -1,11 +1,6 @@ import config from "../../config"; import { isValidUrl } from "./utils/web-crawler"; -import { - OnMessageContext, - OnCallBackQueryData, - ChatConversation, - ChatCompletion, -} from "../types"; +import { OnMessageContext, OnCallBackQueryData, MessageExtras } from "../types"; import { parse } from "path"; import { ParseMode } from "grammy/types"; import { getChatModel, getChatModelPrice, getTokenNumber } from "./api/openAi"; @@ -178,13 +173,6 @@ export const messageTopic = async ( ) => { return await ctx.message?.message_thread_id; }; - -export interface MessageExtras { - caption?: string; - message_thread_id?: number; - parse_mode?: ParseMode; -} - interface GetMessagesExtras { parseMode?: ParseMode | undefined; topicId?: number | undefined; diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 4272b7cf..c274af1a 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -217,13 +217,13 @@ export class OpenAIBot { return; } - if (hasChatPrefix(ctx.message?.text || "") !== "") { + if (hasNewPrefix(ctx.message?.text || "") !== "") { + await this.onEnd(ctx); this.onPrefix(ctx); return; } - if (hasNewPrefix(ctx.message?.text || "") !== "") { - await this.onEnd(ctx); + if (hasChatPrefix(ctx.message?.text || "") !== "") { this.onPrefix(ctx); return; } diff --git a/src/modules/qrcode/QRCodeBot.ts b/src/modules/qrcode/QRCodeBot.ts index 31276870..be46b414 100644 --- a/src/modules/qrcode/QRCodeBot.ts +++ b/src/modules/qrcode/QRCodeBot.ts @@ -1,52 +1,67 @@ -import {Automatic1111Client} from "./Automatic1111Client"; -import {createQRCode, isQRCodeReadable, normalizeUrl, retryAsync} from "./utils"; +import { Automatic1111Client } from "./Automatic1111Client"; +import { + createQRCode, + isQRCodeReadable, + normalizeUrl, + retryAsync, +} from "./utils"; import config from "../../config"; -import {InlineKeyboard, InputFile} from "grammy"; -import {OnCallBackQueryData, OnMessageContext, RefundCallback} from "../types"; -import {Automatic1111Config} from "./Automatic1111Configs"; -import {automatic1111DefaultConfig} from "./Automatic1111DefaultConfig"; -import {ComfyClient} from "./comfy/ComfyClient"; +import { GrammyError, InlineKeyboard, InputFile } from "grammy"; +import { + MessageExtras, + OnCallBackQueryData, + OnMessageContext, + RefundCallback, +} from "../types"; +import { Automatic1111Config } from "./Automatic1111Configs"; +import { automatic1111DefaultConfig } from "./Automatic1111DefaultConfig"; +import { ComfyClient } from "./comfy/ComfyClient"; import crypto from "crypto"; import buildQRWorkflow from "./comfy/buildQRWorkflow"; -import pino, {Logger} from "pino"; +import pino, { Logger } from "pino"; enum SupportedCommands { - QR = 'qr', + QR = "qr", } enum Callbacks { - Regenerate = 'qr-regenerate', + Regenerate = "qr-regenerate", } export class QRCodeBot { - - private logger: Logger + private logger: Logger; constructor() { - this.logger = pino({ - name: 'QRBot', + name: "QRBot", transport: { - target: 'pino-pretty', + target: "pino-pretty", options: { - colorize: true - } - } - }) - + colorize: true, + }, + }, + }); } public getEstimatedPrice(ctx: any) { return 1; // 1.5; } - public isSupportedEvent(ctx: OnMessageContext | OnCallBackQueryData): boolean { - return ctx.hasCommand(Object.values(SupportedCommands)) || ctx.hasCallbackQuery(Object.values(Callbacks)); + public isSupportedEvent( + ctx: OnMessageContext | OnCallBackQueryData + ): boolean { + return ( + ctx.hasCommand(Object.values(SupportedCommands)) || + ctx.hasCallbackQuery(Object.values(Callbacks)) + ); } - public async onEvent(ctx: OnMessageContext | OnCallBackQueryData, refundCallback: RefundCallback) { + public async onEvent( + ctx: OnMessageContext | OnCallBackQueryData, + refundCallback: RefundCallback + ) { if (!this.isSupportedEvent(ctx)) { - await ctx.reply(`Unsupported command: ${ctx.message?.text}`) - return refundCallback('Unsupported command') + await ctx.reply(`Unsupported command: ${ctx.message?.text}`); + return refundCallback("Unsupported command"); } try { @@ -54,82 +69,89 @@ export class QRCodeBot { try { await ctx.answerCallbackQuery(); } catch (ex) { - console.log('### ex', ex); + console.log("### ex", ex); } - const msg = ctx.callbackQuery.message?.text || ctx.callbackQuery.message?.caption || ''; + const msg = + ctx.callbackQuery.message?.text || + ctx.callbackQuery.message?.caption || + ""; if (!msg) { - await ctx.reply('Error: message is too old'); - return refundCallback('Error: message is too old') + await ctx.reply("Error: message is too old"); + return refundCallback("Error: message is too old"); } const cmd = this.parseQrCommand(msg); if (cmd.error || !cmd.command || !cmd.url || !cmd.prompt) { - await ctx.reply('Message haven\'t contain command: ' + msg); - return refundCallback('Message haven\'t contain command: ') + await ctx.reply("Message haven't contain command: " + msg); + return refundCallback("Message haven't contain command: "); } if (cmd.command === SupportedCommands.QR) { - return this.onQr(ctx, msg, 'img2img'); + return this.onQr(ctx, msg, "img2img"); } } if (ctx.hasCommand(SupportedCommands.QR)) { - return this.onQr(ctx, ctx.message.text, 'img2img'); + return this.onQr(ctx, ctx.message.text, "img2img"); } } catch (ex) { if (ex instanceof Error) { - this.logger.info('Error ' + ex.message); + this.logger.info("Error " + ex.message); return refundCallback(ex.message); } - this.logger.info('Error ' + ex); - return refundCallback('Unknown error'); + this.logger.info("Error " + ex); + return refundCallback("Unknown error"); } - await ctx.reply('Unsupported command'); - this.logger.info('Unsupported command'); - return refundCallback('Unsupported command'); + await ctx.reply("Unsupported command"); + this.logger.info("Unsupported command"); + return refundCallback("Unsupported command"); } public parseQrCommand(message: string) { // command: /qr url prompt1, prompt2, prompt3 - if (!message.startsWith('/')) { + if (!message.startsWith("/")) { return { - command: '', - url: '', - prompt: '', + command: "", + url: "", + prompt: "", error: true, - } + }; } - const [command, url, ...rest] = message.split(' '); + const [command, url, ...rest] = message.split(" "); return { - command: command.replace('/', ''), + command: command.replace("/", ""), url, - prompt: rest.join(' '), - } + prompt: rest.join(" "), + }; } - private async onQr(ctx: OnMessageContext | OnCallBackQueryData, message: string, method: 'txt2img' | 'img2img') { - this.logger.info('generate qr'); + private async onQr( + ctx: OnMessageContext | OnCallBackQueryData, + message: string, + method: "txt2img" | "img2img" + ) { + this.logger.info("generate qr"); const command = this.parseQrCommand(message); if (command.error || !command.command || !command.url || !command.prompt) { - command.url = 'https://s.country/ai'; - command.prompt = 'astronaut, exuberant, anime girl, smile, sky, colorful' -// ctx.reply(` -// Please add -// -// /qr h.country/ai Dramatic bonfire on a remote beach, captured at the magic hour with flames dancing against the twilight sky; using a shallow depth of field, a fast lens, and controlled exposure to emphasize the intricate patterns and textures of the fire, complemented by embers in the wind and the gentle glow reflecting on the ocean's edge, moody, intense, and alive.`, { -// disable_web_page_preview: true, -// }); -// return + command.url = "https://s.country/ai"; + command.prompt = "astronaut, exuberant, anime girl, smile, sky, colorful"; + // ctx.reply(` + // Please add + // + // /qr h.country/ai Dramatic bonfire on a remote beach, captured at the magic hour with flames dancing against the twilight sky; using a shallow depth of field, a fast lens, and controlled exposure to emphasize the intricate patterns and textures of the fire, complemented by embers in the wind and the gentle glow reflecting on the ocean's edge, moody, intense, and alive.`, { + // disable_web_page_preview: true, + // }); + // return } // ctx.reply(`Generating...`); @@ -145,84 +167,160 @@ export class QRCodeBot { method, prompt: command.prompt, }; - const qrImgBuffer = await this.genQRCodeByComfyUI(props); - if (!qrImgBuffer) { - throw new Error('internal error'); + throw new Error("internal error"); } - - if(config.qrBot.checkReadable && isQRCodeReadable(qrImgBuffer)) { - console.log('### qr unreadable'); + if (config.qrBot.checkReadable && isQRCodeReadable(qrImgBuffer)) { + console.log("### qr unreadable"); return qrImgBuffer; } - return qrImgBuffer; - } + }; let qrImgBuffer; try { ctx.chatAction = "upload_photo"; qrImgBuffer = await retryAsync(operation, 5, 100); - } catch (ex) { ctx.chatAction = null; this.logger.error(`ex ${ex}`); - await ctx.reply("Internal error") - throw new Error('Internal error'); + await ctx.reply("Internal error"); + throw new Error("Internal error"); } - const regenButton = new InlineKeyboard() - .text("Regenerate", Callbacks.Regenerate) - + const regenButton = new InlineKeyboard().text( + "Regenerate", + Callbacks.Regenerate + ); - await ctx.replyWithPhoto(new InputFile(qrImgBuffer, `qr_code_${Date.now()}.png`), { - caption: `/qr ${command.url} ${command.prompt}`, - reply_markup: regenButton, - }) - this.logger.info('sent qr code'); - return true; + try { + await ctx.replyWithPhoto( + new InputFile(qrImgBuffer, `qr_code_${Date.now()}.png`), + { + caption: `/qr ${command.url} ${command.prompt}`, + reply_markup: regenButton, + } + ); + this.logger.info("sent qr code"); + return true; + } catch (e: any) { + const topicId = await ctx.message?.message_thread_id; + let msgExtras: MessageExtras = {}; + if (topicId) { + msgExtras["message_thread_id"] = topicId; + } + if (e instanceof GrammyError) { + if ( + e.error_code === 400 && + e.description.includes("not enough rights") + ) { + ctx.reply( + `Error: The bot does not have permission to send photos in chat...`, + msgExtras + ); + } else { + ctx.reply( + `Error: something went wrong...`, + msgExtras + ); + } + } else { + this.logger.error(e.toString()); + ctx.reply( + `Error: something went wrong...`, + msgExtras + ); + } + return false; + } } - private async genQRCode({qrUrl, qrMargin, prompt, method}: {qrUrl: string, qrMargin: number, prompt: string, method: 'img2img' | 'txt2img'}) { - const qrImgBuffer = await createQRCode({url: qrUrl, margin: qrMargin }); + private async genQRCode({ + qrUrl, + qrMargin, + prompt, + method, + }: { + qrUrl: string; + qrMargin: number; + prompt: string; + method: "img2img" | "txt2img"; + }) { + const qrImgBuffer = await createQRCode({ url: qrUrl, margin: qrMargin }); const sdClient = new Automatic1111Client(); - const extendedPrompt = prompt + ', ' + automatic1111DefaultConfig.additionalPrompt; + const extendedPrompt = + prompt + ", " + automatic1111DefaultConfig.additionalPrompt; const negativePrompt = automatic1111DefaultConfig.defaultNegativePrompt; const sdConfig: Automatic1111Config = { - imgBase64: qrImgBuffer.toString('base64'), + imgBase64: qrImgBuffer.toString("base64"), prompt: extendedPrompt, negativePrompt, }; - if (method === 'txt2img') { - return sdClient.text2img({...automatic1111DefaultConfig.text2img, ...sdConfig}); + if (method === "txt2img") { + return sdClient.text2img({ + ...automatic1111DefaultConfig.text2img, + ...sdConfig, + }); } - return sdClient.img2img({...automatic1111DefaultConfig.img2img, ...sdConfig}); + return sdClient.img2img({ + ...automatic1111DefaultConfig.img2img, + ...sdConfig, + }); } - private async genQRCodeByComfyUI({qrUrl, qrMargin, prompt, method}: {qrUrl: string, qrMargin: number, prompt: string, method: 'img2img' | 'txt2img'}) { - const qrImgBuffer = await createQRCode({url: normalizeUrl(qrUrl), width: 680, margin: qrMargin }); - const extendedPrompt = prompt + ', ' + automatic1111DefaultConfig.additionalPrompt; + private async genQRCodeByComfyUI({ + qrUrl, + qrMargin, + prompt, + method, + }: { + qrUrl: string; + qrMargin: number; + prompt: string; + method: "img2img" | "txt2img"; + }) { + const qrImgBuffer = await createQRCode({ + url: normalizeUrl(qrUrl), + width: 680, + margin: qrMargin, + }); + const extendedPrompt = + prompt + ", " + automatic1111DefaultConfig.additionalPrompt; const negativePrompt = automatic1111DefaultConfig.defaultNegativePrompt; - const comfyClient = new ComfyClient({host: config.comfyHost2, wsHost: config.comfyWsHost2}); - - const filenameHash = crypto.createHash('sha256').update(qrUrl, 'utf8'); - const filename = filenameHash.digest('hex') + '.png'; - - const uploadResult = await comfyClient.uploadImage({filename, fileBuffer: qrImgBuffer, override: true}); - - const workflow = buildQRWorkflow({qrFilename: uploadResult.name, clientId: comfyClient.clientId, negativePrompt, prompt: extendedPrompt}) + const comfyClient = new ComfyClient({ + host: config.comfyHost2, + wsHost: config.comfyWsHost2, + }); + + const filenameHash = crypto.createHash("sha256").update(qrUrl, "utf8"); + const filename = filenameHash.digest("hex") + ".png"; + const uploadResult = await comfyClient.uploadImage({ + filename, + fileBuffer: qrImgBuffer, + override: true, + }); + + const workflow = buildQRWorkflow({ + qrFilename: uploadResult.name, + clientId: comfyClient.clientId, + negativePrompt, + prompt: extendedPrompt, + }); const response = await comfyClient.queuePrompt(workflow); - const promptResult = await comfyClient.waitingPromptExecution(response.prompt_id); + const promptResult = await comfyClient.waitingPromptExecution( + response.prompt_id + ); comfyClient.abortWebsocket(); - - return comfyClient.downloadResult(promptResult.data.output.images[0].filename); + return comfyClient.downloadResult( + promptResult.data.output.images[0].filename + ); } } diff --git a/src/modules/sd-images/SDImagesBotBase.ts b/src/modules/sd-images/SDImagesBotBase.ts index ed365ba7..68b3a97a 100644 --- a/src/modules/sd-images/SDImagesBotBase.ts +++ b/src/modules/sd-images/SDImagesBotBase.ts @@ -123,25 +123,25 @@ export class SDImagesBotBase { `${session.message} ${prompt}` : `/${model.aliases[0]} ${prompt}`; - const topicId = await ctx.message?.message_thread_id let msgExtras: MessageExtras = { caption: reqMessage } if (topicId) { msgExtras['message_thread_id'] = topicId - } + } await ctx.replyWithPhoto(new InputFile(imageBuffer),msgExtras); if (ctx.chat?.id && queueMessageId) { await ctx.api.deleteMessage(ctx.chat?.id, queueMessageId); } } catch (e: any) { + ctx.chatAction = null const topicId = await ctx.message?.message_thread_id let msgExtras: MessageExtras = {} if (topicId) { msgExtras['message_thread_id'] = topicId - } + } if (e instanceof GrammyError) { if (e.error_code === 400 && e.description.includes('not enough rights')) { ctx.reply(`Error: The bot does not have permission to send photos in chat... Refunding payments`, msgExtras); @@ -151,8 +151,8 @@ export class SDImagesBotBase { } else { this.logger.error(e.toString()); ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras); - refundCallback(); } + refundCallback() } this.queue = this.queue.filter((v) => v !== uuid); @@ -237,9 +237,17 @@ export class SDImagesBotBase { let msgExtras: MessageExtras = {} if (topicId) { msgExtras['message_thread_id'] = topicId - } - this.logger.error(e.toString()); - ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras); + } + if (e instanceof GrammyError) { + if (e.error_code === 400 && e.description.includes('not enough rights')) { + ctx.reply(`Error: The bot does not have permission to send photos in chat... Refunding payments`, msgExtras); + } else { + ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras) + } + } else { + this.logger.error(e.toString()); + ctx.reply(`Error: something went wrong... Refunding payments`, msgExtras); + } refundCallback(); } From 77c252c335c1e634c7da8d31e94c9eb9f426acc0 Mon Sep 17 00:00:00 2001 From: fegloff Date: Tue, 5 Sep 2023 19:05:29 -0500 Subject: [PATCH 13/14] add chatGPT word limit --- src/config.ts | 13 +++++++------ src/modules/open-ai/api/openAi.ts | 4 ++-- src/modules/open-ai/helpers.ts | 11 +++++++++++ src/modules/open-ai/index.ts | 32 +++++++++++++++---------------- 4 files changed, 36 insertions(+), 24 deletions(-) diff --git a/src/config.ts b/src/config.ts index ec8546c4..c979e1b0 100644 --- a/src/config.ts +++ b/src/config.ts @@ -26,10 +26,6 @@ export default { ? parseInt(process.env.SESSION_TIMEOUT) : 48, // in hours openAi: { - maxTokens: - (process.env.OPENAI_MAX_TOKENS && - parseInt(process.env.OPENAI_MAX_TOKENS)) || - 800, // telegram messages has a char limit dalle: { isEnabled: Boolean(parseInt(process.env.IMAGE_GEN_ENABLED || "1")), telegramFileUrl: "https://api.telegram.org/file/bot", @@ -48,6 +44,11 @@ export default { }, }, chatGpt: { + maxTokens: + (process.env.OPENAI_MAX_TOKENS && + parseInt(process.env.OPENAI_MAX_TOKENS)) || + 800, // telegram messages has a char limit + wordLimit: 50, wordCountBetween: process.env.WORD_COUNT_BETWEEN ? parseInt(process.env.WORD_COUNT_BETWEEN) : 100, @@ -64,13 +65,13 @@ export default { prefixes: { chatPrefix: process.env.ASK_PREFIX ? process.env.ASK_PREFIX.split(",") - : ["a.","?",">","."], + : ["a.", "?", ">", "."], dallePrefix: process.env.DALLE_PREFIX ? process.env.DALLE_PREFIX.split(",") : ["d."], newPrefix: process.env.NEW_PREFIX ? process.env.NEW_PREFIX.split(",") - : ["n.",".."], + : ["n.", ".."], }, minimumBalance: process.env.MIN_BALANCE ? parseInt(process.env.MIN_BALANCE) diff --git a/src/modules/open-ai/api/openAi.ts b/src/modules/open-ai/api/openAi.ts index df4fe92f..a910b43a 100644 --- a/src/modules/open-ai/api/openAi.ts +++ b/src/modules/open-ai/api/openAi.ts @@ -99,7 +99,7 @@ export async function chatCompletion( try { const payload = { model: model, - max_tokens: limitTokens ? config.openAi.maxTokens : undefined, + max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined, temperature: config.openAi.dalle.completions.temperature, messages: conversation, }; @@ -143,7 +143,7 @@ export const streamChatCompletion = async ( messages: conversation as OpenAI.Chat.Completions.CreateChatCompletionRequestMessage[], stream: true, - max_tokens: limitTokens ? config.openAi.maxTokens : undefined, + max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined, temperature: config.openAi.dalle.completions.temperature, }); let wordCount = 0; diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index cbfdf7c8..e01830f7 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -230,3 +230,14 @@ export const getPromptPrice = (completion: string, data: ChatGptPayload) => { completionTokens, }; }; + +export const limitPrompt = (prompt: string) => { + const wordCountPattern = /(\d+)\s*word/g; + const match = wordCountPattern.exec(prompt); + + if (match) { + return `${prompt}`; + } + + return `${prompt} in around ${config.openAi.chatGpt.wordLimit} words`; +}; diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index c274af1a..08f8d27f 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -31,6 +31,7 @@ import { hasUrl, hasUsernamePassword, isMentioned, + limitPrompt, MAX_TRIES, messageTopic, preparePrompt, @@ -403,7 +404,7 @@ export class OpenAIBot { // const { model } = ctx.session.openAi.chatGpt; const chatModel = getChatModel(model); const webCrawlerMaxTokens = - chatModel.maxContextTokens - config.openAi.maxTokens * 2; + chatModel.maxContextTokens - config.openAi.chatGpt.maxTokens * 2; const { user, password } = hasUsernamePassword(prompt); if (user && password) { // && ctx.chat?.type !== 'private' @@ -436,22 +437,21 @@ export class OpenAIBot { ) { this.onNotBalanceMessage(ctx); } else { + let newPrompt = ""; if (prompt !== "") { - chat.push({ - content: `${ - command === "sum" && "Summarize" - } ${prompt} this text: ${webContent.urlText}`, - role: "user", - }); + newPrompt = `${command === "sum" && "Summarize"} ${limitPrompt( + prompt + )} this text: ${webContent.urlText}`; } else { - chat.push({ - content: `${ - command === "sum" && "Summarize this text in 50 words:" - } "${webContent.urlText}"`, - role: "user", - }); + newPrompt = `${ + command === "sum" && + `Summarize this text in ${config.openAi.chatGpt.wordLimit} words:` + } "${webContent.urlText}"`; } - + chat.push({ + content: newPrompt, + role: "user", + }); if (prompt || command === "sum") { const payload = { conversation: chat, @@ -612,9 +612,9 @@ export class OpenAIBot { "ask" ); } else { - chatConversation.push({ + const newPrompt = chatConversation.push({ role: "user", - content: prompt, + content: limitPrompt(prompt), }); const payload = { conversation: chatConversation!, From 88b0912ece0d84a0854c391f36c0f9b8a4f1e0a4 Mon Sep 17 00:00:00 2001 From: fegloff Date: Tue, 5 Sep 2023 19:13:57 -0500 Subject: [PATCH 14/14] update limitPrompt pattern --- src/modules/open-ai/helpers.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/open-ai/helpers.ts b/src/modules/open-ai/helpers.ts index e01830f7..e9e46866 100644 --- a/src/modules/open-ai/helpers.ts +++ b/src/modules/open-ai/helpers.ts @@ -232,7 +232,7 @@ export const getPromptPrice = (completion: string, data: ChatGptPayload) => { }; export const limitPrompt = (prompt: string) => { - const wordCountPattern = /(\d+)\s*word/g; + const wordCountPattern = /(\d+)\s*word(s)?/g; const match = wordCountPattern.exec(prompt); if (match) {