Skip to content

Commit

Permalink
Merge pull request #240 from harmony-one/bot-chat-permissions
Browse files Browse the repository at this point in the history
Bot chat permissions
  • Loading branch information
theofandrich authored Sep 6, 2023
2 parents 23fef3c + 88b0912 commit 22d7c0b
Show file tree
Hide file tree
Showing 10 changed files with 871 additions and 649 deletions.
34 changes: 18 additions & 16 deletions src/bot.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {TranslateBot} from "./modules/translate/TranslateBot";
import { TranslateBot } from "./modules/translate/TranslateBot";

require("events").EventEmitter.defaultMaxListeners = 30;
import express from "express";
Expand Down Expand Up @@ -98,8 +98,8 @@ function createInitialSessionData(): BotSessionData {
},
translate: {
languages: [],
enable: false
}
enable: false,
},
};
}

Expand Down Expand Up @@ -270,21 +270,23 @@ const onMessage = async (ctx: OnMessageContext) => {
const price = translateBot.getEstimatedPrice(ctx);
const isPaid = await payments.pay(ctx, price);

if(isPaid) {
const response = await translateBot.onEvent(ctx, (reason?: string) => {
payments.refundPayment(reason, ctx, price);
}).catch((e) => {
payments.refundPayment(e.message || "Unknown error", ctx, price);
return {next: false};
});
if (isPaid) {
const response = await translateBot
.onEvent(ctx, (reason?: string) => {
payments.refundPayment(reason, ctx, price);
})
.catch((e) => {
payments.refundPayment(e.message || "Unknown error", ctx, price);
return { next: false };
});

if (!response.next) {
return;
}
}
}

if (openAiBot.isSupportedEvent(ctx)) {
if (await openAiBot.isSupportedEvent(ctx)) {
if (ctx.session.openAi.imageGen.isEnabled) {
const price = openAiBot.getEstimatedPrice(ctx);
const isPaid = await payments.pay(ctx, price!);
Expand Down Expand Up @@ -438,15 +440,15 @@ bot.command("love", (ctx) => {
});
});

bot.command('stop', (ctx) => {
bot.command("stop", (ctx) => {
logger.info("/stop command");
ctx.session.openAi.chatGpt.chatConversation = [];
ctx.session.openAi.chatGpt.usage = 0;
ctx.session.openAi.chatGpt.price = 0;
ctx.session.openAi.chatGpt.price = 0;
ctx.session.translate.enable = false;
ctx.session.translate.languages = []
ctx.session.oneCountry.lastDomain = ""
})
ctx.session.translate.languages = [];
ctx.session.oneCountry.lastDomain = "";
});
// bot.command("memo", (ctx) => {
// ctx.reply(MEMO.text, {
// parse_mode: "Markdown",
Expand Down
13 changes: 7 additions & 6 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,6 @@ export default {
? parseInt(process.env.SESSION_TIMEOUT)
: 48, // in hours
openAi: {
maxTokens:
(process.env.OPENAI_MAX_TOKENS &&
parseInt(process.env.OPENAI_MAX_TOKENS)) ||
800, // telegram messages has a char limit
dalle: {
isEnabled: Boolean(parseInt(process.env.IMAGE_GEN_ENABLED || "1")),
telegramFileUrl: "https://api.telegram.org/file/bot",
Expand All @@ -48,6 +44,11 @@ export default {
},
},
chatGpt: {
maxTokens:
(process.env.OPENAI_MAX_TOKENS &&
parseInt(process.env.OPENAI_MAX_TOKENS)) ||
800, // telegram messages has a char limit
wordLimit: 50,
wordCountBetween: process.env.WORD_COUNT_BETWEEN
? parseInt(process.env.WORD_COUNT_BETWEEN)
: 100,
Expand All @@ -64,13 +65,13 @@ export default {
prefixes: {
chatPrefix: process.env.ASK_PREFIX
? process.env.ASK_PREFIX.split(",")
: ["a.","?",">","."],
: ["a.", "?", ">", "."],
dallePrefix: process.env.DALLE_PREFIX
? process.env.DALLE_PREFIX.split(",")
: ["d."],
newPrefix: process.env.NEW_PREFIX
? process.env.NEW_PREFIX.split(",")
: ["n."],
: ["n.", ".."],
},
minimumBalance: process.env.MIN_BALANCE
? parseInt(process.env.MIN_BALANCE)
Expand Down
11 changes: 9 additions & 2 deletions src/modules/open-ai/api/openAi.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import {
DalleGPTModel,
DalleGPTModels,
} from "../types";
import { getMessageExtras } from "../helpers";

const openai = new OpenAI({
apiKey: config.openAiKey,
Expand Down Expand Up @@ -98,7 +99,7 @@ export async function chatCompletion(
try {
const payload = {
model: model,
max_tokens: limitTokens ? config.openAi.maxTokens : undefined,
max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined,
temperature: config.openAi.dalle.completions.temperature,
messages: conversation,
};
Expand Down Expand Up @@ -134,12 +135,15 @@ export const streamChatCompletion = async (
const wordCountMinimum = config.openAi.chatGpt.wordCountBetween;
return new Promise<string>(async (resolve, reject) => {
try {
// const extras = getMessageExtras({
// topicId: ctx.message?.message_thread_id
// })
const stream = await openai.chat.completions.create({
model: model,
messages:
conversation as OpenAI.Chat.Completions.CreateChatCompletionRequestMessage[],
stream: true,
max_tokens: limitTokens ? config.openAi.maxTokens : undefined,
max_tokens: limitTokens ? config.openAi.chatGpt.maxTokens : undefined,
temperature: config.openAi.dalle.completions.temperature,
});
let wordCount = 0;
Expand All @@ -157,6 +161,9 @@ export const streamChatCompletion = async (
completion = completion.replaceAll("..", "");
completion += "..";
wordCount = 0;
// const extras = getMessageExtras({
// topicId: ctx.message?.message_thread_id
// })
await ctx.api
.editMessageText(ctx.chat?.id!, msgId, completion)
.catch(async (e: any) => {
Expand Down
168 changes: 0 additions & 168 deletions src/modules/open-ai/controller/index.ts

This file was deleted.

Loading

0 comments on commit 22d7c0b

Please sign in to comment.