Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integration of LangChain into the Discord Bot Service #19

Open
wants to merge 13 commits into
base: develop
Choose a base branch
from
4 changes: 2 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: CI
on:
push:
branches:
- '*'
- '**/*'
jobs:
main:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -31,7 +31,7 @@ jobs:
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./coverage/apps/yggdrasil-core-engine/lcov.info,./coverage/apps/yggdrasil-discord-client/lcov.info,./coverage/libs/nest-openai-client/lcov.info,./coverage/libs/nest-winston/lcov.info,./coverage/libs/utils/lcov.info
file: ./coverage/apps/yggdrasil-core-engine/lcov.info,./coverage/apps/yggdrasil-discord-client/lcov.info,./coverage/libs/nest-openai-client/lcov.info,./coverage/libs/nest-winston/lcov.info,./coverage/libs/utils/lcov.info,./coverage/libs/nest-langchain/lcov.info
push_to_gitlab:
# if branch is develop, push to gitlab develop branch
if: github.ref == 'refs/heads/develop'
Expand Down
9 changes: 8 additions & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,17 @@ jobs:
with:
node-version: '18.x'
registry-url: 'https://registry.npmjs.org'
- name: Install dependencies, build and publish 🔧
- name: Install dependencies, build 🔧 and publish 🚀 nest-openai-client
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
npm ci
npx nx run nest-openai-client:build
npx nx run nest-openai-client:publish --ver=${{ github.event.release.tag_name }} --tag=latest
- name: Install dependencies, build 🔧 and publish 🚀 nest-langchain:build
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
npm ci
npx nx run nest-langchain:build
npx nx run nest-langchain:publish --ver=${{ github.event.release.tag_name }} --tag=latest
28 changes: 28 additions & 0 deletions .verdaccio/config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# path to a directory with all packages
storage: ../tmp/local-registry/storage

# a list of other known repositories we can talk to
uplinks:
npmjs:
url: https://registry.npmjs.org/
maxage: 60m

packages:
'**':
# give all users (including non-authenticated users) full access
# because it is a local registry
access: $all
publish: $all
unpublish: $all

# if package is not available locally, proxy requests to npm registry
proxy: npmjs

# log settings
logs:
type: stdout
format: pretty
level: warn

publish:
allow_offline: true # set offline to true to allow publish offline
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ you can use grpc, restful, etc. to communicate with the micro-service. and we ha
## Packages

- [Nest.js OpenAI Client Package](https://www.npmjs.com/package/@sd0x/nest-openai-client/)
- [Nest.js LangChain Package](https://www.npmjs.com/package/@sd0x/nest-langchain/)

## TODO

Expand Down
19 changes: 11 additions & 8 deletions apps/yggdrasil-core-engine/.env.local.example
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
PACKAGE_NAME=Yggdrasil Core Engine
SERVICE_NAME=yggdrasil-core-engine
LOGGER_LEVEL=debug
CORE_ENGINE_API_KEY=
CHATGPT_API_KEY=
CHATGPT_ORG=
OPENAI_API_TYPE=
AZURE_API_BASE_PATH=
AZURE_CHATGPT_API_KEY=
AZURE_DEPLOYMENT_NAME=
AZURE_API_VERSION='2023-03-15-preview'
OPENAI_API_KEY=
OPENAI_ORG=
A_AZURE_OPENAI_ENABLE=
A_AZURE_OPENAI_API_KEY=
A_AZURE_OPENAI_ENDPOINT=
A_AZURE_OPENAI_DEPLOYMENT_NAME=
A_AZURE_OPENAI_MODEL_NAME=
A_AZURE_OPENAI_INSTANCE_NAME=
A_AZURE_OPENAI_API_VERSION=2023-07-01-preview
RPC_API_KEY=
MONGO_DB_URI=
MONGO_DB_URI=
6 changes: 5 additions & 1 deletion apps/yggdrasil-core-engine/.eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,9 @@
"files": ["*.js", "*.jsx"],
"rules": {}
}
]
],
"rules": {
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": ["error"]
}
}
2 changes: 1 addition & 1 deletion apps/yggdrasil-core-engine/src/app/app.controller.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ describe('AppController', () => {

describe('getData', () => {
it('should return "Hello API"', () => {
return;
expect(app.get<AppController>(AppController)).toBeDefined();
});
});
});
5 changes: 4 additions & 1 deletion apps/yggdrasil-core-engine/src/app/app.module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import { openAIConfig } from './config/open.ai.config';
import { NestWinstonModule } from '@asgard-hub/nest-winston';
import { ChatGPTModule } from './chatgpt/chatgpt.module';
import { mongoDBConfig } from './config/mongo.db.config';
import { LLMAIController } from './controllers/llm-ai/llm-ai.controller';
import { LLMAIModule } from './llm-ai/llm-ai.module';

@Module({
imports: [
Expand Down Expand Up @@ -43,8 +45,9 @@ import { mongoDBConfig } from './config/mongo.db.config';
inject: [ConfigService],
}),
ChatGPTModule,
LLMAIModule,
],
controllers: [AppController],
controllers: [AppController, LLMAIController],
providers: [ConfigService, AppService],
exports: [ConfigService],
})
Expand Down
10 changes: 8 additions & 2 deletions apps/yggdrasil-core-engine/src/app/auth/auth.guard.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
import { CanActivate, ExecutionContext, Injectable } from '@nestjs/common';
import {
CanActivate,
ExecutionContext,
Inject,
Injectable,
} from '@nestjs/common';
import { Observable } from 'rxjs';
import { AsgardLogger } from '@asgard-hub/nest-winston';
import { AsgardLogger, AsgardLoggerSupplement } from '@asgard-hub/nest-winston';
import { ConfigService } from '@nestjs/config';
import { ConfigPath, IAppConfig } from '../config/app.config';

@Injectable()
export class AuthGuard implements CanActivate {
constructor(
@Inject(AsgardLoggerSupplement.LOGGER_HELPER_SERVICE)
private readonly asgardLogger: AsgardLogger,
private readonly configService: ConfigService
) {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import { TXTChatGptService } from './service/txt-chat-gpt/txt-chat-gpt.service';
import { PDFChatGPTService } from './service/pdf-chat-gpt/pdf-chat-gpt.service';
import { ImageChatGptService } from './service/image-chat-gpt/image-chat-gpt.service';
import { GatewayService } from './service/gateway-service/gateway-service.service';
import { ChatGPTController } from '../controllers/chatgpt.controller';
import { ChatGPTController } from '../controllers/chatgpt/chatgpt.controller';
import { ServiceModule } from '../services/service.module';
import { DataSourceAdapterModule } from '../data-source-adapter/data-source-adapter.module';
import { AudioChatGPTService } from './service/audio-chat-gpt/audio-chat-gpt.service';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ export class AudioChatGPTService extends BaseFeatureChatGPTService<AudioAdapter>
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (path, index, pool) => {
.process(async (path, index) => {
const transcriptionResponse =
await this.getAdapter().getDataFromPath<PartCreateTranscriptionResponse>(
path
Expand All @@ -116,7 +116,9 @@ export class AudioChatGPTService extends BaseFeatureChatGPTService<AudioAdapter>
});

promisePool.errors?.length > 0 &&
promisePool.errors.forEach((e) => this.asgardLogger.error(e.raw, e.stack, e));
promisePool.errors.forEach((e) =>
this.asgardLogger.error(e.raw, e.stack, e)
);
const results = promisePool.results.filter(
(r) => r !== undefined && (r as any) !== Symbol.for('failed')
);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
/* eslint-disable @typescript-eslint/no-unused-vars */
import { DataSourceType } from '../../data-source-adapter/adapter/interface/data-source-type.enum';
import { DataSourceAdapterService } from '../../data-source-adapter/data-source-adapter.service';
import { PartCreateChatCompletionResponse } from '../interface/create.completion.response.usage.for.rpc.interface';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ export class ImageChatGptService extends BaseFeatureChatGPTService<ImageAdapter>
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (content, index, pool) => {
.process(async (content, index) => {
const summary = await this.getTXTSummary(content, {
user,
partNumber: index + 1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export class PDFChatGPTService extends BaseFeatureChatGPTService<PDFAdapter> {
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (content, index, pool) => {
.process(async (content, index) => {
const summary = await this.getPDFSummary(content, {
user,
partNumber: index + 1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ export class TXTChatGptService extends BaseFeatureChatGPTService<TXTAdapter> {
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (content, index, pool) => {
.process(async (content, index) => {
const summary = await this.getTXTSummary(content, {
user,
partNumber: index + 1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class URLChatGPTService extends BaseFeatureChatGPTService<URLAdapter> {
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (content, index, pool) => {
.process(async (content, index) => {
const summary = await this.getURLSummary(content, {
title,
user,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Injectable, Scope } from '@nestjs/common';
import { Injectable } from '@nestjs/common';
import { ChatGPTGateWayService } from '../../../services/chatgpt-gateway-service/chatgpt.service';
import PromisePool from '@supercharge/promise-pool/dist';
import { splitAudioFile } from '@asgard-hub/utils';
Expand Down Expand Up @@ -83,7 +83,7 @@ export class YTChatGPTService extends BaseFeatureChatGPTService<YoutubeAdapter>
this.asgardLogger.log(`percentage: ${formatted}`);
await delay(Math.random() * 500 + 256);
})
.process(async (path, index, pool) => {
.process(async (path, index) => {
const { summary, transcription } = await this.getYoutubeSummary(
path,
{
Expand Down
14 changes: 10 additions & 4 deletions apps/yggdrasil-core-engine/src/app/config/azure.openai.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,17 @@ export interface IAzureOpenAIConfig {
apiKey: string;
endpoint: string;
deploymentName: string;
instanceName: string;
apiVersion: string;
modelName: string;
}

export const azureOpenAIConfig = registerAs(ConfigPath.AzureOpenAI, () => ({
enable: process.env.AZURE_OPENAI_ENABLE ? true : false,
apiKey: process.env.AZURE_OPENAI_API_KEY,
endpoint: process.env.AZURE_OPENAI_ENDPOINT,
deploymentName: process.env.AZURE_OPENAI_DEPLOYMENT_NAME,
enable: process.env.A_AZURE_OPENAI_ENABLE === 'true' ? true : false,
apiKey: process.env.A_AZURE_OPENAI_API_KEY,
endpoint: process.env.A_AZURE_OPENAI_ENDPOINT,
deploymentName: process.env.A_AZURE_OPENAI_DEPLOYMENT_NAME,
instanceName: process.env.A_AZURE_OPENAI_INSTANCE_NAME,
apiVersion: process.env.A_AZURE_OPENAI_API_VERSION,
modelName: process.env.A_AZURE_OPENAI_MODEL_NAME,
}));
3 changes: 3 additions & 0 deletions apps/yggdrasil-core-engine/src/app/config/open.ai.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@ import { ConfigPath } from './app.config';

export interface IOpenAIConfig {
apiKey: string;
modelName?: string;
tokenLimit?: number;
}

export const openAIConfig = registerAs(ConfigPath.OpenAI, () => ({
apiKey: process.env.OPENAI_API_KEY,
modelName: 'gpt-3.5-turbo-16k-0613',
}));
Loading
Loading