Skip to content

Commit

Permalink
Create Models
Browse files Browse the repository at this point in the history
  • Loading branch information
sonuku092 committed Feb 29, 2024
1 parent a7f45d9 commit f4ac32c
Show file tree
Hide file tree
Showing 11 changed files with 2,450 additions and 169 deletions.
2,404 changes: 2,269 additions & 135 deletions backend/package-lock.json

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,10 @@
"@nestjs/platform-express": "^10.0.0",
"@nestjs/platform-socket.io": "^10.3.3",
"@nestjs/websockets": "^10.3.3",
"@tensorflow/tfjs": "^4.17.0",
"@tensorflow/tfjs-vis": "^1.1.0",
"axios": "^1.6.7",
"dotenv": "^16.4.5",
"reflect-metadata": "^0.2.0",
"rxjs": "^7.8.1",
"socket.io": "^4.7.4",
Expand Down
21 changes: 14 additions & 7 deletions backend/src/chats/chats.controller.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,20 @@
// src/chats/chats.controller.ts

import { Controller, Post, Body } from '@nestjs/common';
import { ChatsService } from './chats.service';
import { UserMessageDto } from '../shared/dto/user-message.dto';
import { OpenaiService } from '../shared/services/openai.service';

@Controller('api')
@Controller('chats')
export class ChatsController {
constructor(private readonly chatsService: ChatsService) {}
constructor(
private readonly chatsService: ChatsService,
private readonly openaiService: OpenaiService,
) {}

@Post('user-message')
async handleUserMessage(@Body() userMessageDto: { message: string }) {
const response = await this.chatsService.handleUserMessage(userMessageDto.message);
return { message: response };
}
@Post()
async handleUserMessage(@Body() userMessageDto: UserMessageDto): Promise<string> {
const response = await this.openaiService.getResponse(userMessageDto.message);
return response;
}
}
9 changes: 7 additions & 2 deletions backend/src/chats/chats.module.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
// src/chats/chats.module.ts

import { Module } from '@nestjs/common';
import { MyWebSocketGateway } from './websocket.gateway'; // Import MyWebSocketGateway
import { ChatsController } from './chats.controller';
import { ChatsService } from './chats.service';
import { OpenaiService } from '../shared/services/openai.service'; // Import OpenaiService

@Module({
providers: [MyWebSocketGateway],
controllers: [ChatsController],
providers: [ChatsService, OpenaiService], // Add OpenaiService to providers
})
export class ChatsModule {}
6 changes: 3 additions & 3 deletions backend/src/chats/chats.service.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { Injectable } from '@nestjs/common';
import { OpenAIService } from '../shared/services/openai.service';
import { OpenaiService } from '../shared/services/openai.service';

@Injectable()
export class ChatsService {
constructor(private readonly openAIService: OpenAIService) {}
constructor(private readonly openAIService: OpenaiService) {}

async handleUserMessage(userMessage: string): Promise<string> {
async handleUserMessage(userMessage: string): Promise<void | string> {
// Here you can implement your logic to fetch responses for fixed questions
// For example, using a service like OpenAI or any other method you prefer
const response = await this.openAIService.fetchResponseForFixedQuestion(userMessage);
Expand Down
12 changes: 12 additions & 0 deletions backend/src/models/heart-disease/data/heart-disease-dataset.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
age,sex,cp,trestbps,chol,fbs,restecg,thalach,exang,oldpeak,slope,ca,thal,target
63,1,3,145,233,1,0,150,0,2.3,0,0,1,1
37,1,2,130,250,0,1,187,0,3.5,0,0,2,1
41,0,1,130,204,0,0,172,0,1.4,2,0,2,1
56,1,1,120,236,0,1,178,0,0.8,2,0,2,1
57,0,0,120,354,0,1,163,1,0.6,2,0,2,1
57,1,0,140,192,0,1,148,0,0.4,1,0,1,1
56,0,1,140,294,0,0,153,0,1.3,1,0,2,1
44,1,1,120,263,0,1,173,0,0,2,0,3,1
52,1,2,172,199,1,1,162,0,0.5,2,0,3,1
57,1,2,150,168,0,1,174,0,1.6,2,0,2,1
...
36 changes: 36 additions & 0 deletions backend/src/models/heart-disease/heart-disease-model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import * as tf from '@tensorflow/tfjs';
import axios from 'axios';

async function loadHeartDiseaseDataset() {
try {
// Load the CSV data
const response = await axios.get('https://example.com/heart-disease-dataset.csv');
const csvData = response.data;

// Parse the CSV data
const rows = csvData.split('\n').map(row => row.split(','));

// Extract features and labels
const x = rows.map(row => row.slice(0, -1).map(parseFloat));
const y = rows.map(row => parseFloat(row[row.length - 1]));

// Convert data to tensors
const xs = tf.tensor2d(x);
const ys = tf.tensor2d(y, [y.length, 1]);

return { xs, ys };
} catch (error) {
console.error('Error loading heart disease dataset:', error);
return null;
}
}

// Usage
async function trainHeartDiseaseModel() {
const { xs, ys } = await loadHeartDiseaseDataset();
if (xs && ys) {
// Define and train the model using xs and ys
}
}

trainHeartDiseaseModel();
Empty file.
42 changes: 42 additions & 0 deletions backend/src/models/skin-disease/skin-disease-model.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import * as tf from '@tensorflow/tfjs';
import { loadDataset, preprocessData } from '../utils/utils'; // Assuming you have utility functions to load and preprocess data

async function trainSkinDiseaseModel() {
// Load and preprocess the dataset
const { images, labels } = await loadDataset('skin-disease-dataset'); // Replace 'skin-disease-dataset' with your dataset name
const { xs, ys } = preprocessData(images, labels);

// Define the model architecture
const model = tf.sequential();
model.add(tf.layers.conv2d({
inputShape: [224, 224, 3],
kernelSize: 3,
filters: 16,
activation: 'relu'
}));
model.add(tf.layers.maxPooling2d({ poolSize: 2, strides: 2 }));
model.add(tf.layers.flatten());
model.add(tf.layers.dense({ units: 128, activation: 'relu' }));
model.add(tf.layers.dropout({ rate: 0.5 }));
model.add(tf.layers.dense({ units: 3, activation: 'softmax' }));

// Compile the model
model.compile({
optimizer: 'adam',
loss: 'sparseCategoricalCrossentropy',
metrics: ['accuracy']
});

// Train the model
await model.fit(xs, ys, {
epochs: 10,
validationSplit: 0.2,
callbacks: tf.callbacks.earlyStopping({ monitor: 'val_loss', patience: 2 })
});

// Save the model
await model.save('file://./skin-disease-model');
}

// Call the function to train the model
trainSkinDiseaseModel();
26 changes: 26 additions & 0 deletions backend/src/models/utils/utils.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import * as tf from '@tensorflow/tfjs';

export async function loadDataset(datasetName: string): Promise<{ images: any[], labels: any[] }> {
// Implementation to load dataset based on datasetName
let images: any[] = [];
let labels: any[] = [];

// Your implementation to load dataset goes here...

return { images, labels };
}

export function preprocessData(images: any[], labels: any[]): { xs: tf.Tensor, ys: tf.Tensor } {
// Convert images and labels to TensorFlow tensors
const xs = tf.tensor(images);
const ys = tf.tensor(labels);

// Normalize image data
const normalizedXs = tf.div(xs, 255);

// One-hot encode labels
const numClasses = ys.shape[1];
const normalizedYs = tf.oneHot(ys, numClasses);

return { xs: normalizedXs, ys: normalizedYs };
}
60 changes: 38 additions & 22 deletions backend/src/shared/services/openai.service.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,44 @@
// src/shared/services/openai.service.ts

import { Injectable } from '@nestjs/common';
import { OpenAIConstants } from '../constants/openai.constants';
import axios from 'axios';
import * as dotenv from 'dotenv';

dotenv.config();

@Injectable()
export class OpenAIService {
async fetchResponseForFixedQuestion(question: string): Promise<string> {
// Implement logic to fetch response for fixed questions using OpenAI API or any other method
// Example using OpenAI API:
try {
const response = await axios.post(OpenAIConstants.apiUrl, {
prompt: question,
max_tokens: 50, // Adjust this based on your requirement
}, {
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${OpenAIConstants.apiKey}`,
},
});
return response.data.choices[0].text.trim();
} catch (error) {
console.error('Error fetching response from OpenAI:', error);
return 'Sorry, I encountered an error.';
export class OpenaiService {
fetchResponseForFixedQuestion(userMessage: string) {
throw new Error('Method not implemented.');
}
}
}
private readonly apiKey: string;

constructor() {
this.apiKey = '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';
}

async getResponse(message: string): Promise<string> {
try {
const response = await axios.post(
'https://api.openai.com/v1/engines/text-davinci-002/completions',
{
prompt: message,
max_tokens: 150,
temperature: 0.7,
stop: '\n'
},
{
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${this.apiKey}`
}
}
);

export default OpenAIService;
return response.data.choices[0].text.trim();
} catch (error) {
console.error('Error fetching response from ChatGPT:', error);
return 'Sorry, I am unable to respond at the moment.';
}
}
}

0 comments on commit f4ac32c

Please sign in to comment.