Skip to content

Commit

Permalink
Bug fixed in conversation (#710)
Browse files Browse the repository at this point in the history
* Bug fixed in conversation

* Comments addressed
  • Loading branch information
javipacheco authored Apr 2, 2024
1 parent dc55080 commit efa2181
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 15 deletions.
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
package com.xebia.functional.xef.llm

import com.xebia.functional.openai.generated.model.ChatCompletionRequestMessage
import com.xebia.functional.openai.generated.model.ChatCompletionResponseMessage
import com.xebia.functional.openai.generated.model.ChatCompletionRole
import com.xebia.functional.openai.generated.model.CreateChatCompletionResponseChoicesInner
import com.xebia.functional.openai.generated.model.*
import com.xebia.functional.xef.conversation.Conversation
import com.xebia.functional.xef.conversation.MessagesToHistory
import com.xebia.functional.xef.store.ConversationId
Expand Down Expand Up @@ -42,9 +39,7 @@ internal suspend fun List<CreateChatCompletionResponseChoicesInner>.addChoiceWit
): List<CreateChatCompletionResponseChoicesInner> = also {
val cid = scope.conversationId
if (history != MessagesToHistory.NONE && isNotEmpty() && cid != null) {
val aiMemory =
this.filter { it.message.content != null }
.map { it.message.toMemory(cid, scope.store.incrementIndexAndGet()) }
val aiMemory = this.map { it.message.toMemory(cid, scope.store.incrementIndexAndGet()) }
val newMessages = previousMemories + aiMemory
scope.store.addMemoriesByHistory(history, newMessages)
}
Expand All @@ -58,7 +53,7 @@ internal suspend fun List<CreateChatCompletionResponseChoicesInner>.addChoiceToM
val cid = scope.conversationId
if (history != MessagesToHistory.NONE && isNotEmpty() && cid != null) {
val aiMemory =
this.mapNotNull { it.message }.map { it.toMemory(cid, scope.store.incrementIndexAndGet()) }
this.map { it.message }.map { it.toMemory(cid, scope.store.incrementIndexAndGet()) }
val newMessages = previousMemories + aiMemory
scope.store.addMemoriesByHistory(history, newMessages)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ sealed class MemorizedMessage {
ChatCompletionRequestMessage.CaseChatCompletionRequestAssistantMessage(
ChatCompletionRequestAssistantMessage(
role = ChatCompletionRequestAssistantMessage.Role.assistant,
content = message.content
// TODO: Find a new strategy to save the tool calls as content
content = message.content ?: message.toolCalls?.firstOrNull()?.toString()
)
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,16 @@ package com.xebia.functional.xef.conversation.conversations

import com.xebia.functional.openai.generated.model.CreateChatCompletionRequestModel
import com.xebia.functional.xef.AI
import com.xebia.functional.xef.OpenAI
import com.xebia.functional.xef.conversation.Conversation
import com.xebia.functional.xef.conversation.MessagesFromHistory
import com.xebia.functional.xef.conversation.MessagesToHistory
import com.xebia.functional.xef.llm.promptMessage
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.PromptBuilder.Companion.system
import com.xebia.functional.xef.prompt.PromptBuilder.Companion.user
import com.xebia.functional.xef.prompt.configuration.PromptConfiguration
import com.xebia.functional.xef.store.LocalVectorStore
import kotlinx.serialization.Serializable

@Serializable data class Animal(val name: String, val habitat: String, val diet: String)
Expand All @@ -22,9 +25,11 @@ suspend fun main() {
// - # cd server/docker/opentelemetry
// - # docker-compose up

val openAI = OpenAI()

Conversation(
// metric = com.xebia.functional.xef.opentelemetry.OpenTelemetryMetric(),
metric = com.xebia.functional.xef.metrics.LogsMetric()
store = LocalVectorStore(openAI.embeddings),
) {
metric.customSpan("Animal Example") {
val configNoneFromConversation = PromptConfiguration {
Expand All @@ -34,13 +39,15 @@ suspend fun main() {
val animal: Animal =
AI(
Prompt(model) { +user("A unique animal species.") }
.copy(configuration = configNoneFromConversation)
.copy(configuration = configNoneFromConversation),
conversation = this@Conversation
)

val invention: Invention =
AI(
Prompt(model) { +user("A groundbreaking invention from the 20th century.") }
.copy(configuration = configNoneFromConversation)
.copy(configuration = configNoneFromConversation),
conversation = this@Conversation
)

println("\nAnimal: $animal")
Expand All @@ -58,7 +65,7 @@ suspend fun main() {
}
)

val story: String = AI(storyPrompt)
val story: String = openAI.chat.promptMessage(storyPrompt, scope = this@Conversation)

println("\nStory 1:\n$story\n")

Expand All @@ -67,7 +74,7 @@ suspend fun main() {
+user("Write a short story of 100 words that involves the animal in a city called Cadiz")
}

val story2: String = AI(storyPrompt2)
val story2: String = openAI.chat.promptMessage(storyPrompt2, scope = this@Conversation)

println("\nStory 2:\n$story2\n")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,23 @@ object TestExample {
+OutputResponse(description = fakeOutputs, value = "The movie is Jurassic Park")
}
}
spec.evaluate<AnswerAccuracy>(success = listOf(AnswerAccuracy.yes))
val results = spec.evaluate<AnswerAccuracy>(success = listOf(AnswerAccuracy.yes))
results.items.forEach {
println("==============")
println(" ${it.description}")
println("==============")
it.items.zip(it.items.indices).forEach { (item, index) ->
println()
println(">> Output ${index + 1}")
println("Description: ${item.description}")
println("Success: ${item.success}")
println()
println("AI Output:")
println(item.output)
println()
}
println()
println()
}
}
}

0 comments on commit efa2181

Please sign in to comment.