Skip to content

Commit

Permalink
Merge branch 'main' into dependabot/gradle/kotlin-1.9.0
Browse files Browse the repository at this point in the history
  • Loading branch information
nomisRev authored Jul 26, 2023
2 parents a2e7690 + e156190 commit d57ec67
Show file tree
Hide file tree
Showing 44 changed files with 1,646 additions and 23 deletions.
3 changes: 2 additions & 1 deletion core/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ kotlin {
api(libs.ktor.utils)
api(projects.xefTokenizer)

implementation(libs.klogging)
implementation(libs.uuid)
}
}
Expand All @@ -82,7 +83,7 @@ kotlin {

val jvmMain by getting {
dependencies {
api(libs.ktor.client.cio)
implementation(libs.ktor.http)
implementation(libs.logback)
implementation(libs.skrape)
implementation(libs.rss.reader)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class CoreAIScope
constructor(
val embeddings: Embeddings,
val context: VectorStore = LocalVectorStore(embeddings),
val conversationId: ConversationId = ConversationId(UUID.generateUUID().toString())
val conversationId: ConversationId? = ConversationId(UUID.generateUUID().toString())
) : AutoCloseable, AutoClose by autoClose() {

/**
Expand Down Expand Up @@ -108,8 +108,8 @@ constructor(
return prompt(
prompt = Prompt(prompt),
context = context,
functions = functions,
serializer = serializer,
functions = functions,
promptConfiguration = promptConfiguration,
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,22 +8,68 @@ import com.xebia.functional.xef.auto.PromptConfiguration
import com.xebia.functional.xef.llm.models.chat.ChatCompletionRequestWithFunctions
import com.xebia.functional.xef.llm.models.chat.ChatCompletionResponseWithFunctions
import com.xebia.functional.xef.llm.models.functions.CFunction
import com.xebia.functional.xef.llm.models.functions.encodeJsonSchema
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.vectorstores.ConversationId
import com.xebia.functional.xef.vectorstores.VectorStore
import io.github.oshai.kotlinlogging.KotlinLogging
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.KSerializer
import kotlinx.serialization.descriptors.SerialDescriptor
import kotlinx.serialization.json.Json

interface ChatWithFunctions : Chat {

suspend fun createChatCompletionWithFunctions(
request: ChatCompletionRequestWithFunctions
): ChatCompletionResponseWithFunctions

@OptIn(ExperimentalSerializationApi::class)
fun generateCFunction(descriptor: SerialDescriptor): List<CFunction> {
val fnName = descriptor.serialName.substringAfterLast(".")
return generateCFunction(fnName, encodeJsonSchema(descriptor))
}

fun generateCFunction(fnName: String, schema: String): List<CFunction> =
listOf(CFunction(fnName, "Generated function for $fnName", schema))

@AiDsl
suspend fun <A> prompt(
prompt: Prompt,
context: VectorStore,
serializerName: String,
jsonSchema: String,
conversationId: ConversationId? = null,
serializer: (json: String) -> A,
functions: List<CFunction> = generateCFunction(serializerName, jsonSchema),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A = prompt(prompt, context, conversationId, functions, serializer, promptConfiguration)

@AiDsl
suspend fun <A> prompt(
prompt: Prompt,
context: VectorStore,
serializer: KSerializer<A>,
conversationId: ConversationId? = null,
functions: List<CFunction> = generateCFunction(serializer.descriptor),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A {
return prompt(
prompt,
context,
conversationId,
functions,
{ json -> Json.decodeFromString(serializer, json) },
promptConfiguration
)
}

@AiDsl
suspend fun <A> prompt(
prompt: String,
context: VectorStore,
conversationId: ConversationId? = null,
functions: List<CFunction>,
functions: List<CFunction> = emptyList(),
serializer: (json: String) -> A,
promptConfiguration: PromptConfiguration,
): A {
Expand All @@ -43,7 +89,7 @@ interface ChatWithFunctions : Chat {
prompt: Prompt,
context: VectorStore,
conversationId: ConversationId? = null,
functions: List<CFunction>,
functions: List<CFunction> = emptyList(),
serializer: (json: String) -> A,
promptConfiguration: PromptConfiguration,
): A {
Expand All @@ -63,11 +109,13 @@ interface ChatWithFunctions : Chat {
maxDeserializationAttempts: Int,
agent: suspend () -> List<String>
): A {
val logger = KotlinLogging.logger {}
(0 until maxDeserializationAttempts).forEach { currentAttempts ->
val result = agent().firstOrNull() ?: throw AIError.NoResponse()
catch({
return@tryDeserialize serializer(result)
}) { e: Throwable ->
logger.warn { "Failed to deserialize result: $result with exception ${e.message}" }
if (currentAttempts == maxDeserializationAttempts)
throw AIError.JsonParsing(result, maxDeserializationAttempts, e.nonFatalOrThrow())
// TODO else log attempt ?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,5 @@ data class ChatCompletionRequestWithFunctions(
val frequencyPenalty: Double = 0.0,
val logitBias: Map<String, Int> = emptyMap(),
val user: String?,
val functionCall: Map<String, String>,
val functionCall: Map<String, String>
)
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
@file:JvmMultifileClass
@file:OptIn(ExperimentalSerializationApi::class)

package com.xebia.functional.xef.auto.llm.openai
package com.xebia.functional.xef.llm.models.functions

/*
Ported over from https://github.com/Ricky12Awesome/json-schema-serialization
Expand Down
3 changes: 2 additions & 1 deletion examples/java/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,10 @@ plugins {

dependencies {
implementation(projects.xefJava)
implementation(projects.xefReasoning)
implementation(projects.xefGpt4all)
}

tasks.withType<Test>().configureEach {
useJUnit()
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package com.xebia.functional.xef.java.auto.reasoning;

import com.xebia.functional.xef.auto.CoreAIScope;
import com.xebia.functional.xef.auto.llm.openai.OpenAI;
import com.xebia.functional.xef.auto.llm.openai.OpenAIEmbeddings;
import com.xebia.functional.xef.reasoning.filesystem.Files;
import com.xebia.functional.xef.reasoning.pdf.PDF;
import com.xebia.functional.xef.reasoning.text.Text;
import com.xebia.functional.xef.reasoning.tools.ToolSelection;

import java.util.Collections;
import java.util.List;

public class ToolSelectionExample {

public static void main(String[] args) {
try (var scope = new CoreAIScope(new OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING))) {
var model = OpenAI.DEFAULT_CHAT;
var serialization = OpenAI.DEFAULT_SERIALIZATION;
var text = Text.create(model, scope);
var files = Files.create(serialization, scope, Collections.emptyList());
var pdf = PDF.create(model, serialization, scope);

var toolSelection = new ToolSelection(
serialization,
scope,
List.of(
text.summarize,
pdf.readPDFFromUrl,
files.readFile,
files.writeToTextFile
),
Collections.emptyList()
);

var inputText = "Extract information from https://arxiv.org/pdf/2305.10601.pdf";
var result = toolSelection.applyInferredToolsBlocking(inputText);
System.out.println(result);
}
}
}

1 change: 1 addition & 0 deletions examples/kotlin/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ dependencies {
implementation(projects.xefTokenizer)
implementation(projects.xefGpt4all)
implementation(projects.xefOpenai)
implementation(projects.xefReasoning)
implementation(libs.kotlinx.serialization.json)
implementation(libs.logback)
implementation(libs.klogging)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,13 @@ import com.xebia.functional.xef.auto.llm.openai.prompt
import kotlinx.serialization.Serializable

@Serializable
data class Book(val title: String, val author: String, val summary: String)
data class Book(
@Description(["The title of the book"])
val title: String,
@Description(["The author of the book"])
val author: String,
@Description(["An extended summary of the book of at least 100 words"])
val summary: String)

suspend fun main() {
ai {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package com.xebia.functional.xef.auto.reasoning

import com.xebia.functional.xef.auto.ai
import com.xebia.functional.xef.auto.llm.openai.OpenAI
import com.xebia.functional.xef.auto.llm.openai.getOrThrow
import com.xebia.functional.xef.reasoning.code.Code

suspend fun main() {
ai {
val code = Code(model = OpenAI.DEFAULT_CHAT, scope = this)

val sourceCode = """
import java.util.*
class ShoppingCart {
private val items: MutableList<String> = mutableListOf()
fun addItem(item: String) {
items.add(item)
}
fun removeItem(item: String) {
items.remove(item)
}
fun getTotalItems(): Int {
return items.size
}
fun calculateTotalPrice(): Double {
var totalPrice = 0.0
for (item in items) {
val price = fetchItemPrice(item)
totalPrice += price
}
return totalPrice
}
private fun fetchItemPrice(item: String): Double {
// Logic to fetch item price from database or API
// For simplicity, return a random price
return Random().nextDouble() * 100
}
}
fun main() {
val cart = ShoppingCart()
cart.addItem("Item 1")
cart.addItem("Item 2")
cart.addItem("Item 3")
println("Total items in cart: ${'$'}{cart.getTotalItems()}")
println("Total price of items in cart: ${'$'}{cart.calculateTotalPrice()}")
cart.removeItem("Item 2")
println("Total items in cart: ${'$'}{cart.getTotalItems()}")
println("Total price of items in cart: ${'$'}{cart.calculateTotalPrice()}")
}
""".trimIndent()

val antiPatternDetectionResult = code.antiPatternDetection(sourceCode)
println("Detected Anti-Patterns:")
println(antiPatternDetectionResult)

val codeBreakdownResult = code.codeBreakdown(sourceCode)
println("Code Breakdown:")
println(codeBreakdownResult)

val codeDocumentationGenerationResult = code.coreDocumentationGeneration(sourceCode)
println("Code Documentation Generation:")
println(codeDocumentationGenerationResult)
}.getOrThrow()
}
Loading

0 comments on commit d57ec67

Please sign in to comment.