-
Notifications
You must be signed in to change notification settings - Fork 0
/
streaming.js
110 lines (96 loc) · 3.13 KB
/
streaming.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import { BedrockRuntimeClient, InvokeModelCommand, InvokeModelWithResponseStreamCommand } from "@aws-sdk/client-bedrock-runtime";
// Amazon Bedrock credentials - replace with your own
const AWS_ACCESS_KEY_ID = "<YOUR_ACCESS_KEY_ID>";
const AWS_SECRET_ACCESS_KEY = "<YOUR_SECRET_ACCESS_KEY>";
const AWS_SESSION_TOKEN = "<YOUR_SESSION_TOKEN>";
const config = {
region: "us-east-1",
credentials: {
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY,
sessionToken: AWS_SESSION_TOKEN,
},
};
const client = new BedrockRuntimeClient(config);
const ai_request = (request, respondWith) => {
// Adds each previous query and response as individual messages
const conversation = request.thread.flatMap((event) => {
if (event.response) {
return [
{ role: 'user', content: event.request.query },
{ role: 'assistant', content: event.response.data }
];
} else {
return [];
}
});
// System messages provided by the plugin to format the output as HTML content.
const pluginSystemMessages = request.system.map((text) => ({
text
}));
const systemMessages = [
...pluginSystemMessages,
// Additional system messages to control the output of the AI
{ text: 'Do not include html\`\`\` at the start or \`\`\` at the end.' },
{ text: 'No explanation or boilerplate, just give the HTML response.' }
]
const system = systemMessages.map((message) => message.text).join('\n');
// Forms the new query sent to the API
const text = request.context.length === 0 || conversation.length > 0
? request.query
: `Question: ${request.query} Context: """${request.context}"""`;
const messages = [
...conversation,
{
role: "user",
content: text
}
];
const payload = {
anthropic_version: "bedrock-2023-05-31",
max_tokens: 1000,
system,
messages,
};
const input = {
body: JSON.stringify(payload),
contentType: "application/json",
accept: "application/json",
modelId: "anthropic.claude-3-haiku-20240307-v1:0"
};
console.log(system);
console.log(messages);
// Bedrock doesn't support cancelling a response mid-stream, so there is no use for the signal callback.
respondWith.stream(async (_signal, streamMessage) => {
const command = new InvokeModelWithResponseStreamCommand(input);
const response = await client.send(command);
for await (const item of response.body) {
const chunk = JSON.parse(new TextDecoder().decode(item.chunk.bytes));
const chunk_type = chunk.type;
switch (chunk_type) {
case "message_start":
break;
case "content_block_start":
break;
case "content_block_delta":
const message = chunk.delta.text;
streamMessage(message);
break;
case "content_block_stop":
break;
case "message_delta":
break;
case "message_stop":
break;
default:
return Promise.reject("Stream error");
}
}
});
};
tinymce.init({
selector: 'textarea#streaming',
plugins: 'ai code help',
toolbar: 'aidialog aishortcuts code help',
ai_request
});