forked from AIObjectives/talk-to-the-city-reports
-
Notifications
You must be signed in to change notification settings - Fork 0
/
llama_v0.ts
71 lines (63 loc) · 1.63 KB
/
llama_v0.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import nodes from '$lib/node_register';
import categories from '$lib/node_categories';
import type { DGNodeInterface, BaseData } from '$lib/node_data_types';
interface LlamaData extends BaseData {
text: string;
}
export default class LlamaNode {
id: string;
data: LlamaData;
position: { x: number; y: number };
type: string;
constructor(node_data: LlamaNodeInterface) {
const { id, data, position, type } = node_data;
this.id = id;
this.data = data;
this.position = position;
this.type = type;
}
async compute(
inputData: object,
context: string,
info: (arg: string) => void,
error: (arg: string) => void,
success: (arg: string) => void,
slug: string,
Cookies: any
) {
this.data.message = 'fetching..';
this.data.dirty = false;
try {
const response = await fetch('http://localhost:10000/v1/chat/completions');
if (response.ok) {
this.data.message = 'online';
return this.data.text;
} else {
error('Request failed');
}
} catch (err) {
this.data.message = 'offline';
}
}
}
type LlamaNodeInterface = DGNodeInterface & {
data: LlamaData;
};
export const llama_node_data: LlamaNodeInterface = {
id: 'llama',
data: {
label: 'llama',
text: 'http://localhost:10000/v1/chat/completions',
dirty: false,
compute_type: 'llama_v0',
input_ids: {},
category: categories.ml.id,
icon: 'llama_v0',
show_in_ui: true,
message: ''
},
position: { x: 0, y: 0 },
type: 'text_input_v0'
};
export const llama_node = new LlamaNode(llama_node_data);
nodes.register(LlamaNode, llama_node_data);