-
Notifications
You must be signed in to change notification settings - Fork 8
/
req-worker.js
249 lines (215 loc) · 8.52 KB
/
req-worker.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
// REQ helper worker
// Handles POST request from relay worker
addEventListener("fetch", (event) => {
const { request } = event;
if (request.method === 'POST') {
event.respondWith(handlePostRequest(request));
} else {
event.respondWith(new Response("Invalid request", { status: 400 }));
}
});
// Controls concurrent connections
const MAX_CONCURRENT_CONNECTIONS = 6;
let activeConnections = 0;
// Controls number of active connections
async function withConnectionLimit(promiseFunction) {
// Wait if too many connections are active
while (activeConnections >= MAX_CONCURRENT_CONNECTIONS) {
await new Promise(resolve => setTimeout(resolve, 100));
}
activeConnections += 1;
try {
return await promiseFunction();
} finally {
activeConnections -= 1;
}
}
// Handles POST requests
async function handlePostRequest(request) {
try {
// Checks if Authorization header is present and matches authToken
const authHeader = request.headers.get('Authorization');
console.log(`Authorization header received: ${authHeader !== null}`);
if (!authHeader || authHeader !== `Bearer ${authToken}`) {
console.warn("Unauthorized request.");
return new Response("Unauthorized", { status: 401 });
}
const { type, subscriptionId, filters } = await request.json();
console.log(`Request type: ${type}, Subscription ID: ${subscriptionId}`);
if (type === 'REQ') {
console.log(`Processing REQ with filters: ${JSON.stringify(filters)}`);
const events = await processReq(subscriptionId, filters);
console.log(`Returning ${events.length} events for subscription ID: ${subscriptionId}`);
return new Response(JSON.stringify(events), {
status: 200,
headers: { 'Content-Type': 'application/json' },
});
} else {
console.warn(`Invalid request type: ${type}`);
return new Response("Invalid request type", { status: 400 });
}
} catch (error) {
console.error("Error processing POST request:", error);
return new Response(`Error processing request: ${error.message}`, { status: 500 });
}
}
// Handles REQ messages with batching
async function processReq(subscriptionId, filters) {
console.log(`Processing request for subscription ID: ${subscriptionId}`);
let events = [];
const eventPromises = [];
// Fetch events in batches
try {
if (filters.ids) {
console.log(`Fetching events by IDs: ${filters.ids}`);
eventPromises.push(...fetchEventsById(filters.ids));
}
if (filters.kinds) {
console.log(`Fetching events by kinds: ${filters.kinds}`);
eventPromises.push(...await fetchEventsByKind(filters.kinds));
}
if (filters.authors) {
console.log(`Fetching events by authors: ${filters.authors}`);
eventPromises.push(...await fetchEventsByAuthor(filters.authors));
}
if (filters.tags) {
console.log(`Fetching events by tags: ${JSON.stringify(filters.tags)}`);
eventPromises.push(...await fetchEventsByTag(filters.tags));
}
const fetchedEvents = await Promise.all(eventPromises);
console.log(`Fetched ${fetchedEvents.length} events, applying filters...`);
events = filterEvents(fetchedEvents.filter(event => event !== null), filters);
} catch (error) {
console.error(`Error retrieving events from R2:`, error);
}
console.log(`Returning ${events.length} filtered events.`);
return events;
}
// Fetch events by IDs in batches
function fetchEventsById(ids, batchSize = 10) {
console.log(`Fetching events by IDs in batches of ${batchSize}`);
const batches = [];
for (let i = 0; i < ids.length; i += batchSize) {
const batch = ids.slice(i, i + batchSize);
console.log(`Processing batch: ${batch}`);
batches.push(...batch.map(id => fetchEventById(id)));
}
return batches;
}
// Fetch a single event by ID
async function fetchEventById(id) {
const idKey = `events/event:${id}`;
const eventUrl = `https://${r2BucketDomain}/${idKey}`;
console.log(`Fetching event by ID: ${id} from ${eventUrl}`);
try {
return withConnectionLimit(async () => {
const response = await fetch(eventUrl);
if (!response.ok) {
console.warn(`Event not found for ID: ${id}`);
return null;
}
const data = await response.text();
console.log(`Event found for ID: ${id}`);
return JSON.parse(data);
});
} catch (error) {
console.error(`Error fetching event with ID ${id}:`, error);
return null;
}
}
// Fetch events by kind in batches
async function fetchEventsByKind(kinds, limit = 25) {
console.log(`Fetching events by kinds: ${kinds} with limit: ${limit}`);
const promises = [];
for (const kind of kinds) {
const kindCountKey = `counts/kind_count_${kind}`;
console.log(`Fetching kind count for kind: ${kind}`);
const kindCountResponse = await withConnectionLimit(() => relayDb.get(kindCountKey));
const kindCountValue = kindCountResponse ? await kindCountResponse.text() : '0';
const kindCount = parseInt(kindCountValue, 10);
console.log(`Found ${kindCount} events for kind: ${kind}`);
for (let i = kindCount; i >= Math.max(1, kindCount - limit + 1); i--) {
const kindKey = `kinds/kind-${kind}:${i}`;
promises.push(fetchEventByKey(kindKey));
}
}
return promises;
}
// Fetch events by author in batches
async function fetchEventsByAuthor(authors, limit = 25) {
console.log(`Fetching events by authors: ${authors} with limit: ${limit}`);
const promises = [];
for (const author of authors) {
const pubkeyCountKey = `counts/pubkey_count_${author}`;
console.log(`Fetching pubkey count for author: ${author}`);
const pubkeyCountResponse = await withConnectionLimit(() => relayDb.get(pubkeyCountKey));
const pubkeyCountValue = pubkeyCountResponse ? await pubkeyCountResponse.text() : '0';
const pubkeyCount = parseInt(pubkeyCountValue, 10);
console.log(`Found ${pubkeyCount} events for author: ${author}`);
for (let i = pubkeyCount; i >= Math.max(1, pubkeyCount - limit + 1); i--) {
const pubkeyKey = `pubkeys/pubkey-${author}:${i}`;
promises.push(fetchEventByKey(pubkeyKey));
}
}
return promises;
}
// Fetch events by tag in batches
async function fetchEventsByTag(tags, limit = 25) {
console.log(`Fetching events by tags: ${JSON.stringify(tags)} with limit: ${limit}`);
const promises = [];
for (const [tagName, tagValue] of tags) {
const tagCountKey = `counts/${tagName}_count_${tagValue}`;
console.log(`Fetching tag count for tag: ${tagName}-${tagValue}`);
const tagCountResponse = await withConnectionLimit(() => relayDb.get(tagCountKey));
const tagCountValue = tagCountResponse ? await tagCountResponse.text() : '0';
const tagCount = parseInt(tagCountValue, 10);
console.log(`Found ${tagCount} events for tag: ${tagName}-${tagValue}`);
for (let i = tagCount; i >= Math.max(1, tagCount - limit + 1); i--) {
const tagKey = `tags/${tagName}-${tagValue}:${i}`;
promises.push(fetchEventByKey(tagKey));
}
}
return promises;
}
// Fetch event by key (common for kind, author, etc.)
async function fetchEventByKey(eventKey) {
const eventUrl = `https://${r2BucketDomain}/${eventKey}`;
console.log(`Fetching event by key: ${eventKey} from ${eventUrl}`);
try {
return withConnectionLimit(async () => {
const response = await fetch(eventUrl);
if (!response.ok) {
console.warn(`Event not found for key: ${eventKey}`);
return null;
}
const data = await response.text();
console.log(`Event found for key: ${eventKey}`);
return JSON.parse(data);
});
} catch (error) {
console.error(`Error fetching event with key ${eventKey}:`, error);
return null;
}
}
// Filter events based on additional filters
function filterEvents(events, filters) {
console.log(`Filtering events based on filters: ${JSON.stringify(filters)}`);
return events.filter(event => {
// Check for basic filters: ids, kinds, authors, created_at range
const includeEvent = (!filters.ids || filters.ids.includes(event.id)) &&
(!filters.kinds || filters.kinds.includes(event.kind)) &&
(!filters.authors || filters.authors.includes(event.pubkey)) &&
(!filters.since || event.created_at >= filters.since) &&
(!filters.until || event.created_at <= filters.until);
// Check for tag filters
if (filters.tags) {
for (const [tagName, tagValue] of filters.tags) {
const eventTags = event.tags.filter(([t]) => t === tagName).map(([, v]) => v);
if (!eventTags.includes(tagValue)) {
return false;
}
}
}
return includeEvent;
});
}