658 lines
20 KiB
TypeScript
658 lines
20 KiB
TypeScript
import { NSchema as n } from "jsr:@nostrify/nostrify";
|
|
import type {
|
|
NostrClientREQ,
|
|
NostrEvent,
|
|
NostrFilter,
|
|
} from "jsr:@nostrify/types";
|
|
import {
|
|
getCCNPrivateKey,
|
|
getCCNPubkey,
|
|
isAddressableEvent,
|
|
isArray,
|
|
isLocalhost,
|
|
isReplaceableEvent,
|
|
isValidJSON,
|
|
parseATagQuery,
|
|
randomTimeUpTo2DaysInThePast,
|
|
} from "./utils.ts";
|
|
import * as nostrTools from "@nostr/tools";
|
|
import { nip44 } from "@nostr/tools";
|
|
import { randomBytes } from "@noble/ciphers/webcrypto";
|
|
import { encodeBase64 } from "jsr:@std/encoding@0.224/base64";
|
|
import { Database } from "jsr:@db/sqlite";
|
|
import { mixQuery, sql, sqlPartial } from "./utils/queries.ts";
|
|
import { log, setupLogger } from "./utils/logs.ts";
|
|
import { getEveFilePath } from "./utils/files.ts";
|
|
import { MIN_POW, POW_TO_MINE } from "./consts.ts";
|
|
|
|
await setupLogger();
|
|
|
|
if (!Deno.env.has("ENCRYPTION_KEY")) {
|
|
log.error(
|
|
`Missing ENCRYPTION_KEY. Please set it in your env.\nA new one has been generated for you: ENCRYPTION_KEY="${
|
|
encodeBase64(
|
|
randomBytes(32),
|
|
)
|
|
}"`,
|
|
);
|
|
Deno.exit(1);
|
|
}
|
|
|
|
const db = new Database(await getEveFilePath("db"));
|
|
const pool = new nostrTools.SimplePool();
|
|
const relays = [
|
|
"wss://relay.arx-ccn.com/",
|
|
"wss://relay.dannymorabito.com/",
|
|
"wss://nos.lol/",
|
|
"wss://nostr.einundzwanzig.space/",
|
|
"wss://nostr.massmux.com/",
|
|
"wss://nostr.mom/",
|
|
"wss://nostr.wine/",
|
|
"wss://purplerelay.com/",
|
|
"wss://relay.damus.io/",
|
|
"wss://relay.goodmorningbitcoin.com/",
|
|
"wss://relay.lexingtonbitcoin.org/",
|
|
"wss://relay.nostr.band/",
|
|
"wss://relay.primal.net/",
|
|
"wss://relay.snort.social/",
|
|
"wss://strfry.iris.to/",
|
|
"wss://cache2.primal.net/v1",
|
|
];
|
|
|
|
export function runMigrations(db: Database, latestVersion: number) {
|
|
const migrations = Deno.readDirSync(`${import.meta.dirname}/migrations`);
|
|
for (const migrationFile of migrations) {
|
|
const migrationVersion = Number.parseInt(
|
|
migrationFile.name.split("-")[0],
|
|
10,
|
|
);
|
|
|
|
if (migrationVersion > latestVersion) {
|
|
log.info(
|
|
`Running migration ${migrationFile.name} (version ${migrationVersion})`,
|
|
);
|
|
const start = Date.now();
|
|
const migrationSql = Deno.readTextFileSync(
|
|
`${import.meta.dirname}/migrations/${migrationFile.name}`,
|
|
);
|
|
db.run("BEGIN TRANSACTION");
|
|
try {
|
|
db.run(migrationSql);
|
|
const end = Date.now();
|
|
const durationMs = end - start;
|
|
sql`
|
|
INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status) VALUES (${migrationVersion}, ${migrationFile.name}, ${
|
|
new Date().toISOString()
|
|
}, ${durationMs}, 'success');
|
|
db.run("COMMIT TRANSACTION");
|
|
`(db);
|
|
} catch (e) {
|
|
db.run("ROLLBACK TRANSACTION");
|
|
const error = e instanceof Error
|
|
? e
|
|
: typeof e === "string"
|
|
? new Error(e)
|
|
: new Error(JSON.stringify(e));
|
|
const end = Date.now();
|
|
const durationMs = end - start;
|
|
sql`
|
|
INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status, error_message) VALUES (${migrationVersion}, ${migrationFile.name}, ${
|
|
new Date().toISOString()
|
|
}, ${durationMs}, 'failed', ${error.message});
|
|
`(db);
|
|
throw e;
|
|
}
|
|
db.run("END TRANSACTION");
|
|
}
|
|
}
|
|
}
|
|
|
|
async function createEncryptedEvent(
|
|
event: nostrTools.VerifiedEvent,
|
|
): Promise<nostrTools.VerifiedEvent> {
|
|
if (!event.id) throw new Error("Event must have an ID");
|
|
if (!event.sig) throw new Error("Event must be signed");
|
|
const ccnPubKey = await getCCNPubkey();
|
|
const ccnPrivateKey = await getCCNPrivateKey();
|
|
const randomPrivateKey = nostrTools.generateSecretKey();
|
|
const randomPrivateKeyPubKey = nostrTools.getPublicKey(randomPrivateKey);
|
|
const conversationKey = nip44.getConversationKey(randomPrivateKey, ccnPubKey);
|
|
const sealTemplate = {
|
|
kind: 13,
|
|
created_at: randomTimeUpTo2DaysInThePast(),
|
|
content: nip44.encrypt(JSON.stringify(event), conversationKey),
|
|
tags: [],
|
|
};
|
|
const seal = nostrTools.finalizeEvent(sealTemplate, ccnPrivateKey);
|
|
const giftWrapTemplate = {
|
|
kind: 1059,
|
|
created_at: randomTimeUpTo2DaysInThePast(),
|
|
content: nip44.encrypt(JSON.stringify(seal), conversationKey),
|
|
tags: [["p", ccnPubKey]],
|
|
pubkey: randomPrivateKeyPubKey,
|
|
};
|
|
const minedGiftWrap = nostrTools.nip13.minePow(giftWrapTemplate, POW_TO_MINE);
|
|
const giftWrap = nostrTools.finalizeEvent(minedGiftWrap, randomPrivateKey);
|
|
return giftWrap;
|
|
}
|
|
|
|
async function decryptEvent(
|
|
event: nostrTools.Event,
|
|
): Promise<nostrTools.VerifiedEvent> {
|
|
const ccnPrivkey = await getCCNPrivateKey();
|
|
|
|
if (event.kind !== 1059) {
|
|
throw new Error("Cannot decrypt event -- not a gift wrap");
|
|
}
|
|
|
|
const pow = nostrTools.nip13.getPow(event.id);
|
|
|
|
if (pow < MIN_POW) {
|
|
throw new Error("Cannot decrypt event -- PoW too low");
|
|
}
|
|
|
|
const conversationKey = nip44.getConversationKey(ccnPrivkey, event.pubkey);
|
|
const seal = JSON.parse(nip44.decrypt(event.content, conversationKey));
|
|
if (!seal) throw new Error("Cannot decrypt event -- no seal");
|
|
if (seal.kind !== 13) {
|
|
throw new Error("Cannot decrypt event subevent -- not a seal");
|
|
}
|
|
const content = JSON.parse(nip44.decrypt(seal.content, conversationKey));
|
|
return content as nostrTools.VerifiedEvent;
|
|
}
|
|
|
|
class EventAlreadyExistsException extends Error {}
|
|
|
|
function addEventToDb(
|
|
decryptedEvent: nostrTools.VerifiedEvent,
|
|
encryptedEvent: nostrTools.VerifiedEvent,
|
|
) {
|
|
const existingEvent = sql`
|
|
SELECT * FROM events WHERE id = ${decryptedEvent.id}
|
|
`(db)[0];
|
|
|
|
if (existingEvent) throw new EventAlreadyExistsException();
|
|
try {
|
|
db.run("BEGIN TRANSACTION");
|
|
if (isReplaceableEvent(decryptedEvent.kind)) {
|
|
sql`
|
|
DELETE FROM events
|
|
WHERE kind = ${decryptedEvent.kind}
|
|
AND pubkey = ${decryptedEvent.pubkey}
|
|
AND created_at < ${decryptedEvent.created_at}
|
|
`(db);
|
|
}
|
|
|
|
if (isAddressableEvent(decryptedEvent.kind)) {
|
|
const dTag = decryptedEvent.tags.find((tag) => tag[0] === "d")?.[1];
|
|
if (dTag) {
|
|
sql`
|
|
DELETE FROM events
|
|
WHERE kind = ${decryptedEvent.kind}
|
|
AND pubkey = ${decryptedEvent.pubkey}
|
|
AND created_at < ${decryptedEvent.created_at}
|
|
AND id IN (
|
|
SELECT event_id FROM event_tags
|
|
WHERE tag_name = 'd'
|
|
AND tag_id IN (
|
|
SELECT tag_id FROM event_tags_values
|
|
WHERE value_position = 1
|
|
AND value = ${dTag}
|
|
)
|
|
)
|
|
`(db);
|
|
}
|
|
}
|
|
|
|
sql`
|
|
INSERT INTO events (id, original_id, pubkey, created_at, kind, content, sig, first_seen) VALUES (
|
|
${decryptedEvent.id},
|
|
${encryptedEvent.id},
|
|
${decryptedEvent.pubkey},
|
|
${decryptedEvent.created_at},
|
|
${decryptedEvent.kind},
|
|
${decryptedEvent.content},
|
|
${decryptedEvent.sig},
|
|
unixepoch()
|
|
)
|
|
`(db);
|
|
if (decryptedEvent.tags) {
|
|
for (let i = 0; i < decryptedEvent.tags.length; i++) {
|
|
const tag = sql`
|
|
INSERT INTO event_tags(event_id, tag_name, tag_index) VALUES (
|
|
${decryptedEvent.id},
|
|
${decryptedEvent.tags[i][0]},
|
|
${i}
|
|
) RETURNING tag_id
|
|
`(db)[0];
|
|
for (let j = 1; j < decryptedEvent.tags[i].length; j++) {
|
|
sql`
|
|
INSERT INTO event_tags_values(tag_id, value_position, value) VALUES (
|
|
${tag.tag_id},
|
|
${j},
|
|
${decryptedEvent.tags[i][j]}
|
|
)
|
|
`(db);
|
|
}
|
|
}
|
|
}
|
|
db.run("COMMIT TRANSACTION");
|
|
} catch (e) {
|
|
db.run("ROLLBACK TRANSACTION");
|
|
throw e;
|
|
}
|
|
}
|
|
|
|
function encryptedEventIsInDb(event: nostrTools.VerifiedEvent) {
|
|
return sql`
|
|
SELECT * FROM events WHERE original_id = ${event.id}
|
|
`(db)[0];
|
|
}
|
|
|
|
async function setupAndSubscribeToExternalEvents() {
|
|
const ccnPubkey = await getCCNPubkey();
|
|
|
|
const isInitialized = sql`
|
|
SELECT name FROM sqlite_master WHERE type='table' AND name='migration_history'
|
|
`(db)[0];
|
|
|
|
if (!isInitialized) runMigrations(db, -1);
|
|
|
|
const latestVersion = sql`
|
|
SELECT migration_version FROM migration_history WHERE status = 'success' ORDER BY migration_version DESC LIMIT 1
|
|
`(db)[0]?.migration_version ?? -1;
|
|
|
|
runMigrations(db, latestVersion);
|
|
|
|
pool.subscribeMany(
|
|
relays,
|
|
[
|
|
{
|
|
"#p": [ccnPubkey],
|
|
kinds: [1059],
|
|
},
|
|
],
|
|
{
|
|
async onevent(event: nostrTools.Event) {
|
|
if (timer) {
|
|
timerCleaned = true;
|
|
clearTimeout(timer);
|
|
}
|
|
if (knownOriginalEvents.indexOf(event.id) >= 0) return;
|
|
if (!nostrTools.verifyEvent(event)) {
|
|
log.warn("Invalid event received");
|
|
return;
|
|
}
|
|
if (encryptedEventIsInDb(event)) return;
|
|
const decryptedEvent = await decryptEvent(event);
|
|
try {
|
|
addEventToDb(decryptedEvent, event);
|
|
} catch (e) {
|
|
if (e instanceof EventAlreadyExistsException) return;
|
|
}
|
|
},
|
|
},
|
|
);
|
|
|
|
let timerCleaned = false;
|
|
|
|
const knownOriginalEvents = sql`SELECT original_id FROM events`(db).flatMap(
|
|
(row) => row.original_id,
|
|
);
|
|
|
|
const timer = setTimeout(async () => {
|
|
// if nothing is found in 10 seconds, create a new CCN, TODO: change logic
|
|
const ccnCreationEventTemplate = {
|
|
kind: 0,
|
|
content: JSON.stringify({
|
|
display_name: "New CCN",
|
|
name: "New CCN",
|
|
bot: true,
|
|
}),
|
|
created_at: Math.floor(Date.now() / 1000),
|
|
tags: [["p", ccnPubkey]],
|
|
};
|
|
const ccnCreationEvent = nostrTools.finalizeEvent(
|
|
ccnCreationEventTemplate,
|
|
await getCCNPrivateKey(),
|
|
);
|
|
const encryptedCCNCreationEvent = await createEncryptedEvent(
|
|
ccnCreationEvent,
|
|
);
|
|
if (timerCleaned) return; // in case we get an event before the timer is cleaned
|
|
await Promise.any(pool.publish(relays, encryptedCCNCreationEvent));
|
|
}, 10000);
|
|
}
|
|
|
|
await setupAndSubscribeToExternalEvents();
|
|
|
|
class UserConnection {
|
|
public socket: WebSocket;
|
|
public subscriptions: Map<string, NostrFilter[]>;
|
|
public db: Database;
|
|
|
|
constructor(
|
|
socket: WebSocket,
|
|
subscriptions: Map<string, NostrFilter[]>,
|
|
db: Database,
|
|
) {
|
|
this.socket = socket;
|
|
this.subscriptions = subscriptions;
|
|
this.db = db;
|
|
}
|
|
}
|
|
|
|
function filtersMatchingEvent(
|
|
event: NostrEvent,
|
|
connection: UserConnection,
|
|
): string[] {
|
|
const matching = [];
|
|
for (const subscription of connection.subscriptions.keys()) {
|
|
const filters = connection.subscriptions.get(subscription);
|
|
if (!filters) continue;
|
|
const isMatching = filters.every((filter) =>
|
|
Object.entries(filter).every(([type, value]) => {
|
|
if (type === "ids") return value.includes(event.id);
|
|
if (type === "kinds") return value.includes(event.kind);
|
|
if (type === "authors") return value.includes(event.pubkey);
|
|
if (type === "since") return event.created_at >= value;
|
|
if (type === "until") return event.created_at <= value;
|
|
if (type === "limit") return event.created_at <= value;
|
|
if (type.startsWith("#")) {
|
|
const tagName = type.slice(1);
|
|
return event.tags.some(
|
|
(tag: string[]) => tag[0] === tagName && value.includes(tag[1]),
|
|
);
|
|
}
|
|
return false;
|
|
})
|
|
);
|
|
if (isMatching) matching.push(subscription);
|
|
}
|
|
return matching;
|
|
}
|
|
|
|
function handleRequest(connection: UserConnection, request: NostrClientREQ) {
|
|
const [, subscriptionId, ...filters] = request;
|
|
if (connection.subscriptions.has(subscriptionId)) {
|
|
return log.warn("Duplicate subscription ID");
|
|
}
|
|
|
|
log.info(
|
|
`New subscription: ${subscriptionId} with filters: ${
|
|
JSON.stringify(
|
|
filters,
|
|
)
|
|
}`,
|
|
);
|
|
|
|
let query = sqlPartial`SELECT * FROM events`;
|
|
|
|
const filtersAreNotEmpty = filters.some((filter) => {
|
|
return Object.values(filter).some((value) => {
|
|
return value.length > 0;
|
|
});
|
|
});
|
|
|
|
if (filtersAreNotEmpty) {
|
|
query = mixQuery(query, sqlPartial`WHERE`);
|
|
|
|
for (let i = 0; i < filters.length; i++) {
|
|
// filters act as OR, filter groups act as AND
|
|
query = mixQuery(query, sqlPartial`(`);
|
|
|
|
const filter = Object.entries(filters[i]).filter(([type, value]) => {
|
|
if (type === "ids") return value.length > 0;
|
|
if (type === "authors") return value.length > 0;
|
|
if (type === "kinds") return value.length > 0;
|
|
if (type.startsWith("#")) return value.length > 0;
|
|
if (type === "since") return value > 0;
|
|
if (type === "until") return value > 0;
|
|
return false;
|
|
});
|
|
|
|
for (let j = 0; j < filter.length; j++) {
|
|
const [type, value] = filter[j];
|
|
|
|
if (type === "ids") {
|
|
const uniqueIds = [...new Set(value)];
|
|
query = mixQuery(query, sqlPartial`id IN (`);
|
|
for (let k = 0; k < uniqueIds.length; k++) {
|
|
const id = uniqueIds[k] as string;
|
|
|
|
query = mixQuery(query, sqlPartial`${id}`);
|
|
|
|
if (k < uniqueIds.length - 1) {
|
|
query = mixQuery(query, sqlPartial`,`);
|
|
}
|
|
}
|
|
query = mixQuery(query, sqlPartial`)`);
|
|
}
|
|
|
|
if (type === "authors") {
|
|
const uniqueAuthors = [...new Set(value)];
|
|
query = mixQuery(query, sqlPartial`pubkey IN (`);
|
|
for (let k = 0; k < uniqueAuthors.length; k++) {
|
|
const author = uniqueAuthors[k] as string;
|
|
|
|
query = mixQuery(query, sqlPartial`${author}`);
|
|
|
|
if (k < uniqueAuthors.length - 1) {
|
|
query = mixQuery(query, sqlPartial`,`);
|
|
}
|
|
}
|
|
query = mixQuery(query, sqlPartial`)`);
|
|
}
|
|
|
|
if (type === "kinds") {
|
|
const uniqueKinds = [...new Set(value)];
|
|
query = mixQuery(query, sqlPartial`kind IN (`);
|
|
for (let k = 0; k < uniqueKinds.length; k++) {
|
|
const kind = uniqueKinds[k] as number;
|
|
|
|
query = mixQuery(query, sqlPartial`${kind}`);
|
|
|
|
if (k < uniqueKinds.length - 1) {
|
|
query = mixQuery(query, sqlPartial`,`);
|
|
}
|
|
}
|
|
query = mixQuery(query, sqlPartial`)`);
|
|
}
|
|
|
|
if (type.startsWith("#")) {
|
|
const tag = type.slice(1);
|
|
const uniqueValues = [...new Set(value)];
|
|
query = mixQuery(query, sqlPartial`(`);
|
|
for (let k = 0; k < uniqueValues.length; k++) {
|
|
const tagValue = uniqueValues[k] as string;
|
|
if (tag === "a") {
|
|
const aTagInfo = parseATagQuery(tagValue);
|
|
|
|
if (aTagInfo.dTag && aTagInfo.dTag !== "") {
|
|
// Addressable event reference
|
|
query = mixQuery(
|
|
query,
|
|
sqlPartial`id IN (
|
|
SELECT e.id
|
|
FROM events e
|
|
JOIN event_tags t ON e.id = t.event_id
|
|
JOIN event_tags_values v ON t.tag_id = v.tag_id
|
|
WHERE e.kind = ${aTagInfo.kind}
|
|
AND e.pubkey = ${aTagInfo.pubkey}
|
|
AND t.tag_name = 'd'
|
|
AND v.value_position = 1
|
|
AND v.value = ${aTagInfo.dTag}
|
|
)`,
|
|
);
|
|
} else {
|
|
// Replaceable event reference
|
|
query = mixQuery(
|
|
query,
|
|
sqlPartial`id IN (
|
|
SELECT id
|
|
FROM events
|
|
WHERE kind = ${aTagInfo.kind}
|
|
AND pubkey = ${aTagInfo.pubkey}
|
|
)`,
|
|
);
|
|
}
|
|
} else {
|
|
// Regular tag handling (unchanged)
|
|
query = mixQuery(
|
|
query,
|
|
sqlPartial`id IN (
|
|
SELECT t.event_id
|
|
FROM event_tags t
|
|
WHERE t.tag_name = ${tag}
|
|
AND t.tag_id IN (
|
|
SELECT v.tag_id
|
|
FROM event_tags_values v
|
|
WHERE v.value_position = 1
|
|
AND v.value = ${tagValue}
|
|
)
|
|
)`,
|
|
);
|
|
}
|
|
|
|
if (k < uniqueValues.length - 1) {
|
|
query = mixQuery(query, sqlPartial`OR`);
|
|
}
|
|
}
|
|
query = mixQuery(query, sqlPartial`)`);
|
|
}
|
|
|
|
if (type === "since") {
|
|
query = mixQuery(query, sqlPartial`created_at >= ${value}`);
|
|
}
|
|
|
|
if (type === "until") {
|
|
query = mixQuery(query, sqlPartial`created_at <= ${value}`);
|
|
}
|
|
|
|
if (j < filter.length - 1) query = mixQuery(query, sqlPartial`AND`);
|
|
}
|
|
|
|
query = mixQuery(query, sqlPartial`)`);
|
|
|
|
if (i < filters.length - 1) query = mixQuery(query, sqlPartial`OR`);
|
|
}
|
|
}
|
|
|
|
query = mixQuery(query, sqlPartial`ORDER BY created_at ASC`);
|
|
|
|
log.debug(query.query, ...query.values);
|
|
|
|
const events = connection.db.prepare(query.query).all(...query.values);
|
|
|
|
for (let i = 0; i < events.length; i++) {
|
|
const rawTags = sql`SELECT * FROM event_tags_view WHERE event_id = ${
|
|
events[i].id
|
|
}`(connection.db);
|
|
const tags: { [key: string]: string[] } = {};
|
|
for (const item of rawTags) {
|
|
if (!tags[item.tag_name]) tags[item.tag_name] = [item.tag_name];
|
|
tags[item.tag_name].push(item.tag_value);
|
|
}
|
|
const tagsArray = Object.values(tags);
|
|
|
|
const event = {
|
|
id: events[i].id,
|
|
pubkey: events[i].pubkey,
|
|
created_at: events[i].created_at,
|
|
kind: events[i].kind,
|
|
tags: tagsArray,
|
|
content: events[i].content,
|
|
sig: events[i].sig,
|
|
};
|
|
|
|
connection.socket.send(JSON.stringify(["EVENT", subscriptionId, event]));
|
|
}
|
|
connection.socket.send(JSON.stringify(["EOSE", subscriptionId]));
|
|
|
|
connection.subscriptions.set(subscriptionId, filters);
|
|
}
|
|
|
|
async function handleEvent(
|
|
connection: UserConnection,
|
|
event: nostrTools.Event,
|
|
) {
|
|
const valid = nostrTools.verifyEvent(event);
|
|
if (!valid) {
|
|
connection.socket.send(JSON.stringify(["NOTICE", "Invalid event"]));
|
|
return log.warn("Invalid event");
|
|
}
|
|
|
|
const encryptedEvent = await createEncryptedEvent(event);
|
|
try {
|
|
addEventToDb(event, encryptedEvent);
|
|
} catch (e) {
|
|
if (e instanceof EventAlreadyExistsException) {
|
|
log.warn("Event already exists");
|
|
return;
|
|
}
|
|
}
|
|
await Promise.any(pool.publish(relays, encryptedEvent));
|
|
|
|
connection.socket.send(JSON.stringify(["OK", event.id, true, "Event added"]));
|
|
|
|
const filtersThatMatchEvent = filtersMatchingEvent(event, connection);
|
|
|
|
for (let i = 0; i < filtersThatMatchEvent.length; i++) {
|
|
const filter = filtersThatMatchEvent[i];
|
|
connection.socket.send(JSON.stringify(["EVENT", filter, event]));
|
|
}
|
|
}
|
|
|
|
function handleClose(connection: UserConnection, subscriptionId: string) {
|
|
if (!connection.subscriptions.has(subscriptionId)) {
|
|
return log.warn(
|
|
`Closing unknown subscription? That's weird. Subscription ID: ${subscriptionId}`,
|
|
);
|
|
}
|
|
|
|
connection.subscriptions.delete(subscriptionId);
|
|
}
|
|
|
|
Deno.serve({
|
|
port: 6942,
|
|
handler: (request) => {
|
|
if (request.headers.get("upgrade") === "websocket") {
|
|
if (!isLocalhost(request)) {
|
|
return new Response(
|
|
"Forbidden. Please read the Arx-CCN documentation for more information on how to interact with the relay.",
|
|
{ status: 403 },
|
|
);
|
|
}
|
|
|
|
const { socket, response } = Deno.upgradeWebSocket(request);
|
|
|
|
const connection = new UserConnection(socket, new Map(), db);
|
|
|
|
socket.onopen = () => log.info("User connected");
|
|
socket.onmessage = (event) => {
|
|
log.debug(`Received: ${event.data}`);
|
|
if (typeof event.data !== "string" || !isValidJSON(event.data)) {
|
|
return log.warn("Invalid request");
|
|
}
|
|
const data = JSON.parse(event.data);
|
|
if (!isArray(data)) return log.warn("Invalid request");
|
|
|
|
const msg = n.clientMsg().parse(data);
|
|
switch (msg[0]) {
|
|
case "REQ":
|
|
return handleRequest(connection, n.clientREQ().parse(data));
|
|
case "EVENT":
|
|
return handleEvent(connection, n.clientEVENT().parse(data)[1]);
|
|
case "CLOSE":
|
|
return handleClose(connection, n.clientCLOSE().parse(data)[1]);
|
|
default:
|
|
return log.warn("Invalid request");
|
|
}
|
|
};
|
|
socket.onclose = () => log.info("User disconnected");
|
|
|
|
return response;
|
|
}
|
|
return new Response("Eve Relay");
|
|
},
|
|
});
|