diff --git a/biome.json b/biome.json
new file mode 100644
index 0000000..4101319
--- /dev/null
+++ b/biome.json
@@ -0,0 +1,44 @@
+{
+  "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
+  "files": {
+    "include": ["index.ts", "**/*.ts"]
+  },
+  "vcs": {
+    "enabled": true,
+    "clientKind": "git",
+    "useIgnoreFile": true
+  },
+  "linter": {
+    "enabled": true,
+    "rules": {
+      "style": {
+        "noNonNullAssertion": "off",
+        "useNodejsImportProtocol": "warn"
+      },
+      "complexity": {
+        "useLiteralKeys": "off"
+      }
+    }
+  },
+  "formatter": {
+    "enabled": true,
+    "formatWithErrors": true,
+    "ignore": [],
+    "attributePosition": "auto",
+    "indentStyle": "space",
+    "indentWidth": 2,
+    "lineWidth": 80,
+    "lineEnding": "lf"
+  },
+  "javascript": {
+    "formatter": {
+      "arrowParentheses": "always",
+      "bracketSameLine": true,
+      "bracketSpacing": true,
+      "quoteStyle": "single",
+      "quoteProperties": "asNeeded",
+      "semicolons": "always",
+      "trailingCommas": "all"
+    }
+  }
+}
diff --git a/consts.ts b/consts.ts
index cb2ce39..2b8235f 100644
--- a/consts.ts
+++ b/consts.ts
@@ -18,3 +18,27 @@ export const MIN_POW = 8;
  * - Difficulty 21: ~5-6 seconds
  */
 export const POW_TO_MINE = 10;
+
+/**
+ * Maximum size of a note chunk in bytes.
+ *
+ * This value determines the maximum size of a note that can be encrypted and
+ * sent in a single chunk.
+ */
+export const MAX_CHUNK_SIZE = 32768;
+
+/**
+ * Interval for cleaning up expired note chunks in milliseconds.
+ *
+ * This value determines how often the relay will check for and remove expired
+ * note chunks from the database.
+ */
+export const CHUNK_CLEANUP_INTERVAL = 1000 * 60 * 60;
+
+/**
+ * Maximum age of a note chunk in milliseconds.
+ *
+ * This value determines the maximum duration a note chunk can remain in the
+ * database before it is considered expired and eligible for cleanup.
+ */
+export const CHUNK_MAX_AGE = 1000 * 60 * 60 * 24;
diff --git a/deno.json b/deno.json
index d505ec8..2738627 100644
--- a/deno.json
+++ b/deno.json
@@ -1,8 +1,11 @@
 {
   "tasks": {
-    "dev": "deno run --allow-read --allow-write --allow-net --allow-ffi --allow-env --env-file --watch index.ts"
+    "dev": "deno run --allow-read --allow-write --allow-net --allow-ffi --allow-env --env-file --watch index.ts",
+    "lint": "biome check",
+    "lint:fix": "biome check --write --unsafe"
   },
   "imports": {
+    "@biomejs/biome": "npm:@biomejs/biome@^1.9.4",
     "@db/sqlite": "jsr:@db/sqlite@^0.12.0",
     "@noble/ciphers": "jsr:@noble/ciphers@^1.2.1",
     "@nostr/tools": "jsr:@nostr/tools@^2.10.4",
@@ -12,13 +15,5 @@
     "@std/fmt": "jsr:@std/fmt@^1.0.4",
     "@std/log": "jsr:@std/log@^0.224.13",
     "@types/deno": "npm:@types/deno@^2.0.0"
-  },
-  "fmt": {
-    "indentWidth": 2,
-    "useTabs": false,
-    "lineWidth": 80,
-    "proseWrap": "always",
-    "semiColons": true,
-    "singleQuote": false
   }
 }
diff --git a/deno.lock b/deno.lock
index 21a17b3..6957956 100644
--- a/deno.lock
+++ b/deno.lock
@@ -30,6 +30,8 @@
     "jsr:@std/path@0.217": "0.217.0",
     "jsr:@std/path@0.221": "0.221.0",
     "jsr:@std/path@^1.0.8": "1.0.8",
+    "npm:@biomejs/biome@1.9.4": "1.9.4",
+    "npm:@biomejs/biome@^1.9.4": "1.9.4",
     "npm:@noble/ciphers@~0.5.1": "0.5.3",
     "npm:@noble/curves@1.2.0": "1.2.0",
     "npm:@noble/hashes@1.3.1": "1.3.1",
@@ -168,6 +170,43 @@
     }
   },
   "npm": {
+    "@biomejs/biome@1.9.4": {
+      "integrity": "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog==",
+      "dependencies": [
+        "@biomejs/cli-darwin-arm64",
+        "@biomejs/cli-darwin-x64",
+        "@biomejs/cli-linux-arm64",
+        "@biomejs/cli-linux-arm64-musl",
+        "@biomejs/cli-linux-x64",
+        "@biomejs/cli-linux-x64-musl",
+        "@biomejs/cli-win32-arm64",
+        "@biomejs/cli-win32-x64"
+      ]
+    },
+    "@biomejs/cli-darwin-arm64@1.9.4": {
+      "integrity": "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="
+    },
+    "@biomejs/cli-darwin-x64@1.9.4": {
+      "integrity": "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="
+    },
+    "@biomejs/cli-linux-arm64-musl@1.9.4": {
+      "integrity": "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="
+    },
+    "@biomejs/cli-linux-arm64@1.9.4": {
+      "integrity": "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="
+    },
+    "@biomejs/cli-linux-x64-musl@1.9.4": {
+      "integrity": "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="
+    },
+    "@biomejs/cli-linux-x64@1.9.4": {
+      "integrity": "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="
+    },
+    "@biomejs/cli-win32-arm64@1.9.4": {
+      "integrity": "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="
+    },
+    "@biomejs/cli-win32-x64@1.9.4": {
+      "integrity": "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="
+    },
     "@noble/ciphers@0.5.3": {
       "integrity": "sha512-B0+6IIHiqEs3BPMT0hcRmHvEj2QHOLu+uwt+tqDDeVd0oyVzh7BPrDcPjRnV1PV/5LaknXJJQvOuRGR0zQJz+w=="
     },
@@ -272,6 +311,7 @@
       "jsr:@std/encoding@^1.0.6",
       "jsr:@std/fmt@^1.0.4",
       "jsr:@std/log@~0.224.13",
+      "npm:@biomejs/biome@^1.9.4",
       "npm:@types/deno@2"
     ]
   }
diff --git a/eventEncryptionDecryption.ts b/eventEncryptionDecryption.ts
new file mode 100644
index 0000000..d77e2f5
--- /dev/null
+++ b/eventEncryptionDecryption.ts
@@ -0,0 +1,174 @@
+import type { Database } from '@db/sqlite';
+import * as nostrTools from '@nostr/tools';
+import { nip44 } from '@nostr/tools';
+import { MAX_CHUNK_SIZE, MIN_POW, POW_TO_MINE } from './consts.ts';
+import {
+  getCCNPrivateKey,
+  getCCNPubkey,
+  randomTimeUpTo2DaysInThePast,
+} from './utils.ts';
+import { sql } from './utils/queries.ts';
+
+export class EventAlreadyExistsException extends Error {}
+export class ChunkedEventReceived extends Error {}
+
+export async function createEncryptedEvent(
+  event: nostrTools.VerifiedEvent,
+): Promise<nostrTools.VerifiedEvent | nostrTools.VerifiedEvent[]> {
+  if (!event.id) throw new Error('Event must have an ID');
+  if (!event.sig) throw new Error('Event must be signed');
+
+  const ccnPubKey = await getCCNPubkey();
+  const ccnPrivateKey = await getCCNPrivateKey();
+
+  const eventJson = JSON.stringify(event);
+  if (eventJson.length <= MAX_CHUNK_SIZE) {
+    const randomPrivateKey = nostrTools.generateSecretKey();
+    const randomPrivateKeyPubKey = nostrTools.getPublicKey(randomPrivateKey);
+    const conversationKey = nip44.getConversationKey(
+      randomPrivateKey,
+      ccnPubKey,
+    );
+    const sealTemplate = {
+      kind: 13,
+      created_at: randomTimeUpTo2DaysInThePast(),
+      content: nip44.encrypt(eventJson, conversationKey),
+      tags: [],
+    };
+    const seal = nostrTools.finalizeEvent(sealTemplate, ccnPrivateKey);
+    const giftWrapTemplate = {
+      kind: 1059,
+      created_at: randomTimeUpTo2DaysInThePast(),
+      content: nip44.encrypt(JSON.stringify(seal), conversationKey),
+      tags: [['p', ccnPubKey]],
+      pubkey: randomPrivateKeyPubKey,
+    };
+    const minedGiftWrap = nostrTools.nip13.minePow(
+      giftWrapTemplate,
+      POW_TO_MINE,
+    );
+    const giftWrap = nostrTools.finalizeEvent(minedGiftWrap, randomPrivateKey);
+    return giftWrap;
+  }
+
+  const chunks: string[] = [];
+  for (let i = 0; i < eventJson.length; i += MAX_CHUNK_SIZE)
+    chunks.push(eventJson.slice(i, i + MAX_CHUNK_SIZE));
+
+  const messageId = crypto.randomUUID();
+  const totalChunks = chunks.length;
+
+  const encryptedChunks = [];
+  for (let i = 0; i < chunks.length; i++) {
+    const chunk = chunks[i];
+    const randomPrivateKey = nostrTools.generateSecretKey();
+    const randomPrivateKeyPubKey = nostrTools.getPublicKey(randomPrivateKey);
+    const conversationKey = nip44.getConversationKey(
+      randomPrivateKey,
+      ccnPubKey,
+    );
+
+    const sealTemplate = {
+      kind: 13,
+      created_at: randomTimeUpTo2DaysInThePast(),
+      content: nip44.encrypt(chunk, conversationKey),
+      tags: [['chunk', String(i), String(totalChunks), messageId]],
+    };
+
+    const seal = nostrTools.finalizeEvent(sealTemplate, ccnPrivateKey);
+    const giftWrapTemplate = {
+      kind: 1059,
+      created_at: randomTimeUpTo2DaysInThePast(),
+      content: nip44.encrypt(JSON.stringify(seal), conversationKey),
+      tags: [['p', ccnPubKey]],
+      pubkey: randomPrivateKeyPubKey,
+    };
+
+    const minedGiftWrap = nostrTools.nip13.minePow(
+      giftWrapTemplate,
+      POW_TO_MINE,
+    );
+    encryptedChunks.push(
+      nostrTools.finalizeEvent(minedGiftWrap, randomPrivateKey),
+    );
+  }
+
+  return encryptedChunks;
+}
+export async function decryptEvent(
+  db: Database,
+  event: nostrTools.Event,
+): Promise<nostrTools.VerifiedEvent> {
+  const ccnPrivkey = await getCCNPrivateKey();
+
+  if (event.kind !== 1059) {
+    throw new Error('Cannot decrypt event -- not a gift wrap');
+  }
+
+  const pow = nostrTools.nip13.getPow(event.id);
+
+  if (pow < MIN_POW) {
+    throw new Error('Cannot decrypt event -- PoW too low');
+  }
+
+  const conversationKey = nip44.getConversationKey(ccnPrivkey, event.pubkey);
+  const seal = JSON.parse(nip44.decrypt(event.content, conversationKey));
+  if (!seal) throw new Error('Cannot decrypt event -- no seal');
+  if (seal.kind !== 13) {
+    throw new Error('Cannot decrypt event subevent -- not a seal');
+  }
+
+  const chunkTag = seal.tags.find((tag: string[]) => tag[0] === 'chunk');
+  if (!chunkTag) {
+    const content = JSON.parse(nip44.decrypt(seal.content, conversationKey));
+    return content as nostrTools.VerifiedEvent;
+  }
+
+  const [_, chunkIndex, totalChunks, messageId] = chunkTag;
+  const chunk = nip44.decrypt(seal.content, conversationKey);
+
+  try {
+    sql`
+      INSERT INTO event_chunks (
+        message_id,
+        chunk_index,
+        total_chunks,
+        chunk_data,
+        conversation_key,
+        created_at
+      ) VALUES (
+        ${messageId},
+        ${Number(chunkIndex)},
+        ${Number(totalChunks)},
+        ${chunk},
+        ${conversationKey},
+        ${Math.floor(Date.now() / 1000)}
+      )
+    `(db);
+
+    const chunks = sql`
+      SELECT chunk_data 
+      FROM event_chunks 
+      WHERE message_id = ${messageId}
+      ORDER BY chunk_index ASC
+    `(db);
+
+    if (chunks.length === Number(totalChunks)) {
+      const completeEventJson = chunks.map((c) => c.chunk_data).join('');
+
+      sql`DELETE FROM event_chunks WHERE message_id = ${messageId}`(db);
+
+      return JSON.parse(completeEventJson) as nostrTools.VerifiedEvent;
+    }
+
+    throw new ChunkedEventReceived(
+      `Chunked event received (${chunks.length}/${totalChunks}) - messageId: ${messageId}`,
+    );
+  } catch (e) {
+    if (e instanceof Error && e.message.includes('UNIQUE constraint failed'))
+      throw new Error(
+        `Duplicate chunk received (${Number(chunkIndex) + 1}/${totalChunks}) - messageId: ${messageId}`,
+      );
+    throw e;
+  }
+}
diff --git a/index.ts b/index.ts
index 4186bc0..f9cbd6b 100644
--- a/index.ts
+++ b/index.ts
@@ -1,9 +1,20 @@
-import { NSchema as n } from "jsr:@nostrify/nostrify";
+import { randomBytes } from '@noble/ciphers/webcrypto';
+import * as nostrTools from '@nostr/tools';
+import { Database } from 'jsr:@db/sqlite';
+import { NSchema as n } from 'jsr:@nostrify/nostrify';
 import type {
   NostrClientREQ,
   NostrEvent,
   NostrFilter,
-} from "jsr:@nostrify/types";
+} from 'jsr:@nostrify/types';
+import { encodeBase64 } from 'jsr:@std/encoding@0.224/base64';
+import { CHUNK_CLEANUP_INTERVAL, CHUNK_MAX_AGE } from './consts.ts';
+import {
+  ChunkedEventReceived,
+  EventAlreadyExistsException,
+  createEncryptedEvent,
+  decryptEvent,
+} from './eventEncryptionDecryption.ts';
 import {
   getCCNPrivateKey,
   getCCNPubkey,
@@ -14,57 +25,53 @@ import {
   isReplaceableEvent,
   isValidJSON,
   parseATagQuery,
-  randomTimeUpTo2DaysInThePast,
-} from "./utils.ts";
-import * as nostrTools from "@nostr/tools";
-import { nip44 } from "@nostr/tools";
-import { randomBytes } from "@noble/ciphers/webcrypto";
-import { encodeBase64 } from "jsr:@std/encoding@0.224/base64";
-import { Database } from "jsr:@db/sqlite";
-import { mixQuery, sql, sqlPartial } from "./utils/queries.ts";
-import { log, setupLogger } from "./utils/logs.ts";
-import { getEveFilePath } from "./utils/files.ts";
-import { MIN_POW, POW_TO_MINE } from "./consts.ts";
+} from './utils.ts';
+import { getEveFilePath } from './utils/files.ts';
+import { log, setupLogger } from './utils/logs.ts';
+import { mixQuery, sql, sqlPartial } from './utils/queries.ts';
 
 await setupLogger();
 
-if (!Deno.env.has("ENCRYPTION_KEY")) {
+if (!Deno.env.has('ENCRYPTION_KEY')) {
   log.error(
-    `Missing ENCRYPTION_KEY. Please set it in your env.\nA new one has been generated for you: ENCRYPTION_KEY="${
-      encodeBase64(
-        randomBytes(32),
-      )
-    }"`,
+    `Missing ENCRYPTION_KEY. Please set it in your env.\nA new one has been generated for you: ENCRYPTION_KEY="${encodeBase64(
+      randomBytes(32),
+    )}"`,
   );
   Deno.exit(1);
 }
 
-const db = new Database(await getEveFilePath("db"));
+const db = new Database(await getEveFilePath('db'));
 const pool = new nostrTools.SimplePool();
 const relays = [
-  "wss://relay.arx-ccn.com/",
-  "wss://relay.dannymorabito.com/",
-  "wss://nos.lol/",
-  "wss://nostr.einundzwanzig.space/",
-  "wss://nostr.massmux.com/",
-  "wss://nostr.mom/",
-  "wss://nostr.wine/",
-  "wss://purplerelay.com/",
-  "wss://relay.damus.io/",
-  "wss://relay.goodmorningbitcoin.com/",
-  "wss://relay.lexingtonbitcoin.org/",
-  "wss://relay.nostr.band/",
-  "wss://relay.primal.net/",
-  "wss://relay.snort.social/",
-  "wss://strfry.iris.to/",
-  "wss://cache2.primal.net/v1",
+  'wss://relay.arx-ccn.com/',
+  'wss://relay.dannymorabito.com/',
+  'wss://nos.lol/',
+  'wss://nostr.einundzwanzig.space/',
+  'wss://nostr.massmux.com/',
+  'wss://nostr.mom/',
+  'wss://nostr.wine/',
+  'wss://purplerelay.com/',
+  'wss://relay.damus.io/',
+  'wss://relay.goodmorningbitcoin.com/',
+  'wss://relay.lexingtonbitcoin.org/',
+  'wss://relay.nostr.band/',
+  'wss://relay.primal.net/',
+  'wss://relay.snort.social/',
+  'wss://strfry.iris.to/',
+  'wss://cache2.primal.net/v1',
 ];
 
 export function runMigrations(db: Database, latestVersion: number) {
-  const migrations = Deno.readDirSync(`${import.meta.dirname}/migrations`);
+  const migrations = [...Deno.readDirSync(`${import.meta.dirname}/migrations`)];
+  migrations.sort((a, b) => {
+    const aVersion = Number.parseInt(a.name.split('-')[0], 10);
+    const bVersion = Number.parseInt(b.name.split('-')[0], 10);
+    return aVersion - bVersion;
+  });
   for (const migrationFile of migrations) {
     const migrationVersion = Number.parseInt(
-      migrationFile.name.split("-")[0],
+      migrationFile.name.split('-')[0],
       10,
     );
 
@@ -76,94 +83,35 @@ export function runMigrations(db: Database, latestVersion: number) {
       const migrationSql = Deno.readTextFileSync(
         `${import.meta.dirname}/migrations/${migrationFile.name}`,
       );
-      db.run("BEGIN TRANSACTION");
+      db.run('BEGIN TRANSACTION');
       try {
         db.run(migrationSql);
         const end = Date.now();
         const durationMs = end - start;
         sql`
-          INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status) VALUES (${migrationVersion}, ${migrationFile.name}, ${
-          new Date().toISOString()
-        }, ${durationMs}, 'success');
+          INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status) VALUES (${migrationVersion}, ${migrationFile.name}, ${new Date().toISOString()}, ${durationMs}, 'success');
           db.run("COMMIT TRANSACTION");
         `(db);
       } catch (e) {
-        db.run("ROLLBACK TRANSACTION");
-        const error = e instanceof Error
-          ? e
-          : typeof e === "string"
-          ? new Error(e)
-          : new Error(JSON.stringify(e));
+        db.run('ROLLBACK TRANSACTION');
+        const error =
+          e instanceof Error
+            ? e
+            : typeof e === 'string'
+              ? new Error(e)
+              : new Error(JSON.stringify(e));
         const end = Date.now();
         const durationMs = end - start;
         sql`
-          INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status, error_message) VALUES (${migrationVersion}, ${migrationFile.name}, ${
-          new Date().toISOString()
-        }, ${durationMs}, 'failed', ${error.message});
+          INSERT INTO migration_history (migration_version, migration_name, executed_at, duration_ms, status, error_message) VALUES (${migrationVersion}, ${migrationFile.name}, ${new Date().toISOString()}, ${durationMs}, 'failed', ${error.message});
         `(db);
         throw e;
       }
-      db.run("END TRANSACTION");
+      db.run('END TRANSACTION');
     }
   }
 }
 
-async function createEncryptedEvent(
-  event: nostrTools.VerifiedEvent,
-): Promise<nostrTools.VerifiedEvent> {
-  if (!event.id) throw new Error("Event must have an ID");
-  if (!event.sig) throw new Error("Event must be signed");
-  const ccnPubKey = await getCCNPubkey();
-  const ccnPrivateKey = await getCCNPrivateKey();
-  const randomPrivateKey = nostrTools.generateSecretKey();
-  const randomPrivateKeyPubKey = nostrTools.getPublicKey(randomPrivateKey);
-  const conversationKey = nip44.getConversationKey(randomPrivateKey, ccnPubKey);
-  const sealTemplate = {
-    kind: 13,
-    created_at: randomTimeUpTo2DaysInThePast(),
-    content: nip44.encrypt(JSON.stringify(event), conversationKey),
-    tags: [],
-  };
-  const seal = nostrTools.finalizeEvent(sealTemplate, ccnPrivateKey);
-  const giftWrapTemplate = {
-    kind: 1059,
-    created_at: randomTimeUpTo2DaysInThePast(),
-    content: nip44.encrypt(JSON.stringify(seal), conversationKey),
-    tags: [["p", ccnPubKey]],
-    pubkey: randomPrivateKeyPubKey,
-  };
-  const minedGiftWrap = nostrTools.nip13.minePow(giftWrapTemplate, POW_TO_MINE);
-  const giftWrap = nostrTools.finalizeEvent(minedGiftWrap, randomPrivateKey);
-  return giftWrap;
-}
-
-async function decryptEvent(
-  event: nostrTools.Event,
-): Promise<nostrTools.VerifiedEvent> {
-  const ccnPrivkey = await getCCNPrivateKey();
-
-  if (event.kind !== 1059) {
-    throw new Error("Cannot decrypt event -- not a gift wrap");
-  }
-
-  const pow = nostrTools.nip13.getPow(event.id);
-
-  if (pow < MIN_POW) {
-    throw new Error("Cannot decrypt event -- PoW too low");
-  }
-
-  const conversationKey = nip44.getConversationKey(ccnPrivkey, event.pubkey);
-  const seal = JSON.parse(nip44.decrypt(event.content, conversationKey));
-  if (!seal) throw new Error("Cannot decrypt event -- no seal");
-  if (seal.kind !== 13) {
-    throw new Error("Cannot decrypt event subevent -- not a seal");
-  }
-  const content = JSON.parse(nip44.decrypt(seal.content, conversationKey));
-  return content as nostrTools.VerifiedEvent;
-}
-
-class EventAlreadyExistsException extends Error {}
-
 function addEventToDb(
   decryptedEvent: nostrTools.VerifiedEvent,
   encryptedEvent: nostrTools.VerifiedEvent,
@@ -174,7 +122,7 @@ function addEventToDb(
 
   if (existingEvent) throw new EventAlreadyExistsException();
   try {
-    db.run("BEGIN TRANSACTION");
+    db.run('BEGIN TRANSACTION');
 
     if (isReplaceableEvent(decryptedEvent.kind)) {
       sql`
@@ -187,7 +135,7 @@ function addEventToDb(
     }
 
     if (isAddressableEvent(decryptedEvent.kind)) {
-      const dTag = decryptedEvent.tags.find((tag) => tag[0] === "d")?.[1];
+      const dTag = decryptedEvent.tags.find((tag) => tag[0] === 'd')?.[1];
       if (dTag) {
         sql`
           UPDATE events 
@@ -209,7 +157,7 @@ function addEventToDb(
     }
 
     if (isCCNReplaceableEvent(decryptedEvent.kind)) {
-      const dTag = decryptedEvent.tags.find((tag) => tag[0] === "d")?.[1];
+      const dTag = decryptedEvent.tags.find((tag) => tag[0] === 'd')?.[1];
       sql`
         UPDATE events 
         SET replaced = 1
@@ -259,9 +207,9 @@ function addEventToDb(
         }
       }
     }
-    db.run("COMMIT TRANSACTION");
+    db.run('COMMIT TRANSACTION');
   } catch (e) {
-    db.run("ROLLBACK TRANSACTION");
+    db.run('ROLLBACK TRANSACTION');
     throw e;
   }
 }
@@ -272,6 +220,11 @@ function encryptedEventIsInDb(event: nostrTools.VerifiedEvent) {
 	`(db)[0];
 }
 
+function cleanupOldChunks() {
+  const cutoffTime = Math.floor((Date.now() - CHUNK_MAX_AGE) / 1000);
+  sql`DELETE FROM event_chunks WHERE created_at < ${cutoffTime}`(db);
+}
+
 async function setupAndSubscribeToExternalEvents() {
   const ccnPubkey = await getCCNPubkey();
 
@@ -281,7 +234,8 @@ async function setupAndSubscribeToExternalEvents() {
 
   if (!isInitialized) runMigrations(db, -1);
 
-  const latestVersion = sql`
+  const latestVersion =
+    sql`
     SELECT migration_version FROM migration_history WHERE status = 'success' ORDER BY migration_version DESC LIMIT 1
   `(db)[0]?.migration_version ?? -1;
 
@@ -291,7 +245,7 @@ async function setupAndSubscribeToExternalEvents() {
     relays,
     [
       {
-        "#p": [ccnPubkey],
+        '#p': [ccnPubkey],
         kinds: [1059],
       },
     ],
@@ -303,15 +257,18 @@ async function setupAndSubscribeToExternalEvents() {
         }
         if (knownOriginalEvents.indexOf(event.id) >= 0) return;
         if (!nostrTools.verifyEvent(event)) {
-          log.warn("Invalid event received");
+          log.warn('Invalid event received');
           return;
         }
         if (encryptedEventIsInDb(event)) return;
-        const decryptedEvent = await decryptEvent(event);
         try {
+          const decryptedEvent = await decryptEvent(db, event);
           addEventToDb(decryptedEvent, event);
         } catch (e) {
           if (e instanceof EventAlreadyExistsException) return;
+          if (e instanceof ChunkedEventReceived) {
+            return;
+          }
         }
       },
     },
@@ -328,23 +285,29 @@ async function setupAndSubscribeToExternalEvents() {
     const ccnCreationEventTemplate = {
       kind: 0,
       content: JSON.stringify({
-        display_name: "New CCN",
-        name: "New CCN",
+        display_name: 'New CCN',
+        name: 'New CCN',
         bot: true,
       }),
       created_at: Math.floor(Date.now() / 1000),
-      tags: [["p", ccnPubkey]],
+      tags: [['p', ccnPubkey]],
     };
     const ccnCreationEvent = nostrTools.finalizeEvent(
       ccnCreationEventTemplate,
       await getCCNPrivateKey(),
     );
-    const encryptedCCNCreationEvent = await createEncryptedEvent(
-      ccnCreationEvent,
-    );
+    const encryptedCCNCreationEvent =
+      await createEncryptedEvent(ccnCreationEvent);
     if (timerCleaned) return; // in case we get an event before the timer is cleaned
-    await Promise.any(pool.publish(relays, encryptedCCNCreationEvent));
+    if (Array.isArray(encryptedCCNCreationEvent)) {
+      for (const event of encryptedCCNCreationEvent)
+        await Promise.any(pool.publish(relays, event));
+    } else {
+      await Promise.any(pool.publish(relays, encryptedCCNCreationEvent));
+    }
   }, 10000);
+
+  setInterval(cleanupOldChunks, CHUNK_CLEANUP_INTERVAL);
 }
 
 await setupAndSubscribeToExternalEvents();
@@ -375,20 +338,20 @@ function filtersMatchingEvent(
     if (!filters) continue;
     const isMatching = filters.every((filter) =>
       Object.entries(filter).every(([type, value]) => {
-        if (type === "ids") return value.includes(event.id);
-        if (type === "kinds") return value.includes(event.kind);
-        if (type === "authors") return value.includes(event.pubkey);
-        if (type === "since") return event.created_at >= value;
-        if (type === "until") return event.created_at <= value;
-        if (type === "limit") return event.created_at <= value;
-        if (type.startsWith("#")) {
+        if (type === 'ids') return value.includes(event.id);
+        if (type === 'kinds') return value.includes(event.kind);
+        if (type === 'authors') return value.includes(event.pubkey);
+        if (type === 'since') return event.created_at >= value;
+        if (type === 'until') return event.created_at <= value;
+        if (type === 'limit') return event.created_at <= value;
+        if (type.startsWith('#')) {
           const tagName = type.slice(1);
           return event.tags.some(
             (tag: string[]) => tag[0] === tagName && value.includes(tag[1]),
           );
         }
         return false;
-      })
+      }),
     );
     if (isMatching) matching.push(subscription);
   }
@@ -398,15 +361,13 @@ function filtersMatchingEvent(
 function handleRequest(connection: UserConnection, request: NostrClientREQ) {
   const [, subscriptionId, ...filters] = request;
   if (connection.subscriptions.has(subscriptionId)) {
-    return log.warn("Duplicate subscription ID");
+    return log.warn('Duplicate subscription ID');
   }
 
   log.info(
-    `New subscription: ${subscriptionId} with filters: ${
-      JSON.stringify(
-        filters,
-      )
-    }`,
+    `New subscription: ${subscriptionId} with filters: ${JSON.stringify(
+      filters,
+    )}`,
   );
 
   let query = sqlPartial`SELECT * FROM events WHERE replaced = 0`;
@@ -425,19 +386,19 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
       query = mixQuery(query, sqlPartial`(`);
 
       const filter = Object.entries(filters[i]).filter(([type, value]) => {
-        if (type === "ids") return value.length > 0;
-        if (type === "authors") return value.length > 0;
-        if (type === "kinds") return value.length > 0;
-        if (type.startsWith("#")) return value.length > 0;
-        if (type === "since") return value > 0;
-        if (type === "until") return value > 0;
+        if (type === 'ids') return value.length > 0;
+        if (type === 'authors') return value.length > 0;
+        if (type === 'kinds') return value.length > 0;
+        if (type.startsWith('#')) return value.length > 0;
+        if (type === 'since') return value > 0;
+        if (type === 'until') return value > 0;
         return false;
       });
 
       for (let j = 0; j < filter.length; j++) {
         const [type, value] = filter[j];
 
-        if (type === "ids") {
+        if (type === 'ids') {
           const uniqueIds = [...new Set(value)];
           query = mixQuery(query, sqlPartial`id IN (`);
           for (let k = 0; k < uniqueIds.length; k++) {
@@ -452,7 +413,7 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
           query = mixQuery(query, sqlPartial`)`);
         }
 
-        if (type === "authors") {
+        if (type === 'authors') {
           const uniqueAuthors = [...new Set(value)];
           query = mixQuery(query, sqlPartial`pubkey IN (`);
           for (let k = 0; k < uniqueAuthors.length; k++) {
@@ -467,7 +428,7 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
           query = mixQuery(query, sqlPartial`)`);
         }
 
-        if (type === "kinds") {
+        if (type === 'kinds') {
           const uniqueKinds = [...new Set(value)];
           query = mixQuery(query, sqlPartial`kind IN (`);
           for (let k = 0; k < uniqueKinds.length; k++) {
@@ -482,16 +443,16 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
           query = mixQuery(query, sqlPartial`)`);
         }
 
-        if (type.startsWith("#")) {
+        if (type.startsWith('#')) {
           const tag = type.slice(1);
           const uniqueValues = [...new Set(value)];
           query = mixQuery(query, sqlPartial`(`);
           for (let k = 0; k < uniqueValues.length; k++) {
             const tagValue = uniqueValues[k] as string;
-            if (tag === "a") {
+            if (tag === 'a') {
               const aTagInfo = parseATagQuery(tagValue);
 
-              if (aTagInfo.dTag && aTagInfo.dTag !== "") {
+              if (aTagInfo.dTag && aTagInfo.dTag !== '') {
                 if (isCCNReplaceableEvent(aTagInfo.kind)) {
                   // CCN replaceable event reference
                   query = mixQuery(
@@ -561,11 +522,11 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
           query = mixQuery(query, sqlPartial`)`);
         }
 
-        if (type === "since") {
+        if (type === 'since') {
           query = mixQuery(query, sqlPartial`created_at >= ${value}`);
         }
 
-        if (type === "until") {
+        if (type === 'until') {
           query = mixQuery(query, sqlPartial`created_at <= ${value}`);
         }
 
@@ -588,10 +549,13 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
     const rawTags = sql`SELECT * FROM event_tags_view WHERE event_id = ${
       events[i].id
     }`(connection.db);
-    const tagsByIndex = new Map<number, {
-      name: string;
-      values: Map<number, string>;
-    }>();
+    const tagsByIndex = new Map<
+      number,
+      {
+        name: string;
+        values: Map<number, string>;
+      }
+    >();
 
     for (const tag of rawTags) {
       let tagData = tagsByIndex.get(tag.tag_index);
@@ -629,9 +593,9 @@ function handleRequest(connection: UserConnection, request: NostrClientREQ) {
       sig: events[i].sig,
     };
 
-    connection.socket.send(JSON.stringify(["EVENT", subscriptionId, event]));
+    connection.socket.send(JSON.stringify(['EVENT', subscriptionId, event]));
   }
-  connection.socket.send(JSON.stringify(["EOSE", subscriptionId]));
+  connection.socket.send(JSON.stringify(['EOSE', subscriptionId]));
 
   connection.subscriptions.set(subscriptionId, filters);
 }
@@ -642,28 +606,35 @@ async function handleEvent(
 ) {
   const valid = nostrTools.verifyEvent(event);
   if (!valid) {
-    connection.socket.send(JSON.stringify(["NOTICE", "Invalid event"]));
-    return log.warn("Invalid event");
+    connection.socket.send(JSON.stringify(['NOTICE', 'Invalid event']));
+    return log.warn('Invalid event');
   }
 
   const encryptedEvent = await createEncryptedEvent(event);
   try {
-    addEventToDb(event, encryptedEvent);
+    if (Array.isArray(encryptedEvent)) {
+      await Promise.all(
+        encryptedEvent.map((chunk) => Promise.any(pool.publish(relays, chunk))),
+      );
+      addEventToDb(event, encryptedEvent[0]);
+    } else {
+      addEventToDb(event, encryptedEvent);
+      await Promise.any(pool.publish(relays, encryptedEvent));
+    }
   } catch (e) {
     if (e instanceof EventAlreadyExistsException) {
-      log.warn("Event already exists");
+      log.warn('Event already exists');
       return;
     }
   }
-  await Promise.any(pool.publish(relays, encryptedEvent));
 
-  connection.socket.send(JSON.stringify(["OK", event.id, true, "Event added"]));
+  connection.socket.send(JSON.stringify(['OK', event.id, true, 'Event added']));
 
   const filtersThatMatchEvent = filtersMatchingEvent(event, connection);
 
   for (let i = 0; i < filtersThatMatchEvent.length; i++) {
     const filter = filtersThatMatchEvent[i];
-    connection.socket.send(JSON.stringify(["EVENT", filter, event]));
+    connection.socket.send(JSON.stringify(['EVENT', filter, event]));
   }
 }
 
@@ -680,10 +651,10 @@ function handleClose(connection: UserConnection, subscriptionId: string) {
 Deno.serve({
   port: 6942,
   handler: (request) => {
-    if (request.headers.get("upgrade") === "websocket") {
+    if (request.headers.get('upgrade') === 'websocket') {
       if (!isLocalhost(request)) {
         return new Response(
-          "Forbidden. Please read the Arx-CCN documentation for more information on how to interact with the relay.",
+          'Forbidden. Please read the Arx-CCN documentation for more information on how to interact with the relay.',
           { status: 403 },
         );
       }
@@ -692,31 +663,31 @@ Deno.serve({
 
       const connection = new UserConnection(socket, new Map(), db);
 
-      socket.onopen = () => log.info("User connected");
+      socket.onopen = () => log.info('User connected');
       socket.onmessage = (event) => {
         log.debug(`Received: ${event.data}`);
-        if (typeof event.data !== "string" || !isValidJSON(event.data)) {
-          return log.warn("Invalid request");
+        if (typeof event.data !== 'string' || !isValidJSON(event.data)) {
+          return log.warn('Invalid request');
         }
         const data = JSON.parse(event.data);
-        if (!isArray(data)) return log.warn("Invalid request");
+        if (!isArray(data)) return log.warn('Invalid request');
 
         const msg = n.clientMsg().parse(data);
         switch (msg[0]) {
-          case "REQ":
+          case 'REQ':
             return handleRequest(connection, n.clientREQ().parse(data));
-          case "EVENT":
+          case 'EVENT':
             return handleEvent(connection, n.clientEVENT().parse(data)[1]);
-          case "CLOSE":
+          case 'CLOSE':
             return handleClose(connection, n.clientCLOSE().parse(data)[1]);
           default:
-            return log.warn("Invalid request");
+            return log.warn('Invalid request');
         }
       };
-      socket.onclose = () => log.info("User disconnected");
+      socket.onclose = () => log.info('User disconnected');
 
       return response;
     }
-    return new Response("Eve Relay");
+    return new Response('Eve Relay');
   },
 });
diff --git a/migrations/4-createChunksStore.sql b/migrations/4-createChunksStore.sql
new file mode 100644
index 0000000..d554cd4
--- /dev/null
+++ b/migrations/4-createChunksStore.sql
@@ -0,0 +1,13 @@
+CREATE TABLE event_chunks (
+    chunk_id INTEGER PRIMARY KEY AUTOINCREMENT,
+    message_id TEXT NOT NULL,
+    chunk_index INTEGER NOT NULL,
+    total_chunks INTEGER NOT NULL,
+    chunk_data TEXT NOT NULL,
+    conversation_key TEXT NOT NULL,
+    created_at INTEGER NOT NULL,
+    UNIQUE(message_id, chunk_index)
+);
+
+CREATE INDEX idx_event_chunks_message_id ON event_chunks(message_id);
+CREATE INDEX idx_event_chunks_created_at ON event_chunks(created_at); 
\ No newline at end of file
diff --git a/utils.ts b/utils.ts
index 94e930c..6b6b63f 100644
--- a/utils.ts
+++ b/utils.ts
@@ -1,19 +1,19 @@
-import { exists } from "jsr:@std/fs";
-import * as nostrTools from "@nostr/tools";
-import * as nip06 from "@nostr/tools/nip06";
-import { decodeBase64, encodeBase64 } from "jsr:@std/encoding@0.224/base64";
-import { getEveFilePath } from "./utils/files.ts";
+import { decodeBase64, encodeBase64 } from 'jsr:@std/encoding@0.224/base64';
+import { exists } from 'jsr:@std/fs';
+import * as nostrTools from '@nostr/tools';
+import * as nip06 from '@nostr/tools/nip06';
 import {
   decryptUint8Array,
-  encryptionKey,
   encryptUint8Array,
-} from "./utils/encryption.ts";
+  encryptionKey,
+} from './utils/encryption.ts';
+import { getEveFilePath } from './utils/files.ts';
 
 export function isLocalhost(req: Request): boolean {
   const url = new URL(req.url);
   const hostname = url.hostname;
   return (
-    hostname === "127.0.0.1" || hostname === "::1" || hostname === "localhost"
+    hostname === '127.0.0.1' || hostname === '::1' || hostname === 'localhost'
   );
 }
 
@@ -39,11 +39,12 @@ export function randomTimeUpTo2DaysInThePast() {
 }
 
 export async function getCCNPubkey(): Promise<string> {
-  const ccnPubPath = await getEveFilePath("ccn.pub");
-  const seedPath = await getEveFilePath("ccn.seed");
+  const ccnPubPath = await getEveFilePath('ccn.pub');
+  const seedPath = await getEveFilePath('ccn.seed');
   const doWeHaveKey = await exists(ccnPubPath);
   if (doWeHaveKey) return Deno.readTextFileSync(ccnPubPath);
-  const ccnSeed = Deno.env.get("CCN_SEED") ||
+  const ccnSeed =
+    Deno.env.get('CCN_SEED') ||
     ((await exists(seedPath))
       ? Deno.readTextFileSync(seedPath)
       : nip06.generateSeedWords());
@@ -53,7 +54,7 @@ export async function getCCNPubkey(): Promise<string> {
 
   Deno.writeTextFileSync(ccnPubPath, ccnPublicKey);
   Deno.writeTextFileSync(
-    await getEveFilePath("ccn.priv"),
+    await getEveFilePath('ccn.priv'),
     encodeBase64(encryptedPrivateKey),
   );
   Deno.writeTextFileSync(seedPath, ccnSeed);
@@ -63,7 +64,7 @@ export async function getCCNPubkey(): Promise<string> {
 
 export async function getCCNPrivateKey(): Promise<Uint8Array> {
   const encryptedPrivateKey = Deno.readTextFileSync(
-    await getEveFilePath("ccn.priv"),
+    await getEveFilePath('ccn.priv'),
   );
   return decryptUint8Array(decodeBase64(encryptedPrivateKey), encryptionKey);
 }
@@ -77,21 +78,25 @@ export function isAddressableEvent(kind: number): boolean {
 }
 
 export function isRegularEvent(kind: number): boolean {
-  return (kind >= 1000 && kind < 10000) ||
+  return (
+    (kind >= 1000 && kind < 10000) ||
     (kind >= 4 && kind < 45) ||
     kind === 1 ||
-    kind === 2;
+    kind === 2
+  );
 }
 
 export function isCCNReplaceableEvent(kind: number): boolean {
-  return (kind >= 60000 && kind < 65536);
+  return kind >= 60000 && kind < 65536;
 }
 
-export function parseATagQuery(
-  aTagValue: string,
-): { kind: number; pubkey: string; dTag?: string } {
-  const parts = aTagValue.split(":");
-  if (parts.length < 2) return { kind: 0, pubkey: "" };
+export function parseATagQuery(aTagValue: string): {
+  kind: number;
+  pubkey: string;
+  dTag?: string;
+} {
+  const parts = aTagValue.split(':');
+  if (parts.length < 2) return { kind: 0, pubkey: '' };
 
   return {
     kind: Number.parseInt(parts[0], 10),
diff --git a/utils/encryption.ts b/utils/encryption.ts
index 9973c37..311b624 100644
--- a/utils/encryption.ts
+++ b/utils/encryption.ts
@@ -1,7 +1,7 @@
-import { xchacha20poly1305 } from "@noble/ciphers/chacha";
-import { managedNonce } from "@noble/ciphers/webcrypto";
-import { decodeBase64 } from "jsr:@std/encoding/base64";
-export const encryptionKey = decodeBase64(Deno.env.get("ENCRYPTION_KEY") || "");
+import { decodeBase64 } from 'jsr:@std/encoding/base64';
+import { xchacha20poly1305 } from '@noble/ciphers/chacha';
+import { managedNonce } from '@noble/ciphers/webcrypto';
+export const encryptionKey = decodeBase64(Deno.env.get('ENCRYPTION_KEY') || '');
 
 /**
  * Encrypts a given Uint8Array using the XChaCha20-Poly1305 algorithm.
diff --git a/utils/files.ts b/utils/files.ts
index a51b6f5..f1cb209 100644
--- a/utils/files.ts
+++ b/utils/files.ts
@@ -1,4 +1,4 @@
-import { exists } from "jsr:@std/fs";
+import { exists } from 'jsr:@std/fs';
 
 /**
  * Return the path to Eve's configuration directory and ensures its existence.
@@ -14,13 +14,11 @@ import { exists } from "jsr:@std/fs";
 
 export async function getEveConfigHome(): Promise<string> {
   let storagePath: string;
-  if (Deno.build.os === "darwin") {
-    storagePath = `${
-      Deno.env.get("HOME")
-    }/Library/Application Support/eve/arx/Eve`;
+  if (Deno.build.os === 'darwin') {
+    storagePath = `${Deno.env.get('HOME')}/Library/Application Support/eve/arx/Eve`;
   } else {
-    const xdgConfigHome = Deno.env.get("XDG_CONFIG_HOME") ??
-      `${Deno.env.get("HOME")}/.config`;
+    const xdgConfigHome =
+      Deno.env.get('XDG_CONFIG_HOME') ?? `${Deno.env.get('HOME')}/.config`;
     storagePath = `${xdgConfigHome}/arx/Eve`;
   }
   if (!(await exists(storagePath))) {
diff --git a/utils/logs.ts b/utils/logs.ts
index 0e32f93..3e5a58f 100644
--- a/utils/logs.ts
+++ b/utils/logs.ts
@@ -1,59 +1,59 @@
-import * as colors from "jsr:@std/fmt@^1.0.4/colors";
-import * as log from "jsr:@std/log";
-import { getEveFilePath } from "./files.ts";
-export * as log from "jsr:@std/log";
+import * as colors from 'jsr:@std/fmt@^1.0.4/colors';
+import * as log from 'jsr:@std/log';
+import { getEveFilePath } from './files.ts';
+export * as log from 'jsr:@std/log';
 
 export async function setupLogger() {
   const formatLevel = (level: number): string => {
     return (
       {
-        10: colors.gray("[DEBUG]"),
-        20: colors.green("[INFO] "),
-        30: colors.yellow("[WARN] "),
-        40: colors.red("[ERROR]"),
-        50: colors.bgRed("[FATAL]"),
+        10: colors.gray('[DEBUG]'),
+        20: colors.green('[INFO] '),
+        30: colors.yellow('[WARN] '),
+        40: colors.red('[ERROR]'),
+        50: colors.bgRed('[FATAL]'),
       }[level] || `[LVL${level}]`
     );
   };
 
   const levelName = (level: number): string => {
-    return {
-      10: "DEBUG",
-      20: "INFO",
-      30: "WARN",
-      40: "ERROR",
-      50: "FATAL",
-    }[level] || `LVL${level}`;
+    return (
+      {
+        10: 'DEBUG',
+        20: 'INFO',
+        30: 'WARN',
+        40: 'ERROR',
+        50: 'FATAL',
+      }[level] || `LVL${level}`
+    );
   };
 
   const formatArg = (arg: unknown): string => {
-    if (typeof arg === "object") return JSON.stringify(arg);
+    if (typeof arg === 'object') return JSON.stringify(arg);
     return String(arg);
   };
 
   await log.setup({
     handlers: {
-      console: new log.ConsoleHandler("DEBUG", {
+      console: new log.ConsoleHandler('DEBUG', {
         useColors: true,
         formatter: (record) => {
           const timestamp = new Date().toISOString();
-          let msg = `${colors.dim(`[${timestamp}]`)} ${
-            formatLevel(record.level)
-          } ${record.msg}`;
+          let msg = `${colors.dim(`[${timestamp}]`)} ${formatLevel(record.level)} ${record.msg}`;
 
           if (record.args.length > 0) {
             const args = record.args
               .map((arg, i) => `${colors.dim(`arg${i}:`)} ${formatArg(arg)}`)
-              .join(" ");
-            msg += ` ${colors.dim("|")} ${args}`;
+              .join(' ');
+            msg += ` ${colors.dim('|')} ${args}`;
           }
 
           return msg;
         },
       }),
-      file: new log.FileHandler("DEBUG", {
-        filename: Deno.env.get("LOG_FILE") ||
-          await getEveFilePath("eve-logs.jsonl"),
+      file: new log.FileHandler('DEBUG', {
+        filename:
+          Deno.env.get('LOG_FILE') || (await getEveFilePath('eve-logs.jsonl')),
         formatter: (record) => {
           const timestamp = new Date().toISOString();
           return JSON.stringify({
@@ -67,8 +67,8 @@ export async function setupLogger() {
     },
     loggers: {
       default: {
-        level: "DEBUG",
-        handlers: ["console", "file"],
+        level: 'DEBUG',
+        handlers: ['console', 'file'],
       },
     },
   });
diff --git a/utils/queries.ts b/utils/queries.ts
index ddf7ebb..aedfa5e 100644
--- a/utils/queries.ts
+++ b/utils/queries.ts
@@ -1,4 +1,4 @@
-import type { BindValue, Database } from "@db/sqlite";
+import type { BindValue, Database } from '@db/sqlite';
 
 /**
  * Construct a SQL query with placeholders for values.
@@ -23,8 +23,8 @@ export function sqlPartial(
 ) {
   return {
     query: segments.reduce(
-      (acc, str, i) => acc + str + (i < values.length ? "?" : ""),
-      "",
+      (acc, str, i) => acc + str + (i < values.length ? '?' : ''),
+      '',
     ),
     values: values,
   };
@@ -72,7 +72,7 @@ export function mixQuery(...queries: { query: string; values: BindValue[] }[]) {
       query: `${acc.query} ${query}`,
       values: [...acc.values, ...values],
     }),
-    { query: "", values: [] },
+    { query: '', values: [] },
   );
   return { query, values };
 }