From fa98e902100a5ef41bc5007c295ea86f82a57560 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=A2u=20Cao?= Date: Mon, 21 Oct 2024 15:17:14 +0200 Subject: [PATCH] MVP --- .env.sample | 5 ++ config.ts | 16 +++++ dates.ts | 13 ++++ deno.json | 5 +- deno.lock | 41 +++++++++++-- feeds.ts | 38 ++++++++++++ handlers/user-atom-feed.ts | 41 +++++++++++++ handlers/user-event.ts | 13 ++-- handlers/{username.ts => user-profile.ts} | 10 +++- html.ts | 72 +++++++++-------------- ldap.ts | 23 +++----- main.ts | 21 ++++++- models/article.ts | 34 ++++++++++- models/profile.ts | 54 +++++++++++++++++ nostr.ts | 13 +++- tests/dates_test.ts | 19 ++++++ tests/fixtures/article-1.json | 30 +++++++++- tests/fixtures/profile-1.json | 9 +++ tests/models/article_test.ts | 53 ++++++++++++++--- tests/models/profile_test.ts | 43 ++++++++++++++ 20 files changed, 462 insertions(+), 91 deletions(-) create mode 100644 .env.sample create mode 100644 config.ts create mode 100644 dates.ts create mode 100644 feeds.ts create mode 100644 handlers/user-atom-feed.ts rename handlers/{username.ts => user-profile.ts} (70%) create mode 100644 models/profile.ts create mode 100644 tests/dates_test.ts create mode 100644 tests/fixtures/profile-1.json create mode 100644 tests/models/profile_test.ts diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..8c756bf --- /dev/null +++ b/.env.sample @@ -0,0 +1,5 @@ +HOME_RELAY_URL=wss://nostr.kosmos.org +LDAP_URL=ldap://10.1.1.116:389 +LDAP_BIND_DN=uid=service,ou=kosmos.org,cn=applications,dc=kosmos,dc=org +LDAP_PASSWORD=123456abcdef +LDAP_SEARCH_DN=ou=kosmos.org,cn=users,dc=kosmos,dc=org diff --git a/config.ts b/config.ts new file mode 100644 index 0000000..7bcb73f --- /dev/null +++ b/config.ts @@ -0,0 +1,16 @@ +import { load } from "@std/dotenv"; + +const dirname = new URL(".", import.meta.url).pathname; +await load({ envPath: `${dirname}/.env`, export: true }); + +const config = { + home_relay_url: Deno.env.get("HOME_RELAY_URL") || "", + ldap: { + url: Deno.env.get("LDAP_URL"), + bindDN: Deno.env.get("LDAP_BIND_DN"), + password: Deno.env.get("LDAP_PASSWORD"), + searchDN: Deno.env.get("LDAP_SEARCH_DN"), + } +}; + +export default config; diff --git a/dates.ts b/dates.ts new file mode 100644 index 0000000..d74dde4 --- /dev/null +++ b/dates.ts @@ -0,0 +1,13 @@ +export function localizeDate(timestamp: number) { + const date = new Date(timestamp * 1000); + return date.toLocaleDateString("en-US", { + year: "numeric", + month: "long", + day: "numeric", + }); +} + +export function isoDate(timestamp: number) { + const date = new Date(timestamp * 1000); + return date.toISOString(); +} diff --git a/deno.json b/deno.json index f06ec69..ef15fb7 100644 --- a/deno.json +++ b/deno.json @@ -1,14 +1,15 @@ { "tasks": { - "dev": "deno run --allow-net --allow-read --allow-env --deny-env --watch main.ts" + "dev": "deno run --allow-net --allow-read --allow-env --watch main.ts" }, "imports": { "@deno/gfm": "jsr:@deno/gfm@^0.9.0", "@nostr/tools": "jsr:@nostr/tools@^2.3.1", "@nostrify/nostrify": "jsr:@nostrify/nostrify@^0.36.1", "@oak/oak": "jsr:@oak/oak@^17.1.0", - "@std/assert": "jsr:@std/assert@1", "@std/dotenv": "jsr:@std/dotenv@^0.225.2", + "@std/expect": "jsr:@std/expect@^1.0.5", + "@std/testing": "jsr:@std/testing@^1.0.3", "ldapts": "npm:ldapts@^7.2.1" } } diff --git a/deno.lock b/deno.lock index 12a0e5f..0bfb5b2 100644 --- a/deno.lock +++ b/deno.lock @@ -9,18 +9,24 @@ "jsr:@oak/commons@1": "1.0.0", "jsr:@oak/oak@^17.1.0": "17.1.0", "jsr:@std/assert@1": "1.0.6", + "jsr:@std/assert@^1.0.6": "1.0.6", "jsr:@std/bytes@1": "1.0.2", "jsr:@std/bytes@^1.0.2": "1.0.2", "jsr:@std/crypto@1": "1.0.3", + "jsr:@std/data-structures@^1.0.4": "1.0.4", "jsr:@std/dotenv@~0.225.2": "0.225.2", "jsr:@std/encoding@1": "1.0.5", "jsr:@std/encoding@^1.0.5": "1.0.5", "jsr:@std/encoding@~0.224.1": "0.224.3", + "jsr:@std/expect@^1.0.5": "1.0.5", + "jsr:@std/fs@^1.0.4": "1.0.4", "jsr:@std/http@1": "1.0.8", "jsr:@std/internal@^1.0.4": "1.0.4", "jsr:@std/io@0.224": "0.224.9", "jsr:@std/media-types@1": "1.0.3", "jsr:@std/path@1": "1.0.6", + "jsr:@std/path@^1.0.6": "1.0.6", + "jsr:@std/testing@^1.0.3": "1.0.3", "npm:@noble/ciphers@~0.5.1": "0.5.3", "npm:@noble/curves@1.2.0": "1.2.0", "npm:@noble/hashes@1.3.1": "1.3.1", @@ -91,7 +97,7 @@ "@oak/commons@1.0.0": { "integrity": "49805b55603c3627a9d6235c0655aa2b6222d3036b3a13ff0380c16368f607ac", "dependencies": [ - "jsr:@std/assert", + "jsr:@std/assert@1", "jsr:@std/bytes@1", "jsr:@std/crypto", "jsr:@std/encoding@1", @@ -103,13 +109,13 @@ "integrity": "14ffb400c3c268bdc7b3a838664fab782b4ed35bb0dfe7669013c95bb12a9503", "dependencies": [ "jsr:@oak/commons", - "jsr:@std/assert", + "jsr:@std/assert@1", "jsr:@std/bytes@1", "jsr:@std/crypto", "jsr:@std/http", "jsr:@std/io", "jsr:@std/media-types", - "jsr:@std/path", + "jsr:@std/path@1", "npm:path-to-regexp@6.2.1" ] }, @@ -125,6 +131,9 @@ "@std/crypto@1.0.3": { "integrity": "a2a32f51ddef632d299e3879cd027c630dcd4d1d9a5285d6e6788072f4e51e7f" }, + "@std/data-structures@1.0.4": { + "integrity": "fa0e20c11eb9ba673417450915c750a0001405a784e2a4e0c3725031681684a0" + }, "@std/dotenv@0.225.2": { "integrity": "e2025dce4de6c7bca21dece8baddd4262b09d5187217e231b033e088e0c4dd23" }, @@ -134,6 +143,19 @@ "@std/encoding@1.0.5": { "integrity": "ecf363d4fc25bd85bd915ff6733a7e79b67e0e7806334af15f4645c569fefc04" }, + "@std/expect@1.0.5": { + "integrity": "8c7ac797e2ffe57becc6399c0f2fd06230cb9ef124d45229c6e592c563824af1", + "dependencies": [ + "jsr:@std/assert@^1.0.6", + "jsr:@std/internal" + ] + }, + "@std/fs@1.0.4": { + "integrity": "2907d32d8d1d9e540588fd5fe0ec21ee638134bd51df327ad4e443aaef07123c", + "dependencies": [ + "jsr:@std/path@^1.0.6" + ] + }, "@std/http@1.0.8": { "integrity": "6ea1b2e8d33929967754a3b6d6c6f399ad6647d7bbb5a466c1eaf9b294a6ebcd", "dependencies": [ @@ -154,6 +176,16 @@ }, "@std/path@1.0.6": { "integrity": "ab2c55f902b380cf28e0eec501b4906e4c1960d13f00e11cfbcd21de15f18fed" + }, + "@std/testing@1.0.3": { + "integrity": "f98c2bee53860a5916727d7e7d3abe920dd6f9edace022e2d059f00d05c2cf42", + "dependencies": [ + "jsr:@std/assert@^1.0.6", + "jsr:@std/data-structures", + "jsr:@std/fs", + "jsr:@std/internal", + "jsr:@std/path@^1.0.6" + ] } }, "npm": { @@ -455,8 +487,9 @@ "jsr:@nostr/tools@^2.3.1", "jsr:@nostrify/nostrify@~0.36.1", "jsr:@oak/oak@^17.1.0", - "jsr:@std/assert@1", "jsr:@std/dotenv@~0.225.2", + "jsr:@std/expect@^1.0.5", + "jsr:@std/testing@^1.0.3", "npm:ldapts@^7.2.1" ] } diff --git a/feeds.ts b/feeds.ts new file mode 100644 index 0000000..63e9759 --- /dev/null +++ b/feeds.ts @@ -0,0 +1,38 @@ +import Article from "./models/article.ts"; +import Profile from "./models/profile.ts"; +import { isoDate } from "./dates.ts"; + +export function profileAtomFeed(profile: Profile, articles: Article[]) { + const feedId = `tag:${profile.nip05},nostr-p-${profile.pubkey}-k-30023`; + const lastUpdate = articles.sort((a, b) => b.updatedAt - a.updatedAt)[0] + ?.updatedAt; + + const articlesXml = articles.map((article) => { + const articleId = + `tag:${profile.nip05},nostr-d-${article.identifier}-k-30023`; + return ` + + ${articleId} + ${article.title} + + ${isoDate(article.updatedAt)} + ${article.summary} + ${article.html} + + `; + }).join("\n"); + + return ` + + + ${profile.name} on Nostr + ${feedId} + ${isoDate(lastUpdate)} + ${profile.picture} + + ${name} + + ${articlesXml} + + `.trim(); +} diff --git a/handlers/user-atom-feed.ts b/handlers/user-atom-feed.ts new file mode 100644 index 0000000..77073b1 --- /dev/null +++ b/handlers/user-atom-feed.ts @@ -0,0 +1,41 @@ +import { Context } from "@oak/oak"; +import { log } from "../log.ts"; +import { lookupPubkeyByUsername } from "../ldap.ts"; +import { fetchArticlesByAuthor, fetchProfileEvent } from "../nostr.ts"; +import { profileAtomFeed } from "../feeds.ts"; +import Article from "../models/article.ts"; +import Profile from "../models/profile.ts"; + +const userAtomFeedHandler = async function (ctx: Context) { + const username = ctx.params.user.replace(/^(@|~)/, ""); + const pubkey = await lookupPubkeyByUsername(username); + + if (!pubkey) { + ctx.response.status = 404; + ctx.response.body = "Not Found"; + return; + } + + try { + const profileEvent = await fetchProfileEvent(pubkey); + const profile = new Profile(profileEvent, username); + + if (profileEvent && profile.nip05) { + const articleEvents = await fetchArticlesByAuthor(pubkey); + const articles = articleEvents.map((a) => new Article(a)); + const atom = profileAtomFeed(profile, articles); + + ctx.response.headers.set("Content-Type", "application/atom+xml"); + ctx.response.body = atom; + } else { + ctx.response.status = 404; + ctx.response.body = "Not Found"; + } + } catch (e) { + log(e, "yellow"); + ctx.response.status = 404; + ctx.response.body = "Not Found"; + } +}; + +export default userAtomFeedHandler; diff --git a/handlers/user-event.ts b/handlers/user-event.ts index b4675b9..d4cb7d1 100644 --- a/handlers/user-event.ts +++ b/handlers/user-event.ts @@ -1,10 +1,9 @@ import { Context } from "@oak/oak"; import { log } from "../log.ts"; import { lookupPubkeyByUsername } from "../ldap.ts"; -import { - fetchProfileEvent, - fetchReplaceableEvent, -} from "../nostr.ts"; +import { fetchProfileEvent, fetchReplaceableEvent } from "../nostr.ts"; +import Article from "../models/article.ts"; +import Profile from "../models/profile.ts"; import { articleHtml } from "../html.ts"; const userEventHandler = async function (ctx: Context) { @@ -24,11 +23,11 @@ const userEventHandler = async function (ctx: Context) { identifier, ); const profileEvent = await fetchProfileEvent(pubkey); - let profile; if (articleEvent && profileEvent) { - profile = JSON.parse(profileEvent.content); - const html = articleHtml(articleEvent, profile); + const article = new Article(articleEvent); + const profile = new Profile(profileEvent, username); + const html = articleHtml(article, profile); ctx.response.body = html; } else { diff --git a/handlers/username.ts b/handlers/user-profile.ts similarity index 70% rename from handlers/username.ts rename to handlers/user-profile.ts index 326a419..732d720 100644 --- a/handlers/username.ts +++ b/handlers/user-profile.ts @@ -2,9 +2,11 @@ import { Context } from "@oak/oak"; import { log } from "../log.ts"; import { lookupPubkeyByUsername } from "../ldap.ts"; import { fetchArticlesByAuthor, fetchProfileEvent } from "../nostr.ts"; +import Article from "../models/article.ts"; +import Profile from "../models/profile.ts"; import { profilePageHtml } from "../html.ts"; -const usernameHandler = async function (ctx: Context) { +const userProfileHandler = async function (ctx: Context) { const username = ctx.params.path.replace(/^(@|~)/, ""); const pubkey = await lookupPubkeyByUsername(username); @@ -18,8 +20,10 @@ const usernameHandler = async function (ctx: Context) { const profileEvent = await fetchProfileEvent(pubkey); if (profileEvent) { + const profile = new Profile(profileEvent, username); const articleEvents = await fetchArticlesByAuthor(pubkey); - const html = profilePageHtml(profileEvent, articleEvents); + const articles = articleEvents.map((a) => new Article(a)); + const html = profilePageHtml(profile, articles); ctx.response.body = html; } else { @@ -33,4 +37,4 @@ const usernameHandler = async function (ctx: Context) { } }; -export default usernameHandler; +export default userProfileHandler; diff --git a/html.ts b/html.ts index 082dcc8..048f70f 100644 --- a/html.ts +++ b/html.ts @@ -1,7 +1,8 @@ -import { render as renderMarkdown } from "@deno/gfm"; -import { nip19 } from "@nostr/tools"; +import { localizeDate } from "./dates.ts"; +import Article from "./models/article.ts"; +import Profile from "./models/profile.ts"; -export function htmlLayout(title: string, body: string) { +export function htmlLayout(title: string, body: string, profile: Profile) { return ` @@ -10,6 +11,7 @@ export function htmlLayout(title: string, body: string) { ${title} + @@ -112,6 +114,11 @@ export function htmlLayout(title: string, body: string) { color: #888; } + p.meta .name a { + color: #3b3a38; + text-decoration: none; + } + .article-list .item { margin-bottom: 3rem; } @@ -128,66 +135,45 @@ export function htmlLayout(title: string, body: string) { `; } -export function articleHtml(articleEvent: object, profile: object) { - const titleTag = articleEvent.tags.find((t) => t[0] === "title"); - const title = titleTag ? titleTag[1] : "Untitled"; - const content = renderMarkdown(articleEvent.content); - const date = new Date(articleEvent.created_at * 1000); - const formattedDate = date.toLocaleDateString("en-US", { - year: "numeric", - month: "long", - day: "numeric", - }); +export function articleHtml(article: Article, profile: Profile) { + const publishedAtFormatted = localizeDate(article.publishedAt); const body = `
-

${title}

+

${article.title}

User Avatar - ${profile.name} - ${formattedDate} + ${profile.name} + ${publishedAtFormatted}

- ${content} + ${article.html}
`; - return htmlLayout(title, body); + return htmlLayout(article.title, body, profile); } -function articleListItemHtml(articleEvent: object) { - const identifier = articleEvent.tags.find((t) => t[0] === "d")[1]; - const naddr = nip19.naddrEncode({ - identifier: identifier, - pubkey: articleEvent.pubkey, - kind: articleEvent.kind, - }); - const titleTag = articleEvent.tags.find((t) => t[0] === "title"); - const title = titleTag ? titleTag[1] : "Untitled"; - const date = new Date(articleEvent.created_at * 1000); - const formattedDate = date.toLocaleDateString("en-US", { - year: "numeric", - month: "long", - day: "numeric", - }); +function articleListItemHtml(article: Article) { + const formattedDate = localizeDate(article.publishedAt); return `
-

${title}

+

${article.title}

${formattedDate}

`; } -export function articleListHtml(articleEvents: object[]) { - if (articleEvents.length === 0) return ""; +export function articleListHtml(articles: Article[]) { + if (articles.length === 0) return ""; let html = ""; - for (const articleEvent of articleEvents) { - html += articleListItemHtml(articleEvent); + for (const article of articles) { + html += articleListItemHtml(article); } return ` @@ -198,10 +184,8 @@ export function articleListHtml(articleEvents: object[]) { `; } -export function profilePageHtml(profileEvent: object, articleEvents: object[]) { - const profile = JSON.parse(profileEvent.content); - const name = profile.name || "Anonymous"; - const title = `${name} on Nostr`; +export function profilePageHtml(profile: Profile, articles: Article[]) { + const title = `${profile.name} on Nostr`; const body = `
@@ -214,9 +198,9 @@ export function profilePageHtml(profileEvent: object, articleEvents: object[]) {

- ${articleListHtml(articleEvents)} + ${articleListHtml(articles)}
`; - return htmlLayout(title, body); + return htmlLayout(title, body, profile); } diff --git a/ldap.ts b/ldap.ts index a9db92e..8f59bd9 100644 --- a/ldap.ts +++ b/ldap.ts @@ -1,26 +1,17 @@ -import { load } from "@std/dotenv"; import { Client } from "ldapts"; import { log } from "./log.ts"; +import config from "./config.ts"; -const dirname = new URL(".", import.meta.url).pathname; -await load({ envPath: `${dirname}/.env`, export: true }); - -const config = { - url: Deno.env.get("LDAP_URL"), - bindDN: Deno.env.get("LDAP_BIND_DN"), - password: Deno.env.get("LDAP_PASSWORD"), - searchDN: Deno.env.get("LDAP_SEARCH_DN"), -}; - -const client = new Client({ url: config.url }); +const { ldap } = config; +const client = new Client({ url: ldap.url }); export async function lookupPubkeyByUsername(username: string) { let pubkey; try { - await client.bind(config.bindDN, config.password); + await client.bind(ldap.bindDN, ldap.password); - const { searchEntries } = await client.search(config.searchDN, { + const { searchEntries } = await client.search(ldap.searchDN, { filter: `(cn=${username})`, attributes: ["nostrKey"], }); @@ -39,9 +30,9 @@ export async function lookupUsernameByPubkey(pubkey: string) { let username; try { - await client.bind(config.bindDN, config.password); + await client.bind(ldap.bindDN, ldap.password); - const { searchEntries } = await client.search(config.searchDN, { + const { searchEntries } = await client.search(ldap.searchDN, { filter: `(nostrKey=${pubkey})`, attributes: ["cn"], }); diff --git a/main.ts b/main.ts index 140f12d..c069280 100644 --- a/main.ts +++ b/main.ts @@ -3,8 +3,9 @@ import { log } from "./log.ts"; import naddrHandler from "./handlers/naddr.ts"; import nprofileHandler from "./handlers/nprofile.ts"; import npubHandler from "./handlers/npub.ts"; -import usernameHandler from "./handlers/username.ts"; +import userProfileHandler from "./handlers/user-profile.ts"; import userEventHandler from "./handlers/user-event.ts"; +import userAtomFeedHandler from "./handlers/user-atom-feed.ts"; const router = new Router(); @@ -18,7 +19,23 @@ router.get("/:path", async (ctx: ctx) => { } else if (path.startsWith("npub")) { await npubHandler(ctx); } else if (path.startsWith("@") || path.startsWith("~")) { - await usernameHandler(ctx); + await userProfileHandler(ctx); + } else { + ctx.response.status = 404; + ctx.response.body = "Not Found"; + } + + log( + `${ctx.request.method} ${ctx.request.url} - ${ctx.response.status}`, + "gray", + ); +}); + +router.get("/:user/:kind.atom", async (ctx: ctx) => { + const { user } = ctx.params; + + if (user.startsWith("@") || user.startsWith("~") || kind === "articles") { + await userAtomFeedHandler(ctx); } else { ctx.response.status = 404; ctx.response.body = "Not Found"; diff --git a/models/article.ts b/models/article.ts index 03e1b70..d24b539 100644 --- a/models/article.ts +++ b/models/article.ts @@ -1,19 +1,47 @@ +import { nip19 } from "@nostr/tools"; import { NEvent } from "../nostr.ts"; import { render as renderMarkdown } from "@deno/gfm"; export default class Article { - private event: NEvent; + event: NEvent; constructor(event: NEvent) { this.event = event; } - get identifier(): string | null { + get identifier(): string { const tag = this.event.tags.find((t) => t[0] === "d"); - return tag ? tag[1] : null; + return tag ? tag[1] : ""; + } + + get title(): string { + const tag = this.event.tags.find((t) => t[0] === "title"); + return tag ? tag[1] : "Untitled"; + } + + get summary(): string { + const tag = this.event.tags.find((t) => t[0] === "summary"); + return tag ? tag[1] : ""; + } + + get publishedAt(): number { + const tag = this.event.tags.find((t) => t[0] === "published_at"); + return tag ? parseInt(tag[1]) : this.event.created_at; + } + + get updatedAt(): number { + return this.event.created_at; } get html(): string { return renderMarkdown(this.event.content); } + + get naddr(): string { + return nip19.naddrEncode({ + identifier: this.identifier, + pubkey: this.event.pubkey, + kind: this.event.kind, + }); + } } diff --git a/models/profile.ts b/models/profile.ts new file mode 100644 index 0000000..3345b37 --- /dev/null +++ b/models/profile.ts @@ -0,0 +1,54 @@ +import { nip19 } from "@nostr/tools"; +import { NEvent } from "../nostr.ts"; + +export interface ProfileData { + name: string; + about?: string; + picture?: string; + nip05?: string; + lud16?: string; +} + +export default class Profile { + event: NEvent; + private data: ProfileData; + username?: string; + + constructor(event: NEvent, username?: string) { + this.event = event; + this.data = JSON.parse(event.content); + this.username = username; + } + + get updatedAt(): number { + return this.event.created_at; + } + + get name(): string { + return this.data.name || "Anonymous"; + } + + get about(): string { + return this.data.about || ""; + } + + get picture(): string | undefined { + return this.data.picture; + } + + get nip05(): string | undefined { + return this.data.nip05; + } + + get lud16(): string | undefined { + return this.data.lud16; + } + + get pubkey(): string { + return this.event.pubkey; + } + + get npub(): string { + return nip19.npubEncode(this.pubkey); + } +} diff --git a/nostr.ts b/nostr.ts index 5d7c330..fe60f12 100644 --- a/nostr.ts +++ b/nostr.ts @@ -1,6 +1,17 @@ import { NRelay1 } from "@nostrify/nostrify"; +import config from "./config.ts"; -export const relay = new NRelay1("wss://nostr.kosmos.org"); +export interface NEvent { + content: string; + created_at: number; + id: string; + kind: number; + pubkey: string; + sig: string; + tags: Array<[string, string, string?]>; +} + +export const relay = new NRelay1(config.home_relay_url); export async function fetchReplaceableEvent( pubkey: string, diff --git a/tests/dates_test.ts b/tests/dates_test.ts new file mode 100644 index 0000000..2ba85b2 --- /dev/null +++ b/tests/dates_test.ts @@ -0,0 +1,19 @@ +import { describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { localizeDate } from "../dates.ts"; + +describe("Dates", () => { + describe("#localizeDate", () => { + it("returns a human-readable date for timestamp", () => { + const date = localizeDate(1726402055); + expect(date).toEqual("September 15, 2024"); + }); + }); + + describe("#isoDate", () => { + it("returns an ISO 8601 date for the timestamp", () => { + const date = localizeDate(1726402055); + expect(date).toEqual("September 15, 2024"); + }); + }); +}); diff --git a/tests/fixtures/article-1.json b/tests/fixtures/article-1.json index 208d0d8..412ee0e 100644 --- a/tests/fixtures/article-1.json +++ b/tests/fixtures/article-1.json @@ -1 +1,29 @@ -{"content":"This week, it finally happened: I still had a Lightning channel open with a node that hadn't been online for the better part of a year now, so I decided to close the channel unilaterally. But force-closing a channel means you have to broadcast the latest commitment transaction, the pre-set fee of which was only ~1 sat/vB for this one.\n\nWith LND, if the channel is created as an [anchor channel](https://lightning.engineering/posts/2021-01-28-lnd-v0.12/) (by default only since version 0.12), then the commitment transaction contains small extra outputs (currently 330 sats), which let either channel partner spend one of them into a child transaction that can be created with higher fees to pay for the parent transaction (CPFP). LND even has a built-in command for that: `lncli wallet bumpclosefee`\n\nHowever, this channel was created in the old-school way, and was thus stuck with its low fee. In fact, even the local bitcoin node refused to accept the transaction into its own mempool, so the bitcoin p2p network didn't even know it existed. So how do we get out of this pickle?\n\n## The solution\n\nEnter the [mempool.space Accelerator](https://mempool.space/accelerator). It is essentially an automated way to create agreements with various mining pools to mine your low-fee transaction in exchange for an out-of-band payment. Mempool.space coordinates these agreements and out-of-band payments with miners and gets a share from the overall fee for that.\n\nNow, if you're in the same situation as I was, you might search for the ID of your closing transaction and find that mempool.space cannot find it. Remember how the local bitcoin node (with mostly default settings) didn't accept it in the first place?\n\n### 1. Get the transaction to be broadcast\n\nIn your `bitcoin.conf`, add the following line:\n\n minrelaytxfee=0\n\nThis sets the minimum fee to 0, meaning it will accept and broadcast your transactions, no matter how low the fee is. Restart `bitcoind` and wait a little bit. LND will retry broadcasting the closing transaction every minute or so until it succeeds. At some point you should be able to find it on mempool.space.\n\n### 2. Use the Accelerator to confirm it\n\nOnce you can see the transaction on [mempool.space](https://mempool.space), you can just click the \"Accelerate\" button next to the ETA. This will bring you to a page that shows you the estimated share of miners that will include your transaction in their blocks, as well as some acceleration fee options for various transaction fee levels, which you can pay for via the Lightning Network, of course.\n\nIf you haven't looked into this service before (which I had), then the fees might be a bit of a surprise to you. This thing is **not** cheap! Bumping my fee from 1 sat/vB to ~9 sats/vB cost a whopping 51,500 sats (~31 USD that day). Bumping it higher only seemed to add the difference in the transaction fee itself, so the service seems to have cost a flat 50K sats at the time.\n\nUnfortunately, this channel wasn't particularly large, so the acceleration fee amounted to ~9% of my remaining channel balance. But 91% of something is better than 100% of nothing, so I actually felt pretty good about it.\n\nNext, you will see something like this:\n\n![Screenshot of an accelerated transaction on mempool.space](https://image.nostr.build/76151cc2ae06a93a8fcd97102bf4fa63541f8f3bd19800b96ff1070c9450945c.png)\n\nTime to lean back and let the miners work for you. In my case, the ETA was eerily precise. It told me that it would take ~56 minutes to confirm the transaction, and almost exactly an hour later it was mined.\n\n### 3. Wait\n\nNow that our transaction is confirmed, our channel is not closed immediately, of course. The [time lock of the HTLC](https://docs.lightning.engineering/the-lightning-network/multihop-payments/hash-time-lock-contract-htlc) protects our channel partner from us broadcasting an old channel state in which our balance might be higher than in the latest state.\n\nIn my case, it was set to 144 blocks, i.e. ~24 hours. So I checked back the next day, et voilá: channel closed and balance restored. 🥳","created_at":1729462158,"id":"b45714c3965549c11dde7228071313bfc53a4df1896d939fb767ed0b6fcdab3a","kind":30023,"pubkey":"1f79058c77a224e5be226c8f024cacdad4d741855d75ed9f11473ba8eb86e1cb","sig":"23d929aa05921f5b10cc4ca1d05cd99b9d595888a94c394ffa1850b97c39b1b3d3d7b7472df959f2675b9f936b3334a55073c8931c52cdcfea18c19cdb9cdcb6","tags":[["d","1726396758485"],["title","How to confirm a stuck Lightning channel closing transaction with the mempool.space Accelerator"],["summary",""],["published_at","1726402055"],["published_at","1726402055"],["alt","This is a long form article, you can read it in https://habla.news/a/naddr1qvzqqqr4gupzq8meqkx80g3yuklzymy0qfx2ekk56aqc2ht4ak03z3em4r4cdcwtqqxnzdejxcenjd3hx5urgwp4676hkz"],["published_at","1726402055"],["published_at","1726402055"],["published_at","1726402055"],["t","lightning"],["t","lightning network"],["t","howto"],["published_at","1726402055"]]} +{ + "content": "This week, it finally happened: I still had a Lightning channel open with a node that hadn't been online for the better part of a year now, so I decided to close the channel unilaterally. But force-closing a channel means you have to broadcast the latest commitment transaction, the pre-set fee of which was only ~1 sat/vB for this one.\n\nWith LND, if the channel is created as an [anchor channel](https://lightning.engineering/posts/2021-01-28-lnd-v0.12/) (by default only since version 0.12), then the commitment transaction contains small extra outputs (currently 330 sats), which let either channel partner spend one of them into a child transaction that can be created with higher fees to pay for the parent transaction (CPFP). LND even has a built-in command for that: `lncli wallet bumpclosefee`\n\nHowever, this channel was created in the old-school way, and was thus stuck with its low fee. In fact, even the local bitcoin node refused to accept the transaction into its own mempool, so the bitcoin p2p network didn't even know it existed. So how do we get out of this pickle?\n\n## The solution\n\nEnter the [mempool.space Accelerator](https://mempool.space/accelerator). It is essentially an automated way to create agreements with various mining pools to mine your low-fee transaction in exchange for an out-of-band payment. Mempool.space coordinates these agreements and out-of-band payments with miners and gets a share from the overall fee for that.\n\nNow, if you're in the same situation as I was, you might search for the ID of your closing transaction and find that mempool.space cannot find it. Remember how the local bitcoin node (with mostly default settings) didn't accept it in the first place?\n\n### 1. Get the transaction to be broadcast\n\nIn your `bitcoin.conf`, add the following line:\n\n minrelaytxfee=0\n\nThis sets the minimum fee to 0, meaning it will accept and broadcast your transactions, no matter how low the fee is. Restart `bitcoind` and wait a little bit. LND will retry broadcasting the closing transaction every minute or so until it succeeds. At some point you should be able to find it on mempool.space.\n\n### 2. Use the Accelerator to confirm it\n\nOnce you can see the transaction on [mempool.space](https://mempool.space), you can just click the \"Accelerate\" button next to the ETA. This will bring you to a page that shows you the estimated share of miners that will include your transaction in their blocks, as well as some acceleration fee options for various transaction fee levels, which you can pay for via the Lightning Network, of course.\n\nIf you haven't looked into this service before (which I had), then the fees might be a bit of a surprise to you. This thing is **not** cheap! Bumping my fee from 1 sat/vB to ~9 sats/vB cost a whopping 51,500 sats (~31 USD that day). Bumping it higher only seemed to add the difference in the transaction fee itself, so the service seems to have cost a flat 50K sats at the time.\n\nUnfortunately, this channel wasn't particularly large, so the acceleration fee amounted to ~9% of my remaining channel balance. But 91% of something is better than 100% of nothing, so I actually felt pretty good about it.\n\nNext, you will see something like this:\n\n![Screenshot of an accelerated transaction on mempool.space](https://image.nostr.build/76151cc2ae06a93a8fcd97102bf4fa63541f8f3bd19800b96ff1070c9450945c.png)\n\nTime to lean back and let the miners work for you. In my case, the ETA was eerily precise. It told me that it would take ~56 minutes to confirm the transaction, and almost exactly an hour later it was mined.\n\n### 3. Wait\n\nNow that our transaction is confirmed, our channel is not closed immediately, of course. The [time lock of the HTLC](https://docs.lightning.engineering/the-lightning-network/multihop-payments/hash-time-lock-contract-htlc) protects our channel partner from us broadcasting an old channel state in which our balance might be higher than in the latest state.\n\nIn my case, it was set to 144 blocks, i.e. ~24 hours. So I checked back the next day, et voilá: channel closed and balance restored. 🥳", + "created_at": 1729462158, + "id": "b45714c3965549c11dde7228071313bfc53a4df1896d939fb767ed0b6fcdab3a", + "kind": 30023, + "pubkey": "1f79058c77a224e5be226c8f024cacdad4d741855d75ed9f11473ba8eb86e1cb", + "sig": "23d929aa05921f5b10cc4ca1d05cd99b9d595888a94c394ffa1850b97c39b1b3d3d7b7472df959f2675b9f936b3334a55073c8931c52cdcfea18c19cdb9cdcb6", + "tags": [ + ["d", "1726396758485"], + [ + "title", + "How to confirm a stuck Lightning channel closing transaction with the mempool.space Accelerator" + ], + ["summary", ""], + ["published_at", "1726402055"], + ["published_at", "1726402055"], + [ + "alt", + "This is a long form article, you can read it in https://habla.news/a/naddr1qvzqqqr4gupzq8meqkx80g3yuklzymy0qfx2ekk56aqc2ht4ak03z3em4r4cdcwtqqxnzdejxcenjd3hx5urgwp4676hkz" + ], + ["published_at", "1726402055"], + ["published_at", "1726402055"], + ["published_at", "1726402055"], + ["t", "lightning"], + ["t", "lightning network"], + ["t", "howto"], + ["published_at", "1726402055"] + ] +} diff --git a/tests/fixtures/profile-1.json b/tests/fixtures/profile-1.json new file mode 100644 index 0000000..d63a0f0 --- /dev/null +++ b/tests/fixtures/profile-1.json @@ -0,0 +1,9 @@ +{ + "content": "{\"name\":\"Râu Cao ⚡\",\"nip05\":\"raucao@kosmos.org\",\"about\":\"Traveling full-time since 2010. Working on open-source software daily. Currently integrating Nostr features into Kosmos accounts.\",\"picture\":\"https://storage.kosmos.org/raucao/public/shares/240604-1441-fuerte-256.png\",\"lud16\":\"raucao@kosmos.org\",\"banner\":\"https://storage.kosmos.org/raucao/public/shares/240604-1517-1500x500.jpg\"}", + "created_at": 1728814592, + "id": "d437964cdd87f4b5bd595f47c2c64f4ba02c849ca215ed56fcb5fd3335ae2720", + "kind": 0, + "pubkey": "1f79058c77a224e5be226c8f024cacdad4d741855d75ed9f11473ba8eb86e1cb", + "sig": "ee067f88344fa8380a16154b7d988087c41d6c87ae720dd52947a38e63232ab6998de37f28e8c7115a0604fc184035af543ad354ed7b616b8ba29974653042cc", + "tags": [] +} diff --git a/tests/models/article_test.ts b/tests/models/article_test.ts index 7d11689..554dcb3 100644 --- a/tests/models/article_test.ts +++ b/tests/models/article_test.ts @@ -1,19 +1,16 @@ -import { - beforeAll, - beforeEach, - describe, - it, -} from "@std/testing/bdd"; +import { beforeAll, describe, it } from "@std/testing/bdd"; import { expect } from "@std/expect"; import { NEvent } from "../../nostr.ts"; import Article from "../../models/article.ts"; -describe("User", () => { +describe("Article", () => { let articleEvent: NEvent; let article: Article; beforeAll(() => { - articleEvent = JSON.parse(Deno.readTextFileSync("tests/fixtures/article-1.json")); + articleEvent = JSON.parse( + Deno.readTextFileSync("tests/fixtures/article-1.json"), + ); article = new Article(articleEvent); }); @@ -22,4 +19,44 @@ describe("User", () => { expect(article.identifier).toEqual("1726396758485"); }); }); + + describe("#title", () => { + it("returns the content of the 'title' tag", () => { + expect(article.title).toMatch( + /How to confirm a stuck Lightning channel closing transaction/, + ); + }); + }); + + describe("#summary", () => { + it("returns the content of the 'summary' tag", () => { + expect(article.summary).toEqual(""); + }); + }); + + describe("#publishedAt", () => { + it("returns the value of the first 'published_at' tag", () => { + expect(article.publishedAt).toEqual(1726402055); + }); + }); + + describe("#updatedAt", () => { + it("returns the value of the first 'published_at' tag", () => { + expect(article.updatedAt).toEqual(1729462158); + }); + }); + + describe("#html", () => { + it("returns a rendered HTML version of the 'content'", () => { + expect(article.html).toMatch(/

/); + }); + }); + + describe("#naddr", () => { + it("returns bech32 addressable event ID", () => { + expect(article.naddr).toEqual( + "naddr1qvzqqqr4gupzq8meqkx80g3yuklzymy0qfx2ekk56aqc2ht4ak03z3em4r4cdcwtqqxnzdejxcenjd3hx5urgwp4676hkz", + ); + }); + }); }); diff --git a/tests/models/profile_test.ts b/tests/models/profile_test.ts new file mode 100644 index 0000000..a4cd09a --- /dev/null +++ b/tests/models/profile_test.ts @@ -0,0 +1,43 @@ +import { beforeAll, describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { NEvent } from "../../nostr.ts"; +import Profile from "../../models/profile.ts"; + +describe("Profile", () => { + let profileEvent: NEvent; + let profile: Profile; + + beforeAll(() => { + profileEvent = JSON.parse( + Deno.readTextFileSync("tests/fixtures/profile-1.json"), + ); + profile = new Profile(profileEvent); + }); + + describe("constructor", () => { + it("instantiates the username when given", () => { + profile = new Profile(profileEvent, "raucao"); + expect(profile.username).toEqual("raucao"); + }); + }); + + describe("#updatedAt", () => { + it("returns the value of the profile event's 'created_at'", () => { + expect(profile.updatedAt).toEqual(1728814592); + }); + }); + + describe("#name", () => { + it("returns the profile's name when present", () => { + expect(profile.name).toEqual("Râu Cao ⚡"); + }); + }); + + describe("#npub", () => { + it("returns the bech32-encoded version of the pubkey", () => { + expect(profile.npub).toEqual( + "npub1raustrrh5gjwt03zdj8syn9vmt2dwsv9t467m8c3gua636uxu89svgdees", + ); + }); + }); +});