提交0.1.0版本

- 完成了书签的基本功能和插件
This commit is contained in:
2026-01-21 23:09:33 +08:00
parent 3e2d1456eb
commit 1a3bbac9ff
95 changed files with 12431 additions and 12445 deletions

View File

@@ -1,12 +1,12 @@
export default [
{
files: ["**/*.js"],
languageOptions: {
ecmaVersion: 2024,
sourceType: "module"
},
rules: {
"no-unused-vars": ["error", { "argsIgnorePattern": "^_" }]
}
}
];
export default [
{
files: ["**/*.js"],
languageOptions: {
ecmaVersion: 2024,
sourceType: "module"
},
rules: {
"no-unused-vars": ["error", { "argsIgnorePattern": "^_" }]
}
}
];

View File

@@ -1,44 +1,44 @@
create extension if not exists pgcrypto;
create table if not exists users (
id uuid primary key default gen_random_uuid(),
email text not null unique,
password_hash text not null,
role text not null default 'user',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create table if not exists bookmark_folders (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
parent_id uuid null references bookmark_folders(id) on delete cascade,
name text not null,
visibility text not null default 'private',
sort_order integer not null default 0,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create index if not exists idx_bookmark_folders_user_parent on bookmark_folders (user_id, parent_id);
create table if not exists bookmarks (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
folder_id uuid null references bookmark_folders(id) on delete set null,
sort_order integer not null default 0,
title text not null,
url text not null,
url_normalized text not null,
url_hash text not null,
visibility text not null default 'private',
source text not null default 'manual',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
deleted_at timestamptz null
);
create index if not exists idx_bookmarks_user_updated_at on bookmarks (user_id, updated_at);
create index if not exists idx_bookmarks_user_folder_sort on bookmarks (user_id, folder_id, sort_order);
create index if not exists idx_bookmarks_user_url_hash on bookmarks (user_id, url_hash);
create index if not exists idx_bookmarks_visibility on bookmarks (visibility);
create extension if not exists pgcrypto;
create table if not exists users (
id uuid primary key default gen_random_uuid(),
email text not null unique,
password_hash text not null,
role text not null default 'user',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create table if not exists bookmark_folders (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
parent_id uuid null references bookmark_folders(id) on delete cascade,
name text not null,
visibility text not null default 'private',
sort_order integer not null default 0,
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create index if not exists idx_bookmark_folders_user_parent on bookmark_folders (user_id, parent_id);
create table if not exists bookmarks (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
folder_id uuid null references bookmark_folders(id) on delete set null,
sort_order integer not null default 0,
title text not null,
url text not null,
url_normalized text not null,
url_hash text not null,
visibility text not null default 'private',
source text not null default 'manual',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
deleted_at timestamptz null
);
create index if not exists idx_bookmarks_user_updated_at on bookmarks (user_id, updated_at);
create index if not exists idx_bookmarks_user_folder_sort on bookmarks (user_id, folder_id, sort_order);
create index if not exists idx_bookmarks_user_url_hash on bookmarks (user_id, url_hash);
create index if not exists idx_bookmarks_visibility on bookmarks (visibility);

View File

@@ -1,5 +1,5 @@
alter table if exists bookmark_folders
add column if not exists sort_order integer not null default 0;
create index if not exists idx_bookmark_folders_user_parent_sort
on bookmark_folders (user_id, parent_id, sort_order);
alter table if exists bookmark_folders
add column if not exists sort_order integer not null default 0;
create index if not exists idx_bookmark_folders_user_parent_sort
on bookmark_folders (user_id, parent_id, sort_order);

View File

@@ -1,5 +1,5 @@
alter table if exists bookmarks
add column if not exists sort_order integer not null default 0;
create index if not exists idx_bookmarks_user_folder_sort
on bookmarks (user_id, folder_id, sort_order);
alter table if exists bookmarks
add column if not exists sort_order integer not null default 0;
create index if not exists idx_bookmarks_user_folder_sort
on bookmarks (user_id, folder_id, sort_order);

View File

@@ -1,33 +1,33 @@
{
"name": "@browser-bookmark/server",
"private": true,
"type": "module",
"version": "0.1.0",
"main": "src/index.js",
"scripts": {
"dev": "node --watch src/index.js",
"build": "node -c src/index.js && node -c src/routes/auth.routes.js && node -c src/routes/bookmarks.routes.js && node -c src/routes/folders.routes.js && node -c src/routes/importExport.routes.js && node -c src/routes/sync.routes.js",
"test": "node --test",
"lint": "eslint .",
"db:migrate": "node src/migrate.js",
"db:reset": "node src/resetDb.js"
},
"dependencies": {
"@browser-bookmark/shared": "0.1.0",
"@fastify/cors": "^11.2.0",
"@fastify/jwt": "^10.0.0",
"@fastify/multipart": "^9.3.0",
"bcryptjs": "^3.0.3",
"cheerio": "^1.1.2",
"dotenv": "^16.4.7",
"fastify": "^5.2.1",
"jsonwebtoken": "^9.0.3",
"pg": "^8.13.1"
},
"devDependencies": {
"eslint": "^9.17.0"
},
"engines": {
"node": ">=22"
}
}
{
"name": "@browser-bookmark/server",
"private": true,
"type": "module",
"version": "0.1.0",
"main": "src/index.js",
"scripts": {
"dev": "node --watch src/index.js",
"build": "node -c src/index.js && node -c src/routes/auth.routes.js && node -c src/routes/bookmarks.routes.js && node -c src/routes/folders.routes.js && node -c src/routes/importExport.routes.js && node -c src/routes/sync.routes.js",
"test": "node --test",
"lint": "eslint .",
"db:migrate": "node src/migrate.js",
"db:reset": "node src/resetDb.js"
},
"dependencies": {
"@browser-bookmark/shared": "file:../../packages/shared",
"@fastify/cors": "^11.2.0",
"@fastify/jwt": "^10.0.0",
"@fastify/multipart": "^9.3.0",
"bcryptjs": "^3.0.3",
"cheerio": "^1.1.2",
"dotenv": "^16.4.7",
"fastify": "^5.2.1",
"jsonwebtoken": "^9.0.3",
"pg": "^8.13.1"
},
"devDependencies": {
"eslint": "^9.17.0"
},
"engines": {
"node": ">=22"
}
}

View File

@@ -1,6 +1,6 @@
import test from "node:test";
import assert from "node:assert/strict";
test("placeholder", () => {
assert.equal(1 + 1, 2);
});
import test from "node:test";
import assert from "node:assert/strict";
test("placeholder", () => {
assert.equal(1 + 1, 2);
});

View File

@@ -1,44 +1,44 @@
import fs from "node:fs";
import path from "node:path";
import dotenv from "dotenv";
function loadEnv() {
// When running via npm workspaces, cwd is often apps/server.
// Support both apps/server/.env and repo-root/.env.
const candidates = [
path.resolve(process.cwd(), ".env"),
path.resolve(process.cwd(), "..", "..", ".env")
];
for (const envPath of candidates) {
if (fs.existsSync(envPath)) {
dotenv.config({ path: envPath });
return;
}
}
}
loadEnv();
export function getConfig() {
const serverPort = Number(process.env.SERVER_PORT || 3001);
const adminEmail = String(process.env.ADMIN_EMAIL || "").trim().toLowerCase();
const corsOriginsRaw = String(process.env.CORS_ORIGINS || "").trim();
const corsOrigins = corsOriginsRaw
? corsOriginsRaw.split(",").map((item) => item.trim()).filter(Boolean)
: true;
return {
serverPort,
adminEmail,
corsOrigins,
database: {
host: process.env.DATABASE_HOST || "127.0.0.1",
port: Number(process.env.DATABASE_PORT || 5432),
database: process.env.DATABASE_NAME || "postgres",
user: process.env.DATABASE_USER || "postgres",
password: process.env.DATABASE_PASSWORD || "",
ssl: String(process.env.DATABASE_SSL || "false").toLowerCase() === "true"
}
};
}
import fs from "node:fs";
import path from "node:path";
import dotenv from "dotenv";
function loadEnv() {
// When running via npm workspaces, cwd is often apps/server.
// Support both apps/server/.env and repo-root/.env.
const candidates = [
path.resolve(process.cwd(), ".env"),
path.resolve(process.cwd(), "..", "..", ".env")
];
for (const envPath of candidates) {
if (fs.existsSync(envPath)) {
dotenv.config({ path: envPath });
return;
}
}
}
loadEnv();
export function getConfig() {
const serverPort = Number(process.env.SERVER_PORT || 3001);
const adminEmail = String(process.env.ADMIN_EMAIL || "").trim().toLowerCase();
const corsOriginsRaw = String(process.env.CORS_ORIGINS || "").trim();
const corsOrigins = corsOriginsRaw
? corsOriginsRaw.split(",").map((item) => item.trim()).filter(Boolean)
: true;
return {
serverPort,
adminEmail,
corsOrigins,
database: {
host: process.env.DATABASE_HOST || "127.0.0.1",
port: Number(process.env.DATABASE_PORT || 5432),
database: process.env.DATABASE_NAME || "postgres",
user: process.env.DATABASE_USER || "postgres",
password: process.env.DATABASE_PASSWORD || "",
ssl: String(process.env.DATABASE_SSL || "false").toLowerCase() === "true"
}
};
}

View File

@@ -1,16 +1,16 @@
import pg from "pg";
import { getConfig } from "./config.js";
const { Pool } = pg;
export function createPool() {
const { database } = getConfig();
return new Pool({
host: database.host,
port: database.port,
database: database.database,
user: database.user,
password: database.password,
ssl: database.ssl ? { rejectUnauthorized: false } : false
});
}
import pg from "pg";
import { getConfig } from "./config.js";
const { Pool } = pg;
export function createPool() {
const { database } = getConfig();
return new Pool({
host: database.host,
port: database.port,
database: database.database,
user: database.user,
password: database.password,
ssl: database.ssl ? { rejectUnauthorized: false } : false
});
}

View File

@@ -1,87 +1,87 @@
import Fastify from "fastify";
import cors from "@fastify/cors";
import multipart from "@fastify/multipart";
import jwt from "@fastify/jwt";
import { getConfig } from "./config.js";
import { createPool } from "./db.js";
import { authRoutes } from "./routes/auth.routes.js";
import { adminRoutes } from "./routes/admin.routes.js";
import { foldersRoutes } from "./routes/folders.routes.js";
import { bookmarksRoutes } from "./routes/bookmarks.routes.js";
import { importExportRoutes } from "./routes/importExport.routes.js";
import { syncRoutes } from "./routes/sync.routes.js";
const app = Fastify({ logger: true });
// Plugins
const config = getConfig();
await app.register(cors, {
origin: config.corsOrigins,
credentials: true,
methods: ["GET", "POST", "PATCH", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization", "Accept"]
});
await app.register(multipart);
const jwtSecret = process.env.AUTH_JWT_SECRET;
if (!jwtSecret) {
throw new Error("AUTH_JWT_SECRET is required");
}
await app.register(jwt, { secret: jwtSecret });
const pool = createPool();
app.decorate("pg", pool);
// Detect optional DB features (for backwards compatibility when migrations haven't run yet).
async function hasColumn(tableName, columnName) {
try {
const r = await app.pg.query(
"select 1 from information_schema.columns where table_schema=current_schema() and table_name=$1 and column_name=$2 limit 1",
[tableName, columnName]
);
return r.rowCount > 0;
} catch {
return false;
}
}
const folderSortOrderSupported = await hasColumn("bookmark_folders", "sort_order");
const bookmarkSortOrderSupported = await hasColumn("bookmarks", "sort_order");
app.decorate("features", {
folderSortOrder: folderSortOrderSupported,
bookmarkSortOrder: bookmarkSortOrderSupported
});
app.decorate("authenticate", async (req, reply) => {
try {
await req.jwtVerify();
} catch (err) {
reply.code(401);
throw err;
}
});
app.setErrorHandler((err, _req, reply) => {
const statusCode = err.statusCode || 500;
reply.code(statusCode).send({ message: err.message || "server error" });
});
app.get("/health", async () => ({ ok: true }));
// Routes
app.decorate("config", config);
await authRoutes(app);
await adminRoutes(app);
await foldersRoutes(app);
await bookmarksRoutes(app);
await importExportRoutes(app);
await syncRoutes(app);
app.addHook("onClose", async (instance) => {
await instance.pg.end();
});
const { serverPort } = config;
await app.listen({ port: serverPort, host: "0.0.0.0" });
import Fastify from "fastify";
import cors from "@fastify/cors";
import multipart from "@fastify/multipart";
import jwt from "@fastify/jwt";
import { getConfig } from "./config.js";
import { createPool } from "./db.js";
import { authRoutes } from "./routes/auth.routes.js";
import { adminRoutes } from "./routes/admin.routes.js";
import { foldersRoutes } from "./routes/folders.routes.js";
import { bookmarksRoutes } from "./routes/bookmarks.routes.js";
import { importExportRoutes } from "./routes/importExport.routes.js";
import { syncRoutes } from "./routes/sync.routes.js";
const app = Fastify({ logger: true });
// Plugins
const config = getConfig();
await app.register(cors, {
origin: config.corsOrigins,
credentials: true,
methods: ["GET", "POST", "PATCH", "DELETE", "OPTIONS"],
allowedHeaders: ["Content-Type", "Authorization", "Accept"]
});
await app.register(multipart);
const jwtSecret = process.env.AUTH_JWT_SECRET;
if (!jwtSecret) {
throw new Error("AUTH_JWT_SECRET is required");
}
await app.register(jwt, { secret: jwtSecret });
const pool = createPool();
app.decorate("pg", pool);
// Detect optional DB features (for backwards compatibility when migrations haven't run yet).
async function hasColumn(tableName, columnName) {
try {
const r = await app.pg.query(
"select 1 from information_schema.columns where table_schema=current_schema() and table_name=$1 and column_name=$2 limit 1",
[tableName, columnName]
);
return r.rowCount > 0;
} catch {
return false;
}
}
const folderSortOrderSupported = await hasColumn("bookmark_folders", "sort_order");
const bookmarkSortOrderSupported = await hasColumn("bookmarks", "sort_order");
app.decorate("features", {
folderSortOrder: folderSortOrderSupported,
bookmarkSortOrder: bookmarkSortOrderSupported
});
app.decorate("authenticate", async (req, reply) => {
try {
await req.jwtVerify();
} catch (err) {
reply.code(401);
throw err;
}
});
app.setErrorHandler((err, _req, reply) => {
const statusCode = err.statusCode || 500;
reply.code(statusCode).send({ message: err.message || "server error" });
});
app.get("/health", async () => ({ ok: true }));
// Routes
app.decorate("config", config);
await authRoutes(app);
await adminRoutes(app);
await foldersRoutes(app);
await bookmarksRoutes(app);
await importExportRoutes(app);
await syncRoutes(app);
app.addHook("onClose", async (instance) => {
await instance.pg.end();
});
const { serverPort } = config;
await app.listen({ port: serverPort, host: "0.0.0.0" });

View File

@@ -1,29 +1,29 @@
import { httpError } from "./httpErrors.js";
function normalizeEmail(email) {
return String(email || "").trim().toLowerCase();
}
export async function requireAdmin(app, req) {
await app.authenticate(req);
const userId = req.user?.sub;
if (!userId) throw httpError(401, "unauthorized");
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users where id=$1",
[userId]
);
const row = res.rows[0];
if (!row) throw httpError(401, "unauthorized");
const adminEmail = normalizeEmail(app.config?.adminEmail);
const isAdmin = Boolean(adminEmail) && normalizeEmail(row.email) === adminEmail;
if (!isAdmin) throw httpError(403, "admin only");
req.adminUser = row;
}
export function isAdminEmail(app, email) {
const adminEmail = normalizeEmail(app.config?.adminEmail);
return Boolean(adminEmail) && normalizeEmail(email) === adminEmail;
}
import { httpError } from "./httpErrors.js";
function normalizeEmail(email) {
return String(email || "").trim().toLowerCase();
}
export async function requireAdmin(app, req) {
await app.authenticate(req);
const userId = req.user?.sub;
if (!userId) throw httpError(401, "unauthorized");
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users where id=$1",
[userId]
);
const row = res.rows[0];
if (!row) throw httpError(401, "unauthorized");
const adminEmail = normalizeEmail(app.config?.adminEmail);
const isAdmin = Boolean(adminEmail) && normalizeEmail(row.email) === adminEmail;
if (!isAdmin) throw httpError(403, "admin only");
req.adminUser = row;
}
export function isAdminEmail(app, email) {
const adminEmail = normalizeEmail(app.config?.adminEmail);
return Boolean(adminEmail) && normalizeEmail(email) === adminEmail;
}

View File

@@ -1,10 +1,10 @@
import bcrypt from "bcryptjs";
export async function hashPassword(password) {
const saltRounds = 10;
return bcrypt.hash(password, saltRounds);
}
export async function verifyPassword(password, passwordHash) {
return bcrypt.compare(password, passwordHash);
}
import bcrypt from "bcryptjs";
export async function hashPassword(password) {
const saltRounds = 10;
return bcrypt.hash(password, saltRounds);
}
export async function verifyPassword(password, passwordHash) {
return bcrypt.compare(password, passwordHash);
}

View File

@@ -1,125 +1,125 @@
import * as cheerio from "cheerio";
export function parseNetscapeBookmarkHtmlNode(html) {
const $ = cheerio.load(html, { decodeEntities: false });
const rootDl = $("dl").first();
if (!rootDl.length) return { folders: [], bookmarks: [] };
const folders = [];
const bookmarks = [];
function normText(s) {
return String(s || "").replace(/\s+/g, " ").trim();
}
function collectLevelDt(node) {
const out = [];
const children = $(node).contents().toArray();
for (const child of children) {
if (!child || child.type !== "tag") continue;
const tag = child.tagName?.toLowerCase();
if (tag === "dt") {
out.push(child);
continue;
}
if (tag === "dl") {
// nested list belongs to the previous <DT>
continue;
}
out.push(...collectLevelDt(child));
}
return out;
}
function findNextDlForDt(dtNode, stopDlNode) {
let cur = dtNode;
while (cur && cur !== stopDlNode) {
let next = cur.nextSibling;
while (next && next.type !== "tag") next = next.nextSibling;
if (next && next.type === "tag" && next.tagName?.toLowerCase() === "dl") return $(next);
cur = cur.parent;
}
return null;
}
function walkDl($dl, parentTempId) {
// Netscape format: <DL><p> contains repeating <DT> items and nested <DL>.
// When parsed, <DT> may be wrapped (e.g. inside <p>), so we must be robust.
const dts = collectLevelDt($dl[0]);
for (const node of dts) {
const $dt = $(node);
const $h3 = $dt.children("h3").first().length ? $dt.children("h3").first() : $dt.find("h3").first();
const $a = $dt.children("a").first().length ? $dt.children("a").first() : $dt.find("a").first();
const $nestedDl = $dt.children("dl").first();
const $nextDl = $nestedDl.length ? $nestedDl : findNextDlForDt(node, $dl[0]);
if ($h3.length) {
const tempId = `${folders.length + 1}`;
const name = normText($h3.text() || "");
folders.push({ tempId, parentTempId: parentTempId ?? null, name });
if ($nextDl?.length) walkDl($nextDl, tempId);
} else if ($a.length) {
const title = normText($a.text() || "");
const url = $a.attr("href") || "";
bookmarks.push({ parentTempId: parentTempId ?? null, title, url });
}
}
}
walkDl(rootDl, null);
return { folders, bookmarks };
}
export function buildNetscapeBookmarkHtml({ folders, bookmarks }) {
// folders: [{id, parentId, name}]
// bookmarks: [{folderId, title, url}]
const folderChildren = new Map();
const bookmarkChildren = new Map();
for (const f of folders) {
const key = f.parentId ?? "root";
if (!folderChildren.has(key)) folderChildren.set(key, []);
folderChildren.get(key).push(f);
}
for (const b of bookmarks) {
const key = b.folderId ?? "root";
if (!bookmarkChildren.has(key)) bookmarkChildren.set(key, []);
bookmarkChildren.get(key).push(b);
}
function esc(s) {
return String(s)
.replaceAll("&", "&amp;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;")
.replaceAll('"', "&quot;");
}
function renderFolder(parentId) {
const key = parentId ?? "root";
const subFolders = (folderChildren.get(key) || []).slice().sort((a, b) => a.name.localeCompare(b.name));
const subBookmarks = (bookmarkChildren.get(key) || []).slice().sort((a, b) => a.title.localeCompare(b.title));
let out = "<DL><p>\n";
for (const f of subFolders) {
out += ` <DT><H3>${esc(f.name)}</H3>\n`;
out += renderFolder(f.id)
.split("\n")
.map((line) => (line ? ` ${line}` : line))
.join("\n");
out += "\n";
}
for (const b of subBookmarks) {
out += ` <DT><A HREF=\"${esc(b.url)}\">${esc(b.title)}</A>\n`;
}
out += "</DL><p>";
return out;
}
const header = `<!DOCTYPE NETSCAPE-Bookmark-file-1>\n<!-- This is an automatically generated file. -->\n<META HTTP-EQUIV=\"Content-Type\" CONTENT=\"text/html; charset=UTF-8\">\n<TITLE>Bookmarks</TITLE>\n<H1>Bookmarks</H1>\n`;
const body = renderFolder(null);
return header + body + "\n";
}
import * as cheerio from "cheerio";
export function parseNetscapeBookmarkHtmlNode(html) {
const $ = cheerio.load(html, { decodeEntities: false });
const rootDl = $("dl").first();
if (!rootDl.length) return { folders: [], bookmarks: [] };
const folders = [];
const bookmarks = [];
function normText(s) {
return String(s || "").replace(/\s+/g, " ").trim();
}
function collectLevelDt(node) {
const out = [];
const children = $(node).contents().toArray();
for (const child of children) {
if (!child || child.type !== "tag") continue;
const tag = child.tagName?.toLowerCase();
if (tag === "dt") {
out.push(child);
continue;
}
if (tag === "dl") {
// nested list belongs to the previous <DT>
continue;
}
out.push(...collectLevelDt(child));
}
return out;
}
function findNextDlForDt(dtNode, stopDlNode) {
let cur = dtNode;
while (cur && cur !== stopDlNode) {
let next = cur.nextSibling;
while (next && next.type !== "tag") next = next.nextSibling;
if (next && next.type === "tag" && next.tagName?.toLowerCase() === "dl") return $(next);
cur = cur.parent;
}
return null;
}
function walkDl($dl, parentTempId) {
// Netscape format: <DL><p> contains repeating <DT> items and nested <DL>.
// When parsed, <DT> may be wrapped (e.g. inside <p>), so we must be robust.
const dts = collectLevelDt($dl[0]);
for (const node of dts) {
const $dt = $(node);
const $h3 = $dt.children("h3").first().length ? $dt.children("h3").first() : $dt.find("h3").first();
const $a = $dt.children("a").first().length ? $dt.children("a").first() : $dt.find("a").first();
const $nestedDl = $dt.children("dl").first();
const $nextDl = $nestedDl.length ? $nestedDl : findNextDlForDt(node, $dl[0]);
if ($h3.length) {
const tempId = `${folders.length + 1}`;
const name = normText($h3.text() || "");
folders.push({ tempId, parentTempId: parentTempId ?? null, name });
if ($nextDl?.length) walkDl($nextDl, tempId);
} else if ($a.length) {
const title = normText($a.text() || "");
const url = $a.attr("href") || "";
bookmarks.push({ parentTempId: parentTempId ?? null, title, url });
}
}
}
walkDl(rootDl, null);
return { folders, bookmarks };
}
export function buildNetscapeBookmarkHtml({ folders, bookmarks }) {
// folders: [{id, parentId, name}]
// bookmarks: [{folderId, title, url}]
const folderChildren = new Map();
const bookmarkChildren = new Map();
for (const f of folders) {
const key = f.parentId ?? "root";
if (!folderChildren.has(key)) folderChildren.set(key, []);
folderChildren.get(key).push(f);
}
for (const b of bookmarks) {
const key = b.folderId ?? "root";
if (!bookmarkChildren.has(key)) bookmarkChildren.set(key, []);
bookmarkChildren.get(key).push(b);
}
function esc(s) {
return String(s)
.replaceAll("&", "&amp;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;")
.replaceAll('"', "&quot;");
}
function renderFolder(parentId) {
const key = parentId ?? "root";
const subFolders = (folderChildren.get(key) || []).slice().sort((a, b) => a.name.localeCompare(b.name));
const subBookmarks = (bookmarkChildren.get(key) || []).slice().sort((a, b) => a.title.localeCompare(b.title));
let out = "<DL><p>\n";
for (const f of subFolders) {
out += ` <DT><H3>${esc(f.name)}</H3>\n`;
out += renderFolder(f.id)
.split("\n")
.map((line) => (line ? ` ${line}` : line))
.join("\n");
out += "\n";
}
for (const b of subBookmarks) {
out += ` <DT><A HREF=\"${esc(b.url)}\">${esc(b.title)}</A>\n`;
}
out += "</DL><p>";
return out;
}
const header = `<!DOCTYPE NETSCAPE-Bookmark-file-1>\n<!-- This is an automatically generated file. -->\n<META HTTP-EQUIV=\"Content-Type\" CONTENT=\"text/html; charset=UTF-8\">\n<TITLE>Bookmarks</TITLE>\n<H1>Bookmarks</H1>\n`;
const body = renderFolder(null);
return header + body + "\n";
}

View File

@@ -1,5 +1,5 @@
export function httpError(statusCode, message) {
const err = new Error(message);
err.statusCode = statusCode;
return err;
}
export function httpError(statusCode, message) {
const err = new Error(message);
err.statusCode = statusCode;
return err;
}

View File

@@ -1,39 +1,39 @@
export function userRowToDto(row) {
return {
id: row.id,
email: row.email,
role: row.role,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function folderRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
parentId: row.parent_id,
name: row.name,
visibility: row.visibility,
sortOrder: row.sort_order ?? 0,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function bookmarkRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
folderId: row.folder_id,
sortOrder: row.sort_order ?? 0,
title: row.title,
url: row.url,
urlNormalized: row.url_normalized,
urlHash: row.url_hash,
visibility: row.visibility,
source: row.source,
updatedAt: row.updated_at,
deletedAt: row.deleted_at
};
}
export function userRowToDto(row) {
return {
id: row.id,
email: row.email,
role: row.role,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function folderRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
parentId: row.parent_id,
name: row.name,
visibility: row.visibility,
sortOrder: row.sort_order ?? 0,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function bookmarkRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
folderId: row.folder_id,
sortOrder: row.sort_order ?? 0,
title: row.title,
url: row.url,
urlNormalized: row.url_normalized,
urlHash: row.url_hash,
visibility: row.visibility,
source: row.source,
updatedAt: row.updated_at,
deletedAt: row.deleted_at
};
}

View File

@@ -1,61 +1,61 @@
import { readFile, readdir } from "node:fs/promises";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { createPool } from "./db.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function ensureMigrationsTable(pool) {
await pool.query(`
create table if not exists schema_migrations (
id text primary key,
applied_at timestamptz not null default now()
);
`);
}
async function getApplied(pool) {
const res = await pool.query("select id from schema_migrations order by id");
return new Set(res.rows.map((r) => r.id));
}
async function applyMigration(pool, id, sql) {
await pool.query("begin");
try {
await pool.query(sql);
await pool.query("insert into schema_migrations (id) values ($1)", [id]);
await pool.query("commit");
// eslint-disable-next-line no-console
console.log(`[migrate] applied ${id}`);
} catch (err) {
await pool.query("rollback");
throw err;
}
}
async function main() {
const pool = createPool();
try {
await ensureMigrationsTable(pool);
const applied = await getApplied(pool);
const migrationsDir = path.resolve(__dirname, "..", "migrations");
const files = (await readdir(migrationsDir))
.filter((f) => f.endsWith(".sql"))
.sort();
for (const file of files) {
if (applied.has(file)) continue;
const sql = await readFile(path.join(migrationsDir, file), "utf8");
await applyMigration(pool, file, sql);
}
// eslint-disable-next-line no-console
console.log("[migrate] done");
} finally {
await pool.end();
}
}
main();
import { readFile, readdir } from "node:fs/promises";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { createPool } from "./db.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function ensureMigrationsTable(pool) {
await pool.query(`
create table if not exists schema_migrations (
id text primary key,
applied_at timestamptz not null default now()
);
`);
}
async function getApplied(pool) {
const res = await pool.query("select id from schema_migrations order by id");
return new Set(res.rows.map((r) => r.id));
}
async function applyMigration(pool, id, sql) {
await pool.query("begin");
try {
await pool.query(sql);
await pool.query("insert into schema_migrations (id) values ($1)", [id]);
await pool.query("commit");
// eslint-disable-next-line no-console
console.log(`[migrate] applied ${id}`);
} catch (err) {
await pool.query("rollback");
throw err;
}
}
async function main() {
const pool = createPool();
try {
await ensureMigrationsTable(pool);
const applied = await getApplied(pool);
const migrationsDir = path.resolve(__dirname, "..", "migrations");
const files = (await readdir(migrationsDir))
.filter((f) => f.endsWith(".sql"))
.sort();
for (const file of files) {
if (applied.has(file)) continue;
const sql = await readFile(path.join(migrationsDir, file), "utf8");
await applyMigration(pool, file, sql);
}
// eslint-disable-next-line no-console
console.log("[migrate] done");
} finally {
await pool.end();
}
}
main();

View File

@@ -1,26 +1,26 @@
import { createPool } from "./db.js";
async function main() {
const pool = createPool();
try {
// Destructive: development convenience only.
await pool.query("begin");
try {
await pool.query("drop table if exists bookmarks cascade");
await pool.query("drop table if exists bookmark_folders cascade");
await pool.query("drop table if exists users cascade");
await pool.query("drop table if exists schema_migrations cascade");
await pool.query("commit");
} catch (e) {
await pool.query("rollback");
throw e;
}
} finally {
await pool.end();
}
// Re-apply migrations.
await import("./migrate.js");
}
main();
import { createPool } from "./db.js";
async function main() {
const pool = createPool();
try {
// Destructive: development convenience only.
await pool.query("begin");
try {
await pool.query("drop table if exists bookmarks cascade");
await pool.query("drop table if exists bookmark_folders cascade");
await pool.query("drop table if exists users cascade");
await pool.query("drop table if exists schema_migrations cascade");
await pool.query("commit");
} catch (e) {
await pool.query("rollback");
throw e;
}
} finally {
await pool.end();
}
// Re-apply migrations.
await import("./migrate.js");
}
main();

View File

@@ -1,147 +1,147 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { httpError } from "../lib/httpErrors.js";
import { requireAdmin, isAdminEmail } from "../lib/admin.js";
import { bookmarkRowToDto, folderRowToDto, userRowToDto } from "../lib/rows.js";
function toUserDtoWithAdminOverride(app, row) {
const dto = userRowToDto(row);
if (isAdminEmail(app, dto.email)) dto.role = "admin";
return dto;
}
export async function adminRoutes(app) {
app.get(
"/admin/users",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async () => {
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users order by created_at desc limit 500"
);
return res.rows.map((r) => toUserDtoWithAdminOverride(app, r));
}
);
app.get(
"/admin/users/:id/folders",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.id;
if (!userId) throw httpError(400, "user id required");
const orderBy = app.features?.folderSortOrder
? "parent_id nulls first, sort_order asc, name asc"
: "parent_id nulls first, name asc";
const res = await app.pg.query(
`select * from bookmark_folders where user_id=$1 order by ${orderBy} limit 1000`,
[userId]
);
return res.rows.map(folderRowToDto);
}
);
app.get(
"/admin/users/:id/bookmarks",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.id;
if (!userId) throw httpError(400, "user id required");
const q = (req.query?.q || "").trim();
const params = [userId];
let where = "where user_id=$1 and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const orderBy = app.features?.bookmarkSortOrder
? "folder_id nulls first, sort_order asc, updated_at desc"
: "updated_at desc";
const res = await app.pg.query(
`select * from bookmarks ${where} order by ${orderBy} limit 500`,
params
);
return res.rows.map(bookmarkRowToDto);
}
);
app.delete(
"/admin/users/:userId/bookmarks/:bookmarkId",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.userId;
const bookmarkId = req.params?.bookmarkId;
if (!userId || !bookmarkId) throw httpError(400, "userId and bookmarkId required");
const res = await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2 and deleted_at is null returning *",
[bookmarkId, userId]
);
if (!res.rows[0]) throw httpError(404, "bookmark not found");
return bookmarkRowToDto(res.rows[0]);
}
);
app.delete(
"/admin/users/:userId/folders/:folderId",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.userId;
const folderId = req.params?.folderId;
if (!userId || !folderId) throw httpError(400, "userId and folderId required");
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning *",
[folderId, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
return folderRowToDto(res.rows[0]);
}
);
app.post(
"/admin/users/:userId/bookmarks/:bookmarkId/copy-to-me",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const sourceUserId = req.params?.userId;
const bookmarkId = req.params?.bookmarkId;
const adminUserId = req.adminUser?.id;
if (!sourceUserId || !bookmarkId) throw httpError(400, "userId and bookmarkId required");
if (!adminUserId) throw httpError(401, "unauthorized");
const srcRes = await app.pg.query(
"select * from bookmarks where id=$1 and user_id=$2 and deleted_at is null",
[bookmarkId, sourceUserId]
);
const src = srcRes.rows[0];
if (!src) throw httpError(404, "bookmark not found");
const urlNormalized = normalizeUrl(src.url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[adminUserId, urlHash]
);
if (existing.rows[0]) {
const merged = await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, visibility='private', folder_id=null, source='manual', updated_at=now()
where id=$4
returning *`,
[src.title, src.url, urlNormalized, existing.rows[0].id]
);
return bookmarkRowToDto(merged.rows[0]);
}
const res = await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, null, $2, $3, $4, $5, 'private', 'manual')
returning *`,
[adminUserId, src.title, src.url, urlNormalized, urlHash]
);
return bookmarkRowToDto(res.rows[0]);
}
);
}
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { httpError } from "../lib/httpErrors.js";
import { requireAdmin, isAdminEmail } from "../lib/admin.js";
import { bookmarkRowToDto, folderRowToDto, userRowToDto } from "../lib/rows.js";
function toUserDtoWithAdminOverride(app, row) {
const dto = userRowToDto(row);
if (isAdminEmail(app, dto.email)) dto.role = "admin";
return dto;
}
export async function adminRoutes(app) {
app.get(
"/admin/users",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async () => {
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users order by created_at desc limit 500"
);
return res.rows.map((r) => toUserDtoWithAdminOverride(app, r));
}
);
app.get(
"/admin/users/:id/folders",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.id;
if (!userId) throw httpError(400, "user id required");
const orderBy = app.features?.folderSortOrder
? "parent_id nulls first, sort_order asc, name asc"
: "parent_id nulls first, name asc";
const res = await app.pg.query(
`select * from bookmark_folders where user_id=$1 order by ${orderBy} limit 1000`,
[userId]
);
return res.rows.map(folderRowToDto);
}
);
app.get(
"/admin/users/:id/bookmarks",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.id;
if (!userId) throw httpError(400, "user id required");
const q = (req.query?.q || "").trim();
const params = [userId];
let where = "where user_id=$1 and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const orderBy = app.features?.bookmarkSortOrder
? "folder_id nulls first, sort_order asc, updated_at desc"
: "updated_at desc";
const res = await app.pg.query(
`select * from bookmarks ${where} order by ${orderBy} limit 500`,
params
);
return res.rows.map(bookmarkRowToDto);
}
);
app.delete(
"/admin/users/:userId/bookmarks/:bookmarkId",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.userId;
const bookmarkId = req.params?.bookmarkId;
if (!userId || !bookmarkId) throw httpError(400, "userId and bookmarkId required");
const res = await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2 and deleted_at is null returning *",
[bookmarkId, userId]
);
if (!res.rows[0]) throw httpError(404, "bookmark not found");
return bookmarkRowToDto(res.rows[0]);
}
);
app.delete(
"/admin/users/:userId/folders/:folderId",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const userId = req.params?.userId;
const folderId = req.params?.folderId;
if (!userId || !folderId) throw httpError(400, "userId and folderId required");
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning *",
[folderId, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
return folderRowToDto(res.rows[0]);
}
);
app.post(
"/admin/users/:userId/bookmarks/:bookmarkId/copy-to-me",
{ preHandler: [async (req) => requireAdmin(app, req)] },
async (req) => {
const sourceUserId = req.params?.userId;
const bookmarkId = req.params?.bookmarkId;
const adminUserId = req.adminUser?.id;
if (!sourceUserId || !bookmarkId) throw httpError(400, "userId and bookmarkId required");
if (!adminUserId) throw httpError(401, "unauthorized");
const srcRes = await app.pg.query(
"select * from bookmarks where id=$1 and user_id=$2 and deleted_at is null",
[bookmarkId, sourceUserId]
);
const src = srcRes.rows[0];
if (!src) throw httpError(404, "bookmark not found");
const urlNormalized = normalizeUrl(src.url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[adminUserId, urlHash]
);
if (existing.rows[0]) {
const merged = await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, visibility='private', folder_id=null, source='manual', updated_at=now()
where id=$4
returning *`,
[src.title, src.url, urlNormalized, existing.rows[0].id]
);
return bookmarkRowToDto(merged.rows[0]);
}
const res = await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, null, $2, $3, $4, $5, 'private', 'manual')
returning *`,
[adminUserId, src.title, src.url, urlNormalized, urlHash]
);
return bookmarkRowToDto(res.rows[0]);
}
);
}

View File

@@ -1,74 +1,74 @@
import { hashPassword, verifyPassword } from "../lib/auth.js";
import { httpError } from "../lib/httpErrors.js";
import { userRowToDto } from "../lib/rows.js";
function normalizeEmail(email) {
return String(email || "").trim().toLowerCase();
}
function toUserDtoWithAdminOverride(app, row) {
const dto = userRowToDto(row);
const adminEmail = normalizeEmail(app.config?.adminEmail);
if (adminEmail && normalizeEmail(dto.email) === adminEmail) {
dto.role = "admin";
}
return dto;
}
export async function authRoutes(app) {
app.post("/auth/register", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
if (String(password).length < 8) throw httpError(400, "password too short");
const passwordHash = await hashPassword(password);
try {
const res = await app.pg.query(
"insert into users (email, password_hash) values ($1, $2) returning id, email, role, created_at, updated_at",
[email, passwordHash]
);
const user = toUserDtoWithAdminOverride(app, res.rows[0]);
const token = await app.jwt.sign({ sub: user.id, role: user.role });
return { token, user };
} catch (err) {
if (String(err?.code) === "23505") throw httpError(409, "email already exists");
throw err;
}
});
app.post("/auth/login", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
const res = await app.pg.query(
"select id, email, role, password_hash, created_at, updated_at from users where email=$1",
[email]
);
const row = res.rows[0];
if (!row) throw httpError(401, "invalid credentials");
const ok = await verifyPassword(password, row.password_hash);
if (!ok) throw httpError(401, "invalid credentials");
const user = userRowToDto(row);
const userWithRole = toUserDtoWithAdminOverride(app, row);
const token = await app.jwt.sign({ sub: userWithRole.id, role: userWithRole.role });
return { token, user: userWithRole };
});
app.get(
"/auth/me",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users where id=$1",
[userId]
);
const row = res.rows[0];
if (!row) throw httpError(404, "user not found");
return toUserDtoWithAdminOverride(app, row);
}
);
}
import { hashPassword, verifyPassword } from "../lib/auth.js";
import { httpError } from "../lib/httpErrors.js";
import { userRowToDto } from "../lib/rows.js";
function normalizeEmail(email) {
return String(email || "").trim().toLowerCase();
}
function toUserDtoWithAdminOverride(app, row) {
const dto = userRowToDto(row);
const adminEmail = normalizeEmail(app.config?.adminEmail);
if (adminEmail && normalizeEmail(dto.email) === adminEmail) {
dto.role = "admin";
}
return dto;
}
export async function authRoutes(app) {
app.post("/auth/register", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
if (String(password).length < 8) throw httpError(400, "password too short");
const passwordHash = await hashPassword(password);
try {
const res = await app.pg.query(
"insert into users (email, password_hash) values ($1, $2) returning id, email, role, created_at, updated_at",
[email, passwordHash]
);
const user = toUserDtoWithAdminOverride(app, res.rows[0]);
const token = await app.jwt.sign({ sub: user.id, role: user.role });
return { token, user };
} catch (err) {
if (String(err?.code) === "23505") throw httpError(409, "email already exists");
throw err;
}
});
app.post("/auth/login", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
const res = await app.pg.query(
"select id, email, role, password_hash, created_at, updated_at from users where email=$1",
[email]
);
const row = res.rows[0];
if (!row) throw httpError(401, "invalid credentials");
const ok = await verifyPassword(password, row.password_hash);
if (!ok) throw httpError(401, "invalid credentials");
const user = userRowToDto(row);
const userWithRole = toUserDtoWithAdminOverride(app, row);
const token = await app.jwt.sign({ sub: userWithRole.id, role: userWithRole.role });
return { token, user: userWithRole };
});
app.get(
"/auth/me",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users where id=$1",
[userId]
);
const row = res.rows[0];
if (!row) throw httpError(404, "user not found");
return toUserDtoWithAdminOverride(app, row);
}
);
}

View File

@@ -1,305 +1,305 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { httpError } from "../lib/httpErrors.js";
import { bookmarkRowToDto } from "../lib/rows.js";
export async function bookmarksRoutes(app) {
app.get("/bookmarks/public", async (req) => {
const q = (req.query?.q || "").trim();
const params = [];
let where = "where visibility='public' and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const res = await app.pg.query(
`select * from bookmarks ${where} order by updated_at desc limit 200`,
params
);
return res.rows.map(bookmarkRowToDto);
});
app.get(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const q = (req.query?.q || "").trim();
const params = [userId];
let where = "where user_id=$1 and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const orderBy = app.features?.bookmarkSortOrder
? "folder_id nulls first, sort_order asc, updated_at desc"
: "updated_at desc";
const res = await app.pg.query(
`select * from bookmarks ${where} order by ${orderBy} limit 500`,
params
);
return res.rows.map(bookmarkRowToDto);
}
);
app.post(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { folderId, title, url, visibility } = req.body || {};
if (!title) throw httpError(400, "title required");
if (!url) throw httpError(400, "url required");
if (!visibility) throw httpError(400, "visibility required");
const urlNormalized = normalizeUrl(url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
// auto-merge
const targetFolderId = folderId ?? null;
const merged = app.features?.bookmarkSortOrder
? await app.pg.query(
`update bookmarks
set title=$1,
url=$2,
url_normalized=$3,
visibility=$4,
folder_id=$5,
sort_order = case
when folder_id is distinct from $5 then (
select coalesce(max(sort_order), -1) + 1
from bookmarks
where user_id=$7 and folder_id is not distinct from $5 and deleted_at is null
)
else sort_order
end,
source='manual',
updated_at=now()
where id=$6
returning *`,
[title, url, urlNormalized, visibility, targetFolderId, existing.rows[0].id, userId]
)
: await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, visibility=$4, folder_id=$5, source='manual', updated_at=now()
where id=$6
returning *`,
[title, url, urlNormalized, visibility, targetFolderId, existing.rows[0].id]
);
return bookmarkRowToDto(merged.rows[0]);
}
const targetFolderId = folderId ?? null;
const res = app.features?.bookmarkSortOrder
? await app.pg.query(
`insert into bookmarks (user_id, folder_id, sort_order, title, url, url_normalized, url_hash, visibility, source)
values (
$1,
$2,
(select coalesce(max(sort_order), -1) + 1 from bookmarks where user_id=$1 and folder_id is not distinct from $2 and deleted_at is null),
$3,
$4,
$5,
$6,
$7,
'manual'
)
returning *`,
[userId, targetFolderId, title, url, urlNormalized, urlHash, visibility]
)
: await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, $7, 'manual')
returning *`,
[userId, targetFolderId, title, url, urlNormalized, urlHash, visibility]
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.post(
"/bookmarks/reorder",
{ preHandler: [app.authenticate] },
async (req) => {
if (!app.features?.bookmarkSortOrder) {
throw httpError(
409,
"bookmark sort order is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const userId = req.user.sub;
const { folderId, orderedIds } = req.body || {};
const folder = folderId ?? null;
if (!Array.isArray(orderedIds) || orderedIds.length === 0) {
throw httpError(400, "orderedIds required");
}
const siblings = await app.pg.query(
"select id from bookmarks where user_id=$1 and folder_id is not distinct from $2 and deleted_at is null",
[userId, folder]
);
const siblingIds = siblings.rows.map((r) => r.id);
const want = new Set(orderedIds);
if (want.size !== orderedIds.length) throw httpError(400, "orderedIds must be unique");
if (siblingIds.length !== orderedIds.length) throw httpError(400, "orderedIds must include all bookmarks in the folder");
for (const id of siblingIds) {
if (!want.has(id)) throw httpError(400, "orderedIds must include all bookmarks in the folder");
}
await app.pg.query("begin");
try {
for (let i = 0; i < orderedIds.length; i++) {
await app.pg.query(
"update bookmarks set sort_order=$1, updated_at=now() where id=$2 and user_id=$3 and deleted_at is null",
[i, orderedIds[i], userId]
);
}
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
}
);
app.patch(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existingRes = await app.pg.query(
"select * from bookmarks where id=$1 and user_id=$2 and deleted_at is null",
[id, userId]
);
const existing = existingRes.rows[0];
if (!existing) throw httpError(404, "bookmark not found");
const sets = [];
const params = [];
let i = 1;
// url update implies url_normalized + url_hash update
let nextUrl = existing.url;
if (Object.prototype.hasOwnProperty.call(body, "url")) {
nextUrl = String(body.url || "").trim();
if (!nextUrl) throw httpError(400, "url required");
}
let urlNormalized = existing.url_normalized;
let urlHash = existing.url_hash;
const urlChanged = nextUrl !== existing.url;
if (urlChanged) {
urlNormalized = normalizeUrl(nextUrl);
urlHash = computeUrlHash(urlNormalized);
}
if (Object.prototype.hasOwnProperty.call(body, "title")) {
const title = String(body.title || "").trim();
if (!title) throw httpError(400, "title required");
sets.push(`title=$${i++}`);
params.push(title);
}
if (Object.prototype.hasOwnProperty.call(body, "folderId")) {
sets.push(`folder_id=$${i++}`);
params.push(body.folderId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (Object.prototype.hasOwnProperty.call(body, "sortOrder")) {
if (!app.features?.bookmarkSortOrder) {
throw httpError(
409,
"sortOrder is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const n = Number(body.sortOrder);
if (!Number.isFinite(n)) throw httpError(400, "sortOrder must be a number");
sets.push(`sort_order=$${i++}`);
params.push(Math.trunc(n));
}
if (Object.prototype.hasOwnProperty.call(body, "url")) {
sets.push(`url=$${i++}`);
params.push(nextUrl);
sets.push(`url_normalized=$${i++}`);
params.push(urlNormalized);
sets.push(`url_hash=$${i++}`);
params.push(urlHash);
}
if (sets.length === 0) throw httpError(400, "no fields to update");
// If URL changed and collides with another bookmark, auto-merge by keeping the existing row.
if (urlChanged) {
const dup = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null and id<>$3 limit 1",
[userId, urlHash, id]
);
if (dup.rows[0]) {
const targetId = dup.rows[0].id;
const merged = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
[...params, targetId, userId]
);
await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2",
[id, userId]
);
return bookmarkRowToDto(merged.rows[0]);
}
}
params.push(id, userId);
const res = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
params
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.delete(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2 and deleted_at is null returning *",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "bookmark not found");
return res.rows.map(bookmarkRowToDto)[0];
}
);
}
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { httpError } from "../lib/httpErrors.js";
import { bookmarkRowToDto } from "../lib/rows.js";
export async function bookmarksRoutes(app) {
app.get("/bookmarks/public", async (req) => {
const q = (req.query?.q || "").trim();
const params = [];
let where = "where visibility='public' and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const res = await app.pg.query(
`select * from bookmarks ${where} order by updated_at desc limit 200`,
params
);
return res.rows.map(bookmarkRowToDto);
});
app.get(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const q = (req.query?.q || "").trim();
const params = [userId];
let where = "where user_id=$1 and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const orderBy = app.features?.bookmarkSortOrder
? "folder_id nulls first, sort_order asc, updated_at desc"
: "updated_at desc";
const res = await app.pg.query(
`select * from bookmarks ${where} order by ${orderBy} limit 500`,
params
);
return res.rows.map(bookmarkRowToDto);
}
);
app.post(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { folderId, title, url, visibility } = req.body || {};
if (!title) throw httpError(400, "title required");
if (!url) throw httpError(400, "url required");
if (!visibility) throw httpError(400, "visibility required");
const urlNormalized = normalizeUrl(url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
// auto-merge
const targetFolderId = folderId ?? null;
const merged = app.features?.bookmarkSortOrder
? await app.pg.query(
`update bookmarks
set title=$1,
url=$2,
url_normalized=$3,
visibility=$4,
folder_id=$5,
sort_order = case
when folder_id is distinct from $5 then (
select coalesce(max(sort_order), -1) + 1
from bookmarks
where user_id=$7 and folder_id is not distinct from $5 and deleted_at is null
)
else sort_order
end,
source='manual',
updated_at=now()
where id=$6
returning *`,
[title, url, urlNormalized, visibility, targetFolderId, existing.rows[0].id, userId]
)
: await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, visibility=$4, folder_id=$5, source='manual', updated_at=now()
where id=$6
returning *`,
[title, url, urlNormalized, visibility, targetFolderId, existing.rows[0].id]
);
return bookmarkRowToDto(merged.rows[0]);
}
const targetFolderId = folderId ?? null;
const res = app.features?.bookmarkSortOrder
? await app.pg.query(
`insert into bookmarks (user_id, folder_id, sort_order, title, url, url_normalized, url_hash, visibility, source)
values (
$1,
$2,
(select coalesce(max(sort_order), -1) + 1 from bookmarks where user_id=$1 and folder_id is not distinct from $2 and deleted_at is null),
$3,
$4,
$5,
$6,
$7,
'manual'
)
returning *`,
[userId, targetFolderId, title, url, urlNormalized, urlHash, visibility]
)
: await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, $7, 'manual')
returning *`,
[userId, targetFolderId, title, url, urlNormalized, urlHash, visibility]
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.post(
"/bookmarks/reorder",
{ preHandler: [app.authenticate] },
async (req) => {
if (!app.features?.bookmarkSortOrder) {
throw httpError(
409,
"bookmark sort order is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const userId = req.user.sub;
const { folderId, orderedIds } = req.body || {};
const folder = folderId ?? null;
if (!Array.isArray(orderedIds) || orderedIds.length === 0) {
throw httpError(400, "orderedIds required");
}
const siblings = await app.pg.query(
"select id from bookmarks where user_id=$1 and folder_id is not distinct from $2 and deleted_at is null",
[userId, folder]
);
const siblingIds = siblings.rows.map((r) => r.id);
const want = new Set(orderedIds);
if (want.size !== orderedIds.length) throw httpError(400, "orderedIds must be unique");
if (siblingIds.length !== orderedIds.length) throw httpError(400, "orderedIds must include all bookmarks in the folder");
for (const id of siblingIds) {
if (!want.has(id)) throw httpError(400, "orderedIds must include all bookmarks in the folder");
}
await app.pg.query("begin");
try {
for (let i = 0; i < orderedIds.length; i++) {
await app.pg.query(
"update bookmarks set sort_order=$1, updated_at=now() where id=$2 and user_id=$3 and deleted_at is null",
[i, orderedIds[i], userId]
);
}
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
}
);
app.patch(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existingRes = await app.pg.query(
"select * from bookmarks where id=$1 and user_id=$2 and deleted_at is null",
[id, userId]
);
const existing = existingRes.rows[0];
if (!existing) throw httpError(404, "bookmark not found");
const sets = [];
const params = [];
let i = 1;
// url update implies url_normalized + url_hash update
let nextUrl = existing.url;
if (Object.prototype.hasOwnProperty.call(body, "url")) {
nextUrl = String(body.url || "").trim();
if (!nextUrl) throw httpError(400, "url required");
}
let urlNormalized = existing.url_normalized;
let urlHash = existing.url_hash;
const urlChanged = nextUrl !== existing.url;
if (urlChanged) {
urlNormalized = normalizeUrl(nextUrl);
urlHash = computeUrlHash(urlNormalized);
}
if (Object.prototype.hasOwnProperty.call(body, "title")) {
const title = String(body.title || "").trim();
if (!title) throw httpError(400, "title required");
sets.push(`title=$${i++}`);
params.push(title);
}
if (Object.prototype.hasOwnProperty.call(body, "folderId")) {
sets.push(`folder_id=$${i++}`);
params.push(body.folderId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (Object.prototype.hasOwnProperty.call(body, "sortOrder")) {
if (!app.features?.bookmarkSortOrder) {
throw httpError(
409,
"sortOrder is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const n = Number(body.sortOrder);
if (!Number.isFinite(n)) throw httpError(400, "sortOrder must be a number");
sets.push(`sort_order=$${i++}`);
params.push(Math.trunc(n));
}
if (Object.prototype.hasOwnProperty.call(body, "url")) {
sets.push(`url=$${i++}`);
params.push(nextUrl);
sets.push(`url_normalized=$${i++}`);
params.push(urlNormalized);
sets.push(`url_hash=$${i++}`);
params.push(urlHash);
}
if (sets.length === 0) throw httpError(400, "no fields to update");
// If URL changed and collides with another bookmark, auto-merge by keeping the existing row.
if (urlChanged) {
const dup = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null and id<>$3 limit 1",
[userId, urlHash, id]
);
if (dup.rows[0]) {
const targetId = dup.rows[0].id;
const merged = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
[...params, targetId, userId]
);
await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2",
[id, userId]
);
return bookmarkRowToDto(merged.rows[0]);
}
}
params.push(id, userId);
const res = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
params
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.delete(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2 and deleted_at is null returning *",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "bookmark not found");
return res.rows.map(bookmarkRowToDto)[0];
}
);
}

View File

@@ -1,199 +1,199 @@
import { httpError } from "../lib/httpErrors.js";
import { folderRowToDto } from "../lib/rows.js";
export async function foldersRoutes(app) {
app.get(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const orderBy = app.features?.folderSortOrder
? "parent_id nulls first, sort_order asc, name asc"
: "parent_id nulls first, name asc";
const res = await app.pg.query(
`select * from bookmark_folders where user_id=$1 order by ${orderBy}`,
[userId]
);
return res.rows.map(folderRowToDto);
}
);
app.post(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { parentId, name, visibility } = req.body || {};
await app.pg.query("begin");
try {
// Move bookmarks in this folder back to root (so they remain visible).
await app.pg.query(
"update bookmarks set folder_id=null, updated_at=now() where user_id=$1 and folder_id=$2 and deleted_at is null",
[userId, id]
);
// Lift child folders to root.
await app.pg.query(
"update bookmark_folders set parent_id=null, updated_at=now() where user_id=$1 and parent_id=$2",
[userId, id]
);
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning id",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
const res = app.features?.folderSortOrder
? await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility, sort_order)
values (
$1,
$2,
$3,
$4,
(select coalesce(max(sort_order), -1) + 1 from bookmark_folders where user_id=$1 and parent_id is not distinct from $2)
)
returning *`,
[userId, parent, name, visibility]
)
: await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, $2, $3, $4)
returning *`,
[userId, parent, name, visibility]
);
return folderRowToDto(res.rows[0]);
}
);
app.post(
"/folders/reorder",
{ preHandler: [app.authenticate] },
async (req) => {
if (!app.features?.folderSortOrder) {
throw httpError(
409,
"folder sort order is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const userId = req.user.sub;
const { parentId, orderedIds } = req.body || {};
const parent = parentId ?? null;
if (!Array.isArray(orderedIds) || orderedIds.length === 0) {
throw httpError(400, "orderedIds required");
}
const siblings = await app.pg.query(
"select id from bookmark_folders where user_id=$1 and parent_id is not distinct from $2",
[userId, parent]
);
const siblingIds = siblings.rows.map((r) => r.id);
// ensure same set
const want = new Set(orderedIds);
if (want.size !== orderedIds.length) throw httpError(400, "orderedIds must be unique");
if (siblingIds.length !== orderedIds.length) throw httpError(400, "orderedIds must include all sibling folders");
for (const id of siblingIds) {
if (!want.has(id)) throw httpError(400, "orderedIds must include all sibling folders");
}
await app.pg.query("begin");
try {
for (let i = 0; i < orderedIds.length; i++) {
await app.pg.query(
"update bookmark_folders set sort_order=$1, updated_at=now() where id=$2 and user_id=$3",
[i, orderedIds[i], userId]
);
}
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
}
);
app.patch(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existing = await app.pg.query(
"select * from bookmark_folders where id=$1 and user_id=$2",
[id, userId]
);
if (!existing.rows[0]) throw httpError(404, "folder not found");
const sets = [];
const params = [];
let i = 1;
if (Object.prototype.hasOwnProperty.call(body, "parentId")) {
sets.push(`parent_id=$${i++}`);
params.push(body.parentId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "name")) {
const name = String(body.name || "").trim();
if (!name) throw httpError(400, "name required");
sets.push(`name=$${i++}`);
params.push(name);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (Object.prototype.hasOwnProperty.call(body, "sortOrder")) {
if (!app.features?.folderSortOrder) {
throw httpError(
409,
"sortOrder is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const n = Number(body.sortOrder);
if (!Number.isFinite(n)) throw httpError(400, "sortOrder must be a number");
sets.push(`sort_order=$${i++}`);
params.push(Math.trunc(n));
}
if (sets.length === 0) throw httpError(400, "no fields to update");
params.push(id, userId);
const res = await app.pg.query(
`update bookmark_folders set ${sets.join(", ")}, updated_at=now() where id=$${i++} and user_id=$${i} returning *`,
params
);
return folderRowToDto(res.rows[0]);
}
);
app.delete(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning id",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
return { ok: true };
}
);
}
import { httpError } from "../lib/httpErrors.js";
import { folderRowToDto } from "../lib/rows.js";
export async function foldersRoutes(app) {
app.get(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const orderBy = app.features?.folderSortOrder
? "parent_id nulls first, sort_order asc, name asc"
: "parent_id nulls first, name asc";
const res = await app.pg.query(
`select * from bookmark_folders where user_id=$1 order by ${orderBy}`,
[userId]
);
return res.rows.map(folderRowToDto);
}
);
app.post(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { parentId, name, visibility } = req.body || {};
await app.pg.query("begin");
try {
// Move bookmarks in this folder back to root (so they remain visible).
await app.pg.query(
"update bookmarks set folder_id=null, updated_at=now() where user_id=$1 and folder_id=$2 and deleted_at is null",
[userId, id]
);
// Lift child folders to root.
await app.pg.query(
"update bookmark_folders set parent_id=null, updated_at=now() where user_id=$1 and parent_id=$2",
[userId, id]
);
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning id",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
const res = app.features?.folderSortOrder
? await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility, sort_order)
values (
$1,
$2,
$3,
$4,
(select coalesce(max(sort_order), -1) + 1 from bookmark_folders where user_id=$1 and parent_id is not distinct from $2)
)
returning *`,
[userId, parent, name, visibility]
)
: await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, $2, $3, $4)
returning *`,
[userId, parent, name, visibility]
);
return folderRowToDto(res.rows[0]);
}
);
app.post(
"/folders/reorder",
{ preHandler: [app.authenticate] },
async (req) => {
if (!app.features?.folderSortOrder) {
throw httpError(
409,
"folder sort order is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const userId = req.user.sub;
const { parentId, orderedIds } = req.body || {};
const parent = parentId ?? null;
if (!Array.isArray(orderedIds) || orderedIds.length === 0) {
throw httpError(400, "orderedIds required");
}
const siblings = await app.pg.query(
"select id from bookmark_folders where user_id=$1 and parent_id is not distinct from $2",
[userId, parent]
);
const siblingIds = siblings.rows.map((r) => r.id);
// ensure same set
const want = new Set(orderedIds);
if (want.size !== orderedIds.length) throw httpError(400, "orderedIds must be unique");
if (siblingIds.length !== orderedIds.length) throw httpError(400, "orderedIds must include all sibling folders");
for (const id of siblingIds) {
if (!want.has(id)) throw httpError(400, "orderedIds must include all sibling folders");
}
await app.pg.query("begin");
try {
for (let i = 0; i < orderedIds.length; i++) {
await app.pg.query(
"update bookmark_folders set sort_order=$1, updated_at=now() where id=$2 and user_id=$3",
[i, orderedIds[i], userId]
);
}
await app.pg.query("commit");
} catch (e) {
await app.pg.query("rollback");
throw e;
}
return { ok: true };
}
);
app.patch(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existing = await app.pg.query(
"select * from bookmark_folders where id=$1 and user_id=$2",
[id, userId]
);
if (!existing.rows[0]) throw httpError(404, "folder not found");
const sets = [];
const params = [];
let i = 1;
if (Object.prototype.hasOwnProperty.call(body, "parentId")) {
sets.push(`parent_id=$${i++}`);
params.push(body.parentId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "name")) {
const name = String(body.name || "").trim();
if (!name) throw httpError(400, "name required");
sets.push(`name=$${i++}`);
params.push(name);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (Object.prototype.hasOwnProperty.call(body, "sortOrder")) {
if (!app.features?.folderSortOrder) {
throw httpError(
409,
"sortOrder is not supported by current database schema. Please run server migrations (db:migrate)."
);
}
const n = Number(body.sortOrder);
if (!Number.isFinite(n)) throw httpError(400, "sortOrder must be a number");
sets.push(`sort_order=$${i++}`);
params.push(Math.trunc(n));
}
if (sets.length === 0) throw httpError(400, "no fields to update");
params.push(id, userId);
const res = await app.pg.query(
`update bookmark_folders set ${sets.join(", ")}, updated_at=now() where id=$${i++} and user_id=$${i} returning *`,
params
);
return folderRowToDto(res.rows[0]);
}
);
app.delete(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning id",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
return { ok: true };
}
);
}

View File

@@ -1,131 +1,131 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { parseNetscapeBookmarkHtmlNode, buildNetscapeBookmarkHtml } from "../lib/bookmarkHtmlNode.js";
export async function importExportRoutes(app) {
app.post(
"/bookmarks/import/html",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const file = await req.file();
if (!file) return { imported: 0, merged: 0 };
const chunks = [];
for await (const c of file.file) chunks.push(c);
const html = Buffer.concat(chunks).toString("utf8");
const parsed = parseNetscapeBookmarkHtmlNode(html);
// Flatten folders (no nesting): dedupe/merge by folder name for this user.
const normName = (s) => String(s || "").replace(/\s+/g, " ").trim().toLowerCase();
const existingFolders = await app.pg.query(
"select id, name from bookmark_folders where user_id=$1",
[userId]
);
const folderIdByName = new Map(
existingFolders.rows.map((r) => [normName(r.name), r.id])
);
const tempIdToFolderName = new Map(
(parsed.folders || []).map((f) => [f.tempId, f.name])
);
const tempToDbId = new Map();
for (const f of parsed.folders || []) {
const key = normName(f.name);
if (!key) continue;
let id = folderIdByName.get(key);
if (!id) {
const res = app.features?.folderSortOrder
? await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility, sort_order)
values (
$1,
null,
$2,
'private',
(select coalesce(max(sort_order), -1) + 1 from bookmark_folders where user_id=$1 and parent_id is null)
)
returning id`,
[userId, f.name]
)
: await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, null, $2, 'private')
returning id`,
[userId, f.name]
);
id = res.rows[0].id;
folderIdByName.set(key, id);
}
tempToDbId.set(f.tempId, id);
}
let imported = 0;
let merged = 0;
for (const b of parsed.bookmarks) {
// Map bookmark's folder via folder name (flattened).
let folderId = null;
if (b.parentTempId) {
const fname = tempIdToFolderName.get(b.parentTempId);
const key = normName(fname);
folderId = key ? (folderIdByName.get(key) || tempToDbId.get(b.parentTempId) || null) : null;
}
const urlNormalized = normalizeUrl(b.url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, folder_id=$4, source='import', updated_at=now()
where id=$5`,
[b.title || "", b.url || "", urlNormalized, folderId, existing.rows[0].id]
);
merged++;
} else {
await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, 'private', 'import')`,
[userId, folderId, b.title || "", b.url || "", urlNormalized, urlHash]
);
imported++;
}
}
return { imported, merged };
}
);
app.get(
"/bookmarks/export/html",
{ preHandler: [app.authenticate] },
async (req, reply) => {
const userId = req.user.sub;
const folders = await app.pg.query(
"select id, parent_id, name from bookmark_folders where user_id=$1 order by name",
[userId]
);
const bookmarks = await app.pg.query(
"select folder_id, title, url from bookmarks where user_id=$1 and deleted_at is null order by title",
[userId]
);
const html = buildNetscapeBookmarkHtml({
folders: folders.rows.map((r) => ({ id: r.id, parentId: r.parent_id, name: r.name })),
bookmarks: bookmarks.rows.map((r) => ({ folderId: r.folder_id, title: r.title, url: r.url }))
});
reply.type("text/html; charset=utf-8");
return html;
}
);
}
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { parseNetscapeBookmarkHtmlNode, buildNetscapeBookmarkHtml } from "../lib/bookmarkHtmlNode.js";
export async function importExportRoutes(app) {
app.post(
"/bookmarks/import/html",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const file = await req.file();
if (!file) return { imported: 0, merged: 0 };
const chunks = [];
for await (const c of file.file) chunks.push(c);
const html = Buffer.concat(chunks).toString("utf8");
const parsed = parseNetscapeBookmarkHtmlNode(html);
// Flatten folders (no nesting): dedupe/merge by folder name for this user.
const normName = (s) => String(s || "").replace(/\s+/g, " ").trim().toLowerCase();
const existingFolders = await app.pg.query(
"select id, name from bookmark_folders where user_id=$1",
[userId]
);
const folderIdByName = new Map(
existingFolders.rows.map((r) => [normName(r.name), r.id])
);
const tempIdToFolderName = new Map(
(parsed.folders || []).map((f) => [f.tempId, f.name])
);
const tempToDbId = new Map();
for (const f of parsed.folders || []) {
const key = normName(f.name);
if (!key) continue;
let id = folderIdByName.get(key);
if (!id) {
const res = app.features?.folderSortOrder
? await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility, sort_order)
values (
$1,
null,
$2,
'private',
(select coalesce(max(sort_order), -1) + 1 from bookmark_folders where user_id=$1 and parent_id is null)
)
returning id`,
[userId, f.name]
)
: await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, null, $2, 'private')
returning id`,
[userId, f.name]
);
id = res.rows[0].id;
folderIdByName.set(key, id);
}
tempToDbId.set(f.tempId, id);
}
let imported = 0;
let merged = 0;
for (const b of parsed.bookmarks) {
// Map bookmark's folder via folder name (flattened).
let folderId = null;
if (b.parentTempId) {
const fname = tempIdToFolderName.get(b.parentTempId);
const key = normName(fname);
folderId = key ? (folderIdByName.get(key) || tempToDbId.get(b.parentTempId) || null) : null;
}
const urlNormalized = normalizeUrl(b.url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, folder_id=$4, source='import', updated_at=now()
where id=$5`,
[b.title || "", b.url || "", urlNormalized, folderId, existing.rows[0].id]
);
merged++;
} else {
await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, 'private', 'import')`,
[userId, folderId, b.title || "", b.url || "", urlNormalized, urlHash]
);
imported++;
}
}
return { imported, merged };
}
);
app.get(
"/bookmarks/export/html",
{ preHandler: [app.authenticate] },
async (req, reply) => {
const userId = req.user.sub;
const folders = await app.pg.query(
"select id, parent_id, name from bookmark_folders where user_id=$1 order by name",
[userId]
);
const bookmarks = await app.pg.query(
"select folder_id, title, url from bookmarks where user_id=$1 and deleted_at is null order by title",
[userId]
);
const html = buildNetscapeBookmarkHtml({
folders: folders.rows.map((r) => ({ id: r.id, parentId: r.parent_id, name: r.name })),
bookmarks: bookmarks.rows.map((r) => ({ folderId: r.folder_id, title: r.title, url: r.url }))
});
reply.type("text/html; charset=utf-8");
return html;
}
);
}

View File

@@ -1,162 +1,162 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
function toDate(v) {
if (!v) return null;
const d = new Date(v);
return Number.isNaN(d.getTime()) ? null : d;
}
export async function syncRoutes(app) {
app.post(
"/sync/push",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { bookmarks = [], folders = [] } = req.body || {};
// folders: upsert by id with LWW
for (const f of folders) {
const incomingUpdatedAt = toDate(f.updatedAt) || new Date();
const existing = await app.pg.query(
"select id, updated_at from bookmark_folders where id=$1 and user_id=$2",
[f.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmark_folders (id, user_id, parent_id, name, visibility, updated_at)
values ($1, $2, $3, $4, $5, $6)`,
[f.id, userId, f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmark_folders
set parent_id=$1, name=$2, visibility=$3, updated_at=$4
where id=$5 and user_id=$6`,
[f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt, f.id, userId]
);
}
}
}
// bookmarks: upsert by id with LWW; keep urlHash normalized
for (const b of bookmarks) {
const incomingUpdatedAt = toDate(b.updatedAt) || new Date();
const incomingDeletedAt = toDate(b.deletedAt);
const urlNormalized = normalizeUrl(b.url || "");
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id, updated_at from bookmarks where id=$1 and user_id=$2",
[b.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmarks (
id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at
) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11)`,
[
b.id,
userId,
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt
]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmarks
set folder_id=$1, title=$2, url=$3, url_normalized=$4, url_hash=$5, visibility=$6, source=$7, updated_at=$8, deleted_at=$9
where id=$10 and user_id=$11`,
[
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt,
b.id,
userId
]
);
}
}
}
return { ok: true };
}
);
app.get(
"/sync/pull",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const since = toDate(req.query?.since);
const paramsFolders = [userId];
let whereFolders = "where user_id=$1";
if (since) {
paramsFolders.push(since);
whereFolders += ` and updated_at > $${paramsFolders.length}`;
}
const paramsBookmarks = [userId];
let whereBookmarks = "where user_id=$1";
if (since) {
paramsBookmarks.push(since);
whereBookmarks += ` and updated_at > $${paramsBookmarks.length}`;
}
const foldersRes = await app.pg.query(
`select id, user_id, parent_id, name, visibility, created_at, updated_at from bookmark_folders ${whereFolders}`,
paramsFolders
);
const bookmarksRes = await app.pg.query(
`select id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at from bookmarks ${whereBookmarks}`,
paramsBookmarks
);
return {
folders: foldersRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
parentId: r.parent_id,
name: r.name,
visibility: r.visibility,
createdAt: r.created_at,
updatedAt: r.updated_at
})),
bookmarks: bookmarksRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
folderId: r.folder_id,
title: r.title,
url: r.url,
urlNormalized: r.url_normalized,
urlHash: r.url_hash,
visibility: r.visibility,
source: r.source,
updatedAt: r.updated_at,
deletedAt: r.deleted_at
})),
serverTime: new Date().toISOString()
};
}
);
}
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
function toDate(v) {
if (!v) return null;
const d = new Date(v);
return Number.isNaN(d.getTime()) ? null : d;
}
export async function syncRoutes(app) {
app.post(
"/sync/push",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { bookmarks = [], folders = [] } = req.body || {};
// folders: upsert by id with LWW
for (const f of folders) {
const incomingUpdatedAt = toDate(f.updatedAt) || new Date();
const existing = await app.pg.query(
"select id, updated_at from bookmark_folders where id=$1 and user_id=$2",
[f.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmark_folders (id, user_id, parent_id, name, visibility, updated_at)
values ($1, $2, $3, $4, $5, $6)`,
[f.id, userId, f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmark_folders
set parent_id=$1, name=$2, visibility=$3, updated_at=$4
where id=$5 and user_id=$6`,
[f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt, f.id, userId]
);
}
}
}
// bookmarks: upsert by id with LWW; keep urlHash normalized
for (const b of bookmarks) {
const incomingUpdatedAt = toDate(b.updatedAt) || new Date();
const incomingDeletedAt = toDate(b.deletedAt);
const urlNormalized = normalizeUrl(b.url || "");
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id, updated_at from bookmarks where id=$1 and user_id=$2",
[b.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmarks (
id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at
) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11)`,
[
b.id,
userId,
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt
]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmarks
set folder_id=$1, title=$2, url=$3, url_normalized=$4, url_hash=$5, visibility=$6, source=$7, updated_at=$8, deleted_at=$9
where id=$10 and user_id=$11`,
[
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt,
b.id,
userId
]
);
}
}
}
return { ok: true };
}
);
app.get(
"/sync/pull",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const since = toDate(req.query?.since);
const paramsFolders = [userId];
let whereFolders = "where user_id=$1";
if (since) {
paramsFolders.push(since);
whereFolders += ` and updated_at > $${paramsFolders.length}`;
}
const paramsBookmarks = [userId];
let whereBookmarks = "where user_id=$1";
if (since) {
paramsBookmarks.push(since);
whereBookmarks += ` and updated_at > $${paramsBookmarks.length}`;
}
const foldersRes = await app.pg.query(
`select id, user_id, parent_id, name, visibility, created_at, updated_at from bookmark_folders ${whereFolders}`,
paramsFolders
);
const bookmarksRes = await app.pg.query(
`select id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at from bookmarks ${whereBookmarks}`,
paramsBookmarks
);
return {
folders: foldersRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
parentId: r.parent_id,
name: r.name,
visibility: r.visibility,
createdAt: r.created_at,
updatedAt: r.updated_at
})),
bookmarks: bookmarksRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
folderId: r.folder_id,
title: r.title,
url: r.url,
urlNormalized: r.url_normalized,
urlHash: r.url_hash,
visibility: r.visibility,
source: r.source,
updatedAt: r.updated_at,
deletedAt: r.deleted_at
})),
serverTime: new Date().toISOString()
};
}
);
}