初始化项目

This commit is contained in:
2026-01-18 10:35:27 +08:00
parent 85042841ae
commit 00ca4c1b0d
116 changed files with 11569 additions and 2 deletions

View File

@@ -0,0 +1,12 @@
export default [
{
files: ["**/*.js"],
languageOptions: {
ecmaVersion: 2024,
sourceType: "module"
},
rules: {
"no-unused-vars": ["error", { "argsIgnorePattern": "^_" }]
}
}
];

View File

@@ -0,0 +1,41 @@
create extension if not exists pgcrypto;
create table if not exists users (
id uuid primary key default gen_random_uuid(),
email text not null unique,
password_hash text not null,
role text not null default 'user',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create table if not exists bookmark_folders (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
parent_id uuid null references bookmark_folders(id) on delete cascade,
name text not null,
visibility text not null default 'private',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now()
);
create index if not exists idx_bookmark_folders_user_parent on bookmark_folders (user_id, parent_id);
create table if not exists bookmarks (
id uuid primary key default gen_random_uuid(),
user_id uuid not null references users(id) on delete cascade,
folder_id uuid null references bookmark_folders(id) on delete set null,
title text not null,
url text not null,
url_normalized text not null,
url_hash text not null,
visibility text not null default 'private',
source text not null default 'manual',
created_at timestamptz not null default now(),
updated_at timestamptz not null default now(),
deleted_at timestamptz null
);
create index if not exists idx_bookmarks_user_updated_at on bookmarks (user_id, updated_at);
create index if not exists idx_bookmarks_user_url_hash on bookmarks (user_id, url_hash);
create index if not exists idx_bookmarks_visibility on bookmarks (visibility);

32
apps/server/package.json Normal file
View File

@@ -0,0 +1,32 @@
{
"name": "@browser-bookmark/server",
"private": true,
"type": "module",
"version": "0.1.0",
"main": "src/index.js",
"scripts": {
"dev": "node --watch src/index.js",
"build": "node -c src/index.js && node -c src/routes/auth.routes.js && node -c src/routes/bookmarks.routes.js && node -c src/routes/folders.routes.js && node -c src/routes/importExport.routes.js && node -c src/routes/sync.routes.js",
"test": "node --test",
"lint": "eslint .",
"db:migrate": "node src/migrate.js"
},
"dependencies": {
"@browser-bookmark/shared": "0.1.0",
"@fastify/cors": "^11.2.0",
"@fastify/jwt": "^10.0.0",
"@fastify/multipart": "^9.3.0",
"bcryptjs": "^3.0.3",
"cheerio": "^1.1.2",
"dotenv": "^16.4.7",
"fastify": "^5.2.1",
"jsonwebtoken": "^9.0.3",
"pg": "^8.13.1"
},
"devDependencies": {
"eslint": "^9.17.0"
},
"engines": {
"node": ">=22"
}
}

View File

@@ -0,0 +1,6 @@
import test from "node:test";
import assert from "node:assert/strict";
test("placeholder", () => {
assert.equal(1 + 1, 2);
});

37
apps/server/src/config.js Normal file
View File

@@ -0,0 +1,37 @@
import fs from "node:fs";
import path from "node:path";
import dotenv from "dotenv";
function loadEnv() {
// When running via npm workspaces, cwd is often apps/server.
// Support both apps/server/.env and repo-root/.env.
const candidates = [
path.resolve(process.cwd(), ".env"),
path.resolve(process.cwd(), "..", "..", ".env")
];
for (const envPath of candidates) {
if (fs.existsSync(envPath)) {
dotenv.config({ path: envPath });
return;
}
}
}
loadEnv();
export function getConfig() {
const serverPort = Number(process.env.SERVER_PORT || 3001);
return {
serverPort,
database: {
host: process.env.DATABASE_HOST || "127.0.0.1",
port: Number(process.env.DATABASE_PORT || 5432),
database: process.env.DATABASE_NAME || "postgres",
user: process.env.DATABASE_USER || "postgres",
password: process.env.DATABASE_PASSWORD || "",
ssl: String(process.env.DATABASE_SSL || "false").toLowerCase() === "true"
}
};
}

16
apps/server/src/db.js Normal file
View File

@@ -0,0 +1,16 @@
import pg from "pg";
import { getConfig } from "./config.js";
const { Pool } = pg;
export function createPool() {
const { database } = getConfig();
return new Pool({
host: database.host,
port: database.port,
database: database.database,
user: database.user,
password: database.password,
ssl: database.ssl ? { rejectUnauthorized: false } : false
});
}

58
apps/server/src/index.js Normal file
View File

@@ -0,0 +1,58 @@
import Fastify from "fastify";
import cors from "@fastify/cors";
import multipart from "@fastify/multipart";
import jwt from "@fastify/jwt";
import { getConfig } from "./config.js";
import { createPool } from "./db.js";
import { authRoutes } from "./routes/auth.routes.js";
import { foldersRoutes } from "./routes/folders.routes.js";
import { bookmarksRoutes } from "./routes/bookmarks.routes.js";
import { importExportRoutes } from "./routes/importExport.routes.js";
import { syncRoutes } from "./routes/sync.routes.js";
const app = Fastify({ logger: true });
// Plugins
await app.register(cors, {
origin: true,
credentials: true
});
await app.register(multipart);
const jwtSecret = process.env.AUTH_JWT_SECRET;
if (!jwtSecret) {
throw new Error("AUTH_JWT_SECRET is required");
}
await app.register(jwt, { secret: jwtSecret });
app.decorate("pg", createPool());
app.decorate("authenticate", async (req, reply) => {
try {
await req.jwtVerify();
} catch (err) {
reply.code(401);
throw err;
}
});
app.setErrorHandler((err, _req, reply) => {
const statusCode = err.statusCode || 500;
reply.code(statusCode).send({ message: err.message || "server error" });
});
app.get("/health", async () => ({ ok: true }));
// Routes
await authRoutes(app);
await foldersRoutes(app);
await bookmarksRoutes(app);
await importExportRoutes(app);
await syncRoutes(app);
app.addHook("onClose", async (instance) => {
await instance.pg.end();
});
const { serverPort } = getConfig();
await app.listen({ port: serverPort, host: "0.0.0.0" });

View File

@@ -0,0 +1,10 @@
import bcrypt from "bcryptjs";
export async function hashPassword(password) {
const saltRounds = 10;
return bcrypt.hash(password, saltRounds);
}
export async function verifyPassword(password, passwordHash) {
return bcrypt.compare(password, passwordHash);
}

View File

@@ -0,0 +1,93 @@
import * as cheerio from "cheerio";
export function parseNetscapeBookmarkHtmlNode(html) {
const $ = cheerio.load(html, { decodeEntities: false });
const rootDl = $("dl").first();
if (!rootDl.length) return { folders: [], bookmarks: [] };
const folders = [];
const bookmarks = [];
function walkDl($dl, parentTempId) {
// Netscape format: <DL><p> contains repeating <DT> items and nested <DL>
const children = $dl.children().toArray();
for (let i = 0; i < children.length; i++) {
const node = children[i];
if (!node || node.tagName?.toLowerCase() !== "dt") continue;
const $dt = $(node);
const $h3 = $dt.children("h3").first();
const $a = $dt.children("a").first();
const $next = $(children[i + 1] || null);
const nextIsDl = $next && $next[0]?.tagName?.toLowerCase() === "dl";
if ($h3.length) {
const tempId = `${folders.length + 1}`;
const name = ($h3.text() || "").trim();
folders.push({ tempId, parentTempId: parentTempId ?? null, name });
if (nextIsDl) walkDl($next, tempId);
} else if ($a.length) {
const title = ($a.text() || "").trim();
const url = $a.attr("href") || "";
bookmarks.push({ parentTempId: parentTempId ?? null, title, url });
}
}
}
walkDl(rootDl, null);
return { folders, bookmarks };
}
export function buildNetscapeBookmarkHtml({ folders, bookmarks }) {
// folders: [{id, parentId, name}]
// bookmarks: [{folderId, title, url}]
const folderChildren = new Map();
const bookmarkChildren = new Map();
for (const f of folders) {
const key = f.parentId ?? "root";
if (!folderChildren.has(key)) folderChildren.set(key, []);
folderChildren.get(key).push(f);
}
for (const b of bookmarks) {
const key = b.folderId ?? "root";
if (!bookmarkChildren.has(key)) bookmarkChildren.set(key, []);
bookmarkChildren.get(key).push(b);
}
function esc(s) {
return String(s)
.replaceAll("&", "&amp;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;")
.replaceAll('"', "&quot;");
}
function renderFolder(parentId) {
const key = parentId ?? "root";
const subFolders = (folderChildren.get(key) || []).slice().sort((a, b) => a.name.localeCompare(b.name));
const subBookmarks = (bookmarkChildren.get(key) || []).slice().sort((a, b) => a.title.localeCompare(b.title));
let out = "<DL><p>\n";
for (const f of subFolders) {
out += ` <DT><H3>${esc(f.name)}</H3>\n`;
out += renderFolder(f.id)
.split("\n")
.map((line) => (line ? ` ${line}` : line))
.join("\n");
out += "\n";
}
for (const b of subBookmarks) {
out += ` <DT><A HREF=\"${esc(b.url)}\">${esc(b.title)}</A>\n`;
}
out += "</DL><p>";
return out;
}
const header = `<!DOCTYPE NETSCAPE-Bookmark-file-1>\n<!-- This is an automatically generated file. -->\n<META HTTP-EQUIV=\"Content-Type\" CONTENT=\"text/html; charset=UTF-8\">\n<TITLE>Bookmarks</TITLE>\n<H1>Bookmarks</H1>\n`;
const body = renderFolder(null);
return header + body + "\n";
}

View File

@@ -0,0 +1,5 @@
export function httpError(statusCode, message) {
const err = new Error(message);
err.statusCode = statusCode;
return err;
}

View File

@@ -0,0 +1,37 @@
export function userRowToDto(row) {
return {
id: row.id,
email: row.email,
role: row.role,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function folderRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
parentId: row.parent_id,
name: row.name,
visibility: row.visibility,
createdAt: row.created_at,
updatedAt: row.updated_at
};
}
export function bookmarkRowToDto(row) {
return {
id: row.id,
userId: row.user_id,
folderId: row.folder_id,
title: row.title,
url: row.url,
urlNormalized: row.url_normalized,
urlHash: row.url_hash,
visibility: row.visibility,
source: row.source,
updatedAt: row.updated_at,
deletedAt: row.deleted_at
};
}

View File

@@ -0,0 +1,61 @@
import { readFile, readdir } from "node:fs/promises";
import path from "node:path";
import { fileURLToPath } from "node:url";
import { createPool } from "./db.js";
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
async function ensureMigrationsTable(pool) {
await pool.query(`
create table if not exists schema_migrations (
id text primary key,
applied_at timestamptz not null default now()
);
`);
}
async function getApplied(pool) {
const res = await pool.query("select id from schema_migrations order by id");
return new Set(res.rows.map((r) => r.id));
}
async function applyMigration(pool, id, sql) {
await pool.query("begin");
try {
await pool.query(sql);
await pool.query("insert into schema_migrations (id) values ($1)", [id]);
await pool.query("commit");
// eslint-disable-next-line no-console
console.log(`[migrate] applied ${id}`);
} catch (err) {
await pool.query("rollback");
throw err;
}
}
async function main() {
const pool = createPool();
try {
await ensureMigrationsTable(pool);
const applied = await getApplied(pool);
const migrationsDir = path.resolve(__dirname, "..", "migrations");
const files = (await readdir(migrationsDir))
.filter((f) => f.endsWith(".sql"))
.sort();
for (const file of files) {
if (applied.has(file)) continue;
const sql = await readFile(path.join(migrationsDir, file), "utf8");
await applyMigration(pool, file, sql);
}
// eslint-disable-next-line no-console
console.log("[migrate] done");
} finally {
await pool.end();
}
}
main();

View File

@@ -0,0 +1,60 @@
import { hashPassword, verifyPassword } from "../lib/auth.js";
import { httpError } from "../lib/httpErrors.js";
import { userRowToDto } from "../lib/rows.js";
export async function authRoutes(app) {
app.post("/auth/register", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
if (String(password).length < 8) throw httpError(400, "password too short");
const passwordHash = await hashPassword(password);
try {
const res = await app.pg.query(
"insert into users (email, password_hash) values ($1, $2) returning id, email, role, created_at, updated_at",
[email, passwordHash]
);
const user = userRowToDto(res.rows[0]);
const token = await app.jwt.sign({ sub: user.id, role: user.role });
return { token, user };
} catch (err) {
if (String(err?.code) === "23505") throw httpError(409, "email already exists");
throw err;
}
});
app.post("/auth/login", async (req) => {
const { email, password } = req.body || {};
if (!email || !password) throw httpError(400, "email and password required");
const res = await app.pg.query(
"select id, email, role, password_hash, created_at, updated_at from users where email=$1",
[email]
);
const row = res.rows[0];
if (!row) throw httpError(401, "invalid credentials");
const ok = await verifyPassword(password, row.password_hash);
if (!ok) throw httpError(401, "invalid credentials");
const user = userRowToDto(row);
const token = await app.jwt.sign({ sub: user.id, role: user.role });
return { token, user };
});
app.get(
"/auth/me",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const res = await app.pg.query(
"select id, email, role, created_at, updated_at from users where id=$1",
[userId]
);
const row = res.rows[0];
if (!row) throw httpError(404, "user not found");
return userRowToDto(row);
}
);
}

View File

@@ -0,0 +1,197 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { httpError } from "../lib/httpErrors.js";
import { bookmarkRowToDto } from "../lib/rows.js";
export async function bookmarksRoutes(app) {
app.get("/bookmarks/public", async (req) => {
const q = (req.query?.q || "").trim();
const params = [];
let where = "where visibility='public' and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const res = await app.pg.query(
`select * from bookmarks ${where} order by updated_at desc limit 200`,
params
);
return res.rows.map(bookmarkRowToDto);
});
app.get(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const q = (req.query?.q || "").trim();
const params = [userId];
let where = "where user_id=$1 and deleted_at is null";
if (q) {
params.push(`%${q}%`);
where += ` and (title ilike $${params.length} or url ilike $${params.length})`;
}
const res = await app.pg.query(
`select * from bookmarks ${where} order by updated_at desc limit 500`,
params
);
return res.rows.map(bookmarkRowToDto);
}
);
app.post(
"/bookmarks",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { folderId, title, url, visibility } = req.body || {};
if (!title) throw httpError(400, "title required");
if (!url) throw httpError(400, "url required");
if (!visibility) throw httpError(400, "visibility required");
const urlNormalized = normalizeUrl(url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
// auto-merge
const merged = await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, visibility=$4, folder_id=$5, source='manual', updated_at=now()
where id=$6
returning *`,
[title, url, urlNormalized, visibility, folderId ?? null, existing.rows[0].id]
);
return bookmarkRowToDto(merged.rows[0]);
}
const res = await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, $7, 'manual')
returning *`,
[userId, folderId ?? null, title, url, urlNormalized, urlHash, visibility]
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.patch(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existingRes = await app.pg.query(
"select * from bookmarks where id=$1 and user_id=$2 and deleted_at is null",
[id, userId]
);
const existing = existingRes.rows[0];
if (!existing) throw httpError(404, "bookmark not found");
const sets = [];
const params = [];
let i = 1;
// url update implies url_normalized + url_hash update
let nextUrl = existing.url;
if (Object.prototype.hasOwnProperty.call(body, "url")) {
nextUrl = String(body.url || "").trim();
if (!nextUrl) throw httpError(400, "url required");
}
let urlNormalized = existing.url_normalized;
let urlHash = existing.url_hash;
const urlChanged = nextUrl !== existing.url;
if (urlChanged) {
urlNormalized = normalizeUrl(nextUrl);
urlHash = computeUrlHash(urlNormalized);
}
if (Object.prototype.hasOwnProperty.call(body, "title")) {
const title = String(body.title || "").trim();
if (!title) throw httpError(400, "title required");
sets.push(`title=$${i++}`);
params.push(title);
}
if (Object.prototype.hasOwnProperty.call(body, "folderId")) {
sets.push(`folder_id=$${i++}`);
params.push(body.folderId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (Object.prototype.hasOwnProperty.call(body, "url")) {
sets.push(`url=$${i++}`);
params.push(nextUrl);
sets.push(`url_normalized=$${i++}`);
params.push(urlNormalized);
sets.push(`url_hash=$${i++}`);
params.push(urlHash);
}
if (sets.length === 0) throw httpError(400, "no fields to update");
// If URL changed and collides with another bookmark, auto-merge by keeping the existing row.
if (urlChanged) {
const dup = await app.pg.query(
"select * from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null and id<>$3 limit 1",
[userId, urlHash, id]
);
if (dup.rows[0]) {
const targetId = dup.rows[0].id;
const merged = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
[...params, targetId, userId]
);
await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2",
[id, userId]
);
return bookmarkRowToDto(merged.rows[0]);
}
}
params.push(id, userId);
const res = await app.pg.query(
`update bookmarks
set ${sets.join(", ")}, source='manual', updated_at=now()
where id=$${i++} and user_id=$${i}
returning *`,
params
);
return bookmarkRowToDto(res.rows[0]);
}
);
app.delete(
"/bookmarks/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"update bookmarks set deleted_at=now(), updated_at=now() where id=$1 and user_id=$2 and deleted_at is null returning *",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "bookmark not found");
return res.rows.map(bookmarkRowToDto)[0];
}
);
}

View File

@@ -0,0 +1,96 @@
import { httpError } from "../lib/httpErrors.js";
import { folderRowToDto } from "../lib/rows.js";
export async function foldersRoutes(app) {
app.get(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const res = await app.pg.query(
"select * from bookmark_folders where user_id=$1 order by name",
[userId]
);
return res.rows.map(folderRowToDto);
}
);
app.post(
"/folders",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { parentId, name, visibility } = req.body || {};
if (!name) throw httpError(400, "name required");
if (!visibility) throw httpError(400, "visibility required");
const res = await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, $2, $3, $4)
returning *`,
[userId, parentId ?? null, name, visibility]
);
return folderRowToDto(res.rows[0]);
}
);
app.patch(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const body = req.body || {};
const existing = await app.pg.query(
"select * from bookmark_folders where id=$1 and user_id=$2",
[id, userId]
);
if (!existing.rows[0]) throw httpError(404, "folder not found");
const sets = [];
const params = [];
let i = 1;
if (Object.prototype.hasOwnProperty.call(body, "parentId")) {
sets.push(`parent_id=$${i++}`);
params.push(body.parentId ?? null);
}
if (Object.prototype.hasOwnProperty.call(body, "name")) {
const name = String(body.name || "").trim();
if (!name) throw httpError(400, "name required");
sets.push(`name=$${i++}`);
params.push(name);
}
if (Object.prototype.hasOwnProperty.call(body, "visibility")) {
if (!body.visibility) throw httpError(400, "visibility required");
sets.push(`visibility=$${i++}`);
params.push(body.visibility);
}
if (sets.length === 0) throw httpError(400, "no fields to update");
params.push(id, userId);
const res = await app.pg.query(
`update bookmark_folders set ${sets.join(", ")}, updated_at=now() where id=$${i++} and user_id=$${i} returning *`,
params
);
return folderRowToDto(res.rows[0]);
}
);
app.delete(
"/folders/:id",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const id = req.params?.id;
const res = await app.pg.query(
"delete from bookmark_folders where id=$1 and user_id=$2 returning id",
[id, userId]
);
if (!res.rows[0]) throw httpError(404, "folder not found");
return { ok: true };
}
);
}

View File

@@ -0,0 +1,90 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
import { parseNetscapeBookmarkHtmlNode, buildNetscapeBookmarkHtml } from "../lib/bookmarkHtmlNode.js";
export async function importExportRoutes(app) {
app.post(
"/bookmarks/import/html",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const file = await req.file();
if (!file) return { imported: 0, merged: 0 };
const chunks = [];
for await (const c of file.file) chunks.push(c);
const html = Buffer.concat(chunks).toString("utf8");
const parsed = parseNetscapeBookmarkHtmlNode(html);
// Create folders preserving structure
const tempToDbId = new Map();
for (const f of parsed.folders) {
const parentId = f.parentTempId ? tempToDbId.get(f.parentTempId) : null;
const res = await app.pg.query(
`insert into bookmark_folders (user_id, parent_id, name, visibility)
values ($1, $2, $3, 'private')
returning id`,
[userId, parentId, f.name]
);
tempToDbId.set(f.tempId, res.rows[0].id);
}
let imported = 0;
let merged = 0;
for (const b of parsed.bookmarks) {
const folderId = b.parentTempId ? tempToDbId.get(b.parentTempId) : null;
const urlNormalized = normalizeUrl(b.url);
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id from bookmarks where user_id=$1 and url_hash=$2 and deleted_at is null limit 1",
[userId, urlHash]
);
if (existing.rows[0]) {
await app.pg.query(
`update bookmarks
set title=$1, url=$2, url_normalized=$3, folder_id=$4, source='import', updated_at=now()
where id=$5`,
[b.title || "", b.url || "", urlNormalized, folderId, existing.rows[0].id]
);
merged++;
} else {
await app.pg.query(
`insert into bookmarks (user_id, folder_id, title, url, url_normalized, url_hash, visibility, source)
values ($1, $2, $3, $4, $5, $6, 'private', 'import')`,
[userId, folderId, b.title || "", b.url || "", urlNormalized, urlHash]
);
imported++;
}
}
return { imported, merged };
}
);
app.get(
"/bookmarks/export/html",
{ preHandler: [app.authenticate] },
async (req, reply) => {
const userId = req.user.sub;
const folders = await app.pg.query(
"select id, parent_id, name from bookmark_folders where user_id=$1 order by name",
[userId]
);
const bookmarks = await app.pg.query(
"select folder_id, title, url from bookmarks where user_id=$1 and deleted_at is null order by title",
[userId]
);
const html = buildNetscapeBookmarkHtml({
folders: folders.rows.map((r) => ({ id: r.id, parentId: r.parent_id, name: r.name })),
bookmarks: bookmarks.rows.map((r) => ({ folderId: r.folder_id, title: r.title, url: r.url }))
});
reply.type("text/html; charset=utf-8");
return html;
}
);
}

View File

@@ -0,0 +1,162 @@
import { computeUrlHash, normalizeUrl } from "@browser-bookmark/shared";
function toDate(v) {
if (!v) return null;
const d = new Date(v);
return Number.isNaN(d.getTime()) ? null : d;
}
export async function syncRoutes(app) {
app.post(
"/sync/push",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const { bookmarks = [], folders = [] } = req.body || {};
// folders: upsert by id with LWW
for (const f of folders) {
const incomingUpdatedAt = toDate(f.updatedAt) || new Date();
const existing = await app.pg.query(
"select id, updated_at from bookmark_folders where id=$1 and user_id=$2",
[f.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmark_folders (id, user_id, parent_id, name, visibility, updated_at)
values ($1, $2, $3, $4, $5, $6)`,
[f.id, userId, f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmark_folders
set parent_id=$1, name=$2, visibility=$3, updated_at=$4
where id=$5 and user_id=$6`,
[f.parentId ?? null, f.name || "", f.visibility || "private", incomingUpdatedAt, f.id, userId]
);
}
}
}
// bookmarks: upsert by id with LWW; keep urlHash normalized
for (const b of bookmarks) {
const incomingUpdatedAt = toDate(b.updatedAt) || new Date();
const incomingDeletedAt = toDate(b.deletedAt);
const urlNormalized = normalizeUrl(b.url || "");
const urlHash = computeUrlHash(urlNormalized);
const existing = await app.pg.query(
"select id, updated_at from bookmarks where id=$1 and user_id=$2",
[b.id, userId]
);
if (!existing.rows[0]) {
await app.pg.query(
`insert into bookmarks (
id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at
) values ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11)`,
[
b.id,
userId,
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt
]
);
} else {
const serverUpdatedAt = new Date(existing.rows[0].updated_at);
if (incomingUpdatedAt > serverUpdatedAt) {
await app.pg.query(
`update bookmarks
set folder_id=$1, title=$2, url=$3, url_normalized=$4, url_hash=$5, visibility=$6, source=$7, updated_at=$8, deleted_at=$9
where id=$10 and user_id=$11`,
[
b.folderId ?? null,
b.title || "",
b.url || "",
urlNormalized,
urlHash,
b.visibility || "private",
b.source || "manual",
incomingUpdatedAt,
incomingDeletedAt,
b.id,
userId
]
);
}
}
}
return { ok: true };
}
);
app.get(
"/sync/pull",
{ preHandler: [app.authenticate] },
async (req) => {
const userId = req.user.sub;
const since = toDate(req.query?.since);
const paramsFolders = [userId];
let whereFolders = "where user_id=$1";
if (since) {
paramsFolders.push(since);
whereFolders += ` and updated_at > $${paramsFolders.length}`;
}
const paramsBookmarks = [userId];
let whereBookmarks = "where user_id=$1";
if (since) {
paramsBookmarks.push(since);
whereBookmarks += ` and updated_at > $${paramsBookmarks.length}`;
}
const foldersRes = await app.pg.query(
`select id, user_id, parent_id, name, visibility, created_at, updated_at from bookmark_folders ${whereFolders}`,
paramsFolders
);
const bookmarksRes = await app.pg.query(
`select id, user_id, folder_id, title, url, url_normalized, url_hash, visibility, source, updated_at, deleted_at from bookmarks ${whereBookmarks}`,
paramsBookmarks
);
return {
folders: foldersRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
parentId: r.parent_id,
name: r.name,
visibility: r.visibility,
createdAt: r.created_at,
updatedAt: r.updated_at
})),
bookmarks: bookmarksRes.rows.map((r) => ({
id: r.id,
userId: r.user_id,
folderId: r.folder_id,
title: r.title,
url: r.url,
urlNormalized: r.url_normalized,
urlHash: r.url_hash,
visibility: r.visibility,
source: r.source,
updatedAt: r.updated_at,
deletedAt: r.deleted_at
})),
serverTime: new Date().toISOString()
};
}
);
}