Compare commits

2 Commits

Author SHA1 Message Date
0e582595f7 Merge pull request 'feat: database' (#2) from feature/db into main
Reviewed-on: #2
2025-11-16 10:35:55 +01:00
1417023395 feat: database 2025-11-16 10:33:59 +01:00
14 changed files with 1436 additions and 192 deletions

View File

@@ -1,39 +1,48 @@
# LDAP Configuration
LDAP_ADDRESS=ldaps://ldap-server:636
LDAP_IMPLEMENTATION=custom
LDAP_TIMEOUT=5000
LDAP_START_TLS=false
LDAP_TLS_SERVER_NAME=ldap-server
LDAP_TLS_SKIP_VERIFY=true
LDAP_TLS_MINIMUM_VERSION=TLS1.2
LDAP_BASE_DN=dc=dcentral,dc=systems
LDAP_ADDITIONAL_USERS_DN=cn=users
LDAP_USERS_FILTER=(&({username_attribute}={input}))
LDAP_ADDITIONAL_GROUPS_DN=cn=groups
LDAP_GROUPS_FILTER=(cn=users)
LDAP_USER=uid=root,cn=users,dc=dcentral,dc=systems
LDAP_PASSWORD=super-secret
LDAP_ATTRIBUTE_DISTINGUISHED_NAME=distinguishedName
LDAP_ATTRIBUTE_USERNAME=uid
LDAP_ATTRIBUTE_MAIL=mail
LDAP_ATTRIBUTE_MEMBER_OF=memberOf
LDAP_ATTRIBUTE_GROUP_NAME=cn
# Session Configuration
SESSION_SECRET=your-secret-key-change-this-in-production
# Server Configuration
# Application Configuration
NODE_ENV=production
PORT=3000
# Database Configuration
# Option 1: Use DATABASE_URL (recommended for production)
# DATABASE_URL=postgresql://user:password@host:port/database
# DATABASE_SSL=true
# Option 2: Use individual connection parameters (recommended for development)
DB_HOST=localhost
DB_PORT=5432
DB_NAME=linkding
DB_USER=postgres
DB_PASSWORD=postgres
# Session Configuration
SESSION_SECRET=your-secret-key-change-this-in-production
SESSION_NAME=connect.sid
TRUST_PROXY=true
COOKIE_SECURE=true
COOKIE_SAMESITE=none
COOKIE_DOMAIN=
COOKIE_PATH=/
# Server Configuration
PORT=3000
NODE_ENV=production
# Proxy Configuration (for reverse proxy like Traefik)
TRUST_PROXY=true
# LDAP Authentication Configuration
LDAP_ADDRESS=ldap://ldap.example.com:389
LDAP_BASE_DN=dc=example,dc=com
LDAP_ADDITIONAL_USERS_DN=
LDAP_USER=cn=admin,dc=example,dc=com
LDAP_PASSWORD=admin_password
LDAP_USERS_FILTER=(&(objectClass=person)(uid={{username}}))
LDAP_TIMEOUT=5000
# LDAP Attribute Mapping
LDAP_ATTRIBUTE_USERNAME=uid
LDAP_ATTRIBUTE_MAIL=mail
LDAP_ATTRIBUTE_DISTINGUISHED_NAME=distinguishedName
LDAP_ATTRIBUTE_MEMBER_OF=memberOf
# LDAP TLS Configuration
LDAP_TLS_SKIP_VERIFY=false
LDAP_TLS_SERVER_NAME=
# Chrome/Chromium Configuration (for Puppeteer)
CHROME_EXECUTABLE_PATH=/usr/bin/chromium

23
Makefile Normal file
View File

@@ -0,0 +1,23 @@
.PHONY: dev up down clean
# Start development environment: PostgreSQL and the app
dev:
@echo "Starting PostgreSQL database..."
@docker compose -f docker-compose.dev.yaml up -d
@echo "Waiting for database to be ready..."
@sleep 3
@echo "Starting application..."
@npm start
# Start only the database
up:
@docker compose -f docker-compose.dev.yaml up -d
# Stop the database
down:
@docker compose -f docker-compose.dev.yaml down
# Stop and remove volumes (clean slate)
clean:
@docker compose -f docker-compose.dev.yaml down -v

167
README.md
View File

@@ -7,22 +7,29 @@ LinkDing is a minimal bookmarking application where you can paste links and get
- Paste links and get a list of links with title, description, and image
- Automatic metadata extraction
- Search functionality by title, description, and URL
- Organize links into custom lists
- Archive/unarchive links
- Public and private lists
- Modern, responsive web interface
- Support for JavaScript-heavy sites using Puppeteer
- Automatic fallback from HTTP scraping to browser rendering
- LDAP authentication support
- PostgreSQL database with automatic migrations
## Tech Stack
- **Backend**: Express.js (Node.js)
- **Frontend**: Vanilla JavaScript, HTML5, CSS3
- **Web Scraping**: Cheerio + Puppeteer (for JavaScript-heavy sites)
- **Data Storage**: JSON file
- **Database**: PostgreSQL with Sequelize ORM
- **Authentication**: LDAP (optional)
## Installation
### Prerequisites
- Node.js 18+ (or Docker)
- PostgreSQL 12+ (or Docker)
- Chromium/Chrome (for Puppeteer support, optional)
### Local Installation
@@ -37,49 +44,86 @@ LinkDing is a minimal bookmarking application where you can paste links and get
npm install
```
3. Start the server:
3. Set up environment variables:
```bash
cp .env.example .env
# Edit .env with your database configuration
```
4. Start PostgreSQL database and the application:
```bash
make dev
```
Or manually:
```bash
# Start PostgreSQL (using docker-compose)
docker compose -f docker-compose.dev.yaml up -d
# Start the application
npm start
```
4. Open your browser to `http://localhost:3000`
5. Open your browser to `http://localhost:3000`
**Note**: On first startup, the application will:
- Create database tables automatically
- Migrate any existing JSON files (`data/links.json` and `data/lists.json`) to the database
- Rename migrated JSON files to `*.json.bak`
### Docker Installation
1. Build the Docker image:
1. Set up environment variables:
```bash
docker build -t linkding .
cp .env.example .env
# Edit .env with your database configuration (or use defaults)
```
2. Run the container:
```bash
docker run -d \
--name linkding \
-p 3000:3000 \
-v $(pwd)/data:/app/data \
linkding
```
Or use Docker Compose:
2. Use Docker Compose (recommended):
```bash
docker-compose up -d
```
This will start both PostgreSQL and the LinkDing application.
3. Access the application at `http://localhost:3000`
**Note**: The Docker Compose setup includes:
- PostgreSQL database with persistent volume
- LinkDing application container
- Automatic database initialization and migrations
## Usage
1. **Add a Link**: Paste a URL into the input field and click "Add Link"
2. **Search**: Use the search bar to filter links by title, description, or URL
3. **View Links**: Browse your saved links with images, titles, and descriptions
4. **Delete Links**: Click the "Delete" button on any link card to remove it
4. **Organize Links**: Create lists and assign links to them
5. **Archive Links**: Archive links to hide them from the main view
6. **Public Lists**: Make lists public to share them with unauthenticated users
7. **Delete Links**: Click the "Delete" button on any link card to remove it
## API Endpoints
- `GET /api/links` - Get all saved links
### Links
- `GET /api/links` - Get all saved links (authenticated users see all, unauthenticated see only public lists)
- `GET /api/links/search?q=query` - Search links
- `POST /api/links` - Add a new link (body: `{ "url": "https://example.com" }`)
- `DELETE /api/links/:id` - Delete a link by ID
- `POST /api/links` - Add a new link (body: `{ "url": "https://example.com" }`) - Requires authentication
- `PATCH /api/links/:id/archive` - Archive/unarchive a link (body: `{ "archived": true }`) - Requires authentication
- `PATCH /api/links/:id/lists` - Update link's lists (body: `{ "listIds": ["uuid1", "uuid2"] }`) - Requires authentication
- `DELETE /api/links/:id` - Delete a link by ID - Requires authentication
### Lists
- `GET /api/lists` - Get all lists (authenticated users see all, unauthenticated see only public)
- `POST /api/lists` - Create a new list (body: `{ "name": "List Name" }`) - Requires authentication
- `PUT /api/lists/:id` - Update a list (body: `{ "name": "New Name" }`) - Requires authentication
- `PATCH /api/lists/:id/public` - Toggle list public status (body: `{ "public": true }`) - Requires authentication
- `DELETE /api/lists/:id` - Delete a list by ID - Requires authentication
### Authentication
- `GET /api/auth/status` - Check authentication status
- `POST /api/auth/login` - Login with LDAP credentials (body: `{ "username": "user", "password": "pass" }`)
- `POST /api/auth/logout` - Logout
## Metadata Extraction
@@ -100,13 +144,63 @@ The application automatically extracts:
## Environment Variables
See `.env.example` for a complete list of environment variables. Key variables include:
### Application
- `PORT` - Server port (default: 3000)
- `CHROME_EXECUTABLE_PATH` - Path to Chrome/Chromium executable (for Puppeteer)
- `NODE_ENV` - Environment mode (production/development)
## Data Storage
### Database
- `DATABASE_URL` - Full PostgreSQL connection string (e.g., `postgresql://user:password@host:port/database`)
- `DATABASE_SSL` - Enable SSL for database connection (true/false)
- `DB_HOST` - Database host (default: localhost)
- `DB_PORT` - Database port (default: 5432)
- `DB_NAME` - Database name (default: linkding)
- `DB_USER` - Database user (default: postgres)
- `DB_PASSWORD` - Database password (default: postgres)
Links are stored in `data/links.json`. Make sure this directory exists and is writable. When using Docker, mount the `data` directory as a volume for persistence.
### Session & Cookies
- `SESSION_SECRET` - Secret key for session encryption (change in production!)
- `SESSION_NAME` - Session cookie name (default: connect.sid)
- `COOKIE_SECURE` - Use secure cookies (default: true in production)
- `COOKIE_SAMESITE` - Cookie SameSite attribute (default: none for secure, lax otherwise)
- `COOKIE_DOMAIN` - Cookie domain (optional)
- `COOKIE_PATH` - Cookie path (default: /)
- `TRUST_PROXY` - Trust proxy headers (default: true)
### LDAP Authentication
- `LDAP_ADDRESS` - LDAP server address
- `LDAP_BASE_DN` - LDAP base distinguished name
- `LDAP_USER` - LDAP bind user
- `LDAP_PASSWORD` - LDAP bind password
- `LDAP_USERS_FILTER` - LDAP user search filter
- And more... (see `.env.example`)
### Puppeteer
- `CHROME_EXECUTABLE_PATH` - Path to Chrome/Chromium executable (for Puppeteer)
## Database
LinkDing uses PostgreSQL for data storage. The application automatically:
- **Creates tables** on first startup
- **Runs migrations** to keep the schema up to date
- **Migrates JSON files** if `data/links.json` or `data/lists.json` exist, then renames them to `*.json.bak`
### Migration System
The application includes a migration system for database schema changes:
- Migrations are stored in `migrations/` directory
- Migrations are automatically run on startup
- Each migration is tracked in the `SequelizeMeta` table
### Data Migration
If you have existing JSON files:
1. Place `links.json` and `lists.json` in the `data/` directory
2. Start the application
3. The files will be automatically migrated to PostgreSQL
4. Original files will be renamed to `links.json.bak` and `lists.json.bak`
## Troubleshooting
@@ -127,10 +221,31 @@ Some sites block automated requests. The app automatically:
## Development
### Using Make (Recommended)
```bash
# Start PostgreSQL and the application
make dev
# Start only PostgreSQL
make up
# Stop PostgreSQL
make down
# Stop and remove volumes (clean slate)
make clean
```
### Manual Development Setup
```bash
# Install dependencies
npm install
# Start PostgreSQL (using docker-compose)
docker compose -f docker-compose.dev.yaml up -d
# Run in development mode with auto-reload
npm run dev
@@ -138,6 +253,14 @@ npm run dev
npm start
```
### Database Management
The application uses Sequelize ORM with PostgreSQL. Database migrations are automatically run on startup. To manually manage the database:
- Connect to PostgreSQL: `psql -h localhost -U postgres -d linkding`
- Check migrations: Query the `SequelizeMeta` table
- View tables: `\dt` in psql
## License
ISC

23
docker-compose.dev.yaml Normal file
View File

@@ -0,0 +1,23 @@
version: '3.8'
services:
postgres:
image: postgres:16-alpine
container_name: linkding-postgres-dev
ports:
- "5432:5432"
environment:
- POSTGRES_DB=linkding
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
volumes:
- postgres-dev-data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 3s
retries: 5
volumes:
postgres-dev-data:

View File

@@ -1,6 +1,22 @@
version: '3.8'
services:
postgres:
image: postgres:16-alpine
container_name: linkding-postgres
environment:
- POSTGRES_DB=${DB_NAME:-linkding}
- POSTGRES_USER=${DB_USER:-postgres}
- POSTGRES_PASSWORD=${DB_PASSWORD:-postgres}
volumes:
- postgres-data:/var/lib/postgresql/data
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-postgres}"]
interval: 10s
timeout: 3s
retries: 5
linkding:
build: .
container_name: linkding
@@ -13,6 +29,14 @@ services:
- NODE_ENV=production
- PORT=3000
- CHROME_EXECUTABLE_PATH=/usr/bin/chromium
- DB_HOST=postgres
- DB_PORT=5432
- DB_NAME=${DB_NAME:-linkding}
- DB_USER=${DB_USER:-postgres}
- DB_PASSWORD=${DB_PASSWORD:-postgres}
depends_on:
postgres:
condition: service_healthy
restart: unless-stopped
healthcheck:
test: ["CMD", "node", "-e", "require('http').get('http://localhost:3000/api/links', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
@@ -21,3 +45,6 @@ services:
retries: 3
start_period: 5s
volumes:
postgres-data:

View File

@@ -0,0 +1,136 @@
/**
* Initial database schema migration
* Creates links, lists, and link_lists tables
*/
module.exports = {
up: async (queryInterface, Sequelize) => {
// Create links table
await queryInterface.createTable('links', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true
},
url: {
type: Sequelize.TEXT,
allowNull: false,
unique: true
},
title: {
type: Sequelize.TEXT,
allowNull: true
},
description: {
type: Sequelize.TEXT,
allowNull: true
},
image: {
type: Sequelize.TEXT,
allowNull: true
},
created_at: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
},
created_by: {
type: Sequelize.TEXT,
allowNull: true
},
modified_at: {
type: Sequelize.DATE,
allowNull: true
},
modified_by: {
type: Sequelize.TEXT,
allowNull: true
},
archived: {
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
}
});
// Create lists table
await queryInterface.createTable('lists', {
id: {
type: Sequelize.UUID,
defaultValue: Sequelize.UUIDV4,
primaryKey: true
},
name: {
type: Sequelize.TEXT,
allowNull: false
},
created_at: {
type: Sequelize.DATE,
allowNull: false,
defaultValue: Sequelize.literal('CURRENT_TIMESTAMP')
},
created_by: {
type: Sequelize.TEXT,
allowNull: true
},
modified_at: {
type: Sequelize.DATE,
allowNull: true
},
modified_by: {
type: Sequelize.TEXT,
allowNull: true
},
public: {
type: Sequelize.BOOLEAN,
defaultValue: false,
allowNull: false
}
});
// Create link_lists junction table
await queryInterface.createTable('link_lists', {
link_id: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'links',
key: 'id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
},
list_id: {
type: Sequelize.UUID,
allowNull: false,
references: {
model: 'lists',
key: 'id'
},
onUpdate: 'CASCADE',
onDelete: 'CASCADE'
}
});
// Add composite primary key
await queryInterface.addConstraint('link_lists', {
fields: ['link_id', 'list_id'],
type: 'primary key',
name: 'link_lists_pkey'
});
// Create indexes for better performance
await queryInterface.addIndex('links', ['url'], { unique: true });
await queryInterface.addIndex('links', ['created_at']);
await queryInterface.addIndex('links', ['archived']);
await queryInterface.addIndex('lists', ['name']);
await queryInterface.addIndex('link_lists', ['link_id']);
await queryInterface.addIndex('link_lists', ['list_id']);
},
down: async (queryInterface, Sequelize) => {
await queryInterface.dropTable('link_lists');
await queryInterface.dropTable('lists');
await queryInterface.dropTable('links');
}
};

104
migrations/runner.js Normal file
View File

@@ -0,0 +1,104 @@
const { QueryInterface } = require('sequelize');
const fs = require('fs').promises;
const path = require('path');
/**
* Simple migration runner for Sequelize
* Checks which migrations have been run and executes pending ones
*/
class MigrationRunner {
constructor(sequelize) {
this.sequelize = sequelize;
this.migrationsPath = path.join(__dirname);
}
async ensureMigrationsTable() {
const queryInterface = this.sequelize.getQueryInterface();
// Check if migrations table exists
const [results] = await this.sequelize.query(`
SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = 'public'
AND table_name = 'SequelizeMeta'
);
`);
if (!results[0].exists) {
// Create migrations table
await queryInterface.createTable('SequelizeMeta', {
name: {
type: require('sequelize').DataTypes.STRING,
allowNull: false,
primaryKey: true
}
});
}
}
async getExecutedMigrations() {
await this.ensureMigrationsTable();
const [results] = await this.sequelize.query(
'SELECT name FROM "SequelizeMeta" ORDER BY name'
);
return results.map(row => row.name);
}
async getAllMigrations() {
const files = await fs.readdir(this.migrationsPath);
return files
.filter(file => file.endsWith('.js') && file !== 'runner.js')
.sort();
}
async runMigrations() {
const executed = await this.getExecutedMigrations();
const allMigrations = await this.getAllMigrations();
const pending = allMigrations.filter(m => !executed.includes(m));
if (pending.length === 0) {
console.log('No pending migrations');
return;
}
console.log(`Running ${pending.length} pending migration(s)...`);
for (const migrationFile of pending) {
const migration = require(path.join(this.migrationsPath, migrationFile));
if (!migration.up || typeof migration.up !== 'function') {
throw new Error(`Migration ${migrationFile} does not export an 'up' function`);
}
const queryInterface = this.sequelize.getQueryInterface();
const transaction = await this.sequelize.transaction();
try {
console.log(`Running migration: ${migrationFile}`);
await migration.up(queryInterface, this.sequelize.constructor);
// Record migration as executed
await this.sequelize.query(
`INSERT INTO "SequelizeMeta" (name) VALUES (:name)`,
{
replacements: { name: migrationFile },
transaction
}
);
await transaction.commit();
console.log(`Completed migration: ${migrationFile}`);
} catch (error) {
await transaction.rollback();
throw new Error(`Migration ${migrationFile} failed: ${error.message}`);
}
}
console.log('All migrations completed successfully');
}
}
module.exports = MigrationRunner;

66
models/Link.js Normal file
View File

@@ -0,0 +1,66 @@
const { DataTypes } = require('sequelize');
const { v4: uuidv4 } = require('uuid');
module.exports = (sequelize) => {
const Link = sequelize.define('Link', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
url: {
type: DataTypes.TEXT,
allowNull: false,
unique: true,
validate: {
isUrl: true
}
},
title: {
type: DataTypes.TEXT,
allowNull: true
},
description: {
type: DataTypes.TEXT,
allowNull: true
},
image: {
type: DataTypes.TEXT,
allowNull: true
},
created_at: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
allowNull: false
},
created_by: {
type: DataTypes.TEXT,
allowNull: true
},
modified_at: {
type: DataTypes.DATE,
allowNull: true
},
modified_by: {
type: DataTypes.TEXT,
allowNull: true
},
archived: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
}
}, {
tableName: 'links',
timestamps: false, // We're using created_at and modified_at manually
underscored: true
});
// Hook to set modified_at before update
Link.beforeUpdate((link) => {
link.modified_at = new Date();
});
return Link;
};

33
models/LinkList.js Normal file
View File

@@ -0,0 +1,33 @@
const { DataTypes } = require('sequelize');
module.exports = (sequelize) => {
const LinkList = sequelize.define('LinkList', {
link_id: {
type: DataTypes.UUID,
primaryKey: true,
references: {
model: 'links',
key: 'id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
},
list_id: {
type: DataTypes.UUID,
primaryKey: true,
references: {
model: 'lists',
key: 'id'
},
onDelete: 'CASCADE',
onUpdate: 'CASCADE'
}
}, {
tableName: 'link_lists',
timestamps: false, // No timestamps on junction table
underscored: true
});
return LinkList;
};

53
models/List.js Normal file
View File

@@ -0,0 +1,53 @@
const { DataTypes } = require('sequelize');
const { v4: uuidv4 } = require('uuid');
module.exports = (sequelize) => {
const List = sequelize.define('List', {
id: {
type: DataTypes.UUID,
defaultValue: DataTypes.UUIDV4,
primaryKey: true
},
name: {
type: DataTypes.TEXT,
allowNull: false,
validate: {
notEmpty: true
}
},
created_at: {
type: DataTypes.DATE,
defaultValue: DataTypes.NOW,
allowNull: false
},
created_by: {
type: DataTypes.TEXT,
allowNull: true
},
modified_at: {
type: DataTypes.DATE,
allowNull: true
},
modified_by: {
type: DataTypes.TEXT,
allowNull: true
},
public: {
type: DataTypes.BOOLEAN,
defaultValue: false,
allowNull: false
}
}, {
tableName: 'lists',
timestamps: false, // We're using created_at and modified_at manually
underscored: true
});
// Hook to set modified_at before update
List.beforeUpdate((list) => {
list.modified_at = new Date();
});
return List;
};

73
models/index.js Normal file
View File

@@ -0,0 +1,73 @@
const { Sequelize } = require('sequelize');
const Link = require('./Link');
const List = require('./List');
const LinkList = require('./LinkList');
// Database connection configuration
const getDatabaseConfig = () => {
// Support DATABASE_URL or individual connection parameters
if (process.env.DATABASE_URL) {
return {
url: process.env.DATABASE_URL,
dialect: 'postgres',
dialectOptions: {
ssl: process.env.DATABASE_SSL === 'true' ? {
require: true,
rejectUnauthorized: false
} : false
}
};
}
return {
host: process.env.DB_HOST || 'localhost',
port: process.env.DB_PORT || 5432,
database: process.env.DB_NAME || 'linkding',
username: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWORD || 'postgres',
dialect: 'postgres',
logging: process.env.NODE_ENV === 'development' ? console.log : false
};
};
// Initialize Sequelize
const config = getDatabaseConfig();
const sequelize = config.url
? new Sequelize(config.url, {
dialect: 'postgres',
dialectOptions: config.dialectOptions,
logging: config.logging
})
: new Sequelize(config.database, config.username, config.password, {
host: config.host,
port: config.port,
dialect: config.dialect,
logging: config.logging
});
// Initialize models
const db = {
sequelize,
Sequelize,
Link: Link(sequelize),
List: List(sequelize),
LinkList: LinkList(sequelize)
};
// Set up associations
db.Link.belongsToMany(db.List, {
through: db.LinkList,
foreignKey: 'link_id',
otherKey: 'list_id',
as: 'lists'
});
db.List.belongsToMany(db.Link, {
through: db.LinkList,
foreignKey: 'list_id',
otherKey: 'link_id',
as: 'links'
});
module.exports = db;

379
package-lock.json generated
View File

@@ -17,7 +17,11 @@
"express-session": "^1.18.2",
"passport": "^0.7.0",
"passport-ldapauth": "^3.0.1",
"puppeteer-core": "^22.15.0"
"pg": "^8.11.3",
"pg-hstore": "^2.3.4",
"puppeteer-core": "^22.15.0",
"sequelize": "^6.35.2",
"uuid": "^9.0.1"
},
"devDependencies": {
"nodemon": "^3.0.1"
@@ -29,6 +33,15 @@
"integrity": "sha512-C5Mc6rdnsaJDjO3UpGW/CQTHtCKaYlScZTly4JIu97Jxo/odCiH0ITnDXSJPTOrEKk/ycSZ0AOgTmkDtkOsvIA==",
"license": "MIT"
},
"node_modules/@types/debug": {
"version": "4.1.12",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
"integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
"license": "MIT",
"dependencies": {
"@types/ms": "*"
}
},
"node_modules/@types/ldapjs": {
"version": "2.2.5",
"resolved": "https://registry.npmjs.org/@types/ldapjs/-/ldapjs-2.2.5.tgz",
@@ -38,6 +51,12 @@
"@types/node": "*"
}
},
"node_modules/@types/ms": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
"integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
"license": "MIT"
},
"node_modules/@types/node": {
"version": "24.10.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.0.tgz",
@@ -47,6 +66,12 @@
"undici-types": "~7.16.0"
}
},
"node_modules/@types/validator": {
"version": "13.15.9",
"resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.15.9.tgz",
"integrity": "sha512-9ENIuq9PUX45M1QRtfJDprgfErED4fBiMPmjlPci4W9WiBelVtHYCjF3xkQNcSnmUeuruLS1kH6hSl5M1vz4Sw==",
"license": "MIT"
},
"node_modules/@types/yauzl": {
"version": "2.10.3",
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
@@ -833,6 +858,12 @@
"url": "https://dotenvx.com"
}
},
"node_modules/dottie": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.6.tgz",
"integrity": "sha512-iGCHkfUc5kFekGiqhe8B/mdaurD+lakO9txNnTvKtA6PISrw86LgqHvRzWYPyoE2Ph5aMIrCw9/uko6XHTKCwA==",
"license": "MIT"
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@@ -1618,6 +1649,15 @@
"dev": true,
"license": "ISC"
},
"node_modules/inflection": {
"version": "1.13.4",
"resolved": "https://registry.npmjs.org/inflection/-/inflection-1.13.4.tgz",
"integrity": "sha512-6I/HUDeYFfuNCVS3td055BaXBwKYuzw7K3ExVMStBowKo9oOAMJIXIHvdyR3iboTCp1b+1i5DSkIZTcwIktuDw==",
"engines": [
"node >= 0.4.0"
],
"license": "MIT"
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
@@ -1744,6 +1784,12 @@
"node": ">=10.13.0"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
"license": "MIT"
},
"node_modules/lru-cache": {
"version": "7.18.3",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz",
@@ -1841,6 +1887,27 @@
"integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==",
"license": "MIT"
},
"node_modules/moment": {
"version": "2.30.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz",
"integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==",
"license": "MIT",
"engines": {
"node": "*"
}
},
"node_modules/moment-timezone": {
"version": "0.5.48",
"resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.48.tgz",
"integrity": "sha512-f22b8LV1gbTO2ms2j2z13MuPogNoh5UzxL3nzNAYKGraILnbGc9NEE6dyiiiLv46DGRb8A4kg8UKWLjPthxBHw==",
"license": "MIT",
"dependencies": {
"moment": "^2.29.4"
},
"engines": {
"node": "*"
}
},
"node_modules/ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
@@ -2161,6 +2228,107 @@
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
"license": "MIT"
},
"node_modules/pg": {
"version": "8.16.3",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz",
"integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==",
"license": "MIT",
"dependencies": {
"pg-connection-string": "^2.9.1",
"pg-pool": "^3.10.1",
"pg-protocol": "^1.10.3",
"pg-types": "2.2.0",
"pgpass": "1.0.5"
},
"engines": {
"node": ">= 16.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.2.7"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz",
"integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==",
"license": "MIT",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.9.1",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz",
"integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==",
"license": "MIT"
},
"node_modules/pg-hstore": {
"version": "2.3.4",
"resolved": "https://registry.npmjs.org/pg-hstore/-/pg-hstore-2.3.4.tgz",
"integrity": "sha512-N3SGs/Rf+xA1M2/n0JBiXFDVMzdekwLZLAO0g7mpDY9ouX+fDI7jS6kTq3JujmYbtNSJ53TJ0q4G98KVZSM4EA==",
"license": "MIT",
"dependencies": {
"underscore": "^1.13.1"
},
"engines": {
"node": ">= 0.8.x"
}
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"license": "ISC",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz",
"integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==",
"license": "MIT",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.10.3",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz",
"integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==",
"license": "MIT"
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"license": "MIT",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"license": "MIT",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/picomatch": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
@@ -2174,6 +2342,45 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"license": "MIT",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/precond": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/precond/-/precond-0.2.3.tgz",
@@ -2403,6 +2610,12 @@
"node": ">=0.10.0"
}
},
"node_modules/retry-as-promised": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/retry-as-promised/-/retry-as-promised-7.1.1.tgz",
"integrity": "sha512-hMD7odLOt3LkTjcif8aRZqi/hybjpLNgSk5oF5FCowfCjok6LukpN2bDX7R5wDmbgBQFn7YoBxSagmtXHaJYJw==",
"license": "MIT"
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -2480,6 +2693,109 @@
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
"node_modules/sequelize": {
"version": "6.37.7",
"resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.37.7.tgz",
"integrity": "sha512-mCnh83zuz7kQxxJirtFD7q6Huy6liPanI67BSlbzSYgVNl5eXVdE2CN1FuAeZwG1SNpGsNRCV+bJAVVnykZAFA==",
"funding": [
{
"type": "opencollective",
"url": "https://opencollective.com/sequelize"
}
],
"license": "MIT",
"dependencies": {
"@types/debug": "^4.1.8",
"@types/validator": "^13.7.17",
"debug": "^4.3.4",
"dottie": "^2.0.6",
"inflection": "^1.13.4",
"lodash": "^4.17.21",
"moment": "^2.29.4",
"moment-timezone": "^0.5.43",
"pg-connection-string": "^2.6.1",
"retry-as-promised": "^7.0.4",
"semver": "^7.5.4",
"sequelize-pool": "^7.1.0",
"toposort-class": "^1.0.1",
"uuid": "^8.3.2",
"validator": "^13.9.0",
"wkx": "^0.5.0"
},
"engines": {
"node": ">=10.0.0"
},
"peerDependenciesMeta": {
"ibm_db": {
"optional": true
},
"mariadb": {
"optional": true
},
"mysql2": {
"optional": true
},
"oracledb": {
"optional": true
},
"pg": {
"optional": true
},
"pg-hstore": {
"optional": true
},
"snowflake-sdk": {
"optional": true
},
"sqlite3": {
"optional": true
},
"tedious": {
"optional": true
}
}
},
"node_modules/sequelize-pool": {
"version": "7.1.0",
"resolved": "https://registry.npmjs.org/sequelize-pool/-/sequelize-pool-7.1.0.tgz",
"integrity": "sha512-G9c0qlIWQSK29pR/5U2JF5dDQeqqHRragoyahj/Nx4KOOQ3CPPfzxnfqFPCSB7x5UgjOgnZ61nSxz+fjDpRlJg==",
"license": "MIT",
"engines": {
"node": ">= 10.0.0"
}
},
"node_modules/sequelize/node_modules/debug": {
"version": "4.4.3",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/sequelize/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
"node_modules/sequelize/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/serve-static": {
"version": "1.16.2",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz",
@@ -2657,6 +2973,15 @@
"node": ">=0.10.0"
}
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"license": "ISC",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@@ -2764,6 +3089,12 @@
"node": ">=0.6"
}
},
"node_modules/toposort-class": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz",
"integrity": "sha512-OsLcGGbYF3rMjPUf8oKktyvCiUxSbqMMS39m33MAjLTC1DVIH6x3WSt63/M77ihI09+Sdfk1AXvfhCEeUmC7mg==",
"license": "MIT"
},
"node_modules/touch": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/touch/-/touch-3.1.1.tgz",
@@ -2822,6 +3153,12 @@
"dev": true,
"license": "MIT"
},
"node_modules/underscore": {
"version": "1.13.7",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz",
"integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==",
"license": "MIT"
},
"node_modules/undici": {
"version": "7.16.0",
"resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz",
@@ -2861,6 +3198,28 @@
"node": ">= 0.4.0"
}
},
"node_modules/uuid": {
"version": "9.0.1",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz",
"integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"license": "MIT",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/validator": {
"version": "13.15.23",
"resolved": "https://registry.npmjs.org/validator/-/validator-13.15.23.tgz",
"integrity": "sha512-4yoz1kEWqUjzi5zsPbAS/903QXSYp0UOtHsPpp7p9rHAw/W+dkInskAE386Fat3oKRROwO98d9ZB0G4cObgUyw==",
"license": "MIT",
"engines": {
"node": ">= 0.10"
}
},
"node_modules/vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
@@ -2931,6 +3290,15 @@
"node": ">=18"
}
},
"node_modules/wkx": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/wkx/-/wkx-0.5.0.tgz",
"integrity": "sha512-Xng/d4Ichh8uN4l0FToV/258EjMGU9MGcA0HV2d9B/ZpZB3lqQm7nkOdZdm5GhKtLLhAE7PiVQwN4eN+2YJJUg==",
"license": "MIT",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@@ -2975,6 +3343,15 @@
}
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"license": "MIT",
"engines": {
"node": ">=0.4"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",

View File

@@ -23,7 +23,11 @@
"express-session": "^1.18.2",
"passport": "^0.7.0",
"passport-ldapauth": "^3.0.1",
"puppeteer-core": "^22.15.0"
"pg": "^8.11.3",
"pg-hstore": "^2.3.4",
"puppeteer-core": "^22.15.0",
"sequelize": "^6.35.2",
"uuid": "^9.0.1"
},
"devDependencies": {
"nodemon": "^3.0.1"

463
server.js
View File

@@ -8,6 +8,10 @@ const fs = require('fs').promises;
const path = require('path');
const axios = require('axios');
const cheerio = require('cheerio');
const { Op } = require('sequelize');
const db = require('./models');
const MigrationRunner = require('./migrations/runner');
const { v4: uuidv4 } = require('uuid');
// Lazy load puppeteer (only if needed)
let puppeteer = null;
@@ -190,54 +194,168 @@ function isAuthenticated(req, res, next) {
res.status(401).json({ error: 'Authentication required' });
}
// Ensure data directory exists
async function ensureDataDir() {
const dataDir = path.dirname(DATA_FILE);
// Database initialization and migration
async function initializeDatabase() {
try {
await fs.access(dataDir);
} catch {
await fs.mkdir(dataDir, { recursive: true });
}
try {
await fs.access(DATA_FILE);
} catch {
await fs.writeFile(DATA_FILE, JSON.stringify([]));
}
try {
await fs.access(LISTS_FILE);
} catch {
await fs.writeFile(LISTS_FILE, JSON.stringify([]));
}
}
// Test database connection
await db.sequelize.authenticate();
console.log('Database connection established successfully.');
// Read links from file
async function readLinks() {
try {
const data = await fs.readFile(DATA_FILE, 'utf8');
return JSON.parse(data);
// Run migrations
const migrationRunner = new MigrationRunner(db.sequelize);
await migrationRunner.runMigrations();
// Migrate JSON files if they exist
await migrateJsonFiles();
console.log('Database initialization completed.');
} catch (error) {
return [];
console.error('Database initialization failed:', error);
throw error;
}
}
// Write links to file
async function writeLinks(links) {
await fs.writeFile(DATA_FILE, JSON.stringify(links, null, 2));
}
// Migrate JSON files to database
async function migrateJsonFiles() {
const linksFile = DATA_FILE;
const listsFile = LISTS_FILE;
const linksBackup = linksFile + '.bak';
const listsBackup = listsFile + '.bak';
// Check if files have already been migrated
let linksAlreadyMigrated = false;
let listsAlreadyMigrated = false;
// Read lists from file
async function readLists() {
try {
const data = await fs.readFile(LISTS_FILE, 'utf8');
return JSON.parse(data);
await fs.access(linksBackup);
linksAlreadyMigrated = true;
} catch {
// Not migrated yet
}
try {
await fs.access(listsBackup);
listsAlreadyMigrated = true;
} catch {
// Not migrated yet
}
// Step 1: Migrate lists first (so we can create relationships)
const listIdMap = new Map(); // Map old ID -> new UUID
if (!listsAlreadyMigrated) {
try {
await fs.access(listsFile);
const listsData = JSON.parse(await fs.readFile(listsFile, 'utf8'));
if (Array.isArray(listsData) && listsData.length > 0) {
console.log(`Migrating ${listsData.length} lists from JSON file...`);
for (const list of listsData) {
const newId = uuidv4();
listIdMap.set(list.id, newId);
await db.List.create({
id: newId,
name: list.name,
created_at: list.createdAt ? new Date(list.createdAt) : new Date(),
created_by: null, // No user info in JSON
public: list.public || false
});
}
// Rename file to backup
await fs.rename(listsFile, listsBackup);
console.log('Lists migration completed.');
}
} catch (error) {
return [];
if (error.code !== 'ENOENT') {
console.error('Error migrating lists:', error);
}
}
}
// Step 2: Migrate links and set up relationships
if (!linksAlreadyMigrated) {
try {
await fs.access(linksFile);
const linksData = JSON.parse(await fs.readFile(linksFile, 'utf8'));
if (Array.isArray(linksData) && linksData.length > 0) {
console.log(`Migrating ${linksData.length} links from JSON file...`);
for (const link of linksData) {
// Create link
const linkRecord = await db.Link.create({
id: uuidv4(),
url: link.url,
title: link.title || null,
description: link.description || null,
image: link.image || null,
created_at: link.createdAt ? new Date(link.createdAt) : new Date(),
created_by: null, // No user info in JSON
archived: link.archived || false
});
// Create relationships if listIds exist
if (link.listIds && Array.isArray(link.listIds) && link.listIds.length > 0) {
const listRecords = [];
for (const oldListId of link.listIds) {
const newListId = listIdMap.get(oldListId);
if (newListId) {
const listRecord = await db.List.findByPk(newListId);
if (listRecord) {
listRecords.push(listRecord);
}
}
}
if (listRecords.length > 0) {
await linkRecord.setLists(listRecords);
}
}
}
// Rename file to backup
await fs.rename(linksFile, linksBackup);
console.log('Links migration completed.');
}
} catch (error) {
if (error.code !== 'ENOENT') {
console.error('Error migrating links:', error);
}
}
}
}
// Write lists to file
async function writeLists(lists) {
await fs.writeFile(LISTS_FILE, JSON.stringify(lists, null, 2));
// Helper function to format link for API response
function formatLink(link) {
const formatted = {
id: link.id,
url: link.url,
title: link.title,
description: link.description,
image: link.image,
createdAt: link.created_at,
createdBy: link.created_by,
modifiedAt: link.modified_at,
modifiedBy: link.modified_by,
archived: link.archived || false,
listIds: link.lists ? link.lists.map(list => list.id) : []
};
return formatted;
}
// Helper function to format list for API response
function formatList(list) {
return {
id: list.id,
name: list.name,
createdAt: list.created_at,
createdBy: list.created_by,
modifiedAt: list.modified_at,
modifiedBy: list.modified_by,
public: list.public || false
};
}
// Extract metadata using Puppeteer (for JavaScript-heavy sites)
@@ -755,25 +873,42 @@ app.post('/api/auth/logout', (req, res) => {
// Get all links
app.get('/api/links', async (req, res) => {
try {
const links = await readLinks();
let links;
// If user is not authenticated, only show links in public lists
if (!req.isAuthenticated()) {
const lists = await readLists();
const publicListIds = lists.filter(list => list.public === true).map(list => list.id);
// Filter links to only those that are in at least one public list
const filteredLinks = links.filter(link => {
const linkListIds = link.listIds || [];
return linkListIds.some(listId => publicListIds.includes(listId));
// Get all public lists
const publicLists = await db.List.findAll({
where: { public: true }
});
const publicListIds = publicLists.map(list => list.id);
return res.json(filteredLinks);
// Get links that are in at least one public list
links = await db.Link.findAll({
include: [{
model: db.List,
as: 'lists',
where: { id: { [Op.in]: publicListIds } },
required: true,
attributes: ['id']
}],
order: [['created_at', 'DESC']]
});
} else {
// Authenticated users see all links
links = await db.Link.findAll({
include: [{
model: db.List,
as: 'lists',
attributes: ['id']
}],
order: [['created_at', 'DESC']]
});
}
// Authenticated users see all links
res.json(links);
res.json(links.map(formatLink));
} catch (error) {
console.error('Error fetching links:', error);
res.status(500).json({ error: 'Failed to read links' });
}
});
@@ -782,33 +917,51 @@ app.get('/api/links', async (req, res) => {
app.get('/api/links/search', async (req, res) => {
try {
const query = req.query.q?.toLowerCase() || '';
let links = await readLinks();
const whereClause = {};
if (query) {
whereClause[Op.or] = [
{ title: { [Op.iLike]: `%${query}%` } },
{ description: { [Op.iLike]: `%${query}%` } },
{ url: { [Op.iLike]: `%${query}%` } }
];
}
let links;
// If user is not authenticated, only show links in public lists
if (!req.isAuthenticated()) {
const lists = await readLists();
const publicListIds = lists.filter(list => list.public === true).map(list => list.id);
const publicLists = await db.List.findAll({
where: { public: true }
});
const publicListIds = publicLists.map(list => list.id);
// Filter links to only those that are in at least one public list
links = links.filter(link => {
const linkListIds = link.listIds || [];
return linkListIds.some(listId => publicListIds.includes(listId));
links = await db.Link.findAll({
where: whereClause,
include: [{
model: db.List,
as: 'lists',
where: { id: { [Op.in]: publicListIds } },
required: true,
attributes: ['id']
}],
order: [['created_at', 'DESC']]
});
} else {
links = await db.Link.findAll({
where: whereClause,
include: [{
model: db.List,
as: 'lists',
attributes: ['id']
}],
order: [['created_at', 'DESC']]
});
}
if (!query) {
return res.json(links);
}
const filtered = links.filter(link => {
const titleMatch = link.title?.toLowerCase().includes(query);
const descMatch = link.description?.toLowerCase().includes(query);
const urlMatch = link.url?.toLowerCase().includes(query);
return titleMatch || descMatch || urlMatch;
});
res.json(filtered);
res.json(links.map(formatLink));
} catch (error) {
console.error('Error searching links:', error);
res.status(500).json({ error: 'Failed to search links' });
}
});
@@ -823,8 +976,7 @@ app.post('/api/links', isAuthenticated, async (req, res) => {
}
// Check if link already exists
const links = await readLinks();
const existingLink = links.find(link => link.url === url);
const existingLink = await db.Link.findOne({ where: { url } });
if (existingLink) {
return res.status(409).json({ error: 'Link already exists' });
}
@@ -833,19 +985,19 @@ app.post('/api/links', isAuthenticated, async (req, res) => {
const metadata = await extractMetadata(url);
// Create new link
const newLink = {
id: Date.now().toString(),
const newLink = await db.Link.create({
url: url,
title: metadata.title,
description: metadata.description,
image: metadata.image,
createdAt: new Date().toISOString()
};
created_by: req.user?.username || null,
archived: false
});
links.unshift(newLink); // Add to beginning
await writeLinks(links);
// Reload with associations to get listIds
await newLink.reload({ include: [{ model: db.List, as: 'lists', attributes: ['id'] }] });
res.status(201).json(newLink);
res.status(201).json(formatLink(newLink));
} catch (error) {
console.error('Error adding link:', error);
res.status(500).json({ error: 'Failed to add link' });
@@ -862,18 +1014,24 @@ app.patch('/api/links/:id/archive', isAuthenticated, async (req, res) => {
return res.status(400).json({ error: 'archived must be a boolean' });
}
const links = await readLinks();
const linkIndex = links.findIndex(link => link.id === id);
const link = await db.Link.findByPk(id, {
include: [{ model: db.List, as: 'lists', attributes: ['id'] }]
});
if (linkIndex === -1) {
if (!link) {
return res.status(404).json({ error: 'Link not found' });
}
links[linkIndex].archived = archived;
await writeLinks(links);
await link.update({
archived: archived,
modified_by: req.user?.username || null
});
res.json(links[linkIndex]);
await link.reload({ include: [{ model: db.List, as: 'lists', attributes: ['id'] }] });
res.json(formatLink(link));
} catch (error) {
console.error('Error updating link:', error);
res.status(500).json({ error: 'Failed to update link' });
}
});
@@ -882,16 +1040,16 @@ app.patch('/api/links/:id/archive', isAuthenticated, async (req, res) => {
app.delete('/api/links/:id', isAuthenticated, async (req, res) => {
try {
const { id } = req.params;
const links = await readLinks();
const filtered = links.filter(link => link.id !== id);
const link = await db.Link.findByPk(id);
if (filtered.length === links.length) {
if (!link) {
return res.status(404).json({ error: 'Link not found' });
}
await writeLinks(filtered);
await link.destroy();
res.json({ message: 'Link deleted successfully' });
} catch (error) {
console.error('Error deleting link:', error);
res.status(500).json({ error: 'Failed to delete link' });
}
});
@@ -906,18 +1064,31 @@ app.patch('/api/links/:id/lists', isAuthenticated, async (req, res) => {
return res.status(400).json({ error: 'listIds must be an array' });
}
const links = await readLinks();
const linkIndex = links.findIndex(link => link.id === id);
const link = await db.Link.findByPk(id);
if (linkIndex === -1) {
if (!link) {
return res.status(404).json({ error: 'Link not found' });
}
links[linkIndex].listIds = listIds;
await writeLinks(links);
// Find all lists by IDs
const lists = await db.List.findAll({
where: { id: { [Op.in]: listIds } }
});
res.json(links[linkIndex]);
// Update relationships
await link.setLists(lists);
// Update modified fields
await link.update({
modified_by: req.user?.username || null
});
// Reload with associations
await link.reload({ include: [{ model: db.List, as: 'lists', attributes: ['id'] }] });
res.json(formatLink(link));
} catch (error) {
console.error('Error updating link lists:', error);
res.status(500).json({ error: 'Failed to update link lists' });
}
});
@@ -927,17 +1098,24 @@ app.patch('/api/links/:id/lists', isAuthenticated, async (req, res) => {
// Get all lists
app.get('/api/lists', async (req, res) => {
try {
const lists = await readLists();
let lists;
// If user is not authenticated, only return public lists
if (!req.isAuthenticated()) {
const publicLists = lists.filter(list => list.public === true);
return res.json(publicLists);
lists = await db.List.findAll({
where: { public: true },
order: [['created_at', 'DESC']]
});
} else {
// Authenticated users see all lists
lists = await db.List.findAll({
order: [['created_at', 'DESC']]
});
}
// Authenticated users see all lists
res.json(lists);
res.json(lists.map(formatList));
} catch (error) {
console.error('Error fetching lists:', error);
res.status(500).json({ error: 'Failed to read lists' });
}
});
@@ -951,26 +1129,28 @@ app.post('/api/lists', isAuthenticated, async (req, res) => {
return res.status(400).json({ error: 'List name is required' });
}
const lists = await readLists();
const trimmedName = name.trim();
// Check if list with same name already exists (case-insensitive)
const existingList = await db.List.findOne({
where: {
name: { [Op.iLike]: trimmedName }
}
});
// Check if list with same name already exists
const existingList = lists.find(list => list.name.toLowerCase() === name.trim().toLowerCase());
if (existingList) {
return res.status(409).json({ error: 'List with this name already exists' });
}
const newList = {
id: Date.now().toString(),
name: name.trim(),
createdAt: new Date().toISOString(),
const newList = await db.List.create({
name: trimmedName,
created_by: req.user?.username || null,
public: false
};
});
lists.push(newList);
await writeLists(lists);
res.status(201).json(newList);
res.status(201).json(formatList(newList));
} catch (error) {
console.error('Error creating list:', error);
res.status(500).json({ error: 'Failed to create list' });
}
});
@@ -985,24 +1165,34 @@ app.put('/api/lists/:id', isAuthenticated, async (req, res) => {
return res.status(400).json({ error: 'List name is required' });
}
const lists = await readLists();
const listIndex = lists.findIndex(list => list.id === id);
const list = await db.List.findByPk(id);
if (listIndex === -1) {
if (!list) {
return res.status(404).json({ error: 'List not found' });
}
// Check if another list with same name exists
const existingList = lists.find(list => list.id !== id && list.name.toLowerCase() === name.trim().toLowerCase());
const trimmedName = name.trim();
// Check if another list with same name exists (case-insensitive)
const existingList = await db.List.findOne({
where: {
id: { [Op.ne]: id },
name: { [Op.iLike]: trimmedName }
}
});
if (existingList) {
return res.status(409).json({ error: 'List with this name already exists' });
}
lists[listIndex].name = name.trim();
await writeLists(lists);
await list.update({
name: trimmedName,
modified_by: req.user?.username || null
});
res.json(lists[listIndex]);
res.json(formatList(list));
} catch (error) {
console.error('Error updating list:', error);
res.status(500).json({ error: 'Failed to update list' });
}
});
@@ -1017,18 +1207,20 @@ app.patch('/api/lists/:id/public', isAuthenticated, async (req, res) => {
return res.status(400).json({ error: 'public must be a boolean' });
}
const lists = await readLists();
const listIndex = lists.findIndex(list => list.id === id);
const list = await db.List.findByPk(id);
if (listIndex === -1) {
if (!list) {
return res.status(404).json({ error: 'List not found' });
}
lists[listIndex].public = isPublic;
await writeLists(lists);
await list.update({
public: isPublic,
modified_by: req.user?.username || null
});
res.json(lists[listIndex]);
res.json(formatList(list));
} catch (error) {
console.error('Error updating list public status:', error);
res.status(500).json({ error: 'Failed to update list public status' });
}
});
@@ -1037,25 +1229,18 @@ app.patch('/api/lists/:id/public', isAuthenticated, async (req, res) => {
app.delete('/api/lists/:id', isAuthenticated, async (req, res) => {
try {
const { id } = req.params;
const lists = await readLists();
const filtered = lists.filter(list => list.id !== id);
const list = await db.List.findByPk(id);
if (filtered.length === lists.length) {
if (!list) {
return res.status(404).json({ error: 'List not found' });
}
// Remove this list from all links
const links = await readLinks();
links.forEach(link => {
if (link.listIds && Array.isArray(link.listIds)) {
link.listIds = link.listIds.filter(listId => listId !== id);
}
});
await writeLinks(links);
// CASCADE delete will automatically remove from link_lists junction table
await list.destroy();
await writeLists(filtered);
res.json({ message: 'List deleted successfully' });
} catch (error) {
console.error('Error deleting list:', error);
res.status(500).json({ error: 'Failed to delete list' });
}
});
@@ -1072,10 +1257,18 @@ function isValidUrl(string) {
// Initialize server
async function startServer() {
await ensureDataDir();
try {
// Initialize database (connect, run migrations, migrate JSON files)
await initializeDatabase();
// Start server
app.listen(PORT, () => {
console.log(`LinkDing server running on http://localhost:${PORT}`);
});
} catch (error) {
console.error('Failed to start server:', error);
process.exit(1);
}
}
startServer();