25 Commits

Author SHA1 Message Date
Anton
16b5af3365 feat: export LlmService interface
Made-with: Cursor
2026-03-04 14:39:04 +03:00
Anton
39721e37ff feat: log LLM metadata to question_cache_meta
Made-with: Cursor
2026-03-04 14:38:44 +03:00
Anton
0baeb1104a feat: add LLM retry and fallback
Made-with: Cursor
2026-03-04 14:37:18 +03:00
Anton
04ad02be5e feat: add LLM question generation
Made-with: Cursor
2026-03-04 14:36:50 +03:00
Anton
0172b4518d feat: add LlmService base
Made-with: Cursor
2026-03-04 14:17:15 +03:00
Anton
f7e865721a feat: add stats to profile response
Made-with: Cursor
2026-03-04 14:16:59 +03:00
Anton
6530e81402 feat: add profile routes
Made-with: Cursor
2026-03-04 14:16:23 +03:00
Anton
b7573acbed feat: add UserService
Made-with: Cursor
2026-03-04 14:15:49 +03:00
Anton
bf544b3e5b feat: add subscription middleware
Made-with: Cursor
2026-03-04 14:15:09 +03:00
Anton
c5a4e26f33 feat: register auth routes in app
Made-with: Cursor
2026-03-04 14:12:24 +03:00
Anton
682885ce5a feat: add rate limiting to auth endpoints
Made-with: Cursor
2026-03-04 14:11:29 +03:00
Anton
78809a064e feat: add email verification and password reset
Made-with: Cursor
2026-03-04 14:07:45 +03:00
Anton
e2baa14814 feat: add auth routes
Made-with: Cursor
2026-03-04 14:07:03 +03:00
Anton
181be58a60 feat: add auth plugin
Made-with: Cursor
2026-03-04 14:06:28 +03:00
Anton
8551d5f6d2 feat: add AuthService
Made-with: Cursor
2026-03-04 14:05:34 +03:00
Anton
5cd13cd8ea feat: add password hashing and JWT utils
Made-with: Cursor
2026-03-04 14:04:24 +03:00
Anton
41b4f48a0f chore: add initial migration and seed script
Made-with: Cursor
2026-03-04 13:57:10 +03:00
Anton
c7c9982234 chore: wire schema and migrations
Made-with: Cursor
2026-03-04 13:39:00 +03:00
Anton
ed8658916c feat: add user_stats, audit_logs and verification tokens schema
Made-with: Cursor
2026-03-04 13:38:46 +03:00
Anton
970a864823 feat: add question bank schema
Made-with: Cursor
2026-03-04 13:38:31 +03:00
Anton
35873c3054 feat: add tests and test_questions schema
Made-with: Cursor
2026-03-04 13:38:19 +03:00
Anton
8b3a4c475f feat: add users and auth tables schema
Made-with: Cursor
2026-03-04 13:38:07 +03:00
Anton
a7394c4d9d feat: add Drizzle enums for schema
Made-with: Cursor
2026-03-04 13:37:55 +03:00
Anton
53525dcd52 feat: add Redis plugin
Made-with: Cursor
2026-03-04 13:36:35 +03:00
Anton
145301aba2 chore: add env config with validation
Made-with: Cursor
2026-03-04 13:36:28 +03:00
45 changed files with 3827 additions and 1 deletions

45
.env.example Normal file
View File

@@ -0,0 +1,45 @@
# Server
PORT=3000
HOST=0.0.0.0
NODE_ENV=development
# Database
DATABASE_URL=postgresql://samreshu:samreshu_dev@localhost:5432/samreshu
# Redis
REDIS_URL=redis://localhost:6379
# Auth
JWT_SECRET=dev-secret-change-in-production-min-32-chars
JWT_ACCESS_TTL=15m
JWT_REFRESH_TTL=7d
# LLM
LLM_BASE_URL=http://localhost:11434/v1
LLM_MODEL=qwen2.5:14b
LLM_API_KEY=
LLM_TIMEOUT_MS=15000
LLM_MAX_RETRIES=1
LLM_TEMPERATURE=0.7
LLM_MAX_TOKENS=2048
# Rate limits
RATE_LIMIT_LOGIN=5
RATE_LIMIT_REGISTER=3
RATE_LIMIT_FORGOT_PASSWORD=3
RATE_LIMIT_VERIFY_EMAIL=5
RATE_LIMIT_API_AUTHED=100
RATE_LIMIT_API_GUEST=30
# CORS (comma-separated origins)
CORS_ORIGINS=http://localhost:5173,http://localhost:3000
# Email (dev — mailpit / mailtrap)
SMTP_HOST=localhost
SMTP_PORT=1025
SMTP_USER=
SMTP_PASS=
EMAIL_FROM=noreply@samreshu.dev
# Sentry (optional for dev)
SENTRY_DSN=

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "samreshu_docs"]
path = samreshu_docs
url = https://git.vakanaut.ru/admin/samreshu_docs.git

307
AGENT_TASKS.md Normal file
View File

@@ -0,0 +1,307 @@
# Распределение задач по агентам (MVP 0)
Документ для параллельной работы нескольких агентов. Каждый агент работает в **отдельной сессии Cursor** (отдельное окно чата). После выполнения **каждой задачи** — отдельный коммит по conventional commits.
## Как запустить агента
1. Открыть **новую сессию чата** (новое окно Composer/Chat в Cursor)
2. Скопировать промпт для нужного агента из раздела ниже
3. Агент выполнит свои задачи и сделает коммиты
4. Повторить для следующего агента
### Промпты для запуска агентов
**Agent A (Infrastructure):**
```text
Implement Agent A tasks from AGENT_TASKS.md. Work in branch feat/infra-core.
Do tasks A1A9 in order. After EACH task, run: git add -A && git commit -m "<message from table>".
Use conventional commits. Do not touch schema or auth/tests/profile/llm/admin code.
```
**Agent B (DB Schema):**
```text
Implement Agent B tasks from AGENT_TASKS.md. Work in branch feat/db-schema.
Do tasks B1B7. After EACH task, commit with the message from the table.
Assume Agent A has created plugins. Generate migrations with npm run db:generate.
```
**Agent C (Auth):**
```text
Implement Agent C tasks from AGENT_TASKS.md. Work in branch feat/auth.
Merge or rebase from dev first. Do tasks C1C7, commit after each.
```
**Agent D (Profile):**
```text
Implement Agent D tasks from AGENT_TASKS.md. Work in branch feat/profile-subscription.
Merge dev first. Do tasks D1D5, commit after each.
```
**Agent E (Tests & Questions):**
```text
Implement Agent E tasks from AGENT_TASKS.md. Work in branch feat/tests-questions.
Requires Auth, Profile, LLM done. Do E1E5, commit after each.
```
**Agent F (LLM):**
```text
Implement Agent F tasks from AGENT_TASKS.md. Work in branch feat/llm-service.
Do F1F5, commit after each. Export LlmService interface for QuestionService.
```
**Agent G (Admin):**
```text
Implement Agent G tasks from AGENT_TASKS.md. Work in branch feat/admin-qa.
Do G1G4, commit after each.
```
**Agent H (Testing):**
```text
Implement Agent H tasks from AGENT_TASKS.md. Work in branch feat/testing.
Do H1H7, commit after each. Target ≥70% coverage on services.
```
## Текущее состояние репозитория
Часть работы уже выполнена одним агентом:
- **Infra (A):** package.json, config, plugins (database, redis, security, rateLimit), app.ts, server.ts, utils/errors, utils/uuid, docker-compose, .env.example
- **Schema (B):** все схемы в src/db/schema, drizzle.config, migrate.ts. Миграции ещё не сгенерированы (нужен `npm run db:generate`)
Агентам BH при старте: проверить, что уже есть, и дописать недостающее. Не дублировать сделанное. Коммитить только свои изменения.
---
## Формат коммитов
Использовать [conventional commits](samreshu_docs/principles/git-workflow.md):
- `feat:` — новый функционал
- `fix:` — исправление бага
- `refactor:` — рефакторинг
- `chore:` — инфраструктура, зависимости, конфиг
- `test:` — тесты
Примеры: `feat: add auth register endpoint`, `chore: add database plugin`, `test: add auth service unit tests`.
---
## Волны запуска и зависимости
```text
Волна 1 (старт сразу, параллельно):
Agent A ────────────────────────────►
Agent B ────────────────────────────►
Волна 2 (после завершения A + B):
Agent C ────────────────────────────►
Agent D ────────────────────────────►
Agent F ────────────────────────────►
Волна 3 (после C, D, F):
Agent E ────────────────────────────►
Agent G ────────────────────────────►
Agent H ────────────────────────────► (может стартовать частично после C)
```
---
## Agent A: Infrastructure & Core Backend
**Зависимости:** нет (запускать первым)
**Ветка:** `feat/infra-core` (создать от `main`/`dev`)
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| A1 | package.json, tsconfig, .nvmrc, .gitignore | `chore: init project with Fastify and TypeScript` |
| A2 | src/config/env.ts с Zod-валидацией | `chore: add env config with validation` |
| A3 | src/plugins/redis.ts | `feat: add Redis plugin` |
| A4 | src/plugins/database.ts | `feat: add database plugin with Drizzle` |
| A5 | src/plugins/security.ts (helmet + cors) | `feat: add security plugin` |
| A6 | src/plugins/rateLimit.ts | `feat: add rate limit plugin with Redis` |
| A7 | src/utils/errors.ts, src/utils/uuid.ts | `chore: add error utils and uuid7 helper` |
| A8 | src/app.ts — error handler, логирование | `feat: add Fastify app with error handler` |
| A9 | src/server.ts, docker-compose.dev.yml, .env.example | `chore: add server entry and dev docker compose` |
**Итого:** 9 коммитов. После завершения — PR в `dev`.
---
## Agent B: Data Model & Drizzle Schema
**Зависимости:** Agent A (нужен database plugin). Может стартовать после A4.
**Ветка:** `feat/db-schema`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| B1 | src/db/schema/enums.ts | `feat: add Drizzle enums for schema` |
| B2 | src/db/schema/users.ts, sessions.ts, subscriptions.ts | `feat: add users and auth tables schema` |
| B3 | src/db/schema/tests.ts, testQuestions.ts | `feat: add tests and test_questions schema` |
| B4 | src/db/schema/questionBank.ts, questionCacheMeta.ts, questionReports.ts | `feat: add question bank schema` |
| B5 | src/db/schema/userStats.ts, auditLogs.ts, userQuestionLog.ts, verificationTokens.ts | `feat: add user_stats, audit_logs and verification tokens schema` |
| B6 | src/db/schema/index.ts, drizzle.config.ts, migrate.ts | `chore: wire schema and migrations` |
| B7 | Генерация миграций (npm run db:generate), seed скрипт | `chore: add initial migration and seed script` |
**Итого:** 7 коммитов. После завершения — PR в `dev`.
---
## Agent C: Auth & Sessions Service
**Зависимости:** A, B завершены.
**Ветка:** `feat/auth`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| C1 | src/utils/password.ts (argon2id), src/utils/jwt.ts | `feat: add password hashing and JWT utils` |
| C2 | src/services/auth/auth.service.ts | `feat: add AuthService` |
| C3 | src/plugins/auth.ts (JWT verify, request.user) | `feat: add auth plugin` |
| C4 | src/routes/auth.ts — register, login, logout, refresh | `feat: add auth routes` |
| C5 | verify-email, forgot-password, reset-password | `feat: add email verification and password reset` |
| C6 | Rate limiting на auth-роуты | `feat: add rate limiting to auth endpoints` |
| C7 | Регистрация auth routes в app | `feat: register auth routes in app` |
**Итого:** 7 коммитов. После завершения — PR в `dev`.
---
## Agent D: Profile & Subscription Middleware
**Зависимости:** A, B, C.
**Ветка:** `feat/profile-subscription`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| D1 | src/plugins/subscription.ts | `feat: add subscription middleware` |
| D2 | src/services/user/user.service.ts | `feat: add UserService` |
| D3 | src/routes/profile.ts — GET, PATCH, GET :username | `feat: add profile routes` |
| D4 | Интеграция user_stats в профиль | `feat: add stats to profile response` |
| D5 | Регистрация profile routes в app | `feat: register profile routes` |
**Итого:** 5 коммитов. После завершения — PR в `dev`.
---
## Agent E: Question Bank & Tests Service
**Зависимости:** A, B, C, D, F (контракт LlmService).
**Ветка:** `feat/tests-questions`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| E1 | src/services/questions/question.service.ts | `feat: add QuestionService` |
| E2 | src/services/tests/tests.service.ts — создание теста, снепшот | `feat: add TestsService create flow` |
| E3 | tests.service — answer, finish, history | `feat: add test answer and finish flow` |
| E4 | src/routes/tests.ts | `feat: add tests routes` |
| E5 | Регистрация tests routes | `feat: register tests routes` |
**Итого:** 5 коммитов. После завершения — PR в `dev`.
---
## Agent F: LLM Service & Fallback Logic
**Зависимости:** A, B.
**Ветка:** `feat/llm-service`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| F1 | src/services/llm/llm.service.ts — конфиг, HTTP client | `feat: add LlmService base` |
| F2 | generateQuestions с JSON Schema валидацией | `feat: add LLM question generation` |
| F3 | Retry и fallback логика | `feat: add LLM retry and fallback` |
| F4 | Интеграция question_cache_meta | `feat: log LLM metadata to question_cache_meta` |
| F5 | Экспорт интерфейса для QuestionService | `feat: export LlmService interface` |
**Итого:** 5 коммитов. После завершения — PR в `dev`.
---
## Agent G: Admin QA for Questions
**Зависимости:** A, B, C, E (модель question_bank).
**Ветка:** `feat/admin-qa`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| G1 | src/services/admin/admin-question.service.ts | `feat: add AdminQuestionService` |
| G2 | src/routes/admin/questions.ts | `feat: add admin questions routes` |
| G3 | audit_logs при approve/reject/edit | `feat: add audit logging to admin actions` |
| G4 | Регистрация admin routes | `feat: register admin routes` |
**Итого:** 4 коммита. После завершения — PR в `dev`.
---
## Agent H: Tests & Quality
**Зависимости:** Может стартовать после C (auth), наращивать по мере готовности других агентов.
**Ветка:** `feat/testing`
**Задачи и коммиты:**
| # | Задача | Коммит |
| - | - | - |
| H1 | Vitest config, test-utils | `test: add Vitest config and test utils` |
| H2 | AuthService unit tests | `test: add auth service tests` |
| H3 | Auth routes integration tests | `test: add auth routes integration tests` |
| H4 | TestsService и QuestionService tests | `test: add tests and questions service tests` |
| H5 | LlmService unit tests (с моком) | `test: add LLM service tests` |
| H6 | Admin routes integration tests | `test: add admin routes tests` |
| H7 | Coverage ≥70%, npm script | `test: add coverage config` |
**Итого:** 7 коммитов. После завершения — PR в `dev`.
---
## Рекомендуемый порядок запуска
1. **Сразу:** Agent A и Agent B (в двух отдельных сессиях Cursor)
2. **После A и B:** Agent C, D, F (три сессии)
3. **После C, D, F:** Agent E, G (две сессии)
4. **Параллельно волне 23:** Agent H (тесты по мере готовности API)
---
## Чеклист для каждого агента
Перед каждым коммитом:
- [ ] `npm run typecheck` проходит
- [ ] Сообщение коммита по conventional commits
- [ ] Нет `console.log`, только `logger`
- [ ] Нет `any` без необходимости
После завершения всех задач агента:
- [ ] Создать PR в `dev`
- [ ] Проверить, что не сломаны существующие тесты (если есть)

29
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,29 @@
services:
postgres:
image: postgres:16-alpine
environment:
POSTGRES_USER: samreshu
POSTGRES_PASSWORD: samreshu_dev
POSTGRES_DB: samreshu
ports:
- "5432:5432"
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U samreshu -d samreshu"]
interval: 5s
timeout: 5s
retries: 5
redis:
image: redis:7-alpine
ports:
- "6379:6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 5s
retries: 5
volumes:
pgdata:

13
drizzle.config.ts Normal file
View File

@@ -0,0 +1,13 @@
import 'dotenv/config';
import { defineConfig } from 'drizzle-kit';
const databaseUrl = process.env.DATABASE_URL ?? 'postgresql://samreshu:samreshu_dev@localhost:5432/samreshu';
export default defineConfig({
schema: './src/db/schema/*.ts',
out: './src/db/migrations',
dialect: 'postgresql',
dbCredentials: {
url: databaseUrl,
},
});

View File

@@ -8,7 +8,7 @@
"build": "tsc",
"dev": "tsx watch src/server.ts",
"start": "node dist/server.js",
"db:generate": "drizzle-kit generate --config=drizzle.config.ts",
"db:generate": "tsx node_modules/drizzle-kit/bin.cjs generate --config=drizzle.config.ts",
"db:migrate": "tsx src/db/migrate.ts",
"db:studio": "drizzle-kit studio",
"db:seed": "tsx src/db/seed.ts",

1
samreshu_docs Submodule

Submodule samreshu_docs added at 99cd8ae727

82
src/app.ts Normal file
View File

@@ -0,0 +1,82 @@
import Fastify, { FastifyInstance } from 'fastify';
import { AppError } from './utils/errors.js';
import databasePlugin from './plugins/database.js';
import redisPlugin from './plugins/redis.js';
import securityPlugin from './plugins/security.js';
import rateLimitPlugin from './plugins/rateLimit.js';
import authPlugin from './plugins/auth.js';
import subscriptionPlugin from './plugins/subscription.js';
import { authRoutes } from './routes/auth.js';
import { env } from './config/env.js';
import { randomUUID } from 'node:crypto';
export async function buildApp(): Promise<FastifyInstance> {
const isDev = env.NODE_ENV === 'development';
const app = Fastify({
logger: {
level: isDev ? 'debug' : 'info',
transport:
isDev
? {
target: 'pino-pretty',
options: {
translateTime: 'HH:MM:ss Z',
ignore: 'pid,hostname',
},
}
: undefined,
},
requestIdHeader: 'x-request-id',
requestIdLogLabel: 'requestId',
genReqId: () => randomUUID(),
});
app.setErrorHandler((err: unknown, request, reply) => {
const error = err as Error & { statusCode?: number; validation?: unknown };
request.log.error({ err }, error.message);
if (err instanceof AppError) {
const statusCode = err.statusCode;
const payload = err.toJSON();
if (err.code === 'RATE_LIMIT_EXCEEDED' && 'retryAfter' in err) {
reply.header('Retry-After', String((err as AppError & { retryAfter?: number }).retryAfter ?? 60));
}
return reply.status(statusCode).send(payload);
}
if (error.validation) {
return reply.status(422).send({
error: {
code: 'VALIDATION_ERROR',
message: 'Validation failed',
details: error.validation,
},
});
}
const statusCode = error.statusCode ?? 500;
return reply.status(statusCode).send({
error: {
code: 'INTERNAL_ERROR',
message: env.NODE_ENV === 'production' ? 'Internal server error' : error.message,
},
});
});
app.addHook('onRequest', async (request, reply) => {
reply.header('x-request-id', request.id);
});
await app.register(redisPlugin);
await app.register(databasePlugin);
await app.register(securityPlugin);
await app.register(rateLimitPlugin);
await app.register(authPlugin);
await app.register(subscriptionPlugin);
await app.register(authRoutes, { prefix: '/auth' });
app.get('/health', async () => ({ status: 'ok', timestamp: new Date().toISOString() }));
return app;
}

51
src/config/env.ts Normal file
View File

@@ -0,0 +1,51 @@
import { z } from 'zod';
const envSchema = z.object({
NODE_ENV: z.enum(['development', 'test', 'production']).default('development'),
PORT: z.coerce.number().min(1).max(65535).default(3000),
HOST: z.string().default('0.0.0.0'),
DATABASE_URL: z.string().min(1),
REDIS_URL: z.string().min(1).default('redis://localhost:6379'),
JWT_SECRET: z.string().min(32),
JWT_ACCESS_TTL: z.string().default('15m'),
JWT_REFRESH_TTL: z.string().default('7d'),
LLM_BASE_URL: z.string().url().default('http://localhost:11434/v1'),
LLM_MODEL: z.string().default('qwen2.5:14b'),
LLM_FALLBACK_MODEL: z.string().optional(),
LLM_API_KEY: z.string().optional(),
LLM_TIMEOUT_MS: z.coerce.number().default(15000),
LLM_MAX_RETRIES: z.coerce.number().min(0).default(1),
LLM_RETRY_DELAY_MS: z.coerce.number().min(0).default(1000),
LLM_TEMPERATURE: z.coerce.number().min(0).max(2).default(0.7),
LLM_MAX_TOKENS: z.coerce.number().default(2048),
RATE_LIMIT_LOGIN: z.coerce.number().default(5),
RATE_LIMIT_REGISTER: z.coerce.number().default(3),
RATE_LIMIT_FORGOT_PASSWORD: z.coerce.number().default(3),
RATE_LIMIT_VERIFY_EMAIL: z.coerce.number().default(5),
RATE_LIMIT_API_AUTHED: z.coerce.number().default(100),
RATE_LIMIT_API_GUEST: z.coerce.number().default(30),
CORS_ORIGINS: z.string().default('http://localhost:5173'),
SENTRY_DSN: z.string().optional(),
});
export type Env = z.infer<typeof envSchema>;
function parseEnv(): Env {
const result = envSchema.safeParse(process.env);
if (!result.success) {
const msg = result.error.flatten().fieldErrors;
throw new Error(`Invalid environment: ${JSON.stringify(msg)}`);
}
return result.data;
}
export const env = parseEnv();
export function getCorsOrigins(): string[] {
return env.CORS_ORIGINS.split(',').map((s) => s.trim()).filter(Boolean);
}

19
src/db/migrate.ts Normal file
View File

@@ -0,0 +1,19 @@
import { drizzle } from 'drizzle-orm/node-postgres';
import { migrate } from 'drizzle-orm/node-postgres/migrator';
import pg from 'pg';
import { env } from '../config/env.js';
const { Pool } = pg;
async function runMigrations() {
const pool = new Pool({ connectionString: env.DATABASE_URL });
const db = drizzle(pool);
await migrate(db, { migrationsFolder: './src/db/migrations' });
await pool.end();
}
runMigrations().catch((err) => {
console.error('Migration failed:', err);
process.exit(1);
});

View File

@@ -0,0 +1,246 @@
CREATE TYPE "public"."level" AS ENUM('basic', 'beginner', 'intermediate', 'advanced', 'expert');--> statement-breakpoint
CREATE TYPE "public"."plan" AS ENUM('free', 'pro');--> statement-breakpoint
CREATE TYPE "public"."question_source" AS ENUM('llm_generated', 'manual');--> statement-breakpoint
CREATE TYPE "public"."question_status" AS ENUM('pending', 'approved', 'rejected');--> statement-breakpoint
CREATE TYPE "public"."question_type" AS ENUM('single_choice', 'multiple_select', 'true_false', 'short_text');--> statement-breakpoint
CREATE TYPE "public"."report_status" AS ENUM('open', 'resolved', 'dismissed');--> statement-breakpoint
CREATE TYPE "public"."self_level" AS ENUM('jun', 'mid', 'sen');--> statement-breakpoint
CREATE TYPE "public"."stack" AS ENUM('html', 'css', 'js', 'ts', 'react', 'vue', 'nodejs', 'git', 'web_basics');--> statement-breakpoint
CREATE TYPE "public"."subscription_status" AS ENUM('active', 'trialing', 'cancelled', 'expired');--> statement-breakpoint
CREATE TYPE "public"."test_mode" AS ENUM('fixed', 'infinite', 'marathon');--> statement-breakpoint
CREATE TYPE "public"."test_status" AS ENUM('in_progress', 'completed', 'abandoned');--> statement-breakpoint
CREATE TYPE "public"."user_role" AS ENUM('guest', 'free', 'pro', 'admin');--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "users" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"email" varchar(255) NOT NULL,
"password_hash" varchar(255) NOT NULL,
"nickname" varchar(30) NOT NULL,
"avatar_url" varchar(500),
"country" varchar(100),
"city" varchar(100),
"self_level" "self_level",
"is_public" boolean DEFAULT true NOT NULL,
"role" "user_role" DEFAULT 'free' NOT NULL,
"email_verified_at" timestamp with time zone,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone DEFAULT now() NOT NULL,
CONSTRAINT "users_email_unique" UNIQUE("email")
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "sessions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"refresh_token_hash" varchar(255) NOT NULL,
"user_agent" varchar(500),
"ip_address" varchar(45),
"last_active_at" timestamp with time zone DEFAULT now() NOT NULL,
"expires_at" timestamp with time zone NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "subscriptions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"plan" "plan" NOT NULL,
"status" "subscription_status" NOT NULL,
"started_at" timestamp with time zone NOT NULL,
"expires_at" timestamp with time zone,
"cancelled_at" timestamp with time zone,
"payment_provider" varchar(50),
"external_id" varchar(255),
CONSTRAINT "subscriptions_user_id_unique" UNIQUE("user_id")
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "tests" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"stack" "stack" NOT NULL,
"level" "level" NOT NULL,
"question_count" integer NOT NULL,
"mode" "test_mode" DEFAULT 'fixed' NOT NULL,
"status" "test_status" DEFAULT 'in_progress' NOT NULL,
"score" integer,
"started_at" timestamp with time zone DEFAULT now() NOT NULL,
"finished_at" timestamp with time zone,
"time_limit_seconds" integer
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "test_questions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"test_id" uuid NOT NULL,
"question_bank_id" uuid,
"order_number" integer NOT NULL,
"type" "question_type" NOT NULL,
"question_text" text NOT NULL,
"options" jsonb,
"correct_answer" jsonb NOT NULL,
"explanation" text NOT NULL,
"user_answer" jsonb,
"is_correct" boolean,
"answered_at" timestamp with time zone
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "question_bank" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"stack" "stack" NOT NULL,
"level" "level" NOT NULL,
"type" "question_type" NOT NULL,
"question_text" text NOT NULL,
"options" jsonb,
"correct_answer" jsonb NOT NULL,
"explanation" text NOT NULL,
"status" "question_status" DEFAULT 'pending' NOT NULL,
"source" "question_source" NOT NULL,
"usage_count" integer DEFAULT 0 NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"approved_at" timestamp with time zone
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "question_cache_meta" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"question_bank_id" uuid NOT NULL,
"llm_model" varchar(100) NOT NULL,
"prompt_hash" varchar(64) NOT NULL,
"generation_time_ms" integer NOT NULL,
"raw_response" jsonb,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "question_reports" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"question_bank_id" uuid NOT NULL,
"user_id" uuid NOT NULL,
"reason" text NOT NULL,
"status" "report_status" DEFAULT 'open' NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"resolved_at" timestamp with time zone
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "user_stats" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"stack" "stack" NOT NULL,
"level" "level" NOT NULL,
"total_questions" integer DEFAULT 0 NOT NULL,
"correct_answers" integer DEFAULT 0 NOT NULL,
"tests_taken" integer DEFAULT 0 NOT NULL,
"last_test_at" timestamp with time zone,
CONSTRAINT "user_stats_user_id_stack_level_unique" UNIQUE("user_id","stack","level")
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "audit_logs" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"admin_id" uuid NOT NULL,
"action" varchar(100) NOT NULL,
"target_type" varchar(50) NOT NULL,
"target_id" uuid NOT NULL,
"details" jsonb,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "user_question_log" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"question_bank_id" uuid NOT NULL,
"seen_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "email_verification_codes" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"code" varchar(10) NOT NULL,
"expires_at" timestamp with time zone NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "password_reset_tokens" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"user_id" uuid NOT NULL,
"token_hash" varchar(255) NOT NULL,
"expires_at" timestamp with time zone NOT NULL,
"created_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "sessions" ADD CONSTRAINT "sessions_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "subscriptions" ADD CONSTRAINT "subscriptions_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "tests" ADD CONSTRAINT "tests_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "test_questions" ADD CONSTRAINT "test_questions_test_id_tests_id_fk" FOREIGN KEY ("test_id") REFERENCES "public"."tests"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "test_questions" ADD CONSTRAINT "test_questions_question_bank_id_question_bank_id_fk" FOREIGN KEY ("question_bank_id") REFERENCES "public"."question_bank"("id") ON DELETE set null ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "question_cache_meta" ADD CONSTRAINT "question_cache_meta_question_bank_id_question_bank_id_fk" FOREIGN KEY ("question_bank_id") REFERENCES "public"."question_bank"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "question_reports" ADD CONSTRAINT "question_reports_question_bank_id_question_bank_id_fk" FOREIGN KEY ("question_bank_id") REFERENCES "public"."question_bank"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "question_reports" ADD CONSTRAINT "question_reports_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "user_stats" ADD CONSTRAINT "user_stats_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "audit_logs" ADD CONSTRAINT "audit_logs_admin_id_users_id_fk" FOREIGN KEY ("admin_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "user_question_log" ADD CONSTRAINT "user_question_log_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "user_question_log" ADD CONSTRAINT "user_question_log_question_bank_id_question_bank_id_fk" FOREIGN KEY ("question_bank_id") REFERENCES "public"."question_bank"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "email_verification_codes" ADD CONSTRAINT "email_verification_codes_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "password_reset_tokens" ADD CONSTRAINT "password_reset_tokens_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
{
"version": "7",
"dialect": "postgresql",
"entries": [
{
"idx": 0,
"version": "7",
"when": 1772620981431,
"tag": "0000_fearless_salo",
"breakpoints": true
}
]
}

View File

@@ -0,0 +1,18 @@
import { pgTable, uuid, varchar, timestamp } from 'drizzle-orm/pg-core';
import { jsonb } from 'drizzle-orm/pg-core';
import { users } from './users.js';
export const auditLogs = pgTable('audit_logs', {
id: uuid('id').primaryKey().defaultRandom(),
adminId: uuid('admin_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
action: varchar('action', { length: 100 }).notNull(),
targetType: varchar('target_type', { length: 50 }).notNull(),
targetId: uuid('target_id').notNull(),
details: jsonb('details').$type<Record<string, unknown>>(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
export type AuditLog = typeof auditLogs.$inferSelect;
export type NewAuditLog = typeof auditLogs.$inferInsert;

27
src/db/schema/enums.ts Normal file
View File

@@ -0,0 +1,27 @@
import { pgEnum } from 'drizzle-orm/pg-core';
export const userRoleEnum = pgEnum('user_role', ['guest', 'free', 'pro', 'admin']);
export const planEnum = pgEnum('plan', ['free', 'pro']);
export const subscriptionStatusEnum = pgEnum('subscription_status', ['active', 'trialing', 'cancelled', 'expired']);
export const stackEnum = pgEnum('stack', ['html', 'css', 'js', 'ts', 'react', 'vue', 'nodejs', 'git', 'web_basics']);
export const levelEnum = pgEnum('level', ['basic', 'beginner', 'intermediate', 'advanced', 'expert']);
export const testModeEnum = pgEnum('test_mode', ['fixed', 'infinite', 'marathon']);
export const testStatusEnum = pgEnum('test_status', ['in_progress', 'completed', 'abandoned']);
export const questionTypeEnum = pgEnum('question_type', ['single_choice', 'multiple_select', 'true_false', 'short_text']);
export const questionStatusEnum = pgEnum('question_status', ['pending', 'approved', 'rejected']);
export const questionSourceEnum = pgEnum('question_source', ['llm_generated', 'manual']);
export const reportStatusEnum = pgEnum('report_status', ['open', 'resolved', 'dismissed']);
export const selfLevelEnum = pgEnum('self_level', ['jun', 'mid', 'sen']);
export type UserRole = (typeof userRoleEnum.enumValues)[number];
export type Plan = (typeof planEnum.enumValues)[number];
export type SubscriptionStatus = (typeof subscriptionStatusEnum.enumValues)[number];
export type Stack = (typeof stackEnum.enumValues)[number];
export type Level = (typeof levelEnum.enumValues)[number];
export type TestMode = (typeof testModeEnum.enumValues)[number];
export type TestStatus = (typeof testStatusEnum.enumValues)[number];
export type QuestionType = (typeof questionTypeEnum.enumValues)[number];
export type QuestionStatus = (typeof questionStatusEnum.enumValues)[number];
export type QuestionSource = (typeof questionSourceEnum.enumValues)[number];
export type ReportStatus = (typeof reportStatusEnum.enumValues)[number];
export type SelfLevel = (typeof selfLevelEnum.enumValues)[number];

13
src/db/schema/index.ts Normal file
View File

@@ -0,0 +1,13 @@
export * from './enums.js';
export * from './users.js';
export * from './sessions.js';
export * from './subscriptions.js';
export * from './tests.js';
export * from './testQuestions.js';
export * from './questionBank.js';
export * from './questionCacheMeta.js';
export * from './questionReports.js';
export * from './userStats.js';
export * from './auditLogs.js';
export * from './userQuestionLog.js';
export * from './verificationTokens.js';

View File

@@ -0,0 +1,22 @@
import { pgTable, uuid, text, integer, timestamp } from 'drizzle-orm/pg-core';
import { jsonb } from 'drizzle-orm/pg-core';
import { stackEnum, levelEnum, questionTypeEnum, questionStatusEnum, questionSourceEnum } from './enums.js';
export const questionBank = pgTable('question_bank', {
id: uuid('id').primaryKey().defaultRandom(),
stack: stackEnum('stack').notNull(),
level: levelEnum('level').notNull(),
type: questionTypeEnum('type').notNull(),
questionText: text('question_text').notNull(),
options: jsonb('options').$type<Array<{ key: string; text: string }>>(),
correctAnswer: jsonb('correct_answer').$type<string | string[]>().notNull(),
explanation: text('explanation').notNull(),
status: questionStatusEnum('status').notNull().default('pending'),
source: questionSourceEnum('source').notNull(),
usageCount: integer('usage_count').notNull().default(0),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
approvedAt: timestamp('approved_at', { withTimezone: true }),
});
export type QuestionBank = typeof questionBank.$inferSelect;
export type NewQuestionBank = typeof questionBank.$inferInsert;

View File

@@ -0,0 +1,18 @@
import { pgTable, uuid, varchar, integer, timestamp } from 'drizzle-orm/pg-core';
import { jsonb } from 'drizzle-orm/pg-core';
import { questionBank } from './questionBank.js';
export const questionCacheMeta = pgTable('question_cache_meta', {
id: uuid('id').primaryKey().defaultRandom(),
questionBankId: uuid('question_bank_id')
.notNull()
.references(() => questionBank.id, { onDelete: 'cascade' }),
llmModel: varchar('llm_model', { length: 100 }).notNull(),
promptHash: varchar('prompt_hash', { length: 64 }).notNull(),
generationTimeMs: integer('generation_time_ms').notNull(),
rawResponse: jsonb('raw_response').$type<unknown>(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
export type QuestionCacheMeta = typeof questionCacheMeta.$inferSelect;
export type NewQuestionCacheMeta = typeof questionCacheMeta.$inferInsert;

View File

@@ -0,0 +1,21 @@
import { pgTable, uuid, text, timestamp } from 'drizzle-orm/pg-core';
import { reportStatusEnum } from './enums.js';
import { questionBank } from './questionBank.js';
import { users } from './users.js';
export const questionReports = pgTable('question_reports', {
id: uuid('id').primaryKey().defaultRandom(),
questionBankId: uuid('question_bank_id')
.notNull()
.references(() => questionBank.id, { onDelete: 'cascade' }),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
reason: text('reason').notNull(),
status: reportStatusEnum('status').notNull().default('open'),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
resolvedAt: timestamp('resolved_at', { withTimezone: true }),
});
export type QuestionReport = typeof questionReports.$inferSelect;
export type NewQuestionReport = typeof questionReports.$inferInsert;

18
src/db/schema/sessions.ts Normal file
View File

@@ -0,0 +1,18 @@
import { pgTable, uuid, varchar, timestamp } from 'drizzle-orm/pg-core';
import { users } from './users.js';
export const sessions = pgTable('sessions', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
refreshTokenHash: varchar('refresh_token_hash', { length: 255 }).notNull(),
userAgent: varchar('user_agent', { length: 500 }),
ipAddress: varchar('ip_address', { length: 45 }),
lastActiveAt: timestamp('last_active_at', { withTimezone: true }).notNull().defaultNow(),
expiresAt: timestamp('expires_at', { withTimezone: true }).notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
export type Session = typeof sessions.$inferSelect;
export type NewSession = typeof sessions.$inferInsert;

View File

@@ -0,0 +1,21 @@
import { pgTable, uuid, varchar, timestamp } from 'drizzle-orm/pg-core';
import { planEnum, subscriptionStatusEnum } from './enums.js';
import { users } from './users.js';
export const subscriptions = pgTable('subscriptions', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' })
.unique(),
plan: planEnum('plan').notNull(),
status: subscriptionStatusEnum('status').notNull(),
startedAt: timestamp('started_at', { withTimezone: true }).notNull(),
expiresAt: timestamp('expires_at', { withTimezone: true }),
cancelledAt: timestamp('cancelled_at', { withTimezone: true }),
paymentProvider: varchar('payment_provider', { length: 50 }),
externalId: varchar('external_id', { length: 255 }),
});
export type Subscription = typeof subscriptions.$inferSelect;
export type NewSubscription = typeof subscriptions.$inferInsert;

View File

@@ -0,0 +1,25 @@
import { pgTable, uuid, text, integer, boolean, timestamp } from 'drizzle-orm/pg-core';
import { jsonb } from 'drizzle-orm/pg-core';
import { questionTypeEnum } from './enums.js';
import { tests } from './tests.js';
import { questionBank } from './questionBank.js';
export const testQuestions = pgTable('test_questions', {
id: uuid('id').primaryKey().defaultRandom(),
testId: uuid('test_id')
.notNull()
.references(() => tests.id, { onDelete: 'cascade' }),
questionBankId: uuid('question_bank_id').references(() => questionBank.id, { onDelete: 'set null' }),
orderNumber: integer('order_number').notNull(),
type: questionTypeEnum('type').notNull(),
questionText: text('question_text').notNull(),
options: jsonb('options').$type<Array<{ key: string; text: string }>>(),
correctAnswer: jsonb('correct_answer').$type<string | string[]>().notNull(),
explanation: text('explanation').notNull(),
userAnswer: jsonb('user_answer').$type<string | string[]>(),
isCorrect: boolean('is_correct'),
answeredAt: timestamp('answered_at', { withTimezone: true }),
});
export type TestQuestion = typeof testQuestions.$inferSelect;
export type NewTestQuestion = typeof testQuestions.$inferInsert;

22
src/db/schema/tests.ts Normal file
View File

@@ -0,0 +1,22 @@
import { pgTable, uuid, integer, timestamp } from 'drizzle-orm/pg-core';
import { stackEnum, levelEnum, testModeEnum, testStatusEnum } from './enums.js';
import { users } from './users.js';
export const tests = pgTable('tests', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
stack: stackEnum('stack').notNull(),
level: levelEnum('level').notNull(),
questionCount: integer('question_count').notNull(),
mode: testModeEnum('mode').notNull().default('fixed'),
status: testStatusEnum('status').notNull().default('in_progress'),
score: integer('score'),
startedAt: timestamp('started_at', { withTimezone: true }).notNull().defaultNow(),
finishedAt: timestamp('finished_at', { withTimezone: true }),
timeLimitSeconds: integer('time_limit_seconds'),
});
export type Test = typeof tests.$inferSelect;
export type NewTest = typeof tests.$inferInsert;

View File

@@ -0,0 +1,17 @@
import { pgTable, uuid, timestamp } from 'drizzle-orm/pg-core';
import { users } from './users.js';
import { questionBank } from './questionBank.js';
export const userQuestionLog = pgTable('user_question_log', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
questionBankId: uuid('question_bank_id')
.notNull()
.references(() => questionBank.id, { onDelete: 'cascade' }),
seenAt: timestamp('seen_at', { withTimezone: true }).notNull().defaultNow(),
});
export type UserQuestionLog = typeof userQuestionLog.$inferSelect;
export type NewUserQuestionLog = typeof userQuestionLog.$inferInsert;

View File

@@ -0,0 +1,26 @@
import { pgTable, uuid, integer, timestamp } from 'drizzle-orm/pg-core';
import { unique } from 'drizzle-orm/pg-core';
import { stackEnum, levelEnum } from './enums.js';
import { users } from './users.js';
export const userStats = pgTable(
'user_stats',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
stack: stackEnum('stack').notNull(),
level: levelEnum('level').notNull(),
totalQuestions: integer('total_questions').notNull().default(0),
correctAnswers: integer('correct_answers').notNull().default(0),
testsTaken: integer('tests_taken').notNull().default(0),
lastTestAt: timestamp('last_test_at', { withTimezone: true }),
},
(t) => ({
userStackLevelUnique: unique().on(t.userId, t.stack, t.level),
})
);
export type UserStat = typeof userStats.$inferSelect;
export type NewUserStat = typeof userStats.$inferInsert;

21
src/db/schema/users.ts Normal file
View File

@@ -0,0 +1,21 @@
import { pgTable, uuid, varchar, timestamp, boolean } from 'drizzle-orm/pg-core';
import { userRoleEnum, selfLevelEnum } from './enums.js';
export const users = pgTable('users', {
id: uuid('id').primaryKey().defaultRandom(),
email: varchar('email', { length: 255 }).notNull().unique(),
passwordHash: varchar('password_hash', { length: 255 }).notNull(),
nickname: varchar('nickname', { length: 30 }).notNull(),
avatarUrl: varchar('avatar_url', { length: 500 }),
country: varchar('country', { length: 100 }),
city: varchar('city', { length: 100 }),
selfLevel: selfLevelEnum('self_level'),
isPublic: boolean('is_public').notNull().default(true),
role: userRoleEnum('role').notNull().default('free'),
emailVerifiedAt: timestamp('email_verified_at', { withTimezone: true }),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp('updated_at', { withTimezone: true }).notNull().defaultNow(),
});
export type User = typeof users.$inferSelect;
export type NewUser = typeof users.$inferInsert;

View File

@@ -0,0 +1,22 @@
import { pgTable, uuid, varchar, timestamp } from 'drizzle-orm/pg-core';
import { users } from './users.js';
export const emailVerificationCodes = pgTable('email_verification_codes', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
code: varchar('code', { length: 10 }).notNull(),
expiresAt: timestamp('expires_at', { withTimezone: true }).notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});
export const passwordResetTokens = pgTable('password_reset_tokens', {
id: uuid('id').primaryKey().defaultRandom(),
userId: uuid('user_id')
.notNull()
.references(() => users.id, { onDelete: 'cascade' }),
tokenHash: varchar('token_hash', { length: 255 }).notNull(),
expiresAt: timestamp('expires_at', { withTimezone: true }).notNull(),
createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(),
});

47
src/db/seed.ts Normal file
View File

@@ -0,0 +1,47 @@
import 'dotenv/config';
import { drizzle } from 'drizzle-orm/node-postgres';
import { eq } from 'drizzle-orm';
import pg from 'pg';
import argon2 from 'argon2';
import { env } from '../config/env.js';
import { users } from './schema/index.js';
const { Pool } = pg;
const TEST_USER = {
email: 'test@example.com',
password: 'TestPassword123!',
nickname: 'TestUser',
};
async function runSeed() {
const pool = new Pool({ connectionString: env.DATABASE_URL });
const db = drizzle(pool);
if (env.NODE_ENV !== 'development') {
await pool.end();
return;
}
const existing = await db.select().from(users).where(eq(users.email, TEST_USER.email)).limit(1);
if (existing.length > 0) {
await pool.end();
return;
}
const passwordHash = await argon2.hash(TEST_USER.password);
await db.insert(users).values({
email: TEST_USER.email,
passwordHash,
nickname: TEST_USER.nickname,
role: 'free',
emailVerifiedAt: new Date(),
});
await pool.end();
}
runSeed().catch((err) => {
console.error('Seed failed:', err);
process.exit(1);
});

42
src/plugins/auth.ts Normal file
View File

@@ -0,0 +1,42 @@
import type { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
import fp from 'fastify-plugin';
import { verifyToken, isAccessPayload } from '../utils/jwt.js';
import { unauthorized } from '../utils/errors.js';
declare module 'fastify' {
interface FastifyInstance {
authenticate: (req: FastifyRequest, reply: FastifyReply) => Promise<void>;
}
interface FastifyRequest {
user?: { id: string; email: string };
}
}
export async function authenticate(req: FastifyRequest, _reply: FastifyReply): Promise<void> {
const authHeader = req.headers.authorization;
if (!authHeader?.startsWith('Bearer ')) {
throw unauthorized('Missing or invalid authorization header');
}
const token = authHeader.slice(7);
try {
const payload = await verifyToken(token);
if (!isAccessPayload(payload)) {
throw unauthorized('Invalid token type');
}
req.user = { id: payload.sub, email: payload.email };
} catch {
throw unauthorized('Invalid or expired token');
}
}
const authPlugin = async (app: FastifyInstance) => {
app.decorateRequest('user', undefined);
app.decorate('authenticate', authenticate);
};
export default fp(authPlugin, { name: 'auth' });

33
src/plugins/database.ts Normal file
View File

@@ -0,0 +1,33 @@
import { FastifyInstance, FastifyPluginAsync } from 'fastify';
import { drizzle } from 'drizzle-orm/node-postgres';
import pg from 'pg';
import fp from 'fastify-plugin';
import { env } from '../config/env.js';
import * as schema from '../db/schema/index.js';
const { Pool } = pg;
declare module 'fastify' {
interface FastifyInstance {
db: ReturnType<typeof drizzle<typeof schema>>;
}
}
const databasePlugin: FastifyPluginAsync = async (app: FastifyInstance) => {
const pool = new Pool({
connectionString: env.DATABASE_URL,
max: 10,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 5000,
});
const db = drizzle(pool, { schema });
app.decorate('db', db);
app.addHook('onClose', async () => {
await pool.end();
});
};
export default fp(databasePlugin, { name: 'database' });

60
src/plugins/rateLimit.ts Normal file
View File

@@ -0,0 +1,60 @@
import { FastifyInstance, FastifyPluginAsync } from 'fastify';
import rateLimit from '@fastify/rate-limit';
import fp from 'fastify-plugin';
import { env } from '../config/env.js';
declare module 'fastify' {
interface FastifyInstance {
rateLimitOptions: {
login: { max: number; timeWindow: string };
register: { max: number; timeWindow: string };
forgotPassword: { max: number; timeWindow: string };
verifyEmail: { max: number; timeWindow: string };
apiAuthed: { max: number; timeWindow: string };
apiGuest: { max: number; timeWindow: string };
};
}
}
const rateLimitPlugin: FastifyPluginAsync = async (app: FastifyInstance) => {
const options = {
login: { max: env.RATE_LIMIT_LOGIN, timeWindow: '15 minutes' },
register: { max: env.RATE_LIMIT_REGISTER, timeWindow: '1 hour' },
forgotPassword: { max: env.RATE_LIMIT_FORGOT_PASSWORD, timeWindow: '1 hour' },
verifyEmail: { max: env.RATE_LIMIT_VERIFY_EMAIL, timeWindow: '15 minutes' },
apiAuthed: { max: env.RATE_LIMIT_API_AUTHED, timeWindow: '1 minute' },
apiGuest: { max: env.RATE_LIMIT_API_GUEST, timeWindow: '1 minute' },
};
app.decorate('rateLimitOptions', options);
await app.register(rateLimit, {
global: false,
max: options.apiGuest.max,
timeWindow: options.apiGuest.timeWindow,
keyGenerator: (req) => {
return (req.ip ?? 'unknown') as string;
},
redis: app.redis,
addHeadersOnExceeding: {
'x-ratelimit-limit': true,
'x-ratelimit-remaining': true,
'x-ratelimit-reset': true,
},
addHeaders: {
'x-ratelimit-limit': true,
'x-ratelimit-remaining': true,
'x-ratelimit-reset': true,
'retry-after': true,
},
errorResponseBuilder: (_req, context) => ({
error: {
code: 'RATE_LIMIT_EXCEEDED',
message: 'Too many requests, please try again later',
retryAfter: context.ttl,
},
}),
});
};
export default fp(rateLimitPlugin, { name: 'rateLimit', dependencies: ['redis'] });

32
src/plugins/redis.ts Normal file
View File

@@ -0,0 +1,32 @@
import { FastifyInstance, FastifyPluginAsync } from 'fastify';
import { Redis } from 'ioredis';
import fp from 'fastify-plugin';
import { env } from '../config/env.js';
declare module 'fastify' {
interface FastifyInstance {
redis: Redis;
}
}
const redisPlugin: FastifyPluginAsync = async (app: FastifyInstance) => {
const redis = new Redis(env.REDIS_URL, {
maxRetriesPerRequest: 3,
retryStrategy(times: number) {
const delay = Math.min(times * 100, 3000);
return delay;
},
});
redis.on('error', (err: Error) => {
app.log.error({ err }, 'Redis connection error');
});
app.decorate('redis', redis);
app.addHook('onClose', async () => {
await redis.quit();
});
};
export default fp(redisPlugin, { name: 'redis' });

21
src/plugins/security.ts Normal file
View File

@@ -0,0 +1,21 @@
import { FastifyInstance, FastifyPluginAsync } from 'fastify';
import helmet from '@fastify/helmet';
import cors from '@fastify/cors';
import fp from 'fastify-plugin';
import { getCorsOrigins } from '../config/env.js';
const securityPlugin: FastifyPluginAsync = async (app: FastifyInstance) => {
await app.register(helmet, {
contentSecurityPolicy: false,
crossOriginEmbedderPolicy: false,
});
await app.register(cors, {
origin: getCorsOrigins(),
credentials: true,
methods: ['GET', 'POST', 'PATCH', 'DELETE', 'PUT', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
});
};
export default fp(securityPlugin, { name: 'security' });

View File

@@ -0,0 +1,71 @@
import type { FastifyInstance, FastifyRequest, FastifyReply } from 'fastify';
import fp from 'fastify-plugin';
import { eq } from 'drizzle-orm';
import { subscriptions } from '../db/schema/subscriptions.js';
import { forbidden } from '../utils/errors.js';
export type SubscriptionInfo = {
plan: 'free' | 'pro';
status: 'active' | 'trialing' | 'cancelled' | 'expired';
isPro: boolean;
expiresAt: Date | null;
};
declare module 'fastify' {
interface FastifyRequest {
subscription?: SubscriptionInfo | null;
}
interface FastifyInstance {
withSubscription: (req: FastifyRequest, reply: FastifyReply) => Promise<void>;
requirePro: (req: FastifyRequest, reply: FastifyReply) => Promise<void>;
}
}
async function loadSubscription(db: FastifyInstance['db'], userId: string): Promise<SubscriptionInfo | null> {
const [sub] = await db
.select()
.from(subscriptions)
.where(eq(subscriptions.userId, userId))
.limit(1);
if (!sub) return null;
const now = new Date();
const isExpired = sub.expiresAt && sub.expiresAt < now;
const isPro =
sub.plan === 'pro' &&
(sub.status === 'active' || sub.status === 'trialing') &&
!isExpired;
return {
plan: sub.plan as 'free' | 'pro',
status: sub.status as SubscriptionInfo['status'],
isPro,
expiresAt: sub.expiresAt,
};
}
export async function requirePro(req: FastifyRequest, _reply: FastifyReply): Promise<void> {
const sub = req.subscription;
if (!sub?.isPro) {
throw forbidden('Pro subscription required');
}
}
const subscriptionPlugin = async (app: FastifyInstance) => {
app.decorateRequest('subscription', undefined);
app.decorate('withSubscription', async (req: FastifyRequest, _reply: FastifyReply) => {
if (!req.user?.id) return;
const sub = await loadSubscription(app.db, req.user.id);
req.subscription = sub;
});
app.decorate('requirePro', requirePro);
};
export default fp(subscriptionPlugin, {
name: 'subscription',
dependencies: ['database', 'auth'],
});

168
src/routes/auth.ts Normal file
View File

@@ -0,0 +1,168 @@
import type { FastifyInstance } from 'fastify';
import { AuthService } from '../services/auth/auth.service.js';
const registerSchema = {
body: {
type: 'object',
required: ['email', 'password', 'nickname'],
properties: {
email: { type: 'string', minLength: 1 },
password: { type: 'string', minLength: 8 },
nickname: { type: 'string', minLength: 2, maxLength: 30 },
},
},
};
const loginSchema = {
body: {
type: 'object',
required: ['email', 'password'],
properties: {
email: { type: 'string', minLength: 1 },
password: { type: 'string' },
},
},
};
const refreshTokenSchema = {
body: {
type: 'object',
required: ['refreshToken'],
properties: {
refreshToken: { type: 'string' },
},
},
};
const logoutSchema = refreshTokenSchema;
const verifyEmailSchema = {
body: {
type: 'object',
required: ['userId', 'code'],
properties: {
userId: { type: 'string', minLength: 1 },
code: { type: 'string', minLength: 1, maxLength: 10 },
},
},
};
const forgotPasswordSchema = {
body: {
type: 'object',
required: ['email'],
properties: {
email: { type: 'string', minLength: 1 },
},
},
};
const resetPasswordSchema = {
body: {
type: 'object',
required: ['token', 'newPassword'],
properties: {
token: { type: 'string' },
newPassword: { type: 'string', minLength: 8 },
},
},
};
export async function authRoutes(app: FastifyInstance) {
const authService = new AuthService(app.db);
const { rateLimitOptions } = app;
app.post(
'/register',
{ schema: registerSchema, config: { rateLimit: rateLimitOptions.register } },
async (req, reply) => {
const body = req.body as { email: string; password: string; nickname: string };
const { userId, verificationCode } = await authService.register(body);
return reply.status(201).send({
userId,
message: 'Registration successful. Please verify your email.',
verificationCode,
});
},
);
app.post(
'/login',
{ schema: loginSchema, config: { rateLimit: rateLimitOptions.login } },
async (req, reply) => {
const body = req.body as { email: string; password: string };
const userAgent = req.headers['user-agent'];
const ipAddress = req.ip;
const result = await authService.login({
email: body.email,
password: body.password,
userAgent,
ipAddress,
});
return reply.send(result);
},
);
app.post(
'/logout',
{ schema: logoutSchema, config: { rateLimit: rateLimitOptions.apiGuest } },
async (req, reply) => {
const body = req.body as { refreshToken: string };
await authService.logout(body.refreshToken);
return reply.status(204).send();
},
);
app.post(
'/refresh',
{ schema: refreshTokenSchema, config: { rateLimit: rateLimitOptions.apiGuest } },
async (req, reply) => {
const body = req.body as { refreshToken: string };
const userAgent = req.headers['user-agent'];
const ipAddress = req.ip;
const result = await authService.refresh({
refreshToken: body.refreshToken,
userAgent,
ipAddress,
});
return reply.send(result);
},
);
app.post(
'/verify-email',
{ schema: verifyEmailSchema, config: { rateLimit: rateLimitOptions.verifyEmail } },
async (req, reply) => {
const body = req.body as { userId: string; code: string };
await authService.verifyEmail(body.userId, body.code);
return reply.send({ message: 'Email verified successfully' });
},
);
app.post(
'/forgot-password',
{ schema: forgotPasswordSchema, config: { rateLimit: rateLimitOptions.forgotPassword } },
async (req, reply) => {
const body = req.body as { email: string };
await authService.forgotPassword(body.email);
return reply.send({
message: 'If the email exists, a reset link has been sent.',
});
},
);
app.post(
'/reset-password',
{ schema: resetPasswordSchema, config: { rateLimit: rateLimitOptions.forgotPassword } },
async (req, reply) => {
const body = req.body as { token: string; newPassword: string };
await authService.resetPassword(body.token, body.newPassword);
return reply.send({ message: 'Password reset successfully' });
},
);
}

73
src/routes/profile.ts Normal file
View File

@@ -0,0 +1,73 @@
import type { FastifyInstance } from 'fastify';
import { UserService } from '../services/user/user.service.js';
const patchProfileSchema = {
body: {
type: 'object',
properties: {
nickname: { type: 'string', minLength: 2, maxLength: 30 },
avatarUrl: { type: ['string', 'null'], maxLength: 500 },
country: { type: ['string', 'null'], maxLength: 100 },
city: { type: ['string', 'null'], maxLength: 100 },
selfLevel: { type: ['string', 'null'], enum: ['jun', 'mid', 'sen'] },
isPublic: { type: 'boolean' },
},
additionalProperties: false,
},
};
const usernameParamsSchema = {
params: {
type: 'object',
required: ['username'],
properties: {
username: { type: 'string', minLength: 2, maxLength: 30 },
},
},
};
export async function profileRoutes(app: FastifyInstance) {
const userService = new UserService(app.db);
const { rateLimitOptions } = app;
app.get(
'/',
{
config: { rateLimit: rateLimitOptions.apiAuthed },
preHandler: [app.authenticate, app.withSubscription],
},
async (req, reply) => {
const userId = req.user!.id;
const profile = await userService.getPrivateProfile(userId);
return reply.send(profile);
},
);
app.patch(
'/',
{
schema: patchProfileSchema,
config: { rateLimit: rateLimitOptions.apiAuthed },
preHandler: [app.authenticate],
},
async (req, reply) => {
const userId = req.user!.id;
const body = req.body as Parameters<UserService['updateProfile']>[1];
const profile = await userService.updateProfile(userId, body);
return reply.send(profile);
},
);
app.get(
'/:username',
{
schema: usernameParamsSchema,
config: { rateLimit: rateLimitOptions.apiGuest },
},
async (req, reply) => {
const { username } = req.params as { username: string };
const profile = await userService.getPublicProfile(username);
return reply.send(profile);
},
);
}

26
src/server.ts Normal file
View File

@@ -0,0 +1,26 @@
import 'dotenv/config';
import { buildApp } from './app.js';
import { env } from './config/env.js';
async function main() {
const app = await buildApp();
try {
await app.listen({ port: env.PORT, host: env.HOST });
app.log.info({ port: env.PORT }, 'Server started');
} catch (err) {
app.log.error(err);
process.exit(1);
}
const shutdown = async () => {
app.log.info('Shutting down...');
await app.close();
process.exit(0);
};
process.on('SIGINT', shutdown);
process.on('SIGTERM', shutdown);
}
main();

View File

@@ -0,0 +1,304 @@
import { eq, and, gt } from 'drizzle-orm';
import type { NodePgDatabase } from 'drizzle-orm/node-postgres';
import type * as schema from '../../db/schema/index.js';
import { users, sessions, emailVerificationCodes, passwordResetTokens } from '../../db/schema/index.js';
import { hashPassword, verifyPassword } from '../../utils/password.js';
import {
signAccessToken,
signRefreshToken,
verifyToken,
isRefreshPayload,
hashToken,
} from '../../utils/jwt.js';
import {
AppError,
conflict,
unauthorized,
notFound,
ERROR_CODES,
} from '../../utils/errors.js';
import { randomBytes, randomUUID } from 'node:crypto';
type Db = NodePgDatabase<typeof schema>;
export interface RegisterInput {
email: string;
password: string;
nickname: string;
}
export interface LoginInput {
email: string;
password: string;
userAgent?: string;
ipAddress?: string;
}
export interface LoginResult {
accessToken: string;
refreshToken: string;
expiresIn: number;
}
export interface RefreshInput {
refreshToken: string;
userAgent?: string;
ipAddress?: string;
}
export interface ForgotPasswordResult {
token: string;
expiresAt: Date;
}
const REFRESH_TTL_MS = 7 * 24 * 60 * 60 * 1000;
const VERIFICATION_CODE_LENGTH = 6;
const VERIFICATION_CODE_TTL_MS = 15 * 60 * 1000;
const RESET_TOKEN_TTL_MS = 60 * 60 * 1000;
export class AuthService {
constructor(private readonly db: Db) {}
async register(input: RegisterInput): Promise<{ userId: string; verificationCode: string }> {
const [existing] = await this.db
.select()
.from(users)
.where(eq(users.email, input.email.toLowerCase().trim()))
.limit(1);
if (existing) {
throw conflict(ERROR_CODES.EMAIL_TAKEN, 'Email already registered');
}
const [nicknameConflict] = await this.db
.select()
.from(users)
.where(eq(users.nickname, input.nickname.trim()))
.limit(1);
if (nicknameConflict) {
throw conflict(ERROR_CODES.NICKNAME_TAKEN, 'Nickname already taken');
}
const passwordHash = await hashPassword(input.password);
const [user] = await this.db
.insert(users)
.values({
email: input.email.toLowerCase().trim(),
passwordHash,
nickname: input.nickname.trim(),
})
.returning({ id: users.id });
if (!user) {
throw new AppError(ERROR_CODES.INTERNAL_ERROR, 'Failed to create user', 500);
}
const code = randomBytes(VERIFICATION_CODE_LENGTH)
.toString('hex')
.slice(0, VERIFICATION_CODE_LENGTH)
.toUpperCase();
const expiresAt = new Date(Date.now() + VERIFICATION_CODE_TTL_MS);
await this.db.insert(emailVerificationCodes).values({
userId: user.id,
code,
expiresAt,
});
return { userId: user.id, verificationCode: code };
}
async login(input: LoginInput): Promise<LoginResult> {
const [user] = await this.db
.select()
.from(users)
.where(eq(users.email, input.email.toLowerCase().trim()))
.limit(1);
if (!user || !(await verifyPassword(user.passwordHash, input.password))) {
throw unauthorized('Invalid email or password');
}
const [accessToken, refreshToken] = await Promise.all([
signAccessToken({ sub: user.id, email: user.email }),
signRefreshToken({ sub: user.id, sid: randomUUID() }),
]);
const refreshHash = hashToken(refreshToken);
const expiresAt = new Date(Date.now() + REFRESH_TTL_MS);
await this.db.insert(sessions).values({
userId: user.id,
refreshTokenHash: refreshHash,
userAgent: input.userAgent ?? null,
ipAddress: input.ipAddress ?? null,
expiresAt,
});
return {
accessToken,
refreshToken,
expiresIn: Math.floor(REFRESH_TTL_MS / 1000),
};
}
async logout(refreshToken: string): Promise<void> {
const hash = hashToken(refreshToken);
await this.db.delete(sessions).where(eq(sessions.refreshTokenHash, hash));
}
async refresh(input: RefreshInput): Promise<LoginResult> {
const payload = await verifyToken(input.refreshToken);
if (!isRefreshPayload(payload)) {
throw unauthorized('Invalid refresh token');
}
const hash = hashToken(input.refreshToken);
const [session] = await this.db
.select()
.from(sessions)
.where(and(eq(sessions.refreshTokenHash, hash), gt(sessions.expiresAt, new Date())))
.limit(1);
if (!session) {
throw new AppError(ERROR_CODES.INVALID_REFRESH_TOKEN, 'Invalid or expired refresh token', 401);
}
await this.db.delete(sessions).where(eq(sessions.id, session.id));
const [user] = await this.db.select().from(users).where(eq(users.id, session.userId)).limit(1);
if (!user) {
throw notFound('User not found');
}
const [accessToken, newRefreshToken] = await Promise.all([
signAccessToken({ sub: user.id, email: user.email }),
signRefreshToken({ sub: user.id, sid: randomUUID() }),
]);
const newHash = hashToken(newRefreshToken);
const expiresAt = new Date(Date.now() + REFRESH_TTL_MS);
await this.db.insert(sessions).values({
userId: user.id,
refreshTokenHash: newHash,
userAgent: input.userAgent ?? session.userAgent,
ipAddress: input.ipAddress ?? session.ipAddress,
expiresAt,
});
return {
accessToken,
refreshToken: newRefreshToken,
expiresIn: Math.floor(REFRESH_TTL_MS / 1000),
};
}
async verifyEmail(userId: string, verificationCode: string): Promise<void> {
const codeUpper = verificationCode.toUpperCase();
const [record] = await this.db
.select()
.from(emailVerificationCodes)
.where(
and(
eq(emailVerificationCodes.userId, userId),
eq(emailVerificationCodes.code, codeUpper),
),
)
.limit(1);
if (!record) {
throw new AppError(ERROR_CODES.INVALID_CODE, 'Invalid or expired verification code', 400);
}
if (record.expiresAt < new Date()) {
await this.db
.delete(emailVerificationCodes)
.where(eq(emailVerificationCodes.id, record.id));
throw new AppError(ERROR_CODES.INVALID_CODE, 'Verification code expired', 400);
}
const [user] = await this.db.select().from(users).where(eq(users.id, userId)).limit(1);
if (!user) {
throw notFound('User not found');
}
if (user.emailVerifiedAt) {
throw new AppError(ERROR_CODES.ALREADY_VERIFIED, 'Email already verified', 400);
}
await this.db
.update(users)
.set({ emailVerifiedAt: new Date(), updatedAt: new Date() })
.where(eq(users.id, userId));
await this.db
.delete(emailVerificationCodes)
.where(eq(emailVerificationCodes.userId, userId));
}
async forgotPassword(email: string): Promise<ForgotPasswordResult> {
const [user] = await this.db
.select()
.from(users)
.where(eq(users.email, email.toLowerCase().trim()))
.limit(1);
if (!user) {
return {
token: '',
expiresAt: new Date(Date.now() + RESET_TOKEN_TTL_MS),
};
}
const token = randomBytes(32).toString('hex');
const tokenHash = hashToken(token);
const expiresAt = new Date(Date.now() + RESET_TOKEN_TTL_MS);
await this.db.insert(passwordResetTokens).values({
userId: user.id,
tokenHash,
expiresAt,
});
return { token, expiresAt };
}
async resetPassword(token: string, newPassword: string): Promise<void> {
const tokenHash = hashToken(token);
const [record] = await this.db
.select()
.from(passwordResetTokens)
.where(eq(passwordResetTokens.tokenHash, tokenHash))
.limit(1);
if (!record) {
throw new AppError(ERROR_CODES.INVALID_RESET_TOKEN, 'Invalid or expired reset token', 400);
}
if (record.expiresAt < new Date()) {
await this.db
.delete(passwordResetTokens)
.where(eq(passwordResetTokens.id, record.id));
throw new AppError(ERROR_CODES.INVALID_RESET_TOKEN, 'Reset token expired', 400);
}
const passwordHash = await hashPassword(newPassword);
await this.db
.update(users)
.set({ passwordHash, updatedAt: new Date() })
.where(eq(users.id, record.userId));
await this.db
.delete(passwordResetTokens)
.where(eq(passwordResetTokens.id, record.id));
}
}

View File

@@ -0,0 +1,9 @@
export {
LlmService,
type ILlmService,
type LlmConfig,
type LlmGenerationMeta,
type GenerateQuestionsInput,
type GenerateQuestionsResult,
type GeneratedQuestion,
} from './llm.service.js';

View File

@@ -0,0 +1,242 @@
import { z } from 'zod';
import { createHash } from 'node:crypto';
import { env } from '../../config/env.js';
import type { Stack, Level, QuestionType } from '../../db/schema/enums.js';
export interface LlmConfig {
baseUrl: string;
model: string;
fallbackModel?: string;
apiKey?: string;
timeoutMs: number;
temperature: number;
maxTokens: number;
maxRetries: number;
retryDelayMs: number;
}
export interface ChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
}
export interface ChatCompletionResponse {
choices: Array<{
message?: { content: string };
text?: string;
}>;
}
const QUESTION_TYPES: QuestionType[] = ['single_choice', 'multiple_select', 'true_false', 'short_text'];
const optionSchema = z.object({
key: z.string().min(1),
text: z.string().min(1),
});
const generatedQuestionSchema = z.object({
questionText: z.string().min(1),
type: z.enum(QUESTION_TYPES as [string, ...string[]]),
options: z.array(optionSchema).optional(),
correctAnswer: z.union([z.string(), z.array(z.string())]),
explanation: z.string().min(1),
});
const generateQuestionsResponseSchema = z.object({
questions: z.array(generatedQuestionSchema),
});
export type GeneratedQuestion = z.infer<typeof generatedQuestionSchema> & {
stack: Stack;
level: Level;
};
export interface GenerateQuestionsInput {
stack: Stack;
level: Level;
count: number;
types?: QuestionType[];
}
/** Metadata for persisting to question_cache_meta (used by QuestionService) */
export interface LlmGenerationMeta {
llmModel: string;
promptHash: string;
generationTimeMs: number;
rawResponse: unknown;
}
export interface GenerateQuestionsResult {
questions: GeneratedQuestion[];
meta: LlmGenerationMeta;
}
/** Interface for QuestionService dependency injection and testing */
export interface ILlmService {
generateQuestions(input: GenerateQuestionsInput): Promise<GenerateQuestionsResult>;
}
export class LlmService implements ILlmService {
private readonly config: LlmConfig;
constructor(config?: Partial<LlmConfig>) {
this.config = {
baseUrl: config?.baseUrl ?? env.LLM_BASE_URL,
model: config?.model ?? env.LLM_MODEL,
fallbackModel: config?.fallbackModel ?? env.LLM_FALLBACK_MODEL,
apiKey: config?.apiKey ?? env.LLM_API_KEY,
timeoutMs: config?.timeoutMs ?? env.LLM_TIMEOUT_MS,
temperature: config?.temperature ?? env.LLM_TEMPERATURE,
maxTokens: config?.maxTokens ?? env.LLM_MAX_TOKENS,
maxRetries: config?.maxRetries ?? env.LLM_MAX_RETRIES,
retryDelayMs: config?.retryDelayMs ?? env.LLM_RETRY_DELAY_MS,
};
}
async chat(messages: ChatMessage[]): Promise<string> {
const { content } = await this.chatWithMeta(messages);
return content;
}
/** Returns content and model used (for logging to question_cache_meta) */
async chatWithMeta(messages: ChatMessage[]): Promise<{ content: string; model: string }> {
let lastError: Error | null = null;
const modelsToTry = [this.config.model];
if (this.config.fallbackModel) {
modelsToTry.push(this.config.fallbackModel);
}
for (const model of modelsToTry) {
for (let attempt = 0; attempt <= this.config.maxRetries; attempt++) {
try {
const content = await this.executeChat(messages, model);
return { content, model };
} catch (err) {
lastError = err instanceof Error ? err : new Error('LLM request failed');
if (attempt < this.config.maxRetries) {
const delayMs = this.config.retryDelayMs * Math.pow(2, attempt);
await sleep(delayMs);
}
}
}
}
throw lastError ?? new Error('LLM request failed');
}
private async executeChat(messages: ChatMessage[], model: string): Promise<string> {
const url = `${this.config.baseUrl.replace(/\/$/, '')}/chat/completions`;
const headers: Record<string, string> = {
'Content-Type': 'application/json',
};
if (this.config.apiKey) {
headers['Authorization'] = `Bearer ${this.config.apiKey}`;
}
const body = {
model,
messages: messages.map((m) => ({ role: m.role, content: m.content })),
temperature: this.config.temperature,
max_tokens: this.config.maxTokens,
};
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), this.config.timeoutMs);
try {
const res = await fetch(url, {
method: 'POST',
headers,
body: JSON.stringify(body),
signal: controller.signal,
});
clearTimeout(timeoutId);
if (!res.ok) {
const text = await res.text();
throw new Error(`LLM request failed: ${res.status} ${res.statusText} - ${text}`);
}
const data = (await res.json()) as ChatCompletionResponse;
const choice = data.choices?.[0];
const content = choice?.message?.content ?? choice?.text ?? '';
return content.trim();
} catch (err) {
clearTimeout(timeoutId);
if (err instanceof Error) {
throw err;
}
throw new Error('LLM request failed');
}
}
async generateQuestions(input: GenerateQuestionsInput): Promise<GenerateQuestionsResult> {
const { stack, level, count, types = QUESTION_TYPES } = input;
const typeList = types.join(', ');
const systemPrompt = `You are a technical interview question generator. Generate exactly ${count} programming/tech questions.
Return ONLY valid JSON in this exact format (no markdown, no code blocks):
{"questions":[{"questionText":"...","type":"single_choice|multiple_select|true_false|short_text","options":[{"key":"a","text":"..."}],"correctAnswer":"a" or ["a","b"],"explanation":"..."}]}
Rules: type must be one of: ${typeList}. For single_choice/multiple_select: options array required with key (a,b,c,d). For true_false: options [{"key":"true","text":"True"},{"key":"false","text":"False"}]. For short_text: options omitted, correctAnswer is string.`;
const userPrompt = `Generate ${count} questions for stack="${stack}", level="${level}". Use types: ${typeList}.`;
const promptForHash = systemPrompt + '\n---\n' + userPrompt;
const promptHash = createHash('sha256').update(promptForHash).digest('hex');
const start = Date.now();
const { content: raw, model } = await this.chatWithMeta([
{ role: 'system', content: systemPrompt },
{ role: 'user', content: userPrompt },
]);
const generationTimeMs = Date.now() - start;
const jsonStr = extractJson(raw);
const parsed = JSON.parse(jsonStr) as unknown;
const result = generateQuestionsResponseSchema.safeParse(parsed);
if (!result.success) {
throw new Error(`LLM response validation failed: ${result.error.message}`);
}
const questions: GeneratedQuestion[] = result.data.questions.map((q) => ({
...q,
stack,
level,
}));
for (const q of questions) {
if ((q.type === 'single_choice' || q.type === 'multiple_select') && (!q.options || q.options.length === 0)) {
throw new Error(`Question validation failed: ${q.type} requires options`);
}
if (q.type === 'true_false' && (!q.options || q.options.length < 2)) {
throw new Error(`Question validation failed: true_false requires true/false options`);
}
}
return {
questions,
meta: {
llmModel: model,
promptHash,
generationTimeMs,
rawResponse: parsed,
},
};
}
}
function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function extractJson(text: string): string {
const trimmed = text.trim();
const match = trimmed.match(/\{[\s\S]*\}/);
return match ? match[0]! : trimmed;
}

View File

@@ -0,0 +1,175 @@
import { eq } from 'drizzle-orm';
import type { NodePgDatabase } from 'drizzle-orm/node-postgres';
import type * as schema from '../../db/schema/index.js';
import { users, userStats } from '../../db/schema/index.js';
import { notFound, conflict, ERROR_CODES } from '../../utils/errors.js';
import type { User } from '../../db/schema/users.js';
import type { SelfLevel } from '../../db/schema/index.js';
type Db = NodePgDatabase<typeof schema>;
export type UserStatItem = {
stack: string;
level: string;
totalQuestions: number;
correctAnswers: number;
testsTaken: number;
lastTestAt: string | null;
};
export type ProfileStats = {
byStack: UserStatItem[];
totalTestsTaken: number;
totalQuestions: number;
correctAnswers: number;
accuracy: number | null;
};
export type ProfileUpdateInput = {
nickname?: string;
avatarUrl?: string | null;
country?: string | null;
city?: string | null;
selfLevel?: SelfLevel | null;
isPublic?: boolean;
};
export type PublicProfile = {
id: string;
nickname: string;
avatarUrl: string | null;
country: string | null;
city: string | null;
selfLevel: string | null;
isPublic: boolean;
stats: ProfileStats;
};
export type PrivateProfile = PublicProfile & {
email: string;
emailVerifiedAt: string | null;
createdAt: string;
updatedAt: string;
};
async function getStatsForUser(db: Db, userId: string): Promise<ProfileStats> {
const rows = await db
.select()
.from(userStats)
.where(eq(userStats.userId, userId));
const byStack: UserStatItem[] = rows.map((r) => ({
stack: r.stack,
level: r.level,
totalQuestions: r.totalQuestions,
correctAnswers: r.correctAnswers,
testsTaken: r.testsTaken,
lastTestAt: r.lastTestAt?.toISOString() ?? null,
}));
const totalTestsTaken = rows.reduce((sum, r) => sum + r.testsTaken, 0);
const totalQuestions = rows.reduce((sum, r) => sum + r.totalQuestions, 0);
const correctAnswers = rows.reduce((sum, r) => sum + r.correctAnswers, 0);
const accuracy = totalQuestions > 0 ? correctAnswers / totalQuestions : null;
return { byStack, totalTestsTaken, totalQuestions, correctAnswers, accuracy };
}
function toPublicProfile(user: User, stats: ProfileStats): PublicProfile {
return {
id: user.id,
nickname: user.nickname,
avatarUrl: user.avatarUrl,
country: user.country,
city: user.city,
selfLevel: user.selfLevel,
isPublic: user.isPublic,
stats,
};
}
function toPrivateProfile(user: User, stats: ProfileStats): PrivateProfile {
return {
...toPublicProfile(user, stats),
email: user.email,
emailVerifiedAt: user.emailVerifiedAt?.toISOString() ?? null,
createdAt: user.createdAt.toISOString(),
updatedAt: user.updatedAt.toISOString(),
};
}
export class UserService {
constructor(private readonly db: Db) {}
async getById(userId: string): Promise<User | null> {
const [user] = await this.db.select().from(users).where(eq(users.id, userId)).limit(1);
return user ?? null;
}
async getByNickname(nickname: string): Promise<User | null> {
const [user] = await this.db
.select()
.from(users)
.where(eq(users.nickname, nickname.trim()))
.limit(1);
return user ?? null;
}
async getPrivateProfile(userId: string): Promise<PrivateProfile> {
const [user, stats] = await Promise.all([this.getById(userId), getStatsForUser(this.db, userId)]);
if (!user) {
throw notFound('User not found');
}
return toPrivateProfile(user, stats);
}
async getPublicProfile(username: string): Promise<PublicProfile> {
const user = await this.getByNickname(username);
if (!user) {
throw notFound('User not found');
}
if (!user.isPublic) {
throw notFound('User not found');
}
const stats = await getStatsForUser(this.db, user.id);
return toPublicProfile(user, stats);
}
async updateProfile(userId: string, input: ProfileUpdateInput): Promise<PrivateProfile> {
const updateData: Partial<typeof users.$inferInsert> = {
updatedAt: new Date(),
};
if (input.nickname !== undefined) {
const trimmed = input.nickname.trim();
const [existing] = await this.db
.select({ id: users.id })
.from(users)
.where(eq(users.nickname, trimmed))
.limit(1);
if (existing && existing.id !== userId) {
throw conflict(ERROR_CODES.NICKNAME_TAKEN, 'Nickname already taken');
}
updateData.nickname = trimmed;
}
if (input.avatarUrl !== undefined) updateData.avatarUrl = input.avatarUrl;
if (input.country !== undefined) updateData.country = input.country;
if (input.city !== undefined) updateData.city = input.city;
if (input.selfLevel !== undefined) updateData.selfLevel = input.selfLevel;
if (input.isPublic !== undefined) updateData.isPublic = input.isPublic;
const [updated] = await this.db
.update(users)
.set(updateData)
.where(eq(users.id, userId))
.returning();
if (!updated) {
throw notFound('User not found');
}
const stats = await getStatsForUser(this.db, userId);
return toPrivateProfile(updated, stats);
}
}

88
src/utils/errors.ts Normal file
View File

@@ -0,0 +1,88 @@
export const ERROR_CODES = {
BAD_REQUEST: 'BAD_REQUEST',
UNAUTHORIZED: 'UNAUTHORIZED',
FORBIDDEN: 'FORBIDDEN',
NOT_FOUND: 'NOT_FOUND',
CONFLICT: 'CONFLICT',
VALIDATION_ERROR: 'VALIDATION_ERROR',
RATE_LIMIT_EXCEEDED: 'RATE_LIMIT_EXCEEDED',
INTERNAL_ERROR: 'INTERNAL_ERROR',
INVALID_CREDENTIALS: 'INVALID_CREDENTIALS',
ACCOUNT_LOCKED: 'ACCOUNT_LOCKED',
EMAIL_TAKEN: 'EMAIL_TAKEN',
INVALID_REFRESH_TOKEN: 'INVALID_REFRESH_TOKEN',
TOKEN_REUSE_DETECTED: 'TOKEN_REUSE_DETECTED',
INVALID_CODE: 'INVALID_CODE',
ALREADY_VERIFIED: 'ALREADY_VERIFIED',
INVALID_RESET_TOKEN: 'INVALID_RESET_TOKEN',
NICKNAME_TAKEN: 'NICKNAME_TAKEN',
DAILY_LIMIT_REACHED: 'DAILY_LIMIT_REACHED',
EMAIL_NOT_VERIFIED: 'EMAIL_NOT_VERIFIED',
QUESTIONS_UNAVAILABLE: 'QUESTIONS_UNAVAILABLE',
QUESTION_ALREADY_ANSWERED: 'QUESTION_ALREADY_ANSWERED',
WRONG_QUESTION: 'WRONG_QUESTION',
TEST_ALREADY_FINISHED: 'TEST_ALREADY_FINISHED',
NO_ANSWERS: 'NO_ANSWERS',
TEST_NOT_FINISHED: 'TEST_NOT_FINISHED',
USER_NOT_FOUND: 'USER_NOT_FOUND',
} as const;
export type ErrorCode = (typeof ERROR_CODES)[keyof typeof ERROR_CODES];
export class AppError extends Error {
constructor(
public readonly code: ErrorCode,
public readonly message: string,
public readonly statusCode: number = 500,
public readonly details?: unknown
) {
super(message);
this.name = 'AppError';
Object.setPrototypeOf(this, AppError.prototype);
}
toJSON() {
const err: { code: string; message: string; details?: unknown } = {
code: this.code,
message: this.message,
};
if (this.details !== undefined) err.details = this.details;
return { error: err };
}
}
export function badRequest(message: string, details?: unknown): AppError {
return new AppError(ERROR_CODES.BAD_REQUEST, message, 400, details);
}
export function unauthorized(message: string): AppError {
return new AppError(ERROR_CODES.UNAUTHORIZED, message, 401);
}
export function forbidden(message: string): AppError {
return new AppError(ERROR_CODES.FORBIDDEN, message, 403);
}
export function notFound(message: string): AppError {
return new AppError(ERROR_CODES.NOT_FOUND, message, 404);
}
export function conflict(code: ErrorCode, message: string): AppError {
return new AppError(code, message, 409);
}
export function validationError(message: string, details?: unknown): AppError {
return new AppError(ERROR_CODES.VALIDATION_ERROR, message, 422, details);
}
export function rateLimitExceeded(message: string, retryAfter?: number): AppError {
const err = new AppError(ERROR_CODES.RATE_LIMIT_EXCEEDED, message, 429);
if (retryAfter !== undefined) {
(err as AppError & { retryAfter: number }).retryAfter = retryAfter;
}
return err;
}
export function internalError(message: string, cause?: unknown): AppError {
return new AppError(ERROR_CODES.INTERNAL_ERROR, message, 500, cause);
}

52
src/utils/jwt.ts Normal file
View File

@@ -0,0 +1,52 @@
import { createHash } from 'node:crypto';
import * as jose from 'jose';
import { env } from '../config/env.js';
export function hashToken(token: string): string {
return createHash('sha256').update(token).digest('hex');
}
export interface AccessPayload {
sub: string;
email: string;
type: 'access';
}
export interface RefreshPayload {
sub: string;
sid: string;
type: 'refresh';
}
type JwtPayload = AccessPayload | RefreshPayload;
const secret = new TextEncoder().encode(env.JWT_SECRET);
export async function signAccessToken(payload: Omit<AccessPayload, 'type'>): Promise<string> {
return new jose.SignJWT({ ...payload, type: 'access' })
.setProtectedHeader({ alg: 'HS256' })
.setIssuedAt()
.setExpirationTime(env.JWT_ACCESS_TTL)
.sign(secret);
}
export async function signRefreshToken(payload: Omit<RefreshPayload, 'type'>): Promise<string> {
return new jose.SignJWT({ ...payload, type: 'refresh' })
.setProtectedHeader({ alg: 'HS256' })
.setIssuedAt()
.setExpirationTime(env.JWT_REFRESH_TTL)
.sign(secret);
}
export async function verifyToken(token: string): Promise<JwtPayload> {
const { payload } = await jose.jwtVerify(token, secret);
return payload as unknown as JwtPayload;
}
export function isAccessPayload(p: JwtPayload): p is AccessPayload {
return p.type === 'access';
}
export function isRefreshPayload(p: JwtPayload): p is RefreshPayload {
return p.type === 'refresh';
}

15
src/utils/password.ts Normal file
View File

@@ -0,0 +1,15 @@
import * as argon2 from 'argon2';
const HASH_OPTIONS: argon2.Options = {
type: argon2.argon2id,
memoryCost: 19456,
timeCost: 2,
};
export async function hashPassword(plain: string): Promise<string> {
return argon2.hash(plain, HASH_OPTIONS);
}
export async function verifyPassword(hash: string, plain: string): Promise<boolean> {
return argon2.verify(hash, plain);
}

37
src/utils/uuid.ts Normal file
View File

@@ -0,0 +1,37 @@
import { randomBytes } from 'node:crypto';
/**
* Generate UUID v7 (time-ordered, sortable).
* Simplified implementation: timestamp (48 bit) + random (74 bit).
*/
export function uuid7(): string {
const timestamp = Date.now();
const random = randomBytes(10);
const high = (timestamp / 0x100000000) >>> 0;
const low = timestamp >>> 0;
const b = new Uint8Array(16);
b[0] = (high >> 24) & 0xff;
b[1] = (high >> 16) & 0xff;
b[2] = (high >> 8) & 0xff;
b[3] = high & 0xff;
b[4] = (low >> 24) & 0xff;
b[5] = (low >> 16) & 0xff;
b[6] = ((low >> 8) & 0x3f) | 0x70;
b[7] = low & 0xff;
b[8] = 0x80 | (random[0] & 0x3f);
b[9] = random[1];
b[10] = random[2];
b[11] = random[3];
b[12] = random[4];
b[13] = random[5];
b[14] = random[6];
b[15] = random[7];
const hex = Array.from(b)
.map((x) => x.toString(16).padStart(2, '0'))
.join('');
return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
}