Compare commits

...

11 Commits

Author SHA1 Message Date
544854ec3f Fix toFixed error by converting string prices to numbers 2025-09-20 17:47:04 +02:00
332016317a URGENT: Revert API URL changes to fix VPS production login 2025-09-20 17:15:57 +02:00
5e4e6ee831 Fix frontend API URL configuration for local development 2025-09-20 17:05:45 +02:00
ce75ee5c61 Fix token packages endpoint authentication - make it truly public 2025-09-20 16:58:51 +02:00
528be6027a Fix token packages authentication issue - add public endpoint for token packages 2025-09-20 16:54:59 +02:00
e75424aac0 TONS code - stripe integration 2025-09-20 16:37:37 +02:00
7a868d7f14 added a test link so the admin can test the interviews too, admin can now go to the landing page too, fixes for the swagger links 2025-09-20 15:04:56 +02:00
824bf93dfb always forgetting the portainer creds so added to the readme as a best practice 2025-09-20 12:25:55 +02:00
7f9cf79a21 Merged the projects 2025-09-20 11:56:15 +02:00
b83c448573 Fix Docker build paths after flattening nested structures
- Remove obsolete version lines from docker-compose.yml files
- Create root Dockerfile for ASP.NET chatbot service
- Update backend docker-compose.yml to use correct build context
- Fix all references to old nested tuna/tuna path structure
2025-09-20 10:48:31 +02:00
ec8342b5e2 Integrate complete Candidat platform with ASP.NET chatbot service
- Added Next.js frontend for candidate interviews
- Added Node.js backend with TypeScript and AI integration
- Added ASP.NET Core chatbot service for specialized AI conversations
- Added MySQL database with complete schema
- Added Nginx reverse proxy configuration
- Complete Docker Compose orchestration for all services
- Environment configuration for production, development, and Cloudflare
- Comprehensive documentation and setup instructions
- Flattened nested folder structures for clean organization
- Integrated chatbot service with fallback to direct AI calls
2025-09-20 10:45:21 +02:00
151 changed files with 33124 additions and 10 deletions

46
AISApp/Dockerfile Normal file
View File

@ -0,0 +1,46 @@
# Multi-stage build for ASP.NET Core application
FROM mcr.microsoft.com/dotnet/sdk:9.0 AS build
WORKDIR /app
# Copy project files
COPY AISApp.csproj .
COPY . .
# Restore dependencies
RUN dotnet restore AISApp.csproj
# Build the application
RUN dotnet build AISApp.csproj -c Release -o /app/build
# Publish the application
RUN dotnet publish AISApp.csproj -c Release -o /app/publish
# Runtime stage
FROM mcr.microsoft.com/dotnet/aspnet:9.0 AS runtime
WORKDIR /app
# Install curl for health checks
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
# Copy published application
COPY --from=build /app/publish .
# Copy prompt.txt file
COPY prompt.txt .
# Create directory for static files
RUN mkdir -p static
# Expose port
EXPOSE 80
# Set environment variables
ENV ASPNETCORE_URLS=http://+:80
ENV ASPNETCORE_ENVIRONMENT=Production
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
CMD curl -f http://localhost/api/chat || exit 1
# Run the application
ENTRYPOINT ["dotnet", "AISApp.dll"]

View File

@ -20,3 +20,5 @@ This will start the application with all necessary services.
--- ---
This README provides the basic steps to configure and run the AISApp project using Docker Compose and environment variables. This README provides the basic steps to configure and run the AISApp project using Docker Compose and environment variables.
portainer= tunaadmin/tunatainer8!

153
TODO.md Normal file
View File

@ -0,0 +1,153 @@
# System Integration TODO - Merging ASP.NET Chatbot with Existing Backend
## Overview
Integrate the ASP.NET chatbot service into the existing Node.js backend system to provide specialized interview capabilities while maintaining the current architecture.
## Phase 1: Container Integration
### 1.1 Add Chatbot Service to Docker Compose
- **Status**: ✅ Completed
- **Description**: Add the ASP.NET chatbot service to the main docker-compose.yml file
- **Files**: `docker-compose.yml`
- **Details**: Configure service with proper networking, environment variables, and dependencies
### 1.2 Update Backend Docker Compose
- **Status**: ✅ Completed
- **Description**: Update backend docker-compose.yml to include chatbot service dependency
- **Files**: `backend/docker-compose.yml`
- **Details**: Add chatbot service and ensure proper service communication
### 1.3 Create Chatbot Service Dockerfile
- **Status**: ✅ Completed
- **Description**: Create proper Dockerfile for the ASP.NET chatbot service
- **Files**: `AISApp/Dockerfile`
- **Details**: Multi-stage build with proper .NET 9.0 runtime and configuration
## Phase 2: Backend Service Integration
### 2.1 Create ChatbotService
- **Status**: ✅ Completed
- **Description**: Create new service to handle communication with ASP.NET chatbot
- **Files**: `backend/src/services/ChatbotService.ts`
- **Details**: HTTP client wrapper for chatbot service with error handling and fallback
### 2.2 Update AIService to Use Chatbot
- **Status**: ✅ Completed
- **Description**: Modify AIService to proxy requests to chatbot service instead of direct OpenRouter
- **Files**: `backend/src/services/AIService.ts`
- **Details**: Add chatbot service integration while maintaining fallback to direct OpenRouter
### 2.3 Update AIController
- **Status**: ✅ Completed
- **Description**: Modify AIController to use new chatbot service integration
- **Files**: `backend/src/controllers/rest/AIController.ts`
- **Details**: Update chat endpoints to use chatbot service, maintain existing interview flow
### 2.4 Add Environment Variables
- **Status**: ✅ Completed
- **Description**: Add new environment variables for chatbot service configuration
- **Files**: `env.example`, `env.production`, `env.cloudflare`
- **Details**: Add chatbot service URL, timeout, and fallback configuration
## Phase 3: ASP.NET Service Modifications
### 3.1 Add MySQL Database Support
- **Status**: ✅ Completed
- **Description**: Replace SQLite with MySQL database connection in ASP.NET service
- **Files**: `AISApp/AISApp/AIS.cs`, `AISApp/AISApp/Program.cs`
- **Details**: Add MySQL connection string and update database operations
### 3.2 Add Interview Context Endpoints
- **Status**: ✅ Completed
- **Description**: Create endpoints for interview initialization and context management
- **Files**: `AISApp/AISApp/Program.cs`
- **Details**: Add endpoints for interview start, status, and completion
### 3.3 Implement Conversation Sync
- **Status**: ✅ Completed
- **Description**: Sync conversation data between ASP.NET service and MySQL database
- **Files**: `AISApp/AISApp/AIS.cs`
- **Details**: Update conversation persistence to use MySQL instead of SQLite
### 3.4 Add Interview-Specific Prompts
- **Status**: ✅ Completed
- **Description**: Modify system prompts based on job requirements and interview context
- **Files**: `AISApp/AISApp/prompt.txt`, `AISApp/AISApp/AIS.cs`
- **Details**: Dynamic prompt generation based on job details and interview stage
## Phase 4: Database Integration
### 4.1 Update Database Schema
- **Status**: ✅ Completed
- **Description**: Add any required database changes for chatbot integration
- **Files**: `database/` (if needed)
- **Details**: Ensure conversation tables support chatbot service requirements
### 4.2 Add Database Migration Scripts
- **Status**: ✅ Completed
- **Description**: Create migration scripts for any database schema changes
- **Files**: `backend/` (new migration files)
- **Details**: SQL scripts for any required table modifications
## Phase 5: Configuration and Environment
### 5.1 Update Nginx Configuration
- **Status**: ✅ Completed (Not Required)
- **Description**: Add nginx routing for chatbot service if needed
- **Files**: `nginx/nginx.conf`
- **Details**: Add proxy rules for chatbot service endpoints
### 5.2 Update Environment Files
- **Status**: ✅ Completed
- **Description**: Update all environment files with chatbot service configuration
- **Files**: `env.example`, `env.production`, `env.cloudflare`
- **Details**: Add chatbot service environment variables
### 5.3 Update Docker Compose Environment
- **Status**: ✅ Completed
- **Description**: Add chatbot service environment variables to docker-compose
- **Files**: `docker-compose.yml`
- **Details**: Add environment variable mapping for chatbot service
## Phase 6: Testing and Validation
### 6.1 Service Communication Test
- **Status**: Pending
- **Description**: Test communication between backend and chatbot service
- **Details**: Verify HTTP requests work correctly between services
### 6.2 Database Integration Test
- **Status**: Pending
- **Description**: Test database operations in chatbot service
- **Details**: Verify conversation sync and data persistence
### 6.3 End-to-End Interview Flow Test
- **Status**: Pending
- **Description**: Test complete interview flow with chatbot integration
- **Details**: Verify mandatory questions → chat → completion flow works
## Phase 7: Documentation and Cleanup
### 7.1 Update API Documentation
- **Status**: Pending
- **Description**: Update API documentation to reflect chatbot integration
- **Files**: `backend/ADMIN_API.md`, `backend/AI_CONFIGURATION.md`
- **Details**: Document new chatbot service endpoints and configuration
### 7.2 Update Deployment Scripts
- **Status**: Pending
- **Description**: Update deployment scripts to include chatbot service
- **Files**: `deploy-production.ps1`, `deploy-production.sh`
- **Details**: Add chatbot service to deployment process
### 7.3 Clean Up Temporary Files
- **Status**: Pending
- **Description**: Remove any temporary files created during integration
- **Details**: Clean up test files and temporary configurations
## Notes
- All changes maintain backward compatibility
- Fallback to direct OpenRouter if chatbot service fails
- No frontend changes required initially
- Maintain existing interview flow and database structure
- Chatbot service runs on port 5000 internally, exposed via nginx if needed

9
backend/.barrels.json Normal file
View File

@ -0,0 +1,9 @@
{
"directory": ["./src/controllers/rest","./src/controllers/pages"],
"exclude": [
"**/__mock__",
"**/__mocks__",
"**/*.spec.ts"
],
"delete": true
}

14
backend/.dockerignore Normal file
View File

@ -0,0 +1,14 @@
node_modules
npm-debug.log
.git
.gitignore
README.md
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.nyc_output
coverage
.DS_Store
*.log

61
backend/.gitignore vendored Normal file
View File

@ -0,0 +1,61 @@
### Node template
.DS_Store
# Logs
logs
*.log
npm-debug.log*
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
# https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git
node_modules
.npmrc
*.log
# Typings
typings/
# Typescript
/**/*.js
/**/*.js.map
test/**/*.js
test/**/*.js.map
# Test
/.tmp
/.nyc_output
# IDE
.vscode
.idea
# Project
/public
/dist
# Environment variables
.env
.env.local
.env.development
.env.production
.env.test
.env.*.local

21
backend/.swcrc Normal file
View File

@ -0,0 +1,21 @@
{
"sourceMaps": true,
"jsc": {
"parser": {
"syntax": "typescript",
"decorators": true,
"dynamicImport": true
},
"target": "es2022",
"externalHelpers": true,
"keepClassNames": true,
"transform": {
"useDefineForClassFields": false,
"legacyDecorator": true,
"decoratorMetadata": true
}
},
"module": {
"type": "es6"
}
}

306
backend/ADMIN_API.md Normal file
View File

@ -0,0 +1,306 @@
# Admin API Endpoints
This document describes the admin-specific API endpoints for the Candivista platform.
## Authentication
All admin endpoints require authentication with a valid JWT token from a user with `role: 'admin'`.
**Headers:**
```
Authorization: Bearer <jwt_token>
Content-Type: application/json
```
## Base URL
```
http://localhost:8083/rest/admin
```
## Endpoints
### System Statistics
#### GET /statistics
Get system-wide statistics and metrics.
**Response:**
```json
{
"total_users": 150,
"active_users": 142,
"total_jobs": 89,
"total_interviews": 234,
"total_tokens_purchased": 1250,
"total_tokens_used": 890,
"total_revenue": 12500.00,
"generated_at": "2024-01-15T10:30:00Z"
}
```
### User Management
#### GET /users
Get all users in the system.
**Response:**
```json
[
{
"id": "user-uuid",
"email": "user@example.com",
"first_name": "John",
"last_name": "Doe",
"role": "recruiter",
"company_name": "Tech Corp",
"is_active": true,
"last_login_at": "2024-01-15T09:00:00Z",
"created_at": "2024-01-01T00:00:00Z"
}
]
```
#### GET /users/:id
Get a specific user by ID.
#### PUT /users/:id
Update user information.
**Request Body:**
```json
{
"first_name": "John",
"last_name": "Doe",
"email": "john@example.com",
"role": "recruiter",
"company_name": "Tech Corp",
"is_active": true
}
```
#### PATCH /users/:id/toggle-status
Toggle user active/inactive status.
**Response:**
```json
{
"success": true,
"new_status": false
}
```
#### PATCH /users/:id/password
Change user password.
**Request Body:**
```json
{
"new_password": "newpassword123"
}
```
#### POST /users
Create a new user.
**Request Body:**
```json
{
"email": "newuser@example.com",
"password": "password123",
"first_name": "Jane",
"last_name": "Smith",
"role": "recruiter",
"company_name": "Startup Inc"
}
```
### Job Management
#### GET /jobs
Get all jobs in the system with user information.
**Response:**
```json
[
{
"id": "job-uuid",
"user_id": "user-uuid",
"title": "Senior Developer",
"description": "Job description...",
"status": "active",
"created_at": "2024-01-15T10:00:00Z",
"first_name": "John",
"last_name": "Doe",
"email": "john@example.com",
"company_name": "Tech Corp"
}
]
```
#### GET /jobs/:id
Get a specific job by ID.
#### PATCH /jobs/:id/status
Update job status.
**Request Body:**
```json
{
"status": "paused"
}
```
#### PUT /jobs/:id
Update job information.
**Request Body:**
```json
{
"title": "Updated Job Title",
"description": "Updated description...",
"status": "active"
}
```
### Token Management
#### GET /user-token-summaries
Get token usage summaries for all users.
**Response:**
```json
[
{
"user_id": "user-uuid",
"first_name": "John",
"last_name": "Doe",
"email": "john@example.com",
"total_purchased": 50,
"total_used": 25,
"total_available": 25,
"utilization_percentage": 50.0
}
]
```
#### POST /add-tokens
Add tokens to a specific user.
**Request Body:**
```json
{
"user_id": "user-uuid",
"quantity": 10,
"price_per_token": 5.00,
"total_price": 50.00
}
```
### Token Packages
#### GET /token-packages
Get all token packages.
**Response:**
```json
[
{
"id": "package-uuid",
"name": "Professional Pack",
"description": "Ideal for regular recruiters",
"quantity": 20,
"price_per_token": 4.00,
"total_price": 80.00,
"discount_percentage": 20,
"is_popular": true,
"is_active": true
}
]
```
#### POST /token-packages
Create a new token package.
**Request Body:**
```json
{
"name": "New Package",
"description": "Package description",
"quantity": 10,
"price_per_token": 4.50,
"total_price": 45.00,
"discount_percentage": 10,
"is_popular": false,
"is_active": true
}
```
#### PUT /token-packages/:id
Update a token package.
#### PATCH /token-packages/:id/toggle-status
Toggle package active/inactive status.
#### DELETE /token-packages/:id
Delete a token package.
### Interview Management
#### GET /interviews
Get all interviews in the system.
#### GET /interviews/:id
Get a specific interview by ID.
### Payment Records
#### GET /payments
Get all payment records.
#### GET /payments/:id
Get a specific payment record by ID.
## Error Responses
All endpoints return appropriate HTTP status codes and error messages:
- `400 Bad Request` - Invalid request data
- `401 Unauthorized` - Invalid or missing authentication
- `403 Forbidden` - Insufficient permissions (non-admin user)
- `404 Not Found` - Resource not found
- `500 Internal Server Error` - Server error
**Error Response Format:**
```json
{
"message": "Error description",
"status": 400
}
```
## Testing
Use the provided test script to verify admin endpoints:
```bash
node test-admin.js
```
## Security Notes
1. All admin endpoints require admin role verification
2. JWT tokens are validated on every request
3. User passwords are hashed using bcrypt
4. All database queries use parameterized statements to prevent SQL injection
5. Admin actions are logged for audit purposes
## Database Schema
The admin endpoints interact with the following database tables:
- `users` - User accounts and profiles
- `jobs` - Job postings
- `interview_tokens` - Token purchases and usage
- `token_packages` - Available token packages
- `interviews` - Interview sessions
- `payment_records` - Payment history
- `user_usage` - Usage tracking and limits

View File

@ -0,0 +1,52 @@
# AI Configuration Guide
## Environment Variables
Add these to your `.env` file:
```env
# AI Configuration
# Choose between 'ollama' or 'openrouter'
AI_PROVIDER=openrouter
# Ollama Configuration (if AI_PROVIDER=ollama)
AI_PORT=11434
AI_MODEL=gpt-oss:20b
# OpenRouter Configuration (if AI_PROVIDER=openrouter)
OPENROUTER_API_KEY=sk-or-your-api-key-here
OPENROUTER_MODEL=gemma
OPENROUTER_BASE_URL=openrouter.ai
OPENROUTER_REL_PATH=/api
OPENROUTER_TEMPERATURE=0.7
```
## Available OpenRouter Models
Based on your C# implementation, these models are available:
- `gemma` - google/gemma-3-12b-it
- `dolphin` - cognitivecomputations/dolphin-mixtral-8x22b
- `dolphin_free` - cognitivecomputations/dolphin3.0-mistral-24b:free
- `gpt-4o-mini` - openai/gpt-4o-mini
- `gpt-4.1-nano` - openai/gpt-4.1-nano
- `qwen` - qwen/qwen3-30b-a3b
- `unslop` - thedrummer/unslopnemo-12b
- `euryale` - sao10k/l3.3-euryale-70b
- `wizard` - microsoft/wizardlm-2-8x22b
- `deepseek` - deepseek/deepseek-chat-v3-0324
- `dobby` - sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b
## Testing
1. Set `AI_PROVIDER=openrouter` in your `.env`
2. Add your OpenRouter API key
3. Test the connection: `GET http://localhost:8083/rest/ai/test-ai`
4. Start an interview to test the full flow
## Switching Back to Ollama
To switch back to Ollama:
1. Set `AI_PROVIDER=ollama` in your `.env`
2. Make sure Ollama is running on the specified port
3. Test the connection: `GET http://localhost:8083/rest/ai/test-ai`

30
backend/Dockerfile Normal file
View File

@ -0,0 +1,30 @@
# Use Node.js 18 Alpine for smaller image size
FROM node:18-alpine
# Set working directory
WORKDIR /app
# Copy package files
COPY package*.json ./
# Install dependencies
RUN npm ci --only=production
# Copy source code
COPY . .
# Build the application
RUN npm run build
# Expose port
EXPOSE 8083
# Install curl for health checks
RUN apk add --no-cache curl
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl -f http://localhost:8083/rest/ai/test-ai || exit 1
# Start the application
CMD ["npm", "run", "start:prod"]

67
backend/README.md Normal file
View File

@ -0,0 +1,67 @@
<p style="text-align: center" align="center">
<a href="https://tsed.dev" target="_blank"><img src="https://tsed.dev/tsed-og.png" width="200" alt="Ts.ED logo"/></a>
</p>
<div align="center">
<h1>Ts.ED - backend</h1>
<br />
<div align="center">
<a href="https://cli.tsed.dev/">Website</a>
<span>&nbsp;&nbsp;&nbsp;&nbsp;</span>
<a href="https://cli.tsed.dev/getting-started.html">Getting started</a>
<span>&nbsp;&nbsp;&nbsp;&nbsp;</span>
<a href="https://slack.tsed.dev">Slack</a>
<span>&nbsp;&nbsp;&nbsp;&nbsp;</span>
<a href="https://twitter.com/TsED_io">Twitter</a>
</div>
<hr />
</div>
> An awesome project based on Ts.ED framework
## Getting started
> **Important!** Ts.ED requires Node >= 20.x or Bun.js and TypeScript >= 5.
```batch
# install dependencies
$ npm install
# serve
$ npm run start
# build for production
$ npm run build
$ npm run start:prod
```
## Docker
```
# build docker image
docker compose build
# start docker image
docker compose up
```
## Barrels
This project uses [barrels](https://www.npmjs.com/package/@tsed/barrels) to generate index files to import the controllers.
Edit `.barrels.json` to customize it:
```json
{
"directory": [
"./src/controllers/rest",
"./src/controllers/pages"
],
"exclude": [
"**/__mock__",
"**/__mocks__",
"**/*.spec.ts"
],
"delete": true
}
```

View File

@ -0,0 +1,3 @@
-- Add icon column to jobs table
USE candidb_main;
ALTER TABLE jobs ADD COLUMN icon VARCHAR(50) DEFAULT 'briefcase' AFTER application_deadline;

View File

@ -0,0 +1,7 @@
USE candidb_main;
-- Add interview_style column to jobs table
ALTER TABLE jobs ADD COLUMN interview_style ENUM('personal', 'balanced', 'technical') DEFAULT 'balanced' AFTER interview_questions;
-- Update existing records to have 'balanced' as default
UPDATE jobs SET interview_style = 'balanced' WHERE interview_style IS NULL;

View File

@ -0,0 +1,7 @@
USE candidb_main;
-- Add tokens_used column to job_links table
ALTER TABLE job_links ADD COLUMN tokens_used INT DEFAULT 0 AFTER tokens_available;
-- Update existing records to have 0 tokens_used
UPDATE job_links SET tokens_used = 0 WHERE tokens_used IS NULL;

View File

@ -0,0 +1,13 @@
USE candidb_main;
CREATE TABLE IF NOT EXISTS job_links (
id VARCHAR(36) PRIMARY KEY,
job_id VARCHAR(36) NOT NULL,
url_slug VARCHAR(50) NOT NULL UNIQUE,
tokens_available INT DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
FOREIGN KEY (job_id) REFERENCES jobs(id) ON DELETE CASCADE,
INDEX idx_job_id (job_id),
INDEX idx_url_slug (url_slug)
);

View File

@ -0,0 +1,46 @@
version: '3.5'
services:
server:
build:
context: .
dockerfile: ./Dockerfile
args:
- http_proxy
- https_proxy
- no_proxy
image: backend/server:latest
ports:
- "8081:8081"
environment:
- CHATBOT_SERVICE_URL=http://chatbot:80
- CHATBOT_SERVICE_TIMEOUT=30000
- CHATBOT_FALLBACK_ENABLED=true
depends_on:
- chatbot
networks:
- candidat-network
chatbot:
build:
context: ../../AISApp
dockerfile: Dockerfile
image: candidat/chatbot:latest
container_name: backend-chatbot
environment:
- ASPNETCORE_ENVIRONMENT=Production
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY}
- CHATBOT_DB_HOST=database
- CHATBOT_DB_NAME=${MYSQL_DATABASE}
- CHATBOT_DB_USER=${MYSQL_USER}
- CHATBOT_DB_PASSWORD=${MYSQL_PASSWORD}
- CHATBOT_DB_PORT=3306
ports:
- "5000:80"
networks:
- candidat-network
restart: unless-stopped
networks:
candidat-network:
external: true

9
backend/nodemon.json Normal file
View File

@ -0,0 +1,9 @@
{
"extensions": ["ts"],
"watch": ["src"],
"ignore": ["**/*.spec.ts"],
"delay": 100,
"execMap": {
"ts": "node --enable-source-maps --import @swc-node/register/esm-register"
}
}

5603
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

79
backend/package.json Normal file
View File

@ -0,0 +1,79 @@
{
"name": "backend",
"version": "1.0.0",
"description": "",
"scripts": {
"build": "npm run barrels && swc src --out-dir dist -s --strip-leading-paths",
"barrels": "barrels",
"start": "npm run barrels && nodemon src/index.ts",
"start:prod": "cross-env NODE_ENV=production node dist/index.js"
},
"dependencies": {
"@swc-node/register": "^1.11.1",
"@swc/cli": "^0.7.8",
"@swc/core": "^1.13.5",
"@swc/helpers": "^0.5.17",
"@tsed/ajv": "^8.16.2",
"@tsed/barrels": "^6.6.3",
"@tsed/core": "^8.16.2",
"@tsed/di": "^8.16.2",
"@tsed/engines": "^8.16.2",
"@tsed/exceptions": "^8.16.2",
"@tsed/json-mapper": "^8.16.2",
"@tsed/logger": "^8.0.4",
"@tsed/openspec": "^8.16.2",
"@tsed/platform-cache": "^8.16.2",
"@tsed/platform-exceptions": "^8.16.2",
"@tsed/platform-express": "^8.16.2",
"@tsed/platform-http": "^8.16.2",
"@tsed/platform-log-request": "^8.16.2",
"@tsed/platform-middlewares": "^8.16.2",
"@tsed/platform-multer": "^8.16.2",
"@tsed/platform-params": "^8.16.2",
"@tsed/platform-response-filter": "^8.16.2",
"@tsed/platform-views": "^8.16.2",
"@tsed/scalar": "^8.16.2",
"@tsed/schema": "^8.16.2",
"@tsed/socketio": "^8.16.2",
"@tsed/swagger": "^8.16.2",
"@types/bcryptjs": "^3.0.0",
"@types/jsonwebtoken": "^9.0.10",
"@types/stripe": "^8.0.417",
"ajv": "^8.17.1",
"axios": "^1.6.0",
"bcryptjs": "^3.0.2",
"body-parser": "^2.2.0",
"compression": "^1.8.1",
"cookie-parser": "^1.4.7",
"cors": "^2.8.5",
"cross-env": "^10.0.0",
"dotenv": "^17.2.2",
"dotenv-expand": "^12.0.3",
"dotenv-flow": "^4.1.0",
"express": "^5.1.0",
"jsonwebtoken": "^9.0.2",
"method-override": "^3.0.0",
"mysql2": "^3.14.5",
"socket.io": "^4.8.1",
"stripe": "^18.5.0",
"typescript": "^5.9.2"
},
"devDependencies": {
"@types/compression": "^1.8.1",
"@types/cookie-parser": "^1.4.9",
"@types/cors": "^2.8.19",
"@types/express": "^5.0.3",
"@types/method-override": "^3.0.0",
"@types/multer": "^2.0.0",
"@types/node": "^24.3.1",
"nodemon": "^3.1.10",
"tslib": "^2.8.1"
},
"tsed": {
"convention": "conv_default",
"architecture": "arc_default",
"packageManager": "npm",
"runtime": "node"
},
"type": "module"
}

View File

@ -0,0 +1,22 @@
'use strict'
const path = require('path')
const defaultLogFile = path.join(__dirname, '/logs/project-server.log')
module.exports = {
'apps': [
{
name: 'api',
'script': `${process.env.WORKDIR}/dist/index.js`,
'cwd': process.env.WORKDIR,
exec_mode: "cluster",
instances: process.env.NODE_ENV === 'test' ? 1 : process.env.NB_INSTANCES || 2,
autorestart: true,
max_memory_restart: process.env.MAX_MEMORY_RESTART || '750M',
'out_file': defaultLogFile,
'error_file': defaultLogFile,
'merge_logs': true,
'kill_timeout': 30000,
}
]
}

2
backend/secret.txt Normal file
View File

@ -0,0 +1,2 @@
Portainer: 168.231.108.135:9443 / tundaadmin / retoortunapass1

114
backend/src/Server.ts Normal file
View File

@ -0,0 +1,114 @@
import {join} from "node:path";
import {Configuration} from "@tsed/di";
import {application} from "@tsed/platform-http";
import "@tsed/platform-log-request"; // remove this import if you don't want log request
import "@tsed/platform-express"; // /!\ keep this import
import "@tsed/ajv";
import "@tsed/swagger";
import "@tsed/scalar";
import {config} from "./config/index.js";
import * as rest from "./controllers/rest/index.js";
import * as pages from "./controllers/pages/index.js";
import {testConnection, closePool} from "./config/database.js";
import {$log} from "@tsed/logger";
@Configuration({
...config,
acceptMimes: ["application/json"],
httpPort: process.env.PORT || 8083,
httpsPort: false, // CHANGE
mount: {
"/rest": [
...Object.values(rest)
],
"/": [
...Object.values(pages)
]
},
swagger: [
{
path: "/doc",
specVersion: "3.0.1",
spec: {
info: {
title: "Candivista API",
version: process.env.APP_VERSION || "1.0.0",
description:
"REST API for Candivista. Authentication via JWT Bearer tokens.\n\n" +
"Includes endpoints for auth, users, jobs, tokens, AI-powered interviews (OpenRouter/Ollama), payment processing, and admin reporting.\n\n" +
"AI Features:\n" +
"- OpenRouter integration for cloud-based AI interviews\n" +
"- Ollama support for local AI processing\n" +
"- Test mode for admin interview testing\n" +
"- Mandatory question support before AI interviews\n\n" +
"Payment Features:\n" +
"- Stripe integration for secure payments\n" +
"- Support for credit cards, iDEAL, and bank transfers\n" +
"- Dynamic token pricing with package discounts\n" +
"- Custom token quantity purchases\n" +
"- Webhook-based payment confirmation",
contact: {
name: "Candivista Team",
url: "https://candivista.com",
email: "support@candivista.com"
},
license: { name: "Proprietary" }
},
servers: [
{ url: "http://localhost:8083", description: "Local" }
],
tags: [
{ name: "Auth", description: "Authentication and session management" },
{ name: "Users", description: "User profile and token summary" },
{ name: "Jobs", description: "Job posting and interview token operations" },
{ name: "Admin", description: "Administrative statistics and management" },
{ name: "AI", description: "AI-powered interview operations with OpenRouter and Ollama support" },
{ name: "Payments", description: "Stripe payment processing for token purchases" },
{ name: "Webhooks", description: "Stripe webhook handlers for payment events" }
],
components: {
securitySchemes: {
bearerAuth: { type: "http", scheme: "bearer", bearerFormat: "JWT" }
}
},
security: [{ bearerAuth: [] }]
}
}
],
scalar: [
{
path: "/scalar/doc",
specVersion: "3.0.1"
}
],
middlewares: [
"cors",
"cookie-parser",
"compression",
"method-override",
"json-parser",
{ use: "urlencoded-parser", options: { extended: true }}
],
views: {
root: join(process.cwd(), "views"),
extensions: {
ejs: "ejs"
}
}
})
export class Server {
protected app = application();
async $onInit() {
// Test database connection on startup
const isConnected = await testConnection();
if (!isConnected) {
$log.error("Failed to connect to database. Server will continue but database operations may fail.");
}
}
async $onDestroy() {
// Close database pool on shutdown
await closePool();
}
}

View File

@ -0,0 +1,54 @@
import mysql from 'mysql2/promise';
import { $log } from '@tsed/logger';
export interface DatabaseConfig {
host: string;
port: number;
user: string;
password: string;
database: string;
connectionLimit: number;
}
const config: DatabaseConfig = {
host: process.env.DB_HOST || 'localhost',
port: parseInt(process.env.DB_PORT || '3306'),
user: process.env.DB_USER || 'root',
password: process.env.DB_PASSWORD || '',
database: process.env.DB_NAME || 'candidb_main',
connectionLimit: parseInt(process.env.DB_CONNECTION_LIMIT || '10')
};
// Create connection pool
export const pool = mysql.createPool({
...config,
waitForConnections: true,
queueLimit: 0,
acquireTimeout: 60000,
timeout: 60000,
reconnect: true
});
// Test database connection
export async function testConnection(): Promise<boolean> {
try {
const connection = await pool.getConnection();
await connection.ping();
connection.release();
$log.info('Database connection established successfully');
return true;
} catch (error) {
$log.error('Database connection failed:', error);
return false;
}
}
// Graceful shutdown
export async function closePool(): Promise<void> {
try {
await pool.end();
$log.info('Database pool closed');
} catch (error) {
$log.error('Error closing database pool:', error);
}
}

View File

@ -0,0 +1,7 @@
import dotenv from "dotenv-flow";
process.env.NODE_ENV = process.env.NODE_ENV || "development";
export const config = dotenv.config();
export const isProduction = process.env.NODE_ENV === "production";
export const envs = process.env

View File

@ -0,0 +1,14 @@
import {readFileSync} from "node:fs";
import {envs} from "./envs/index.js";
import loggerConfig from "./logger/index.js";
const pkg = JSON.parse(readFileSync("./package.json", {encoding: "utf8"}));
export const config: Partial<TsED.Configuration> = {
version: pkg.version,
envs,
ajv: {
returnsCoercedValues: true
},
logger: loggerConfig,
// additional shared configuration
};

View File

@ -0,0 +1,25 @@
import {DILoggerOptions} from "@tsed/di";
import {$log} from "@tsed/logger";
import {isProduction} from "../envs/index.js";
if (isProduction) {
$log.appenders.set("stdout", {
type: "stdout",
levels: ["info", "debug"],
layout: {
type: "json"
}
});
$log.appenders.set("stderr", {
levels: ["trace", "fatal", "error", "warn"],
type: "stderr",
layout: {
type: "json"
}
});
}
export default <DILoggerOptions> {
disableRoutesSummary: isProduction
};

View File

@ -0,0 +1,29 @@
import {Constant, Controller} from "@tsed/di";
import {HeaderParams} from "@tsed/platform-params";
import {View} from "@tsed/platform-views";
import {SwaggerSettings} from "@tsed/swagger";
import {Hidden, Get, Returns} from "@tsed/schema";
@Hidden()
@Controller("/")
export class IndexController {
@Constant("swagger", [])
private swagger: SwaggerSettings[];
@Get("/")
@View("swagger.ejs")
@(Returns(200, String).ContentType("text/html"))
get(@HeaderParams("x-forwarded-proto") protocol: string, @HeaderParams("host") host: string) {
const hostUrl = `${protocol || "http"}://${host}`;
return {
BASE_URL: hostUrl,
docs: this.swagger.map((conf) => {
return {
url: hostUrl + conf.path,
...conf
};
})
};
}
}

View File

@ -0,0 +1,4 @@
/**
* @file Automatically generated by @tsed/barrels.
*/
export * from "./IndexController.js";

View File

@ -0,0 +1,677 @@
import { Controller } from "@tsed/di";
import { Post, Get, Tags, Summary, Description, Returns, Security } from "@tsed/schema";
import { BodyParams, PathParams, QueryParams } from "@tsed/platform-params";
import { Req } from "@tsed/platform-http";
import { BadRequest, NotFound } from "@tsed/exceptions";
import { JobService } from "../../services/JobService.js";
import { AIService } from "../../services/AIService.js";
import axios from "axios";
@Controller("/ai")
@Tags("AI")
export class AIController {
private jobService = new JobService();
private aiService = new AIService();
private aiProvider = process.env.AI_PROVIDER || 'ollama'; // 'ollama' or 'openrouter'
private aiPort = process.env.AI_PORT || '11434';
private aiModel = process.env.AI_MODEL || 'gpt-oss:20b';
// Test AI connection
@Get("/test-ai")
@Summary("Test AI connection")
@Description("Test the AI service connection and configuration. Works with both Ollama and OpenRouter providers.")
@(Returns(200, Object).Description("AI test result with success status and response"))
async testAI() {
try {
if (this.aiProvider === 'openrouter') {
const response = await this.aiService.generateResponse("Hello, please respond with exactly: 'AI is working'");
return {
success: true,
aiResponse: response,
provider: 'openrouter',
model: process.env.OPENROUTER_MODEL || 'gemma'
};
} else {
// Ollama test
const response = await axios.post(`http://localhost:${this.aiPort}/api/generate`, {
model: this.aiModel,
prompt: "Hello, please respond with exactly: 'AI is working'",
stream: false,
options: {
temperature: 0.1,
max_tokens: 50
}
});
return {
success: true,
aiResponse: response.data.response,
provider: 'ollama',
model: this.aiModel,
port: this.aiPort
};
}
} catch (error) {
console.error('AI test failed:', error);
return {
success: false,
error: error.message,
provider: this.aiProvider,
model: this.aiProvider === 'openrouter' ? process.env.OPENROUTER_MODEL : this.aiModel
};
}
}
// Get mandatory questions for the job
@Get("/mandatory-questions/:linkId")
@Summary("Get mandatory interview questions")
@Description("Retrieve mandatory questions for a specific job interview link")
@(Returns(200, Object).Description("List of mandatory questions for the job"))
@(Returns(404, Object).Description("Interview link not found or expired"))
async getMandatoryQuestions(@PathParams("linkId") linkId: string) {
try {
// Verify the job exists and link is valid
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
const mandatoryQuestions = jobData.interview_questions || [];
return {
success: true,
questions: mandatoryQuestions,
hasMandatoryQuestions: mandatoryQuestions.length > 0
};
} catch (error) {
console.error('Error getting mandatory questions:', error);
throw error;
}
}
// Submit mandatory question answers
@Post("/submit-mandatory-answers")
@Summary("Submit mandatory question answers")
@Description("Submit answers to mandatory interview questions before starting the AI interview")
@(Returns(200, Object).Description("Success response with interview data"))
@(Returns(400, Object).Description("Missing required fields"))
@(Returns(404, Object).Description("Interview link not found or expired"))
async submitMandatoryAnswers(@BodyParams() body: any, @QueryParams() query: any) {
try {
const { candidateName, job, linkId, answers } = body;
const isTestMode = query.test === 'true' || body.test === true;
if (!candidateName || !job || !linkId || !answers) {
throw new BadRequest("Missing required fields: candidateName, job, linkId, answers");
}
// Verify the job exists and link is valid
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
// Create or get interview record (skip DB writes in test mode)
const interviewId = await this.jobService.getOrCreateInterview(linkId, candidateName, isTestMode);
// Save all mandatory question answers
for (let i = 0; i < answers.length; i++) {
const question = jobData.interview_questions[i];
const answer = answers[i];
if (question && answer) {
// Save as AI message (question)
await this.jobService.saveConversationMessage(interviewId, linkId, 'ai', `Question ${i + 1}: ${question}`, isTestMode);
// Save as candidate message (answer)
await this.jobService.saveConversationMessage(interviewId, linkId, 'candidate', answer, isTestMode);
}
}
// Log mandatory questions completed
await this.jobService.logInterviewEvent(linkId, 'mandatory_questions_completed', {
candidateName,
interviewId,
questionsAnswered: answers.length,
timestamp: new Date().toISOString()
});
return {
success: true,
message: "Mandatory questions answered successfully",
interviewId
};
} catch (error) {
console.error('Error submitting mandatory answers:', error);
throw error;
}
}
// Start interview with AI agent (only after mandatory questions)
@Post("/start-interview")
@Summary("Start AI interview")
@Description("Initialize an AI-powered interview session. Can be used in test mode for admins.")
@(Returns(200, Object).Description("Interview started successfully with initial AI message"))
@(Returns(400, Object).Description("Missing required fields"))
@(Returns(404, Object).Description("Interview link not found or expired"))
@(Returns(500, Object).Description("AI service unavailable"))
async startInterview(@BodyParams() body: any, @QueryParams() query: any) {
try {
const { candidateName, job, linkId } = body;
const isTestMode = query.test === 'true' || body.test === true;
if (!candidateName || !job || !linkId) {
throw new BadRequest("Missing required fields: candidateName, job, linkId");
}
// Verify the job exists and link is valid
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
// Create or get interview record (skip DB writes in test mode)
const interviewId = await this.jobService.getOrCreateInterview(linkId, candidateName, isTestMode);
// Get conversation history to include mandatory question answers
let conversationHistory;
if (isTestMode) {
// In test mode, we can't get conversation history from DB since we don't save
// The frontend should pass the mandatory question answers in the request
conversationHistory = [];
console.log(`[DEBUG] Starting AI in test mode - no conversation history available`);
} else {
// In production mode, get from database
conversationHistory = await this.jobService.getConversationHistory(interviewId);
console.log(`[DEBUG] Starting AI with conversation history: ${JSON.stringify(conversationHistory, null, 2)}`);
}
// Generate initial AI message using chatbot service (fail if AI unavailable)
const initialMessage = await this.aiService.initializeInterviewWithChatbot(job, candidateName, linkId, conversationHistory);
console.log(`[DEBUG] initializeInterviewWithChatbot returned: "${initialMessage}"`);
if (!initialMessage) {
throw new Error("AI service is currently unavailable. Please try again later.");
}
// Save AI message to conversation
await this.jobService.saveConversationMessage(interviewId, linkId, 'ai', initialMessage, isTestMode);
// Log interview start
await this.jobService.logInterviewEvent(linkId, 'started', {
candidateName,
interviewId,
timestamp: new Date().toISOString()
});
return {
success: true,
message: initialMessage,
job: jobData,
interviewId
};
} catch (error: any) {
console.error('Error starting interview:', error);
throw error;
}
}
// Handle chat messages
@Post("/chat")
@Summary("Send chat message to AI")
@Description("Send a message to the AI interviewer and receive a response. Supports both test and production modes.")
@(Returns(200, Object).Description("AI response message"))
@(Returns(400, Object).Description("Missing required fields"))
@(Returns(404, Object).Description("Interview link not found or expired"))
@(Returns(500, Object).Description("AI service unavailable"))
async handleChat(@BodyParams() body: any, @QueryParams() query: any) {
try {
const { message, candidateName, job, linkId, conversationHistory } = body;
const isTestMode = query.test === 'true' || body.test === true;
if (!message || !candidateName || !job || !linkId) {
throw new BadRequest("Missing required fields: message, candidateName, job, linkId");
}
// Verify the job exists and link is valid
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
// Get or create interview record
const interviewId = await this.jobService.getOrCreateInterview(linkId, candidateName, isTestMode);
// Save user message to conversation
await this.jobService.saveConversationMessage(interviewId, linkId, 'candidate', message, isTestMode);
// Get conversation history - use frontend data in test mode, database in production
let conversationHistoryToUse;
if (isTestMode) {
// In test mode, use the conversation history passed from frontend
conversationHistoryToUse = conversationHistory || [];
console.log(`[DEBUG] Using frontend conversation history (test mode): ${JSON.stringify(conversationHistoryToUse, null, 2)}`);
// Filter out any messages with undefined content
conversationHistoryToUse = conversationHistoryToUse.filter((msg: any) =>
msg && msg.message && msg.message !== 'undefined' && msg.sender
);
console.log(`[DEBUG] Filtered conversation history: ${JSON.stringify(conversationHistoryToUse, null, 2)}`);
} else {
// In production mode, get from database
conversationHistoryToUse = await this.jobService.getConversationHistory(interviewId);
console.log(`[DEBUG] Retrieved conversation history from database: ${JSON.stringify(conversationHistoryToUse, null, 2)}`);
}
// Generate AI response using chatbot service
const aiResponse = await this.generateAIResponseWithChatbot(message, job, conversationHistoryToUse, candidateName, linkId);
console.log(`[DEBUG] generateAIResponseWithChatbot returned:`, aiResponse);
if (!aiResponse) {
throw new Error("AI service is currently unavailable. Please try again later.");
}
// Save AI response to conversation
await this.jobService.saveConversationMessage(interviewId, linkId, 'ai', aiResponse.message, isTestMode);
// Log the messages
await this.jobService.logInterviewEvent(linkId, 'user_message', {
candidateName,
message,
interviewId,
timestamp: new Date().toISOString()
});
await this.jobService.logInterviewEvent(linkId, 'ai_message', {
candidateName,
message: aiResponse.message,
interviewId,
timestamp: new Date().toISOString()
});
return {
success: true,
message: aiResponse.message,
isComplete: aiResponse.isComplete
};
} catch (error: any) {
console.error('Error handling chat:', error);
throw error;
}
}
// Get conversation history
@Get("/conversation/:linkId")
@Summary("Get conversation history")
@Description("Retrieve the conversation history for a specific interview")
@(Returns(200, Object).Description("Conversation history messages"))
@(Returns(404, Object).Description("Interview link not found or expired"))
async getConversation(@PathParams("linkId") linkId: string) {
try {
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
const interviewId = await this.jobService.getInterviewIdByLink(linkId);
if (!interviewId) {
return {
success: true,
messages: []
};
}
const messages = await this.jobService.getConversationHistory(interviewId);
return {
success: true,
messages: messages
};
} catch (error: any) {
console.error('Error getting conversation:', error);
throw error;
}
}
// End interview
@Post("/end-interview/:linkId")
@Summary("End interview session")
@Description("End an active interview session and mark it as completed")
@(Returns(200, Object).Description("Interview ended successfully"))
@(Returns(404, Object).Description("Interview link or interview not found"))
async endInterview(@PathParams("linkId") linkId: string) {
try {
const jobData = await this.jobService.getJobByLinkId(linkId);
if (!jobData) {
throw new NotFound("Interview link not found or expired");
}
const interviewId = await this.jobService.getInterviewIdByLink(linkId);
if (!interviewId) {
throw new NotFound("Interview not found");
}
// End interview with chatbot service
await this.aiService.endInterviewWithChatbot(linkId);
// Mark interview as completed
await this.jobService.completeInterview(interviewId);
// Log interview completion
await this.jobService.logInterviewEvent(linkId, 'completed', {
interviewId,
timestamp: new Date().toISOString()
});
return {
success: true,
message: "Interview completed successfully"
};
} catch (error: any) {
console.error('Error ending interview:', error);
throw error;
}
}
private async generateInitialMessage(job: any, candidateName: string, conversationHistory: any[] = []): Promise<string | null> {
const skills = job.skills_required ? job.skills_required.join(', ') : 'various technical skills';
const experience = job.experience_level.replace('_', ' ');
// Build context from conversation history (mandatory question answers)
const conversationContext = conversationHistory
.map(msg => `${msg.sender === 'candidate' ? 'Candidate' : 'Interviewer'}: ${msg.message}`)
.join('\n');
const systemMessage = `You are an AI interview agent conducting an interview for the position: ${job.title}
Job Description: ${job.description}
Requirements: ${job.requirements}
Required Skills: ${skills}
Experience Level: ${experience}
Location: ${job.location || 'Remote'}
${conversationContext ? `Previous conversation (mandatory questions answered):
${conversationContext}
Based on the candidate's answers to the mandatory questions above, you should now conduct a deeper interview.` : ''}
Your task is to:
1. Greet the candidate warmly and professionally
2. Introduce yourself as their evaluation agent
3. ${conversationContext ? 'Acknowledge their previous answers and build upon them' : 'Explain that you\'ll be conducting a comprehensive interview'}
4. Ask them to tell you about themselves and their interest in this role
5. Keep your response conversational and engaging
6. Don't ask multiple questions at once - start with one open-ended question
Respond in a friendly, professional tone. Keep it concise but welcoming.`;
const userPrompt = `The candidate's name is ${candidateName}. Please start the interview.`;
try {
if (this.aiProvider === 'openrouter') {
const response = await this.aiService.generateResponse(userPrompt, systemMessage);
if (response) {
return response;
} else {
console.log('[WARN] OpenRouter failed, falling back to Ollama');
// Fallback to Ollama if OpenRouter fails
}
}
// Ollama fallback (either configured or as fallback)
const response = await axios.post(`http://localhost:${this.aiPort}/api/generate`, {
model: this.aiModel,
prompt: `${systemMessage}\n\n${userPrompt}`,
stream: false,
options: {
temperature: 0.7,
max_tokens: 500
}
});
return response.data.response || null;
} catch (error) {
console.error('Error calling AI:', error);
return null; // Return null instead of fallback message
}
}
private async generateAIResponseWithChatbot(userMessage: string, job: any, conversationHistory: any[], candidateName: string, linkId: string): Promise<{ message: string; isComplete: boolean } | null> {
// Check if we should end the interview (after 10+ exchanges)
const userMessages = conversationHistory.filter(msg => msg.sender === 'candidate').length;
const shouldEnd = userMessages >= 10;
if (shouldEnd) {
const endPrompt = `The interview is coming to a close. The candidate has provided comprehensive responses about their background and experience for the ${job.title} position.
Please provide a professional closing message that:
1. Thanks the candidate for their time and thoughtful responses
2. Acknowledges their qualifications and interest
3. Explains that their responses will be reviewed by the hiring team
4. Mentions they should expect to hear back within a few business days
5. Keeps it warm and professional
Keep it concise and professional.`;
try {
const response = await this.aiService.generateResponseWithChatbot(
endPrompt,
conversationHistory,
undefined,
job,
candidateName,
linkId
);
return {
message: response || "Thank you for your time and detailed responses. That concludes our interview. We'll review your answers and get back to you within a few business days.",
isComplete: true
};
} catch (error) {
console.error('Error calling chatbot for end message:', error);
return {
message: "Thank you for your time and detailed responses. That concludes our interview. We'll review your answers and get back to you within a few business days.",
isComplete: true
};
}
}
// Build context for ongoing conversation
const conversationContext = conversationHistory
.slice(-6) // Last 6 messages for context
.map(msg => `${msg.sender === 'candidate' ? 'Candidate' : 'Interviewer'}: ${msg.message}`)
.join('\n');
// Debug logging
console.log(`[DEBUG] Conversation history length: ${conversationHistory.length}`);
console.log(`[DEBUG] Conversation context: ${conversationContext}`);
console.log(`[DEBUG] User message: ${userMessage}`);
const systemMessage = `You are an AI interview agent conducting an interview for the position: ${job.title}
Job Details:
- Title: ${job.title}
- Description: ${job.description}
- Requirements: ${job.requirements}
- Required Skills: ${job.skills_required ? job.skills_required.join(', ') : 'Various technical skills'}
- Experience Level: ${job.experience_level.replace('_', ' ')}
CRITICAL INSTRUCTIONS:
1. You MUST acknowledge the candidate's response first
2. You MUST then ask ONE specific follow-up question
3. The question should be relevant to their answer and help evaluate their fit for the ${job.title} role
4. Focus on technical skills, experience, problem-solving, or behavioral aspects
5. Keep the question specific and engaging
6. Do NOT repeat the same question
7. Do NOT ask multiple questions at once
8. Maintain a professional but conversational tone
RESPONSE FORMAT:
- Start with a brief acknowledgment of their answer
- Then ask exactly one follow-up question
- End your response after the question
Example:
"Thanks for sharing that experience with React. That's exactly the kind of hands-on development we're looking for. Can you tell me about a specific challenge you faced while building that application and how you solved it?"`;
const userPrompt = `Recent conversation:
${conversationContext}
Candidate's latest response: ${userMessage}
Please respond with an acknowledgment and follow-up question.`;
try {
const aiResponse = await this.aiService.generateResponseWithChatbot(
userMessage,
conversationHistory,
systemMessage,
job,
candidateName,
linkId
);
if (aiResponse) {
console.log(`[DEBUG] Chatbot Response: ${aiResponse}`);
return {
message: aiResponse,
isComplete: false
};
} else {
console.log('[WARN] Chatbot service failed, falling back to direct OpenRouter');
// Fallback to original method
return await this.generateAIResponse(userMessage, job, conversationHistory, candidateName);
}
} catch (error) {
console.error('Error calling chatbot service:', error);
// Fallback to original method
return await this.generateAIResponse(userMessage, job, conversationHistory, candidateName);
}
}
private async generateAIResponse(userMessage: string, job: any, conversationHistory: any[], candidateName: string): Promise<{ message: string; isComplete: boolean } | null> {
// Check if we should end the interview (after 10+ exchanges)
const userMessages = conversationHistory.filter(msg => msg.sender === 'candidate').length;
const shouldEnd = userMessages >= 10;
if (shouldEnd) {
const endPrompt = `The interview is coming to a close. The candidate has provided comprehensive responses about their background and experience for the ${job.title} position.
Please provide a professional closing message that:
1. Thanks the candidate for their time and thoughtful responses
2. Acknowledges their qualifications and interest
3. Explains that their responses will be reviewed by the hiring team
4. Mentions they should expect to hear back within a few business days
5. Keeps it warm and professional
Keep it concise and professional.`;
try {
const response = await axios.post(`http://localhost:${this.aiPort}/api/generate`, {
model: this.aiModel,
prompt: endPrompt,
stream: false,
options: {
temperature: 0.7,
max_tokens: 300
}
});
return {
message: response.data.response || null,
isComplete: true
};
} catch (error) {
console.error('Error calling Ollama for end message:', error);
return null; // Return null instead of fallback message
}
}
// Build context for ongoing conversation
const conversationContext = conversationHistory
.slice(-6) // Last 6 messages for context
.map(msg => `${msg.sender === 'candidate' ? 'Candidate' : 'Interviewer'}: ${msg.message}`)
.join('\n');
// Debug logging
console.log(`[DEBUG] Conversation history length: ${conversationHistory.length}`);
console.log(`[DEBUG] Conversation context: ${conversationContext}`);
console.log(`[DEBUG] User message: ${userMessage}`);
const systemMessage = `You are an AI interview agent conducting an interview for the position: ${job.title}
Job Details:
- Title: ${job.title}
- Description: ${job.description}
- Requirements: ${job.requirements}
- Required Skills: ${job.skills_required ? job.skills_required.join(', ') : 'Various technical skills'}
- Experience Level: ${job.experience_level.replace('_', ' ')}
CRITICAL INSTRUCTIONS:
1. You MUST acknowledge the candidate's response first
2. You MUST then ask ONE specific follow-up question
3. The question should be relevant to their answer and help evaluate their fit for the ${job.title} role
4. Focus on technical skills, experience, problem-solving, or behavioral aspects
5. Keep the question specific and engaging
6. Do NOT repeat the same question
7. Do NOT ask multiple questions at once
8. Maintain a professional but conversational tone
RESPONSE FORMAT:
- Start with a brief acknowledgment of their answer
- Then ask exactly one follow-up question
- End your response after the question
Example:
"Thanks for sharing that experience with React. That's exactly the kind of hands-on development we're looking for. Can you tell me about a specific challenge you faced while building that application and how you solved it?"`;
const userPrompt = `Recent conversation:
${conversationContext}
Candidate's latest response: ${userMessage}
Please respond with an acknowledgment and follow-up question.`;
try {
if (this.aiProvider === 'openrouter') {
const aiResponse = await this.aiService.generateResponseWithHistory(userMessage, conversationHistory, systemMessage);
if (aiResponse) {
console.log(`[DEBUG] OpenRouter Response: ${aiResponse}`);
return {
message: aiResponse,
isComplete: false
};
} else {
console.log('[WARN] OpenRouter failed, falling back to Ollama');
// Fallback to Ollama if OpenRouter fails
}
}
// Ollama fallback (either configured or as fallback)
console.log(`[DEBUG] Sending to Ollama - Port: ${this.aiPort}, Model: ${this.aiModel}`);
console.log(`[DEBUG] Prompt length: ${systemMessage.length + userPrompt.length} characters`);
const response = await axios.post(`http://localhost:${this.aiPort}/api/generate`, {
model: this.aiModel,
prompt: `${systemMessage}\n\n${userPrompt}`,
stream: false,
options: {
temperature: 0.7,
max_tokens: 400
}
});
const aiResponse = response.data.response || null;
console.log(`[DEBUG] Ollama Response: ${aiResponse}`);
return {
message: aiResponse,
isComplete: false
};
} catch (error) {
console.error('Error calling AI:', error);
return null; // Return null instead of fallback message
}
}
}

View File

@ -0,0 +1,266 @@
import { Controller } from "@tsed/di";
import { Get, Post, Put, Patch, Delete, Tags, Summary, Description, Returns, Security } from "@tsed/schema";
import { BodyParams, PathParams, QueryParams } from "@tsed/platform-params";
import { Req } from "@tsed/platform-http";
import { BadRequest, Unauthorized, NotFound } from "@tsed/exceptions";
import jwt from "jsonwebtoken";
import { AdminService } from "../../services/AdminService.js";
import { UserService } from "../../services/UserService.js";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@Controller("/admin")
@Tags("Admin")
@Security("bearerAuth")
export class AdminController {
private adminService = new AdminService();
private userService = new UserService();
// Middleware to check if user is admin
private async checkAdmin(req: any) {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
try {
const decoded = jwt.verify(token, JWT_SECRET) as any;
const user = await this.userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
if (user.role !== 'admin') {
throw new Unauthorized("Admin access required");
}
return user;
} catch (error) {
throw new Unauthorized("Invalid token or insufficient permissions");
}
}
// System Statistics
@Get("/statistics")
@Summary("Get system statistics")
@Description("High-level metrics: users, jobs, interviews, tokens, revenue")
@(Returns(200).Description("Statistics returned"))
@(Returns(401).Description("Unauthorized"))
async getSystemStatistics(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getSystemStatistics();
}
// User Management
@Get("/users")
@Summary("List all users")
@(Returns(200).Description("Users returned"))
async getAllUsers(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getAllUsers();
}
@Get("/users/:id")
@Summary("Get a user by ID")
@(Returns(200).Description("User returned"))
@(Returns(404).Description("User not found"))
async getUserById(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.getUserById(id);
}
@Put("/users/:id")
@Summary("Update a user")
@(Returns(200).Description("User updated"))
async updateUser(
@Req() req: any,
@PathParams("id") id: string,
@BodyParams() userData: any
) {
await this.checkAdmin(req);
return await this.adminService.updateUser(id, userData);
}
@Patch("/users/:id/toggle-status")
@Summary("Toggle user active status")
@(Returns(200).Description("User status toggled"))
async toggleUserStatus(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.toggleUserStatus(id);
}
@Patch("/users/:id/password")
@Summary("Change user password")
@(Returns(200).Description("Password updated"))
async changeUserPassword(
@Req() req: any,
@PathParams("id") id: string,
@BodyParams() body: { new_password: string }
) {
await this.checkAdmin(req);
return await this.adminService.changeUserPassword(id, body.new_password);
}
@Post("/users")
@Summary("Create a user")
@(Returns(200).Description("User created"))
async createUser(@Req() req: any, @BodyParams() userData: any) {
await this.checkAdmin(req);
return await this.adminService.createUser(userData);
}
// Job Management
@Get("/jobs")
@Summary("List all jobs")
@(Returns(200).Description("Jobs returned"))
async getAllJobs(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getAllJobs();
}
@Get("/jobs/:id")
@Summary("Get job by ID")
@(Returns(200).Description("Job returned"))
async getJobById(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.getJobById(id);
}
@Patch("/jobs/:id/status")
@Summary("Update job status")
@(Returns(200).Description("Job status updated"))
async updateJobStatus(
@Req() req: any,
@PathParams("id") id: string,
@BodyParams() body: { status: string }
) {
await this.checkAdmin(req);
return await this.adminService.updateJobStatus(id, body.status);
}
@Put("/jobs/:id")
@Summary("Update job details")
@(Returns(200).Description("Job updated"))
async updateJob(
@Req() req: any,
@PathParams("id") id: string,
@BodyParams() jobData: any
) {
await this.checkAdmin(req);
return await this.adminService.updateJob(id, jobData);
}
// Token Management
@Get("/user-token-summaries")
@Summary("List user token summaries")
@(Returns(200).Description("Summaries returned"))
async getUserTokenSummaries(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getUserTokenSummaries();
}
@Post("/add-tokens")
@Summary("Add tokens to a user")
@(Returns(200).Description("Tokens added"))
async addTokensToUser(@Req() req: any, @BodyParams() tokenData: any) {
await this.checkAdmin(req);
return await this.adminService.addTokensToUser(tokenData);
}
@Get("/token-packages")
@Summary("List token packages")
@(Returns(200).Description("Packages returned"))
async getTokenPackages(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getTokenPackages();
}
@Post("/token-packages")
@Summary("Create token package")
@(Returns(200).Description("Package created"))
async createTokenPackage(@Req() req: any, @BodyParams() packageData: any) {
await this.checkAdmin(req);
return await this.adminService.createTokenPackage(packageData);
}
@Put("/token-packages/:id")
@Summary("Update token package")
@(Returns(200).Description("Package updated"))
async updateTokenPackage(
@Req() req: any,
@PathParams("id") id: string,
@BodyParams() packageData: any
) {
await this.checkAdmin(req);
return await this.adminService.updateTokenPackage(id, packageData);
}
@Patch("/token-packages/:id/toggle-status")
@Summary("Toggle token package active status")
@(Returns(200).Description("Package status toggled"))
async toggleTokenPackageStatus(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.toggleTokenPackageStatus(id);
}
@Delete("/token-packages/:id")
@Summary("Delete token package")
@(Returns(200).Description("Package deleted"))
async deleteTokenPackage(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.deleteTokenPackage(id);
}
// Interview Management
@Get("/interviews")
@Summary("List interviews")
@(Returns(200).Description("Interviews returned"))
async getAllInterviews(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getAllInterviews();
}
@Get("/interviews/:id")
@Summary("Get interview by ID")
@(Returns(200).Description("Interview returned"))
async getInterviewById(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.getInterviewById(id);
}
// Payment Records
@Get("/payments")
@Summary("List payment records")
@(Returns(200).Description("Payments returned"))
async getPaymentRecords(@Req() req: any) {
await this.checkAdmin(req);
return await this.adminService.getPaymentRecords();
}
@Get("/payments/:id")
@Summary("Get payment by ID")
@(Returns(200).Description("Payment returned"))
async getPaymentById(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.getPaymentById(id);
}
// Job Links Management
@Get("/jobs/:id/links")
@Summary("Get job links")
@(Returns(200).Description("Job links returned"))
async getJobLinks(@Req() req: any, @PathParams("id") id: string) {
await this.checkAdmin(req);
return await this.adminService.getJobLinks(id);
}
@Post("/jobs/:id/create-link")
@Summary("Create job link for testing")
@(Returns(200).Description("Job link created"))
async createJobLink(@Req() req: any, @PathParams("id") id: string, @BodyParams() linkData: any) {
await this.checkAdmin(req);
return await this.adminService.createJobLink(id, linkData.tokensAvailable || 1);
}
}

View File

@ -0,0 +1,167 @@
import { Controller } from "@tsed/di";
import { Post, Get, Summary, Description, Returns, Tags, Security } from "@tsed/schema";
import { BodyParams } from "@tsed/platform-params";
import { Req } from "@tsed/platform-http";
import { BadRequest, Unauthorized } from "@tsed/exceptions";
import jwt from "jsonwebtoken";
import { UserService } from "../../services/UserService.js";
import { User, CreateUserRequest, UpdateUserRequest, UserResponse } from "../../models/User.js";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@Controller("/auth")
@Tags("Auth")
export class AuthController {
private userService = new UserService();
@Post("/login")
@Summary("Authenticate and obtain a JWT")
@Description("Provide email and password to receive a signed JWT used for subsequent requests.")
@Returns(200).Description("Successful authentication")
@Returns(400).Description("Missing email or password")
@Returns(401).Description("Invalid credentials or deactivated account")
async login(@BodyParams() body: { email: string; password: string }) {
const { email, password } = body;
if (!email || !password) {
throw new BadRequest("Email and password are required");
}
const user = await this.userService.getUserByEmail(email);
if (!user) {
throw new Unauthorized("Invalid credentials");
}
if (!user.is_active) {
throw new Unauthorized("Account is deactivated");
}
const isValidPassword = await this.userService.verifyPassword(user, password);
if (!isValidPassword) {
throw new Unauthorized("Invalid credentials");
}
// Update last login
await this.userService.updateLastLogin(user.id);
const token = jwt.sign(
{ userId: user.id, email: user.email, role: user.role },
JWT_SECRET,
{ expiresIn: "24h" }
);
return {
token,
user: {
id: user.id,
email: user.email,
first_name: user.first_name,
last_name: user.last_name,
role: user.role,
company_name: user.company_name,
avatar_url: user.avatar_url,
is_active: user.is_active,
last_login_at: user.last_login_at,
email_verified_at: user.email_verified_at,
created_at: user.created_at,
updated_at: user.updated_at
}
};
}
@Post("/register")
@Summary("Register a new recruiter user")
@Description("Creates a recruiter account and returns a JWT for immediate use.")
@Returns(200).Description("User created and token issued")
@Returns(400).Description("Validation failed or email already exists")
async register(@BodyParams() body: { email: string; password: string; first_name: string; last_name: string; company_name?: string }) {
const { email, password, first_name, last_name, company_name } = body;
if (!email || !password || !first_name || !last_name) {
throw new BadRequest("Email, password, first name, and last name are required");
}
// Validate email format
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
if (!emailRegex.test(email)) {
throw new BadRequest("Invalid email format");
}
// Validate password strength
if (password.length < 8) {
throw new BadRequest("Password must be at least 8 characters long");
}
try {
const user = await this.userService.createUser({
email,
password,
first_name,
last_name,
company_name,
role: 'recruiter'
});
// Generate token
const token = jwt.sign(
{ userId: user.id, email: user.email, role: user.role },
JWT_SECRET,
{ expiresIn: "24h" }
);
return {
token,
user
};
} catch (error: any) {
if (error.message.includes('already exists')) {
throw new BadRequest("User with this email already exists");
}
throw new BadRequest("Failed to create user account");
}
}
@Get("/me")
@Security("bearerAuth")
@Summary("Get the current authenticated user")
@Description("Returns the profile of the user associated with the provided JWT.")
@Returns(200).Description("User profile returned")
@Returns(401).Description("Missing or invalid token")
async getCurrentUser(@Req() req: any) {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
try {
const decoded = jwt.verify(token, JWT_SECRET) as any;
const user = await this.userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
if (!user.is_active) {
throw new Unauthorized("Account is deactivated");
}
return {
id: user.id,
email: user.email,
first_name: user.first_name,
last_name: user.last_name,
role: user.role,
company_name: user.company_name,
avatar_url: user.avatar_url,
is_active: user.is_active,
last_login_at: user.last_login_at,
email_verified_at: user.email_verified_at,
created_at: user.created_at,
updated_at: user.updated_at
};
} catch (error) {
throw new Unauthorized("Invalid token");
}
}
}

View File

@ -0,0 +1,10 @@
import {Controller} from "@tsed/di";
import {Get} from "@tsed/schema";
@Controller("/hello-world")
export class HelloWorldController {
@Get("/")
get() {
return "hello";
}
}

View File

@ -0,0 +1,501 @@
import { Controller } from "@tsed/di";
import { Post, Get, Delete, Put, Patch, Tags, Summary, Description, Returns, Security } from "@tsed/schema";
import { BodyParams, PathParams } from "@tsed/platform-params";
import { Req } from "@tsed/platform-http";
import { Unauthorized, NotFound } from "@tsed/exceptions";
import jwt from "jsonwebtoken";
import { pool } from "../../config/database.js";
import { UserService } from "../../services/UserService.js";
import { JobService } from "../../services/JobService.js";
import { TokenService } from "../../services/TokenService.js";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@Controller("/jobs")
@Tags("Jobs")
export class JobController {
private userService = new UserService();
private jobService = new JobService();
private tokenService = new TokenService();
// Middleware to check if user is authenticated
private async checkAuth(req: any) {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
try {
const decoded = jwt.verify(token, JWT_SECRET) as any;
const user = await this.userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
return user;
} catch (error) {
throw new Unauthorized("Invalid token");
}
}
// Create a new job
@Post("/")
@Security("bearerAuth")
@Summary("Create a new job")
@Description("Recruiters and admins can create a job posting.")
@(Returns(200).Description("Job created successfully"))
@(Returns(401).Description("Unauthorized or missing token"))
async createJob(@Req() req: any, @BodyParams() jobData: any) {
try {
console.log('=== JOB CREATION START ===');
console.log('Job creation request received:', JSON.stringify(jobData, null, 2));
console.log('Request headers:', req.headers);
// Test database connection first
try {
const connection = await pool.getConnection();
console.log('Database connection successful');
connection.release();
} catch (dbError) {
console.error('Database connection failed:', dbError);
throw new Error('Database connection failed: ' + dbError.message);
}
const user = await this.checkAuth(req);
console.log('User authenticated:', user.email, user.role);
// Check if user can create a job (basic validation)
if (user.role !== 'recruiter' && user.role !== 'admin') {
throw new Unauthorized("Only recruiters can create jobs");
}
// Validate required fields
if (!jobData.title || !jobData.description || !jobData.requirements) {
throw new Error("Missing required fields: title, description, or requirements");
}
console.log('All validations passed, creating job...');
const createdJob = await this.jobService.createJob(user.id, jobData);
console.log('Job created successfully:', createdJob.id);
return {
success: true,
job: createdJob,
message: "Job created successfully"
};
} catch (error) {
console.error('=== JOB CREATION ERROR ===');
console.error('Error type:', (error as any).constructor.name);
console.error('Error message:', (error as any).message);
console.error('Error stack:', (error as any).stack);
console.error('Full error object:', error as any);
throw error;
}
}
// Test endpoint to check if the controller is working
@Get("/test")
@Summary("Test endpoint")
@Description("Returns a simple heartbeat for Job controller")
@(Returns(200).Description("Service reachable"))
async testEndpoint() {
return {
success: true,
message: "JobController is working!",
timestamp: new Date().toISOString()
};
}
// Get all jobs for a user
@Get("/")
@Security("bearerAuth")
@Summary("List jobs")
@Description("Recruiters see their jobs; admins see all jobs.")
@(Returns(200).Description("Array of jobs returned"))
@(Returns(401).Description("Unauthorized"))
async getJobs(@Req() req: any) {
try {
const user = await this.checkAuth(req);
console.log('Fetching jobs for user:', user.email, user.role);
if (user.role === 'recruiter') {
// Recruiters can only see their own jobs
const jobs = await this.jobService.getJobsByUserId(user.id);
return {
success: true,
jobs: jobs
};
} else if (user.role === 'admin') {
// Admins can see all jobs
const jobs = await this.jobService.getAllJobs();
return {
success: true,
jobs: jobs
};
} else {
throw new Unauthorized("Only recruiters and admins can access jobs");
}
} catch (error: any) {
console.error('Error fetching jobs:', error);
throw error;
}
}
// Get a single job by ID
@Get("/:id")
@Security("bearerAuth")
@Summary("Get a job by ID")
@(Returns(200).Description("Job found"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async getJobById(@Req() req: any, @PathParams("id") id: string) {
try {
const user = await this.checkAuth(req);
console.log('Fetching job by ID:', id, 'for user:', user.email);
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
// Check if user can access this job
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only view your own jobs");
}
// Get job links if any
const links = await this.jobService.getJobLinks(id);
return {
success: true,
job: {
...job,
links: links
}
};
} catch (error: any) {
console.error('Error fetching job by ID:', error);
throw error;
}
}
// Update a job (recruiter owns it or admin)
@Put("/:id")
@Security("bearerAuth")
@Summary("Update a job")
@(Returns(200).Description("Job updated"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async updateJob(@Req() req: any, @PathParams("id") id: string, @BodyParams() body: any) {
const user = await this.checkAuth(req);
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only update your own jobs");
}
const updated = await this.jobService.updateJob(id, body);
return { success: true, job: updated };
}
// Update job status
@Patch("/:id/status")
@Security("bearerAuth")
@Summary("Update job status")
@(Returns(200).Description("Job status updated"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async updateJobStatus(@Req() req: any, @PathParams("id") id: string, @BodyParams() body: { status: string }) {
const user = await this.checkAuth(req);
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only update your own jobs");
}
const updated = await this.jobService.updateJobStatus(id, body.status);
return { success: true, job: updated };
}
// Create a job link
@Post("/:id/links")
@Security("bearerAuth")
@Summary("Create interview link for a job")
@(Returns(200).Description("Link created"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async createJobLink(@Req() req: any, @PathParams("id") id: string, @BodyParams() linkData: any) {
try {
const user = await this.checkAuth(req);
console.log('Creating job link for job:', id, 'by user:', user.email);
// Verify job exists and user has access
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only create links for your own jobs");
}
const link = await this.jobService.createJobLink(id, linkData.tokens_available || 0);
return {
success: true,
link: link,
message: "Job link created successfully"
};
} catch (error: any) {
console.error('Error creating job link:', error);
throw error;
}
}
// Add tokens to a job link
@Post("/:id/links/:linkId/tokens")
@Security("bearerAuth")
@Summary("Add tokens to a job link")
@(Returns(200).Description("Tokens added"))
@(Returns(400).Description("Insufficient tokens or invalid amount"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async addTokensToLink(@Req() req: any, @PathParams("id") id: string, @PathParams("linkId") linkId: string, @BodyParams() tokenData: any) {
try {
const user = await this.checkAuth(req);
console.log('Adding tokens to link:', linkId, 'for job:', id, 'by user:', user.email);
// Verify job exists and user has access
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only modify links for your own jobs");
}
// Check if user has enough tokens
const tokenSummary = await this.tokenService.getUserTokenSummary(user.id);
const tokensToAdd = tokenData.tokens || 0;
if (tokenSummary.total_available < tokensToAdd) {
return {
success: false,
error: "INSUFFICIENT_TOKENS",
message: `You don't have enough tokens. You have ${tokenSummary.total_available} tokens available, but need ${tokensToAdd}.`,
available_tokens: tokenSummary.total_available,
requested_tokens: tokensToAdd
};
}
const updatedLink = await this.jobService.addTokensToLink(linkId, tokensToAdd, user.id);
return {
success: true,
link: updatedLink,
message: "Tokens added successfully"
};
} catch (error: any) {
console.error('Error adding tokens to link:', error);
throw error;
}
}
// Remove tokens from a job link
@Delete("/:id/links/:linkId/tokens")
@Security("bearerAuth")
@Summary("Remove tokens from a job link")
@(Returns(200).Description("Tokens removed"))
@(Returns(400).Description("Invalid amount"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async removeTokensFromLink(@Req() req: any, @PathParams("id") id: string, @PathParams("linkId") linkId: string, @BodyParams() tokenData: any) {
try {
const user = await this.checkAuth(req);
console.log('Removing tokens from link:', linkId, 'for job:', id, 'by user:', user.email);
// Verify job exists and user has access
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only modify links for your own jobs");
}
const tokensToRemove = tokenData.tokens || 0;
if (tokensToRemove <= 0) {
return {
success: false,
error: "INVALID_AMOUNT",
message: "Please specify a valid number of tokens to remove."
};
}
const updatedLink = await this.jobService.removeTokensFromLink(linkId, tokensToRemove, user.id);
return {
success: true,
link: updatedLink,
message: "Tokens removed successfully"
};
} catch (error: any) {
console.error('Error removing tokens from link:', error);
throw error;
}
}
// Delete a job link
@Delete("/:id/links/:linkId")
@Security("bearerAuth")
@Summary("Delete a job link")
@(Returns(200).Description("Link deleted; tokens returned if applicable"))
@(Returns(401).Description("Unauthorized"))
@(Returns(404).Description("Job not found"))
async deleteJobLink(@Req() req: any, @PathParams("id") id: string, @PathParams("linkId") linkId: string) {
try {
const user = await this.checkAuth(req);
console.log('Deleting job link:', linkId, 'for job:', id, 'by user:', user.email);
// Verify job exists and user has access
const job = await this.jobService.getJobById(id);
if (!job) {
throw new NotFound("Job not found");
}
if (user.role === 'recruiter' && job.user_id !== user.id) {
throw new Unauthorized("You can only modify links for your own jobs");
}
const result = await this.jobService.deleteJobLink(linkId, user.id);
return {
success: true,
message: "Job link deleted successfully",
tokensReturned: result.tokensReturned
};
} catch (error: any) {
console.error('Error deleting job link:', error);
throw error;
}
}
// Get job by interview link (public endpoint)
@Get("/interview/:linkId")
@Summary("Get job by interview link")
@Description("Public endpoint used by candidates to load interview context.")
@(Returns(200).Description("Job returned"))
@(Returns(404).Description("Interview link not found or expired"))
async getJobByLink(@PathParams("linkId") linkId: string) {
try {
console.log('Getting job by link ID:', linkId);
const job = await this.jobService.getJobByLinkId(linkId);
if (!job) {
throw new NotFound("Interview link not found or expired");
}
return {
success: true,
job: job
};
} catch (error: any) {
console.error('Error getting job by link:', error);
throw error;
}
}
// Submit interview responses
@Post("/interview/:linkId/submit")
@Summary("Submit interview responses")
@Description("Submits candidate answers; if not a test, consumes one token.")
@(Returns(200).Description("Submission acknowledged"))
@(Returns(404).Description("Interview link not found or expired"))
async submitInterview(@PathParams("linkId") linkId: string, @BodyParams() submissionData: any) {
try {
console.log('Submitting interview for link:', linkId);
const job = await this.jobService.getJobByLinkId(linkId);
if (!job) {
throw new NotFound("Interview link not found or expired");
}
// If it's not a test, save the interview
if (!submissionData.isTest) {
await this.jobService.submitInterview(linkId, submissionData.answers);
}
return {
success: true,
message: submissionData.isTest ? "Test interview completed" : "Interview submitted successfully"
};
} catch (error: any) {
console.error('Error submitting interview:', error);
throw error;
}
}
// Log failed interview attempt (consent declined)
@Post("/interview/:linkId/failed")
@Summary("Log a failed interview attempt")
@Description("Records consent decline or early exit without consuming tokens.")
@(Returns(200).Description("Event recorded"))
@(Returns(404).Description("Interview link not found or expired"))
async logFailedAttempt(@PathParams("linkId") linkId: string) {
try {
console.log('Logging failed attempt for link:', linkId);
const job = await this.jobService.getJobByLinkId(linkId);
if (!job) {
throw new NotFound("Interview link not found or expired");
}
// Log the failed attempt (no token deduction)
await this.jobService.logFailedAttempt(linkId);
return {
success: true,
message: "Failed attempt logged successfully"
};
} catch (error: any) {
console.error('Error logging failed attempt:', error);
throw error;
}
}
// Health check endpoint
@Get("/health")
@Summary("Health check")
@Description("Reports DB connectivity and service health")
@(Returns(200).Description("Healthy or unhealthy status returned"))
async healthCheck() {
try {
// Test database connection
const connection = await pool.getConnection();
connection.release();
return {
status: "healthy",
database: "connected",
timestamp: new Date().toISOString()
};
} catch (error) {
return {
status: "unhealthy",
database: "disconnected",
error: (error as any).message,
timestamp: new Date().toISOString()
};
}
}
}

View File

@ -0,0 +1,478 @@
import { Controller } from "@tsed/di";
import { Get, Post, Put, Delete, Tags, Summary, Description, Returns, Security } from "@tsed/schema";
import { BodyParams, PathParams, QueryParams } from "@tsed/platform-params";
import { Req } from "@tsed/platform-http";
import { BadRequest, Unauthorized, NotFound } from "@tsed/exceptions";
import jwt from "jsonwebtoken";
import { PaymentService, CreatePaymentRequest } from "../../services/PaymentService.js";
import { StripeService } from "../../services/StripeService.js";
import { UserService } from "../../services/UserService.js";
import { pool } from "../../config/database.js";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@Controller("/payments")
@Tags("Payments")
export class PaymentController {
private paymentService = new PaymentService();
private stripeService = new StripeService();
private userService = new UserService();
// Middleware to check if user is authenticated
private async checkAuth(req: any) {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
try {
const decoded = jwt.verify(token, JWT_SECRET) as any;
const user = await this.userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
return user;
} catch (error) {
throw new Unauthorized("Invalid token");
}
}
// Middleware to check if user is admin
private async checkAdmin(req: any) {
const user = await this.checkAuth(req);
if (user.role !== 'admin') {
throw new Unauthorized("Admin access required");
}
return user;
}
// Get available token packages (public endpoint for purchasing)
@Get("/token-packages")
@Summary("Get available token packages for purchase")
@Description("Returns all active token packages that users can purchase")
@Returns(200, Array).Description("List of available token packages")
@Security() // No authentication required
async getTokenPackages() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT * FROM token_packages
WHERE is_active = 1
ORDER BY quantity ASC
`);
// Convert decimal fields to numbers
const packages = Array.isArray(rows) ? rows.map((pkg: any) => ({
...pkg,
total_price: Number(pkg.total_price),
price_per_token: Number(pkg.price_per_token),
discount_percentage: Number(pkg.discount_percentage),
quantity: Number(pkg.quantity)
})) : [];
return {
success: true,
packages
};
} catch (error) {
console.error('Error getting token packages:', error);
throw new BadRequest("Failed to fetch token packages");
} finally {
connection.release();
}
}
/**
* Calculate token price for a given quantity
*/
@Post("/calculate-price")
@Security("bearerAuth")
@Summary("Calculate token price")
@Description("Calculate the best price for a given quantity of tokens")
@Returns(200).Description("Price calculation returned")
@Returns(401).Description("Unauthorized")
@Returns(400).Description("Invalid request")
async calculatePrice(
@Req() req: any,
@BodyParams() body: { quantity: number; packageId?: string }
) {
try {
await this.checkAuth(req);
if (!body.quantity || body.quantity <= 0) {
throw new BadRequest("Quantity must be a positive number");
}
const calculation = await this.paymentService.calculateTokenPrice(
body.quantity,
body.packageId
);
return {
success: true,
calculation,
};
} catch (error: any) {
throw error;
}
}
/**
* Create a payment intent
*/
@Post("/create-intent")
@Security("bearerAuth")
@Summary("Create payment intent")
@Description("Create a Stripe payment intent for token purchase")
@Returns(200).Description("Payment intent created")
@Returns(401).Description("Unauthorized")
@Returns(400).Description("Invalid request")
async createPaymentIntent(
@Req() req: any,
@BodyParams() body: {
packageId?: string;
customQuantity?: number;
paymentFlowType: 'card' | 'ideal' | 'bank_transfer';
}
) {
try {
const user = await this.checkAuth(req);
if (!body.paymentFlowType) {
throw new BadRequest("Payment flow type is required");
}
if (!body.packageId && !body.customQuantity) {
throw new BadRequest("Either packageId or customQuantity is required");
}
if (body.customQuantity && (body.customQuantity <= 0 || body.customQuantity > 1000)) {
throw new BadRequest("Custom quantity must be between 1 and 1000");
}
const request: CreatePaymentRequest = {
userId: user.id,
packageId: body.packageId,
customQuantity: body.customQuantity,
paymentFlowType: body.paymentFlowType,
userEmail: user.email,
userName: `${user.first_name} ${user.last_name}`,
};
const result = await this.paymentService.createPaymentIntent(request);
return {
success: true,
paymentIntent: {
id: result.paymentIntent.id,
client_secret: result.paymentIntent.client_secret,
status: result.paymentIntent.status,
},
paymentRecord: {
id: result.paymentRecord.id,
amount: result.paymentRecord.amount,
currency: result.paymentRecord.currency,
status: result.paymentRecord.status,
},
calculation: result.calculation,
};
} catch (error: any) {
throw error;
}
}
/**
* Confirm payment completion
*/
@Post("/confirm")
@Security("bearerAuth")
@Summary("Confirm payment")
@Description("Confirm payment completion and allocate tokens")
@Returns(200).Description("Payment confirmed")
@Returns(401).Description("Unauthorized")
@Returns(400).Description("Invalid request")
async confirmPayment(
@Req() req: any,
@BodyParams() body: { paymentIntentId: string }
) {
try {
const user = await this.checkAuth(req);
if (!body.paymentIntentId) {
throw new BadRequest("Payment intent ID is required");
}
// Get payment intent from Stripe
const paymentIntent = await this.stripeService.getPaymentIntent(body.paymentIntentId);
if (paymentIntent.status === 'succeeded') {
// Process successful payment
const paymentRecord = await this.paymentService.processSuccessfulPayment(body.paymentIntentId);
return {
success: true,
message: "Payment confirmed successfully",
paymentRecord: {
id: paymentRecord.id,
amount: paymentRecord.amount,
currency: paymentRecord.currency,
status: paymentRecord.status,
tokensAllocated: paymentRecord.custom_quantity || 1,
},
};
} else if (paymentIntent.status === 'requires_action') {
return {
success: false,
requires_action: true,
message: "Payment requires additional action",
payment_intent: {
id: paymentIntent.id,
status: paymentIntent.status,
client_secret: paymentIntent.client_secret,
},
};
} else {
throw new BadRequest(`Payment not successful. Status: ${paymentIntent.status}`);
}
} catch (error: any) {
throw error;
}
}
/**
* Get available payment methods
*/
@Get("/methods")
@Security("bearerAuth")
@Summary("Get payment methods")
@Description("Get available payment methods for the user's region")
@Returns(200).Description("Payment methods returned")
@Returns(401).Description("Unauthorized")
async getPaymentMethods(
@Req() req: any,
@QueryParams("country") countryCode?: string
) {
try {
await this.checkAuth(req);
const paymentMethods = this.stripeService.getAvailablePaymentMethods(countryCode);
const idealConfig = countryCode === 'NL' ? this.stripeService.getIdealConfiguration() : null;
return {
success: true,
paymentMethods,
ideal: idealConfig,
};
} catch (error: any) {
throw error;
}
}
/**
* Get user payment history
*/
@Get("/history")
@Security("bearerAuth")
@Summary("Get payment history")
@Description("Get payment history for the authenticated user")
@Returns(200).Description("Payment history returned")
@Returns(401).Description("Unauthorized")
async getPaymentHistory(@Req() req: any) {
try {
const user = await this.checkAuth(req);
const payments = await this.paymentService.getUserPaymentHistory(user.id);
return {
success: true,
payments: payments.map(payment => ({
id: payment.id,
amount: payment.amount,
currency: payment.currency,
status: payment.status,
payment_flow_type: payment.payment_flow_type,
custom_quantity: payment.custom_quantity,
applied_discount_percentage: payment.applied_discount_percentage,
package_name: payment.package_name,
created_at: payment.created_at,
paid_at: payment.paid_at,
})),
};
} catch (error: any) {
throw error;
}
}
/**
* Get specific payment details
*/
@Get("/:id")
@Security("bearerAuth")
@Summary("Get payment details")
@Description("Get details of a specific payment")
@Returns(200).Description("Payment details returned")
@Returns(401).Description("Unauthorized")
@Returns(404).Description("Payment not found")
async getPaymentDetails(
@Req() req: any,
@PathParams("id") paymentId: string
) {
try {
const user = await this.checkAuth(req);
const payment = await this.paymentService.getPaymentById(paymentId);
if (!payment) {
throw new NotFound("Payment not found");
}
// Check if user owns this payment or is admin
if (payment.user_id !== user.id && user.role !== 'admin') {
throw new Unauthorized("Access denied");
}
return {
success: true,
payment: {
id: payment.id,
amount: payment.amount,
currency: payment.currency,
status: payment.status,
payment_flow_type: payment.payment_flow_type,
custom_quantity: payment.custom_quantity,
applied_discount_percentage: payment.applied_discount_percentage,
package_name: payment.package_name,
stripe_payment_intent_id: payment.stripe_payment_intent_id,
created_at: payment.created_at,
paid_at: payment.paid_at,
refunded_amount: payment.refunded_amount,
refund_reason: payment.refund_reason,
},
};
} catch (error: any) {
throw error;
}
}
/**
* Process refund (Admin only)
*/
@Post("/:id/refund")
@Security("bearerAuth")
@Summary("Process refund")
@Description("Process a refund for a payment (Admin only)")
@Returns(200).Description("Refund processed")
@Returns(401).Description("Unauthorized")
@Returns(404).Description("Payment not found")
async processRefund(
@Req() req: any,
@PathParams("id") paymentId: string,
@BodyParams() body: { amount?: number; reason?: string }
) {
try {
await this.checkAdmin(req);
const refund = await this.paymentService.processRefund(
paymentId,
body.amount,
body.reason
);
return {
success: true,
message: "Refund processed successfully",
refund: {
id: refund.id,
amount: refund.amount,
status: refund.status,
reason: refund.reason,
},
};
} catch (error: any) {
throw error;
}
}
/**
* Get payment statistics (Admin only)
*/
@Get("/admin/statistics")
@Security("bearerAuth")
@Summary("Get payment statistics")
@Description("Get payment statistics (Admin only)")
@Returns(200).Description("Statistics returned")
@Returns(401).Description("Unauthorized")
async getPaymentStatistics(@Req() req: any) {
try {
await this.checkAdmin(req);
const statistics = await this.paymentService.getPaymentStatistics();
return {
success: true,
statistics,
};
} catch (error: any) {
throw error;
}
}
/**
* Cancel payment intent
*/
@Post("/:id/cancel")
@Security("bearerAuth")
@Summary("Cancel payment")
@Description("Cancel a pending payment intent")
@Returns(200).Description("Payment cancelled")
@Returns(401).Description("Unauthorized")
@Returns(404).Description("Payment not found")
async cancelPayment(
@Req() req: any,
@PathParams("id") paymentId: string
) {
try {
const user = await this.checkAuth(req);
const payment = await this.paymentService.getPaymentById(paymentId);
if (!payment) {
throw new NotFound("Payment not found");
}
if (payment.user_id !== user.id) {
throw new Unauthorized("Access denied");
}
if (payment.status !== 'pending') {
throw new BadRequest("Only pending payments can be cancelled");
}
if (payment.stripe_payment_intent_id) {
await this.stripeService.cancelPaymentIntent(payment.stripe_payment_intent_id);
}
// Update payment record
const connection = await pool.getConnection();
await connection.execute(`
UPDATE payment_records
SET status = 'cancelled', updated_at = NOW()
WHERE id = ?
`, [paymentId]);
connection.release();
return {
success: true,
message: "Payment cancelled successfully",
};
} catch (error: any) {
throw error;
}
}
}

View File

@ -0,0 +1,75 @@
import { Controller } from "@tsed/di";
import { Get, Summary, Description, Returns, Tags, Security } from "@tsed/schema";
import { Req } from "@tsed/platform-http";
import { Unauthorized } from "@tsed/exceptions";
import jwt from "jsonwebtoken";
import { UserService } from "../../services/UserService.js";
import { TokenService } from "../../services/TokenService.js";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
@Controller("/user")
@Tags("Users")
export class UserController {
private userService = new UserService();
private tokenService = new TokenService();
// Middleware to check if user is authenticated
private async checkAuth(req: any) {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
try {
const decoded = jwt.verify(token, JWT_SECRET) as any;
const user = await this.userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
return user;
} catch (error) {
throw new Unauthorized("Invalid token");
}
}
// Get user token summary
@Get("/token-summary")
@Security("bearerAuth")
@Summary("Get token summary for current user")
@Description("Returns total tokens purchased and used by the authenticated user.")
@Returns(200).Description("Token summary returned")
@Returns(401).Description("Unauthorized")
async getTokenSummary(@Req() req: any) {
const user = await this.checkAuth(req);
return await this.tokenService.getUserTokenSummary(user.id);
}
// Get user profile
@Get("/profile")
@Security("bearerAuth")
@Summary("Get current user profile")
@Description("Returns profile details for the authenticated user")
@Returns(200).Description("User profile returned")
@Returns(401).Description("Unauthorized")
async getProfile(@Req() req: any) {
const user = await this.checkAuth(req);
return {
id: user.id,
email: user.email,
first_name: user.first_name,
last_name: user.last_name,
role: user.role,
company_name: user.company_name,
avatar_url: user.avatar_url,
is_active: user.is_active,
last_login_at: user.last_login_at,
email_verified_at: user.email_verified_at,
created_at: user.created_at,
updated_at: user.updated_at
};
}
}

View File

@ -0,0 +1,290 @@
import { Controller } from "@tsed/di";
import { Post, Tags, Summary, Description, Returns } from "@tsed/schema";
import { Req, Res } from "@tsed/platform-http";
import { $log } from "@tsed/logger";
import { PaymentService } from "../../services/PaymentService.js";
import { StripeService } from "../../services/StripeService.js";
import { UserService } from "../../services/UserService.js";
import { pool } from "../../config/database.js";
@Controller("/webhooks")
@Tags("Webhooks")
export class WebhookController {
private paymentService = new PaymentService();
private stripeService = new StripeService();
private userService = new UserService();
/**
* Handle Stripe webhooks
*/
@Post("/stripe")
@Summary("Stripe webhook handler")
@Description("Handle Stripe webhook events for payment processing")
@Returns(200).Description("Webhook processed successfully")
@Returns(400).Description("Invalid webhook signature")
async handleStripeWebhook(@Req() req: any, @Res() res: any) {
const sig = req.headers['stripe-signature'];
const payload = req.body;
try {
// Verify webhook signature
const event = this.stripeService.verifyWebhookSignature(payload, sig);
$log.info(`Processing Stripe webhook: ${event.type}, ID: ${event.id}`);
// Handle the event
switch (event.type) {
case 'payment_intent.succeeded':
await this.handlePaymentIntentSucceeded(event.data.object);
break;
case 'payment_intent.payment_failed':
await this.handlePaymentIntentFailed(event.data.object);
break;
case 'payment_intent.cancelled':
await this.handlePaymentIntentCancelled(event.data.object);
break;
case 'charge.dispute.created':
await this.handleChargeDisputeCreated(event.data.object);
break;
case 'invoice.payment_succeeded':
await this.handleInvoicePaymentSucceeded(event.data.object);
break;
case 'customer.created':
await this.handleCustomerCreated(event.data.object);
break;
default:
$log.info(`Unhandled event type: ${event.type}`);
}
res.status(200).json({ received: true });
} catch (error: any) {
$log.error('Webhook signature verification failed:', error);
res.status(400).json({ error: 'Invalid webhook signature' });
}
}
/**
* Handle successful payment intent
*/
private async handlePaymentIntentSucceeded(paymentIntent: any) {
try {
$log.info(`Payment succeeded: ${paymentIntent.id}`);
// Process successful payment
const paymentRecord = await this.paymentService.processSuccessfulPayment(paymentIntent.id);
// Send confirmation email (if needed)
await this.sendPaymentConfirmationEmail(paymentRecord);
$log.info(`Successfully processed payment: ${paymentIntent.id} for user: ${paymentRecord.user_id}`);
} catch (error) {
$log.error(`Error processing successful payment ${paymentIntent.id}:`, error);
}
}
/**
* Handle failed payment intent
*/
private async handlePaymentIntentFailed(paymentIntent: any) {
try {
$log.info(`Payment failed: ${paymentIntent.id}`);
// Process failed payment
const paymentRecord = await this.paymentService.processFailedPayment(
paymentIntent.id,
paymentIntent.last_payment_error?.message || 'Payment failed'
);
// Send failure notification (if needed)
await this.sendPaymentFailureEmail(paymentRecord);
$log.info(`Successfully processed failed payment: ${paymentIntent.id}`);
} catch (error) {
$log.error(`Error processing failed payment ${paymentIntent.id}:`, error);
}
}
/**
* Handle cancelled payment intent
*/
private async handlePaymentIntentCancelled(paymentIntent: any) {
try {
$log.info(`Payment cancelled: ${paymentIntent.id}`);
// Update payment record status
const connection = await pool.getConnection();
await connection.execute(`
UPDATE payment_records
SET status = 'cancelled', updated_at = NOW()
WHERE stripe_payment_intent_id = ?
`, [paymentIntent.id]);
connection.release();
$log.info(`Successfully processed cancelled payment: ${paymentIntent.id}`);
} catch (error) {
$log.error(`Error processing cancelled payment ${paymentIntent.id}:`, error);
}
}
/**
* Handle charge dispute created
*/
private async handleChargeDisputeCreated(dispute: any) {
try {
$log.warn(`Charge dispute created: ${dispute.id} for charge: ${dispute.charge}`);
// Update payment record with dispute information
const connection = await pool.getConnection();
await connection.execute(`
UPDATE payment_records
SET stripe_metadata = JSON_SET(
COALESCE(stripe_metadata, '{}'),
'$.dispute_id',
?
), updated_at = NOW()
WHERE stripe_payment_intent_id = (
SELECT id FROM stripe_payment_intents WHERE charge_id = ?
)
`, [dispute.id, dispute.charge]);
connection.release();
// Send dispute notification to admin
await this.sendDisputeNotificationEmail(dispute);
$log.info(`Successfully processed dispute: ${dispute.id}`);
} catch (error) {
$log.error(`Error processing dispute ${dispute.id}:`, error);
}
}
/**
* Handle invoice payment succeeded (for recurring payments if implemented)
*/
private async handleInvoicePaymentSucceeded(invoice: any) {
try {
$log.info(`Invoice payment succeeded: ${invoice.id}`);
// This would be used for recurring payments if implemented in the future
// For now, we'll just log it
$log.info(`Successfully processed invoice payment: ${invoice.id}`);
} catch (error) {
$log.error(`Error processing invoice payment ${invoice.id}:`, error);
}
}
/**
* Handle customer created
*/
private async handleCustomerCreated(customer: any) {
try {
$log.info(`Customer created: ${customer.id}`);
// Update user record with Stripe customer ID if needed
if (customer.metadata?.userId) {
const connection = await pool.getConnection();
await connection.execute(`
UPDATE users
SET stripe_customer_id = ?, updated_at = NOW()
WHERE id = ?
`, [customer.id, customer.metadata.userId]);
connection.release();
}
$log.info(`Successfully processed customer creation: ${customer.id}`);
} catch (error) {
$log.error(`Error processing customer creation ${customer.id}:`, error);
}
}
/**
* Send payment confirmation email
*/
private async sendPaymentConfirmationEmail(paymentRecord: any) {
try {
// Get user details
const user = await this.userService.getUserById(paymentRecord.user_id);
if (!user) {
$log.error(`User not found for payment confirmation: ${paymentRecord.user_id}`);
return;
}
// TODO: Implement email service
// For now, just log the confirmation
$log.info(`Payment confirmation email would be sent to: ${user.email} for payment: ${paymentRecord.id}`);
// Email content would include:
// - Payment amount
// - Tokens allocated
// - Payment method used
// - Receipt information
} catch (error) {
$log.error('Error sending payment confirmation email:', error);
}
}
/**
* Send payment failure email
*/
private async sendPaymentFailureEmail(paymentRecord: any) {
try {
// Get user details
const user = await this.userService.getUserById(paymentRecord.user_id);
if (!user) {
$log.error(`User not found for payment failure notification: ${paymentRecord.user_id}`);
return;
}
// TODO: Implement email service
// For now, just log the failure notification
$log.info(`Payment failure email would be sent to: ${user.email} for payment: ${paymentRecord.id}`);
// Email content would include:
// - Payment amount
// - Failure reason
// - Retry instructions
// - Support contact information
} catch (error) {
$log.error('Error sending payment failure email:', error);
}
}
/**
* Send dispute notification email to admin
*/
private async sendDisputeNotificationEmail(dispute: any) {
try {
// TODO: Implement email service for admin notifications
$log.warn(`Dispute notification email would be sent to admin for dispute: ${dispute.id}`);
// Email content would include:
// - Dispute details
// - Charge information
// - Customer information
// - Required actions
} catch (error) {
$log.error('Error sending dispute notification email:', error);
}
}
/**
* Health check endpoint for webhook
*/
@Post("/stripe/health")
@Summary("Stripe webhook health check")
@Description("Health check endpoint for Stripe webhook")
@Returns(200).Description("Webhook is healthy")
async healthCheck(@Req() req: any, @Res() res: any) {
res.status(200).json({
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'stripe-webhook'
});
}
}

View File

@ -0,0 +1,9 @@
/**
* @file Automatically generated by @tsed/barrels.
*/
export * from "./AIController.js";
export * from "./AdminController.js";
export * from "./AuthController.js";
export * from "./HelloWorldController.js";
export * from "./JobController.js";
export * from "./UserController.js";

36
backend/src/index.ts Normal file
View File

@ -0,0 +1,36 @@
import {$log} from "@tsed/logger";
import { PlatformExpress } from "@tsed/platform-express";
import {Server} from "./Server.js";
const SIG_EVENTS = [
"beforeExit",
"SIGHUP",
"SIGINT",
"SIGQUIT",
"SIGILL",
"SIGTRAP",
"SIGABRT",
"SIGBUS",
"SIGFPE",
"SIGUSR1",
"SIGSEGV",
"SIGUSR2",
"SIGTERM"
];
try {
const platform = await PlatformExpress.bootstrap(Server);
await platform.listen();
SIG_EVENTS.forEach((evt) => process.on(evt, () => platform.stop()));
["uncaughtException", "unhandledRejection"].forEach((evt) =>
process.on(evt, async (error) => {
$log.error({event: "SERVER_" + evt.toUpperCase(), message: error.message, stack: error.stack});
await platform.stop();
})
);
} catch (error) {
$log.error({event: "SERVER_BOOTSTRAP_ERROR", message: error.message, stack: error.stack});
}

View File

@ -0,0 +1,51 @@
import { Request, Response, NextFunction } from "express";
import jwt from "jsonwebtoken";
import { UserService } from "../services/UserService.js";
import { Unauthorized } from "@tsed/exceptions";
const JWT_SECRET = process.env.JWT_SECRET || "your-secret-key";
export interface AuthenticatedRequest extends Request {
user?: {
id: string;
email: string;
role: string;
first_name: string;
last_name: string;
};
}
export async function adminAuth(req: AuthenticatedRequest, res: Response, next: NextFunction) {
try {
const token = req.headers.authorization?.replace("Bearer ", "");
if (!token) {
throw new Unauthorized("No token provided");
}
const decoded = jwt.verify(token, JWT_SECRET) as any;
const userService = new UserService();
const user = await userService.getUserById(decoded.userId);
if (!user) {
throw new Unauthorized("User not found");
}
if (user.role !== 'admin') {
throw new Unauthorized("Admin access required");
}
// Add user info to request
req.user = {
id: user.id,
email: user.email,
role: user.role,
first_name: user.first_name,
last_name: user.last_name
};
next();
} catch (error) {
next(new Unauthorized("Invalid token or insufficient permissions"));
}
}

View File

@ -0,0 +1,66 @@
export interface User {
id: string;
email: string;
password_hash: string;
first_name: string;
last_name: string;
role: 'admin' | 'recruiter';
company_name?: string;
avatar_url?: string;
is_active: boolean;
last_login_at?: Date;
email_verified_at?: Date;
created_at: Date;
updated_at: Date;
deleted_at?: Date;
}
export type LoginRequest = {
email: string;
password: string;
}
export type RegisterRequest = {
email: string;
password: string;
first_name: string;
last_name: string;
company_name?: string;
}
export type CreateUserRequest = {
email: string;
password: string;
first_name: string;
last_name: string;
company_name?: string;
role?: 'admin' | 'recruiter';
}
export type UpdateUserRequest = {
first_name?: string;
last_name?: string;
company_name?: string;
avatar_url?: string;
is_active?: boolean;
}
export type UserResponse = {
id: string;
email: string;
first_name: string;
last_name: string;
role: 'admin' | 'recruiter';
company_name?: string;
avatar_url?: string;
is_active: boolean;
last_login_at?: Date;
email_verified_at?: Date;
created_at: Date;
updated_at: Date;
}
export type LoginResponse = {
token: string;
user: UserResponse;
}

View File

@ -0,0 +1,294 @@
import axios from 'axios';
import { ChatbotService } from './ChatbotService.js';
export interface ChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
}
export interface ChatRequest {
model: string;
messages: ChatMessage[];
temperature: number;
}
export interface ChatChoice {
message: ChatMessage;
}
export interface ChatResponse {
choices: ChatChoice[];
}
export class AIService {
private apiKey: string;
private model: string;
private baseUrl: string;
private relPath: string;
private temperature: number;
private chatbotService: ChatbotService;
// Predefined models from your C# code
private static readonly PREDEFINED_MODELS: Record<string, string> = {
'dobby': 'sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b',
'dolphin': 'cognitivecomputations/dolphin-mixtral-8x22b',
'dolphin_free': 'cognitivecomputations/dolphin3.0-mistral-24b:free',
'gemma': 'google/gemma-3-12b-it',
'gpt-4o-mini': 'openai/gpt-4o-mini',
'gpt-4.1-nano': 'openai/gpt-4.1-nano',
'qwen': 'qwen/qwen3-30b-a3b',
'unslop': 'thedrummer/unslopnemo-12b',
'euryale': 'sao10k/l3.3-euryale-70b',
'wizard': 'microsoft/wizardlm-2-8x22b',
'deepseek': 'deepseek/deepseek-chat-v3-0324'
};
constructor() {
this.apiKey = process.env.OPENROUTER_API_KEY || 'sk-or-REPLACE_ME';
this.model = process.env.OPENROUTER_MODEL || 'gemma';
this.baseUrl = process.env.OPENROUTER_BASE_URL || 'openrouter.ai';
this.relPath = process.env.OPENROUTER_REL_PATH || '/api';
this.temperature = parseFloat(process.env.OPENROUTER_TEMPERATURE || '0.7');
this.chatbotService = new ChatbotService();
// Map predefined model names to full model names
if (AIService.PREDEFINED_MODELS[this.model]) {
this.model = AIService.PREDEFINED_MODELS[this.model];
}
console.log(`[DEBUG] AIService initialized:`);
console.log(`[DEBUG] - API Key: ${this.apiKey.substring(0, 10)}...`);
console.log(`[DEBUG] - Model: ${this.model}`);
console.log(`[DEBUG] - Base URL: ${this.baseUrl}`);
console.log(`[DEBUG] - Rel Path: ${this.relPath}`);
console.log(`[DEBUG] - Temperature: ${this.temperature}`);
console.log(`[DEBUG] - Chatbot Service: ${this.chatbotService ? 'Enabled' : 'Disabled'}`);
}
async generateResponse(prompt: string, systemMessage?: string): Promise<string | null> {
try {
const messages: ChatMessage[] = [];
if (systemMessage) {
messages.push({ role: 'system', content: systemMessage });
}
messages.push({ role: 'user', content: prompt });
const payload: ChatRequest = {
model: this.model,
messages: messages,
temperature: this.temperature
};
const url = `https://${this.baseUrl}${this.relPath}/v1/chat/completions`;
console.log(`[DEBUG] Sending to OpenRouter - Model: ${this.model}, URL: ${url}`);
console.log(`[DEBUG] Prompt length: ${prompt.length} characters`);
const response = await axios.post(url, payload, {
headers: {
'Authorization': `Bearer ${this.apiKey}`,
'Content-Type': 'application/json'
}
});
const data = response.data as ChatResponse;
const aiResponse = data.choices?.[0]?.message?.content || null;
console.log(`[DEBUG] OpenRouter Response: ${aiResponse}`);
return aiResponse;
} catch (error) {
console.error('Error calling OpenRouter:', error);
if (error.response) {
console.error('Response status:', error.response.status);
console.error('Response data:', error.response.data);
}
return null;
}
}
async generateResponseWithHistory(
userMessage: string,
conversationHistory: any[],
systemMessage?: string
): Promise<string | null> {
try {
const messages: ChatMessage[] = [];
if (systemMessage) {
messages.push({ role: 'system', content: systemMessage });
}
// Add conversation history
conversationHistory.forEach(msg => {
if (msg.sender === 'candidate' || msg.sender === 'user') {
messages.push({ role: 'user', content: msg.message });
} else if (msg.sender === 'ai' || msg.sender === 'assistant') {
messages.push({ role: 'assistant', content: msg.message });
}
});
// Add current user message
messages.push({ role: 'user', content: userMessage });
const payload: ChatRequest = {
model: this.model,
messages: messages,
temperature: this.temperature
};
const url = `https://${this.baseUrl}${this.relPath}/v1/chat/completions`;
console.log(`[DEBUG] Sending to OpenRouter with history - Model: ${this.model}`);
console.log(`[DEBUG] Messages count: ${messages.length}`);
const response = await axios.post(url, payload, {
headers: {
'Authorization': `Bearer ${this.apiKey}`,
'Content-Type': 'application/json'
}
});
const data = response.data as ChatResponse;
const aiResponse = data.choices?.[0]?.message?.content || null;
console.log(`[DEBUG] OpenRouter Response: ${aiResponse}`);
return aiResponse;
} catch (error) {
console.error('Error calling OpenRouter with history:', error);
if (error.response) {
console.error('Response status:', error.response.status);
console.error('Response data:', error.response.data);
}
return null;
}
}
/**
* Generate response using chatbot service with fallback to direct OpenRouter
*/
async generateResponseWithChatbot(
userMessage: string,
conversationHistory: any[],
systemMessage?: string,
job?: any,
candidateName?: string,
linkId?: string
): Promise<string | null> {
// Try chatbot service first
try {
const isHealthy = await this.chatbotService.isHealthy();
if (isHealthy) {
console.log(`[DEBUG] Using chatbot service for response generation`);
const response = await this.chatbotService.sendMessage({
message: userMessage,
conversationHistory,
systemMessage,
job,
candidateName,
linkId
});
if (response) {
return response;
}
}
} catch (error) {
console.error('[ERROR] Chatbot service failed, falling back to direct OpenRouter:', error);
}
// Fallback to direct OpenRouter
if (this.chatbotService.shouldUseFallback()) {
console.log(`[DEBUG] Falling back to direct OpenRouter`);
return await this.generateResponseWithHistory(userMessage, conversationHistory, systemMessage);
}
return null;
}
/**
* Initialize interview using chatbot service
*/
async initializeInterviewWithChatbot(
job: any,
candidateName: string,
linkId: string,
conversationHistory: any[] = []
): Promise<string | null> {
try {
const isHealthy = await this.chatbotService.isHealthy();
if (isHealthy) {
console.log(`[DEBUG] Using chatbot service for interview initialization`);
return await this.chatbotService.initializeInterview(job, candidateName, linkId, conversationHistory);
}
} catch (error) {
console.error('[ERROR] Chatbot service failed for interview initialization:', error);
}
// Fallback to direct OpenRouter
if (this.chatbotService.shouldUseFallback()) {
console.log(`[DEBUG] Falling back to direct OpenRouter for interview initialization`);
const systemMessage = this.buildInterviewSystemMessage(job, candidateName, conversationHistory);
return await this.generateResponse(`The candidate's name is ${candidateName}. Please start the interview.`, systemMessage);
}
return null;
}
/**
* End interview using chatbot service
*/
async endInterviewWithChatbot(linkId: string): Promise<boolean> {
try {
const isHealthy = await this.chatbotService.isHealthy();
if (isHealthy) {
console.log(`[DEBUG] Using chatbot service for interview end`);
return await this.chatbotService.endInterview(linkId);
}
} catch (error) {
console.error('[ERROR] Chatbot service failed for interview end:', error);
}
return false;
}
/**
* Build interview system message
*/
private buildInterviewSystemMessage(job: any, candidateName: string, conversationHistory: any[] = []): string {
const skills = job.skills_required ? job.skills_required.join(', ') : 'various technical skills';
const experience = job.experience_level.replace('_', ' ');
// Build context from conversation history (mandatory question answers)
const conversationContext = conversationHistory
.map(msg => `${msg.sender === 'candidate' ? 'Candidate' : 'Interviewer'}: ${msg.message}`)
.join('\n');
return `You are an AI interview agent conducting an interview for the position: ${job.title}
Job Description: ${job.description}
Requirements: ${job.requirements}
Required Skills: ${skills}
Experience Level: ${experience}
Location: ${job.location || 'Remote'}
${conversationContext ? `Previous conversation (mandatory questions answered):
${conversationContext}
Based on the candidate's answers to the mandatory questions above, you should now conduct a deeper interview.` : ''}
Your task is to:
1. Greet the candidate warmly and professionally
2. Introduce yourself as their evaluation agent
3. ${conversationContext ? 'Acknowledge their previous answers and build upon them' : 'Explain that you\'ll be conducting a comprehensive interview'}
4. Ask them to tell you about themselves and their interest in this role
5. Keep your response conversational and engaging
6. Don't ask multiple questions at once - start with one open-ended question
Respond in a friendly, professional tone. Keep it concise but welcoming.`;
}
}

View File

@ -0,0 +1,830 @@
import { pool } from '../config/database.js';
import { $log } from '@tsed/logger';
import bcrypt from 'bcryptjs';
import { randomUUID } from 'crypto';
export class AdminService {
// System Statistics
async getSystemStatistics() {
const connection = await pool.getConnection();
try {
// Get basic counts
const [userStats] = await connection.execute(`
SELECT
COUNT(*) as total_users,
SUM(CASE WHEN is_active = TRUE THEN 1 ELSE 0 END) as active_users
FROM users
WHERE deleted_at IS NULL
`);
const [jobStats] = await connection.execute(`
SELECT COUNT(*) as total_jobs
FROM jobs
WHERE deleted_at IS NULL
`);
const [interviewStats] = await connection.execute(`
SELECT COUNT(*) as total_interviews
FROM interviews
WHERE status = 'completed'
`);
const [tokenStats] = await connection.execute(`
SELECT
COALESCE(SUM(quantity), 0) as total_tokens_purchased,
COALESCE(SUM(tokens_used), 0) as total_tokens_used
FROM interview_tokens
`);
const [revenueStats] = await connection.execute(`
SELECT COALESCE(SUM(amount), 0) as total_revenue
FROM payment_records
WHERE status = 'paid'
`);
const userStatsData = Array.isArray(userStats) ? userStats[0] : userStats;
const jobStatsData = Array.isArray(jobStats) ? jobStats[0] : jobStats;
const interviewStatsData = Array.isArray(interviewStats) ? interviewStats[0] : interviewStats;
const tokenStatsData = Array.isArray(tokenStats) ? tokenStats[0] : tokenStats;
const revenueStatsData = Array.isArray(revenueStats) ? revenueStats[0] : revenueStats;
return {
total_users: userStatsData?.total_users || 0,
active_users: userStatsData?.active_users || 0,
total_jobs: jobStatsData?.total_jobs || 0,
total_interviews: interviewStatsData?.total_interviews || 0,
total_tokens_purchased: tokenStatsData?.total_tokens_purchased || 0,
total_tokens_used: tokenStatsData?.total_tokens_used || 0,
total_revenue: revenueStatsData?.total_revenue || 0,
generated_at: new Date().toISOString()
};
} catch (error) {
$log.error('Error getting system statistics:', error);
throw error;
} finally {
connection.release();
}
}
// User Management
async getAllUsers() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
id, email, first_name, last_name, role, company_name,
avatar_url, is_active, last_login_at, email_verified_at,
created_at, updated_at
FROM users
WHERE deleted_at IS NULL
ORDER BY created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting all users:', error);
throw error;
} finally {
connection.release();
}
}
async getUserById(id: string) {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
id, email, first_name, last_name, role, company_name,
avatar_url, is_active, last_login_at, email_verified_at,
created_at, updated_at
FROM users
WHERE id = ? AND deleted_at IS NULL
`, [id]);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0];
}
return null;
} catch (error) {
$log.error('Error getting user by ID:', error);
throw error;
} finally {
connection.release();
}
}
async updateUser(id: string, userData: any) {
const connection = await pool.getConnection();
try {
const updateFields = [];
const values = [];
if (userData.first_name) {
updateFields.push('first_name = ?');
values.push(userData.first_name);
}
if (userData.last_name) {
updateFields.push('last_name = ?');
values.push(userData.last_name);
}
if (userData.email) {
updateFields.push('email = ?');
values.push(userData.email);
}
if (userData.role) {
updateFields.push('role = ?');
values.push(userData.role);
}
if (userData.company_name !== undefined) {
updateFields.push('company_name = ?');
values.push(userData.company_name);
}
if (userData.avatar_url !== undefined) {
updateFields.push('avatar_url = ?');
values.push(userData.avatar_url);
}
if (userData.is_active !== undefined) {
updateFields.push('is_active = ?');
values.push(userData.is_active);
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push('updated_at = NOW()');
values.push(id);
await connection.execute(
`UPDATE users SET ${updateFields.join(', ')} WHERE id = ? AND deleted_at IS NULL`,
values
);
return await this.getUserById(id);
} catch (error) {
$log.error('Error updating user:', error);
throw error;
} finally {
connection.release();
}
}
async toggleUserStatus(id: string) {
const connection = await pool.getConnection();
try {
// Get current status
const [rows] = await connection.execute(
'SELECT is_active FROM users WHERE id = ? AND deleted_at IS NULL',
[id]
);
if (Array.isArray(rows) && rows.length === 0) {
throw new Error('User not found');
}
const currentStatus = Array.isArray(rows) ? rows[0] : rows;
const newStatus = !currentStatus.is_active;
await connection.execute(
'UPDATE users SET is_active = ?, updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[newStatus, id]
);
return { success: true, new_status: newStatus };
} catch (error) {
$log.error('Error toggling user status:', error);
throw error;
} finally {
connection.release();
}
}
async changeUserPassword(id: string, newPassword: string) {
const connection = await pool.getConnection();
try {
const password_hash = await bcrypt.hash(newPassword, 10);
await connection.execute(
'UPDATE users SET password_hash = ?, updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[password_hash, id]
);
return { success: true };
} catch (error) {
$log.error('Error changing user password:', error);
throw error;
} finally {
connection.release();
}
}
async createUser(userData: any) {
const connection = await pool.getConnection();
try {
// Check if user already exists
const [existingUsers] = await connection.execute(
'SELECT id FROM users WHERE email = ? AND deleted_at IS NULL',
[userData.email]
);
if (Array.isArray(existingUsers) && existingUsers.length > 0) {
throw new Error('User with this email already exists');
}
// Hash password
const password_hash = await bcrypt.hash(userData.password, 10);
// Generate UUID for user ID
const userId = randomUUID();
// Insert user
await connection.execute(
`INSERT INTO users (id, email, password_hash, first_name, last_name, role, company_name, is_active, email_verified_at, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW(), NOW())`,
[
userId,
userData.email,
password_hash,
userData.first_name,
userData.last_name,
userData.role || 'recruiter',
userData.company_name || null,
true
]
);
// Initialize usage tracking
await connection.execute(
'INSERT INTO user_usage (user_id) VALUES (?)',
[userId]
);
return await this.getUserById(userId);
} catch (error) {
$log.error('Error creating user:', error);
throw error;
} finally {
connection.release();
}
}
// Job Management
async getAllJobs() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
j.*,
u.first_name,
u.last_name,
u.email,
u.company_name
FROM jobs j
LEFT JOIN users u ON j.user_id = u.id
WHERE j.deleted_at IS NULL
ORDER BY j.created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting all jobs:', error);
throw error;
} finally {
connection.release();
}
}
async getJobById(id: string) {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
j.*,
u.first_name,
u.last_name,
u.email,
u.company_name
FROM jobs j
LEFT JOIN users u ON j.user_id = u.id
WHERE j.id = ? AND j.deleted_at IS NULL
`, [id]);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0];
}
return null;
} catch (error) {
$log.error('Error getting job by ID:', error);
throw error;
} finally {
connection.release();
}
}
async updateJobStatus(id: string, status: string) {
const connection = await pool.getConnection();
try {
await connection.execute(
'UPDATE jobs SET status = ?, updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[status, id]
);
return { success: true, new_status: status };
} catch (error) {
$log.error('Error updating job status:', error);
throw error;
} finally {
connection.release();
}
}
async updateJob(id: string, jobData: any) {
const connection = await pool.getConnection();
try {
const updateFields = [];
const values = [];
if (jobData.title) {
updateFields.push('title = ?');
values.push(jobData.title);
}
if (jobData.description) {
updateFields.push('description = ?');
values.push(jobData.description);
}
if (jobData.requirements) {
updateFields.push('requirements = ?');
values.push(jobData.requirements);
}
if (jobData.skills_required) {
updateFields.push('skills_required = ?');
values.push(JSON.stringify(jobData.skills_required));
}
if (jobData.location) {
updateFields.push('location = ?');
values.push(jobData.location);
}
if (jobData.employment_type) {
updateFields.push('employment_type = ?');
values.push(jobData.employment_type);
}
if (jobData.experience_level) {
updateFields.push('experience_level = ?');
values.push(jobData.experience_level);
}
if (jobData.salary_min !== undefined) {
updateFields.push('salary_min = ?');
values.push(jobData.salary_min);
}
if (jobData.salary_max !== undefined) {
updateFields.push('salary_max = ?');
values.push(jobData.salary_max);
}
if (jobData.currency) {
updateFields.push('currency = ?');
values.push(jobData.currency);
}
if (jobData.status) {
updateFields.push('status = ?');
values.push(jobData.status);
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push('updated_at = NOW()');
values.push(id);
await connection.execute(
`UPDATE jobs SET ${updateFields.join(', ')} WHERE id = ? AND deleted_at IS NULL`,
values
);
return await this.getJobById(id);
} catch (error) {
$log.error('Error updating job:', error);
throw error;
} finally {
connection.release();
}
}
// Token Management
async getUserTokenSummaries() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
u.id as user_id,
u.first_name,
u.last_name,
u.email,
COALESCE(SUM(it.quantity), 0) as total_purchased,
COALESCE(SUM(it.tokens_used), 0) as total_used,
COALESCE(SUM(it.tokens_remaining), 0) as total_available,
CASE
WHEN SUM(it.quantity) > 0 THEN ROUND((SUM(it.tokens_used) / SUM(it.quantity)) * 100, 2)
ELSE 0
END as utilization_percentage
FROM users u
LEFT JOIN interview_tokens it ON u.id = it.user_id AND it.status = 'active'
WHERE u.deleted_at IS NULL
GROUP BY u.id, u.first_name, u.last_name, u.email
ORDER BY u.created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting user token summaries:', error);
throw error;
} finally {
connection.release();
}
}
async addTokensToUser(tokenData: any) {
const connection = await pool.getConnection();
try {
const { user_id, quantity, price_per_token } = tokenData;
const total_price = quantity * price_per_token;
const tokenId = randomUUID();
// Create token record
await connection.execute(`
INSERT INTO interview_tokens (
id, user_id, token_type, quantity, price_per_token,
total_price, status, purchased_at, created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, 'active', NOW(), NOW(), NOW())
`, [
tokenId,
user_id,
quantity === 1 ? 'single' : 'bulk',
quantity,
price_per_token,
total_price
]);
// No payment record needed for admin-granted tokens
// Update user usage
await connection.execute(`
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (?, ?)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + ?
`, [user_id, quantity, quantity]);
return { success: true, token_id: tokenId };
} catch (error) {
$log.error('Error adding tokens to user:', error);
throw error;
} finally {
connection.release();
}
}
// Token Packages
async getTokenPackages() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT * FROM token_packages
ORDER BY created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting token packages:', error);
throw error;
} finally {
connection.release();
}
}
async createTokenPackage(packageData: any) {
const connection = await pool.getConnection();
try {
const packageId = randomUUID();
await connection.execute(`
INSERT INTO token_packages (
id, name, description, quantity, price_per_token,
total_price, discount_percentage, is_popular, is_active,
created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())
`, [
packageId,
packageData.name,
packageData.description,
packageData.quantity,
packageData.price_per_token,
packageData.total_price,
packageData.discount_percentage || 0,
packageData.is_popular || false,
packageData.is_active !== false
]);
return { success: true, package_id: packageId };
} catch (error) {
$log.error('Error creating token package:', error);
throw error;
} finally {
connection.release();
}
}
async updateTokenPackage(id: string, packageData: any) {
const connection = await pool.getConnection();
try {
const updateFields = [];
const values = [];
if (packageData.name) {
updateFields.push('name = ?');
values.push(packageData.name);
}
if (packageData.description) {
updateFields.push('description = ?');
values.push(packageData.description);
}
if (packageData.quantity) {
updateFields.push('quantity = ?');
values.push(packageData.quantity);
}
if (packageData.price_per_token) {
updateFields.push('price_per_token = ?');
values.push(packageData.price_per_token);
}
if (packageData.total_price) {
updateFields.push('total_price = ?');
values.push(packageData.total_price);
}
if (packageData.discount_percentage !== undefined) {
updateFields.push('discount_percentage = ?');
values.push(packageData.discount_percentage);
}
if (packageData.is_popular !== undefined) {
updateFields.push('is_popular = ?');
values.push(packageData.is_popular);
}
if (packageData.is_active !== undefined) {
updateFields.push('is_active = ?');
values.push(packageData.is_active);
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push('updated_at = NOW()');
values.push(id);
await connection.execute(
`UPDATE token_packages SET ${updateFields.join(', ')} WHERE id = ?`,
values
);
return { success: true };
} catch (error) {
$log.error('Error updating token package:', error);
throw error;
} finally {
connection.release();
}
}
async toggleTokenPackageStatus(id: string) {
const connection = await pool.getConnection();
try {
// Get current status
const [rows] = await connection.execute(
'SELECT is_active FROM token_packages WHERE id = ?',
[id]
);
if (Array.isArray(rows) && rows.length === 0) {
throw new Error('Token package not found');
}
const currentStatus = Array.isArray(rows) ? rows[0] : rows;
const newStatus = !currentStatus.is_active;
await connection.execute(
'UPDATE token_packages SET is_active = ?, updated_at = NOW() WHERE id = ?',
[newStatus, id]
);
return { success: true, new_status: newStatus };
} catch (error) {
$log.error('Error toggling token package status:', error);
throw error;
} finally {
connection.release();
}
}
async deleteTokenPackage(id: string) {
const connection = await pool.getConnection();
try {
await connection.execute(
'DELETE FROM token_packages WHERE id = ?',
[id]
);
return { success: true };
} catch (error) {
$log.error('Error deleting token package:', error);
throw error;
} finally {
connection.release();
}
}
// Interview Management
async getAllInterviews() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
i.*,
u.first_name,
u.last_name,
u.email,
j.title as job_title
FROM interviews i
LEFT JOIN users u ON i.user_id = u.id
LEFT JOIN jobs j ON i.job_id = j.id
ORDER BY i.created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting all interviews:', error);
throw error;
} finally {
connection.release();
}
}
async getInterviewById(id: string) {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
i.*,
u.first_name,
u.last_name,
u.email,
j.title as job_title
FROM interviews i
LEFT JOIN users u ON i.user_id = u.id
LEFT JOIN jobs j ON i.job_id = j.id
WHERE i.id = ?
`, [id]);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0];
}
return null;
} catch (error) {
$log.error('Error getting interview by ID:', error);
throw error;
} finally {
connection.release();
}
}
// Payment Records
async getPaymentRecords() {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
pr.*,
u.first_name,
u.last_name,
u.email,
tp.name as package_name
FROM payment_records pr
LEFT JOIN users u ON pr.user_id = u.id
LEFT JOIN token_packages tp ON pr.token_package_id = tp.id
ORDER BY pr.created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting payment records:', error);
throw error;
} finally {
connection.release();
}
}
async getPaymentById(id: string) {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
pr.*,
u.first_name,
u.last_name,
u.email,
tp.name as package_name
FROM payment_records pr
LEFT JOIN users u ON pr.user_id = u.id
LEFT JOIN token_packages tp ON pr.token_package_id = tp.id
WHERE pr.id = ?
`, [id]);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0];
}
return null;
} catch (error) {
$log.error('Error getting payment by ID:', error);
throw error;
} finally {
connection.release();
}
}
// Job Links Management
async getJobLinks(jobId: string) {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM job_links WHERE job_id = ? ORDER BY created_at DESC',
[jobId]
);
if (Array.isArray(rows)) {
return rows;
}
return [];
} catch (error) {
$log.error('Error getting job links:', error);
return [];
} finally {
connection.release();
}
}
async createJobLink(jobId: string, tokensAvailable: number = 1) {
const connection = await pool.getConnection();
try {
// Generate a random URL slug and UUID
const linkId = randomUUID();
const urlSlug = randomUUID().replace(/-/g, '').substring(0, 8);
await connection.execute(
'INSERT INTO job_links (id, job_id, url_slug, tokens_available, created_at, updated_at) VALUES (?, ?, ?, ?, NOW(), NOW())',
[linkId, jobId, urlSlug, tokensAvailable]
);
// Get the created link
const [rows] = await connection.execute(
'SELECT * FROM job_links WHERE id = ?',
[linkId]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0];
}
throw new Error('Failed to create job link');
} catch (error) {
$log.error('Error creating job link:', error);
throw error;
} finally {
connection.release();
}
}
}

View File

@ -0,0 +1,170 @@
import axios, { AxiosInstance, AxiosResponse } from 'axios';
export interface ChatbotRequest {
message: string;
conversationHistory?: any[];
job?: any;
candidateName?: string;
linkId?: string;
systemMessage?: string;
}
export interface ChatbotResponse {
ok: boolean;
reply?: string;
error?: string;
}
export interface ChatbotHealthResponse {
status: string;
timestamp: string;
}
export class ChatbotService {
private client: AxiosInstance;
private baseUrl: string;
private timeout: number;
private fallbackEnabled: boolean;
constructor() {
this.baseUrl = process.env.CHATBOT_SERVICE_URL || 'http://chatbot:80';
this.timeout = parseInt(process.env.CHATBOT_SERVICE_TIMEOUT || '30000');
this.fallbackEnabled = process.env.CHATBOT_FALLBACK_ENABLED === 'true';
this.client = axios.create({
baseURL: this.baseUrl,
timeout: this.timeout,
headers: {
'Content-Type': 'application/json',
},
});
console.log(`[DEBUG] ChatbotService initialized:`);
console.log(`[DEBUG] - Base URL: ${this.baseUrl}`);
console.log(`[DEBUG] - Timeout: ${this.timeout}ms`);
console.log(`[DEBUG] - Fallback Enabled: ${this.fallbackEnabled}`);
}
/**
* Check if chatbot service is healthy
*/
async isHealthy(): Promise<boolean> {
try {
const response = await this.client.get('/api/health');
return response.status === 200;
} catch (error) {
console.error('[ERROR] Chatbot service health check failed:', error);
return false;
}
}
/**
* Send a chat message to the chatbot service
*/
async sendMessage(request: ChatbotRequest): Promise<string | null> {
try {
console.log(`[DEBUG] Sending message to chatbot service: ${request.message.substring(0, 100)}...`);
const response: AxiosResponse<ChatbotResponse> = await this.client.post('/api/chat', {
message: request.message,
conversationHistory: request.conversationHistory,
job: request.job,
candidateName: request.candidateName,
linkId: request.linkId,
systemMessage: request.systemMessage
});
if (response.data.ok && response.data.reply) {
console.log(`[DEBUG] Chatbot service response received: ${response.data.reply.substring(0, 100)}...`);
return response.data.reply;
} else {
console.error('[ERROR] Chatbot service returned error:', response.data.error);
return null;
}
} catch (error) {
console.error('[ERROR] Chatbot service request failed:', error);
if (error.response) {
console.error('[ERROR] Response status:', error.response.status);
console.error('[ERROR] Response data:', error.response.data);
}
return null;
}
}
/**
* Initialize an interview with the chatbot service
*/
async initializeInterview(job: any, candidateName: string, linkId: string, conversationHistory: any[] = []): Promise<string | null> {
try {
console.log(`[DEBUG] Initializing interview with chatbot service for ${candidateName}`);
const response: AxiosResponse<ChatbotResponse> = await this.client.post('/api/interview/start', {
job,
candidateName,
linkId,
conversationHistory
});
if (response.data.ok && response.data.reply) {
console.log(`[DEBUG] Interview initialized successfully`);
return response.data.reply;
} else {
console.error('[ERROR] Failed to initialize interview:', response.data.error);
return null;
}
} catch (error) {
console.error('[ERROR] Interview initialization failed:', error);
return null;
}
}
/**
* End an interview with the chatbot service
*/
async endInterview(linkId: string): Promise<boolean> {
try {
console.log(`[DEBUG] Ending interview with chatbot service for linkId: ${linkId}`);
const response: AxiosResponse<ChatbotResponse> = await this.client.post('/api/interview/end', {
linkId
});
if (response.data.ok) {
console.log(`[DEBUG] Interview ended successfully`);
return true;
} else {
console.error('[ERROR] Failed to end interview:', response.data.error);
return false;
}
} catch (error) {
console.error('[ERROR] Interview end failed:', error);
return false;
}
}
/**
* Get interview status from chatbot service
*/
async getInterviewStatus(linkId: string): Promise<any | null> {
try {
const response: AxiosResponse<ChatbotResponse> = await this.client.get(`/api/interview/status/${linkId}`);
if (response.data.ok) {
return response.data;
} else {
console.error('[ERROR] Failed to get interview status:', response.data.error);
return null;
}
} catch (error) {
console.error('[ERROR] Get interview status failed:', error);
return null;
}
}
/**
* Check if fallback to direct AI service should be used
*/
shouldUseFallback(): boolean {
return this.fallbackEnabled;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,479 @@
import { pool } from '../config/database.js';
import { $log } from '@tsed/logger';
import { randomUUID } from 'crypto';
import { StripeService, PaymentIntentData, CustomerData } from './StripeService.js';
import { TokenService } from './TokenService.js';
export interface CreatePaymentRequest {
userId: string;
packageId?: string;
customQuantity?: number;
paymentFlowType: 'card' | 'ideal' | 'bank_transfer';
userEmail: string;
userName: string;
}
export interface PaymentRecord {
id: string;
user_id: string;
token_package_id?: string;
amount: number;
currency: string;
status: string;
payment_method?: string;
payment_reference?: string;
invoice_url?: string;
paid_at?: string;
created_at: string;
updated_at: string;
stripe_payment_intent_id?: string;
stripe_payment_method_id?: string;
stripe_customer_id?: string;
payment_flow_type: string;
stripe_metadata?: any;
refund_reason?: string;
refunded_amount?: number;
custom_quantity?: number;
applied_discount_percentage?: number;
first_name?: string;
last_name?: string;
email?: string;
package_name?: string;
}
export interface PaymentCalculation {
quantity: number;
basePrice: number;
discountPercentage: number;
finalPrice: number;
savings: number;
packageId?: string;
packageName?: string;
}
export class PaymentService {
private stripeService: StripeService;
private tokenService: TokenService;
constructor() {
this.stripeService = new StripeService();
this.tokenService = new TokenService();
}
/**
* Calculate the best price for a given quantity of tokens
*/
async calculateTokenPrice(quantity: number, packageId?: string): Promise<PaymentCalculation> {
const connection = await pool.getConnection();
try {
// If a specific package is selected, use its pricing
if (packageId) {
const [rows] = await connection.execute(
'SELECT * FROM token_packages WHERE id = ? AND is_active = 1',
[packageId]
);
if (Array.isArray(rows) && rows.length > 0) {
const pkg = rows[0] as any;
const basePrice = quantity * pkg.price_per_token;
const discountAmount = (basePrice * pkg.discount_percentage) / 100;
const finalPrice = basePrice - discountAmount;
return {
quantity,
basePrice,
discountPercentage: pkg.discount_percentage,
finalPrice,
savings: discountAmount,
packageId: pkg.id,
packageName: pkg.name,
};
}
}
// Find the best package for the given quantity
const [rows] = await connection.execute(
'SELECT * FROM token_packages WHERE is_active = 1 ORDER BY quantity ASC'
);
const packages = Array.isArray(rows) ? rows as any[] : [];
if (packages.length === 0) {
// No packages available, use base price
const basePrice = quantity * 5.00; // Default price per token
return {
quantity,
basePrice,
discountPercentage: 0,
finalPrice: basePrice,
savings: 0,
};
}
// Find the package that gives the best discount for this quantity
let bestPackage = null;
let bestPrice = quantity * 5.00; // Default base price
let bestDiscount = 0;
let bestSavings = 0;
for (const pkg of packages) {
if (quantity >= pkg.quantity) {
const basePrice = quantity * pkg.price_per_token;
const discountAmount = (basePrice * pkg.discount_percentage) / 100;
const finalPrice = basePrice - discountAmount;
if (finalPrice < bestPrice) {
bestPackage = pkg;
bestPrice = finalPrice;
bestDiscount = pkg.discount_percentage;
bestSavings = discountAmount;
}
}
}
return {
quantity,
basePrice: quantity * 5.00,
discountPercentage: bestDiscount,
finalPrice: bestPrice,
savings: bestSavings,
packageId: bestPackage?.id,
packageName: bestPackage?.name,
};
} catch (error) {
$log.error('Error calculating token price:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Create a payment intent for token purchase
*/
async createPaymentIntent(request: CreatePaymentRequest): Promise<{
paymentIntent: any;
paymentRecord: PaymentRecord;
calculation: PaymentCalculation;
}> {
const connection = await pool.getConnection();
try {
// Calculate pricing
const calculation = await this.calculateTokenPrice(
request.customQuantity || 1,
request.packageId
);
// Get or create Stripe customer
const customerData: CustomerData = {
email: request.userEmail,
name: request.userName,
userId: request.userId,
};
const customer = await this.stripeService.getOrCreateCustomer(customerData);
// Create payment intent
const paymentIntentData: PaymentIntentData = {
amount: calculation.finalPrice,
currency: 'eur', // Default to EUR for European market
customerId: customer.id,
metadata: {
userId: request.userId,
quantity: calculation.quantity.toString(),
packageId: calculation.packageId || '',
packageName: calculation.packageName || '',
discountPercentage: calculation.discountPercentage.toString(),
},
paymentMethodTypes: this.stripeService.getAvailablePaymentMethods(),
};
const paymentIntent = await this.stripeService.createPaymentIntent(paymentIntentData);
// Create payment record
const paymentRecordId = randomUUID();
await connection.execute(`
INSERT INTO payment_records (
id, user_id, token_package_id, amount, currency, status,
payment_method, payment_reference, stripe_payment_intent_id,
stripe_customer_id, payment_flow_type, custom_quantity,
applied_discount_percentage, created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())
`, [
paymentRecordId,
request.userId,
calculation.packageId || null,
calculation.finalPrice,
'eur',
'pending',
request.paymentFlowType,
paymentIntent.id,
paymentIntent.id,
customer.id,
request.paymentFlowType,
calculation.quantity,
calculation.discountPercentage,
]);
// Get the created payment record
const [rows] = await connection.execute(
'SELECT * FROM payment_records WHERE id = ?',
[paymentRecordId]
);
const paymentRecord = Array.isArray(rows) ? rows[0] as PaymentRecord : null;
$log.info(`Created payment intent: ${paymentIntent.id} for user: ${request.userId}`);
return {
paymentIntent,
paymentRecord: paymentRecord!,
calculation,
};
} catch (error) {
$log.error('Error creating payment intent:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Process a successful payment
*/
async processSuccessfulPayment(paymentIntentId: string): Promise<PaymentRecord> {
const connection = await pool.getConnection();
try {
// Get payment record
const [rows] = await connection.execute(
'SELECT * FROM payment_records WHERE stripe_payment_intent_id = ?',
[paymentIntentId]
);
if (!Array.isArray(rows) || rows.length === 0) {
throw new Error('Payment record not found');
}
const paymentRecord = rows[0] as PaymentRecord;
// Update payment record status
await connection.execute(`
UPDATE payment_records
SET status = 'paid', paid_at = NOW(), updated_at = NOW()
WHERE stripe_payment_intent_id = ?
`, [paymentIntentId]);
// Allocate tokens to user
const quantity = paymentRecord.custom_quantity || 1;
const pricePerToken = paymentRecord.amount / quantity;
await this.tokenService.addTokensToUser(
paymentRecord.user_id,
quantity,
pricePerToken
);
// Update user usage
await connection.execute(`
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (?, ?)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + ?
`, [paymentRecord.user_id, quantity, quantity]);
$log.info(`Processed successful payment: ${paymentIntentId} for user: ${paymentRecord.user_id}`);
return paymentRecord;
} catch (error) {
$log.error('Error processing successful payment:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Process a failed payment
*/
async processFailedPayment(paymentIntentId: string, reason?: string): Promise<PaymentRecord> {
const connection = await pool.getConnection();
try {
// Update payment record status
await connection.execute(`
UPDATE payment_records
SET status = 'failed', updated_at = NOW()
WHERE stripe_payment_intent_id = ?
`, [paymentIntentId]);
// Get updated payment record
const [rows] = await connection.execute(
'SELECT * FROM payment_records WHERE stripe_payment_intent_id = ?',
[paymentIntentId]
);
const paymentRecord = Array.isArray(rows) ? rows[0] as PaymentRecord : null;
$log.info(`Processed failed payment: ${paymentIntentId}, reason: ${reason}`);
return paymentRecord!;
} catch (error) {
$log.error('Error processing failed payment:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get user payment history
*/
async getUserPaymentHistory(userId: string): Promise<PaymentRecord[]> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
pr.*,
u.first_name,
u.last_name,
u.email,
tp.name as package_name
FROM payment_records pr
LEFT JOIN users u ON pr.user_id = u.id
LEFT JOIN token_packages tp ON pr.token_package_id = tp.id
WHERE pr.user_id = ?
ORDER BY pr.created_at DESC
`, [userId]);
return Array.isArray(rows) ? rows as PaymentRecord[] : [];
} catch (error) {
$log.error('Error getting user payment history:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get payment by ID
*/
async getPaymentById(paymentId: string): Promise<PaymentRecord | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
pr.*,
u.first_name,
u.last_name,
u.email,
tp.name as package_name
FROM payment_records pr
LEFT JOIN users u ON pr.user_id = u.id
LEFT JOIN token_packages tp ON pr.token_package_id = tp.id
WHERE pr.id = ?
`, [paymentId]);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as PaymentRecord;
}
return null;
} catch (error) {
$log.error('Error getting payment by ID:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Process refund
*/
async processRefund(paymentId: string, amount?: number, reason?: string): Promise<any> {
const connection = await pool.getConnection();
try {
// Get payment record
const paymentRecord = await this.getPaymentById(paymentId);
if (!paymentRecord) {
throw new Error('Payment record not found');
}
if (!paymentRecord.stripe_payment_intent_id) {
throw new Error('No Stripe payment intent found for this payment');
}
// Create refund via Stripe
const refund = await this.stripeService.createRefund({
paymentIntentId: paymentRecord.stripe_payment_intent_id,
amount: amount || paymentRecord.amount,
reason: reason as any,
metadata: {
paymentId: paymentId,
refundedBy: 'admin', // This should come from the admin user context
},
});
// Update payment record
await connection.execute(`
UPDATE payment_records
SET status = 'refunded', refunded_amount = ?, refund_reason = ?, updated_at = NOW()
WHERE id = ?
`, [amount || paymentRecord.amount, reason, paymentId]);
$log.info(`Processed refund: ${refund.id} for payment: ${paymentId}`);
return refund;
} catch (error) {
$log.error('Error processing refund:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get payment statistics
*/
async getPaymentStatistics(): Promise<{
totalPayments: number;
totalRevenue: number;
successRate: number;
averageTransactionValue: number;
}> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
COUNT(*) as total_payments,
SUM(CASE WHEN status = 'paid' THEN amount ELSE 0 END) as total_revenue,
SUM(CASE WHEN status = 'paid' THEN 1 ELSE 0 END) as successful_payments,
AVG(CASE WHEN status = 'paid' THEN amount ELSE NULL END) as avg_transaction_value
FROM payment_records
WHERE created_at >= DATE_SUB(NOW(), INTERVAL 30 DAY)
`);
const stats = Array.isArray(rows) ? rows[0] as any : {};
const totalPayments = stats.total_payments || 0;
const successfulPayments = stats.successful_payments || 0;
const successRate = totalPayments > 0 ? (successfulPayments / totalPayments) * 100 : 0;
return {
totalPayments,
totalRevenue: stats.total_revenue || 0,
successRate: Math.round(successRate * 100) / 100,
averageTransactionValue: stats.avg_transaction_value || 0,
};
} catch (error) {
$log.error('Error getting payment statistics:', error);
throw error;
} finally {
connection.release();
}
}
}

View File

@ -0,0 +1,321 @@
import Stripe from 'stripe';
import { $log } from '@tsed/logger';
export interface PaymentIntentData {
amount: number;
currency: string;
customerId?: string;
paymentMethodId?: string;
metadata: Record<string, string>;
paymentMethodTypes?: string[];
confirmationMethod?: 'automatic' | 'manual';
}
export interface CustomerData {
email: string;
name: string;
userId: string;
metadata?: Record<string, string>;
}
export interface RefundData {
paymentIntentId: string;
amount?: number;
reason?: 'duplicate' | 'fraudulent' | 'requested_by_customer';
metadata?: Record<string, string>;
}
export class StripeService {
private stripe: Stripe;
constructor() {
const secretKey = process.env.STRIPE_SECRET_KEY?.trim();
if (!secretKey ||
secretKey.includes('your_secret_key_here') ||
secretKey.includes('sk_test_your_secret_key_here') ||
secretKey.includes('placeholder') ||
!secretKey.startsWith('sk_test_') && !secretKey.startsWith('sk_live_')) {
$log.warn('STRIPE_SECRET_KEY is not properly configured. Payment features will be disabled.');
// Create a mock Stripe instance for development
this.stripe = null as any;
return;
}
try {
this.stripe = new Stripe(secretKey, {
apiVersion: '2024-12-18.acacia',
typescript: true,
});
$log.info('Stripe service initialized successfully');
} catch (error) {
$log.error('Failed to initialize Stripe service:', error);
this.stripe = null as any;
}
}
/**
* Create a Stripe customer
*/
async createCustomer(customerData: CustomerData): Promise<Stripe.Customer> {
if (!this.stripe) {
throw new Error('Stripe is not configured. Please set up STRIPE_SECRET_KEY.');
}
try {
const customer = await this.stripe.customers.create({
email: customerData.email,
name: customerData.name,
metadata: {
userId: customerData.userId,
...customerData.metadata,
},
});
$log.info(`Created Stripe customer: ${customer.id} for user: ${customerData.userId}`);
return customer;
} catch (error) {
$log.error('Error creating Stripe customer:', error);
throw new Error('Failed to create customer');
}
}
/**
* Get or create a Stripe customer for a user
*/
async getOrCreateCustomer(customerData: CustomerData): Promise<Stripe.Customer> {
if (!this.stripe) {
throw new Error('Stripe is not configured. Please set up STRIPE_SECRET_KEY.');
}
try {
// First, try to find existing customer by email
const existingCustomers = await this.stripe.customers.list({
email: customerData.email,
limit: 1,
});
if (existingCustomers.data.length > 0) {
const customer = existingCustomers.data[0];
$log.info(`Found existing Stripe customer: ${customer.id} for user: ${customerData.userId}`);
return customer;
}
// Create new customer if not found
return await this.createCustomer(customerData);
} catch (error) {
$log.error('Error getting or creating Stripe customer:', error);
throw new Error('Failed to get or create customer');
}
}
/**
* Create a payment intent
*/
async createPaymentIntent(data: PaymentIntentData): Promise<Stripe.PaymentIntent> {
if (!this.stripe) {
throw new Error('Stripe is not configured. Please set up STRIPE_SECRET_KEY.');
}
try {
const paymentIntentData: Stripe.PaymentIntentCreateParams = {
amount: Math.round(data.amount * 100), // Convert to cents
currency: data.currency,
metadata: data.metadata,
payment_method_types: data.paymentMethodTypes || ['card'],
confirmation_method: data.confirmationMethod || 'automatic',
};
if (data.customerId) {
paymentIntentData.customer = data.customerId;
}
if (data.paymentMethodId) {
paymentIntentData.payment_method = data.paymentMethodId;
paymentIntentData.confirmation_method = 'manual';
}
const paymentIntent = await this.stripe.paymentIntents.create(paymentIntentData);
$log.info(`Created payment intent: ${paymentIntent.id} for amount: ${data.amount}`);
return paymentIntent;
} catch (error) {
$log.error('Error creating payment intent:', error);
throw new Error('Failed to create payment intent');
}
}
/**
* Confirm a payment intent
*/
async confirmPaymentIntent(paymentIntentId: string): Promise<Stripe.PaymentIntent> {
if (!this.stripe) {
throw new Error('Stripe is not configured. Please set up STRIPE_SECRET_KEY.');
}
try {
const paymentIntent = await this.stripe.paymentIntents.confirm(paymentIntentId);
$log.info(`Confirmed payment intent: ${paymentIntentId}, status: ${paymentIntent.status}`);
return paymentIntent;
} catch (error) {
$log.error('Error confirming payment intent:', error);
throw new Error('Failed to confirm payment intent');
}
}
/**
* Retrieve a payment intent
*/
async getPaymentIntent(paymentIntentId: string): Promise<Stripe.PaymentIntent> {
try {
const paymentIntent = await this.stripe.paymentIntents.retrieve(paymentIntentId);
$log.info(`Retrieved payment intent: ${paymentIntentId}, status: ${paymentIntent.status}`);
return paymentIntent;
} catch (error) {
$log.error('Error retrieving payment intent:', error);
throw new Error('Failed to retrieve payment intent');
}
}
/**
* Cancel a payment intent
*/
async cancelPaymentIntent(paymentIntentId: string): Promise<Stripe.PaymentIntent> {
try {
const paymentIntent = await this.stripe.paymentIntents.cancel(paymentIntentId);
$log.info(`Cancelled payment intent: ${paymentIntentId}`);
return paymentIntent;
} catch (error) {
$log.error('Error cancelling payment intent:', error);
throw new Error('Failed to cancel payment intent');
}
}
/**
* Create a refund
*/
async createRefund(data: RefundData): Promise<Stripe.Refund> {
try {
const refundData: Stripe.RefundCreateParams = {
payment_intent: data.paymentIntentId,
metadata: data.metadata,
};
if (data.amount) {
refundData.amount = Math.round(data.amount * 100); // Convert to cents
}
if (data.reason) {
refundData.reason = data.reason;
}
const refund = await this.stripe.refunds.create(refundData);
$log.info(`Created refund: ${refund.id} for payment intent: ${data.paymentIntentId}`);
return refund;
} catch (error) {
$log.error('Error creating refund:', error);
throw new Error('Failed to create refund');
}
}
/**
* Get payment methods for a customer
*/
async getPaymentMethods(customerId: string): Promise<Stripe.PaymentMethod[]> {
try {
const paymentMethods = await this.stripe.paymentMethods.list({
customer: customerId,
type: 'card',
});
$log.info(`Retrieved ${paymentMethods.data.length} payment methods for customer: ${customerId}`);
return paymentMethods.data;
} catch (error) {
$log.error('Error retrieving payment methods:', error);
throw new Error('Failed to retrieve payment methods');
}
}
/**
* Create a setup intent for saving payment methods
*/
async createSetupIntent(customerId: string, metadata?: Record<string, string>): Promise<Stripe.SetupIntent> {
try {
const setupIntent = await this.stripe.setupIntents.create({
customer: customerId,
payment_method_types: ['card'],
metadata: metadata || {},
});
$log.info(`Created setup intent: ${setupIntent.id} for customer: ${customerId}`);
return setupIntent;
} catch (error) {
$log.error('Error creating setup intent:', error);
throw new Error('Failed to create setup intent');
}
}
/**
* Verify webhook signature
*/
verifyWebhookSignature(payload: string | Buffer, signature: string): Stripe.Event {
try {
const webhookSecret = process.env.STRIPE_WEBHOOK_SECRET;
if (!webhookSecret) {
throw new Error('STRIPE_WEBHOOK_SECRET environment variable is required');
}
const event = this.stripe.webhooks.constructEvent(payload, signature, webhookSecret);
$log.info(`Verified webhook event: ${event.type}, id: ${event.id}`);
return event;
} catch (error) {
$log.error('Error verifying webhook signature:', error);
throw new Error('Invalid webhook signature');
}
}
/**
* Get available payment methods for different regions
*/
getAvailablePaymentMethods(countryCode?: string): string[] {
const baseMethods = ['card'];
if (countryCode === 'NL') {
return [...baseMethods, 'ideal'];
}
if (countryCode === 'DE' || countryCode === 'FR' || countryCode === 'ES' || countryCode === 'IT') {
return [...baseMethods, 'sepa_debit'];
}
return baseMethods;
}
/**
* Get payment method configuration for iDEAL
*/
getIdealConfiguration(): { banks: Array<{ id: string; name: string }> } {
return {
banks: [
{ id: 'abn_amro', name: 'ABN AMRO' },
{ id: 'asn_bank', name: 'ASN Bank' },
{ id: 'bunq', name: 'bunq' },
{ id: 'handelsbanken', name: 'Handelsbanken' },
{ id: 'ing', name: 'ING' },
{ id: 'knab', name: 'Knab' },
{ id: 'rabobank', name: 'Rabobank' },
{ id: 'regiobank', name: 'RegioBank' },
{ id: 'revolut', name: 'Revolut' },
{ id: 'sns_bank', name: 'SNS Bank' },
{ id: 'triodos_bank', name: 'Triodos Bank' },
{ id: 'van_lanschot', name: 'Van Lanschot' },
],
};
}
}

View File

@ -0,0 +1,581 @@
import { pool } from '../config/database.js';
import { $log } from '@tsed/logger';
import { randomUUID } from 'crypto';
export interface InterviewToken {
id: string;
user_id: string;
token_type: 'single' | 'bulk';
quantity: number;
price_per_token: number;
total_price: number;
tokens_used: number;
tokens_remaining: number;
status: 'active' | 'exhausted' | 'expired';
expires_at?: string;
purchased_at: string;
created_at: string;
updated_at: string;
}
export interface TokenPackage {
id: string;
name: string;
description: string;
quantity: number;
price_per_token: number;
total_price: number;
discount_percentage: number;
is_popular: boolean;
is_active: boolean;
created_at: string;
updated_at: string;
}
export interface CreateTokenPackageRequest {
name: string;
description: string;
quantity: number;
price_per_token: number;
total_price: number;
discount_percentage?: number;
is_popular?: boolean;
is_active?: boolean;
}
export interface UpdateTokenPackageRequest {
name?: string;
description?: string;
quantity?: number;
price_per_token?: number;
total_price?: number;
discount_percentage?: number;
is_popular?: boolean;
is_active?: boolean;
}
export class TokenService {
// Token Packages
async getTokenPackages(): Promise<TokenPackage[]> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM token_packages ORDER BY created_at DESC'
);
return Array.isArray(rows) ? rows as TokenPackage[] : [];
} catch (error) {
$log.error('Error getting token packages:', error);
throw error;
} finally {
connection.release();
}
}
async getTokenPackageById(id: string): Promise<TokenPackage | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM token_packages WHERE id = ?',
[id]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as TokenPackage;
}
return null;
} catch (error) {
$log.error('Error getting token package by ID:', error);
throw error;
} finally {
connection.release();
}
}
async createTokenPackage(packageData: CreateTokenPackageRequest): Promise<TokenPackage> {
const connection = await pool.getConnection();
try {
const packageId = randomUUID();
await connection.execute(`
INSERT INTO token_packages (
id, name, description, quantity, price_per_token,
total_price, discount_percentage, is_popular, is_active,
created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW())
`, [
packageId,
packageData.name,
packageData.description,
packageData.quantity,
packageData.price_per_token,
packageData.total_price,
packageData.discount_percentage || 0,
packageData.is_popular || false,
packageData.is_active !== false
]);
return await this.getTokenPackageById(packageId) as TokenPackage;
} catch (error) {
$log.error('Error creating token package:', error);
throw error;
} finally {
connection.release();
}
}
async updateTokenPackage(id: string, packageData: UpdateTokenPackageRequest): Promise<TokenPackage | null> {
const connection = await pool.getConnection();
try {
const updateFields = [];
const values = [];
if (packageData.name) {
updateFields.push('name = ?');
values.push(packageData.name);
}
if (packageData.description) {
updateFields.push('description = ?');
values.push(packageData.description);
}
if (packageData.quantity) {
updateFields.push('quantity = ?');
values.push(packageData.quantity);
}
if (packageData.price_per_token) {
updateFields.push('price_per_token = ?');
values.push(packageData.price_per_token);
}
if (packageData.total_price) {
updateFields.push('total_price = ?');
values.push(packageData.total_price);
}
if (packageData.discount_percentage !== undefined) {
updateFields.push('discount_percentage = ?');
values.push(packageData.discount_percentage);
}
if (packageData.is_popular !== undefined) {
updateFields.push('is_popular = ?');
values.push(packageData.is_popular);
}
if (packageData.is_active !== undefined) {
updateFields.push('is_active = ?');
values.push(packageData.is_active);
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push('updated_at = NOW()');
values.push(id);
await connection.execute(
`UPDATE token_packages SET ${updateFields.join(', ')} WHERE id = ?`,
values
);
return await this.getTokenPackageById(id);
} catch (error) {
$log.error('Error updating token package:', error);
throw error;
} finally {
connection.release();
}
}
async deleteTokenPackage(id: string): Promise<void> {
const connection = await pool.getConnection();
try {
await connection.execute(
'DELETE FROM token_packages WHERE id = ?',
[id]
);
} catch (error) {
$log.error('Error deleting token package:', error);
throw error;
} finally {
connection.release();
}
}
async toggleTokenPackageStatus(id: string): Promise<{ success: boolean; new_status: boolean }> {
const connection = await pool.getConnection();
try {
// Get current status
const [rows] = await connection.execute(
'SELECT is_active FROM token_packages WHERE id = ?',
[id]
);
if (Array.isArray(rows) && rows.length === 0) {
throw new Error('Token package not found');
}
const currentStatus = Array.isArray(rows) ? rows[0] : rows;
const newStatus = !currentStatus.is_active;
await connection.execute(
'UPDATE token_packages SET is_active = ?, updated_at = NOW() WHERE id = ?',
[newStatus, id]
);
return { success: true, new_status: newStatus };
} catch (error) {
$log.error('Error toggling token package status:', error);
throw error;
} finally {
connection.release();
}
}
// Interview Tokens
async getTokensByUserId(userId: string): Promise<InterviewToken[]> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM interview_tokens WHERE user_id = ? ORDER BY created_at DESC',
[userId]
);
return Array.isArray(rows) ? rows as InterviewToken[] : [];
} catch (error) {
$log.error('Error getting tokens by user ID:', error);
throw error;
} finally {
connection.release();
}
}
async getTokenById(id: string): Promise<InterviewToken | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM interview_tokens WHERE id = ?',
[id]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as InterviewToken;
}
return null;
} catch (error) {
$log.error('Error getting token by ID:', error);
throw error;
} finally {
connection.release();
}
}
async addTokensToUser(userId: string, quantity: number, pricePerToken: number): Promise<InterviewToken> {
const connection = await pool.getConnection();
try {
const totalPrice = quantity * pricePerToken;
const tokenId = randomUUID();
// Create token record
await connection.execute(`
INSERT INTO interview_tokens (
id, user_id, token_type, quantity, price_per_token,
total_price, status, purchased_at, created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, 'active', NOW(), NOW(), NOW())
`, [
tokenId,
userId,
quantity === 1 ? 'single' : 'bulk',
quantity,
pricePerToken,
totalPrice
]);
// No payment record needed for admin-granted tokens
// Update user usage
await connection.execute(`
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (?, ?)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + ?
`, [userId, quantity, quantity]);
return await this.getTokenById(tokenId) as InterviewToken;
} catch (error) {
$log.error('Error adding tokens to user:', error);
throw error;
} finally {
connection.release();
}
}
async useToken(tokenId: string): Promise<boolean> {
const connection = await pool.getConnection();
try {
// Get current token status
const token = await this.getTokenById(tokenId);
if (!token || token.status !== 'active') {
return false;
}
// Check if token has remaining uses
if (token.tokens_remaining <= 0) {
return false;
}
// Update token usage
const newUsedCount = token.tokens_used + 1;
const newStatus = newUsedCount >= token.quantity ? 'exhausted' : 'active';
await connection.execute(`
UPDATE interview_tokens
SET tokens_used = ?, status = ?, updated_at = NOW()
WHERE id = ?
`, [newUsedCount, newStatus, tokenId]);
// Update user usage
await connection.execute(`
INSERT INTO user_usage (user_id, tokens_used)
VALUES (?, 1)
ON DUPLICATE KEY UPDATE tokens_used = tokens_used + 1
`, [token.user_id]);
return true;
} catch (error) {
$log.error('Error using token:', error);
throw error;
} finally {
connection.release();
}
}
async getUserTokenSummary(userId: string): Promise<{
total_purchased: number;
total_used: number;
total_available: number;
utilization_percentage: number;
}> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
COALESCE(SUM(quantity), 0) as total_purchased,
COALESCE(SUM(tokens_used), 0) as total_used,
COALESCE(SUM(tokens_remaining), 0) as total_available
FROM interview_tokens
WHERE user_id = ? AND status = 'active'
`, [userId]);
const data = Array.isArray(rows) ? rows[0] : rows;
const totalPurchased = data?.total_purchased || 0;
const totalUsed = data?.total_used || 0;
const totalAvailable = data?.total_available || 0;
const utilizationPercentage = totalPurchased > 0
? Math.round((totalUsed / totalPurchased) * 100)
: 0;
return {
total_purchased: totalPurchased,
total_used: totalUsed,
total_available: totalAvailable,
utilization_percentage: utilizationPercentage
};
} catch (error) {
$log.error('Error getting user token summary:', error);
throw error;
} finally {
connection.release();
}
}
async getAllUserTokenSummaries(): Promise<Array<{
user_id: string;
first_name: string;
last_name: string;
email: string;
total_purchased: number;
total_used: number;
total_available: number;
utilization_percentage: number;
}>> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
u.id as user_id,
u.first_name,
u.last_name,
u.email,
COALESCE(SUM(it.quantity), 0) as total_purchased,
COALESCE(SUM(it.tokens_used), 0) as total_used,
COALESCE(SUM(it.tokens_remaining), 0) as total_available,
CASE
WHEN SUM(it.quantity) > 0 THEN ROUND((SUM(it.tokens_used) / SUM(it.quantity)) * 100, 2)
ELSE 0
END as utilization_percentage
FROM users u
LEFT JOIN interview_tokens it ON u.id = it.user_id AND it.status = 'active'
WHERE u.deleted_at IS NULL
GROUP BY u.id, u.first_name, u.last_name, u.email
ORDER BY u.created_at DESC
`);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting all user token summaries:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Calculate custom token price based on quantity
*/
async calculateCustomTokenPrice(quantity: number): Promise<{
basePrice: number;
discountPercentage: number;
finalPrice: number;
savings: number;
}> {
const connection = await pool.getConnection();
try {
// Get all active packages
const [rows] = await connection.execute(
'SELECT * FROM token_packages WHERE is_active = 1 ORDER BY quantity ASC'
);
const packages = Array.isArray(rows) ? rows as TokenPackage[] : [];
if (packages.length === 0) {
// No packages available, use base price
const basePrice = quantity * 5.00; // Default price per token
return {
basePrice,
discountPercentage: 0,
finalPrice: basePrice,
savings: 0,
};
}
// Find the package that gives the best discount for this quantity
let bestPackage = null;
let bestPrice = quantity * 5.00; // Default base price
let bestDiscount = 0;
let bestSavings = 0;
for (const pkg of packages) {
if (quantity >= pkg.quantity) {
const basePrice = quantity * pkg.price_per_token;
const discountAmount = (basePrice * pkg.discount_percentage) / 100;
const finalPrice = basePrice - discountAmount;
if (finalPrice < bestPrice) {
bestPackage = pkg;
bestPrice = finalPrice;
bestDiscount = pkg.discount_percentage;
bestSavings = discountAmount;
}
}
}
return {
basePrice: quantity * 5.00,
discountPercentage: bestDiscount,
finalPrice: bestPrice,
savings: bestSavings,
};
} catch (error) {
$log.error('Error calculating custom token price:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get the best package for a given quantity
*/
async getBestPackageForQuantity(quantity: number): Promise<TokenPackage | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM token_packages WHERE is_active = 1 AND quantity <= ? ORDER BY quantity DESC LIMIT 1',
[quantity]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as TokenPackage;
}
return null;
} catch (error) {
$log.error('Error getting best package for quantity:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Add tokens to user account (updated for payment-based tokens)
*/
async addTokensToUserFromPayment(
userId: string,
quantity: number,
pricePerToken: number,
paymentId: string
): Promise<InterviewToken> {
const connection = await pool.getConnection();
try {
const totalPrice = quantity * pricePerToken;
const tokenId = randomUUID();
// Create token record
await connection.execute(`
INSERT INTO interview_tokens (
id, user_id, token_type, quantity, price_per_token,
total_price, status, purchased_at, created_at, updated_at
) VALUES (?, ?, ?, ?, ?, ?, 'active', NOW(), NOW(), NOW())
`, [
tokenId,
userId,
quantity === 1 ? 'single' : 'bulk',
quantity,
pricePerToken,
totalPrice
]);
// Update user usage
await connection.execute(`
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (?, ?)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + ?
`, [userId, quantity, quantity]);
$log.info(`Added ${quantity} tokens to user ${userId} from payment ${paymentId}`);
return await this.getTokenById(tokenId) as InterviewToken;
} catch (error) {
$log.error('Error adding tokens to user from payment:', error);
throw error;
} finally {
connection.release();
}
}
}

View File

@ -0,0 +1,333 @@
import { pool } from '../config/database.js';
import { User, CreateUserRequest, UpdateUserRequest, UserResponse } from '../models/User.js';
import { $log } from '@tsed/logger';
import bcrypt from 'bcryptjs';
import { randomUUID } from 'crypto';
export class UserService {
async createUser(userData: CreateUserRequest): Promise<UserResponse> {
const connection = await pool.getConnection();
try {
// Check if user already exists
const [existingUsers] = await connection.execute(
'SELECT id FROM users WHERE email = ? AND deleted_at IS NULL',
[userData.email]
);
if (Array.isArray(existingUsers) && existingUsers.length > 0) {
throw new Error('User with this email already exists');
}
// Hash password
const password_hash = await bcrypt.hash(userData.password, 10);
// Generate UUID for user ID
const userId = randomUUID();
// Insert user
const [result] = await connection.execute(
`INSERT INTO users (id, email, password_hash, first_name, last_name, role, company_name, is_active, email_verified_at, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, NOW(), NOW(), NOW())`,
[
userId,
userData.email,
password_hash,
userData.first_name,
userData.last_name,
userData.role || 'recruiter',
userData.company_name || null,
true
]
);
// Get the created user
const user = await this.getUserById(userId);
if (!user) {
throw new Error('Failed to create user');
}
return this.mapUserToResponse(user);
} catch (error) {
$log.error('Error creating user:', error);
throw error;
} finally {
connection.release();
}
}
async getUserByEmail(email: string): Promise<User | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM users WHERE email = ? AND deleted_at IS NULL',
[email]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as User;
}
return null;
} catch (error) {
$log.error('Error getting user by email:', error);
throw error;
} finally {
connection.release();
}
}
async getUserById(id: string): Promise<User | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM users WHERE id = ? AND deleted_at IS NULL',
[id]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as User;
}
return null;
} catch (error) {
$log.error('Error getting user by ID:', error);
throw error;
} finally {
connection.release();
}
}
async updateUser(id: string, userData: UpdateUserRequest): Promise<UserResponse | null> {
const connection = await pool.getConnection();
try {
const updateFields = [];
const values = [];
if (userData.first_name) {
updateFields.push('first_name = ?');
values.push(userData.first_name);
}
if (userData.last_name) {
updateFields.push('last_name = ?');
values.push(userData.last_name);
}
if (userData.company_name !== undefined) {
updateFields.push('company_name = ?');
values.push(userData.company_name);
}
if (userData.avatar_url !== undefined) {
updateFields.push('avatar_url = ?');
values.push(userData.avatar_url);
}
if (userData.is_active !== undefined) {
updateFields.push('is_active = ?');
values.push(userData.is_active);
}
if (updateFields.length === 0) {
throw new Error('No fields to update');
}
updateFields.push('updated_at = NOW()');
values.push(id);
await connection.execute(
`UPDATE users SET ${updateFields.join(', ')} WHERE id = ? AND deleted_at IS NULL`,
values
);
const user = await this.getUserById(id);
return user ? this.mapUserToResponse(user) : null;
} catch (error) {
$log.error('Error updating user:', error);
throw error;
} finally {
connection.release();
}
}
async updateLastLogin(id: string): Promise<void> {
const connection = await pool.getConnection();
try {
await connection.execute(
'UPDATE users SET last_login_at = NOW(), updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[id]
);
} catch (error) {
$log.error('Error updating last login:', error);
throw error;
} finally {
connection.release();
}
}
async verifyPassword(user: User, password: string): Promise<boolean> {
return await bcrypt.compare(password, user.password_hash);
}
async changePassword(id: string, newPassword: string): Promise<void> {
const connection = await pool.getConnection();
try {
const password_hash = await bcrypt.hash(newPassword, 10);
await connection.execute(
'UPDATE users SET password_hash = ?, updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[password_hash, id]
);
} catch (error) {
$log.error('Error changing password:', error);
throw error;
} finally {
connection.release();
}
}
async softDeleteUser(id: string): Promise<void> {
const connection = await pool.getConnection();
try {
await connection.execute(
'UPDATE users SET deleted_at = NOW(), is_active = FALSE, updated_at = NOW() WHERE id = ? AND deleted_at IS NULL',
[id]
);
} catch (error) {
$log.error('Error soft deleting user:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get user payment history
*/
async getUserPaymentHistory(userId: string): Promise<any[]> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
pr.*,
tp.name as package_name
FROM payment_records pr
LEFT JOIN token_packages tp ON pr.token_package_id = tp.id
WHERE pr.user_id = ?
ORDER BY pr.created_at DESC
`, [userId]);
return Array.isArray(rows) ? rows : [];
} catch (error) {
$log.error('Error getting user payment history:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get user by Stripe customer ID
*/
async getUserByStripeCustomerId(stripeCustomerId: string): Promise<User | null> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(
'SELECT * FROM users WHERE stripe_customer_id = ? AND deleted_at IS NULL',
[stripeCustomerId]
);
if (Array.isArray(rows) && rows.length > 0) {
return rows[0] as User;
}
return null;
} catch (error) {
$log.error('Error getting user by Stripe customer ID:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Update user's Stripe customer ID
*/
async updateUserStripeCustomerId(userId: string, stripeCustomerId: string): Promise<void> {
const connection = await pool.getConnection();
try {
await connection.execute(
'UPDATE users SET stripe_customer_id = ?, updated_at = NOW() WHERE id = ?',
[stripeCustomerId, userId]
);
$log.info(`Updated Stripe customer ID for user: ${userId}`);
} catch (error) {
$log.error('Error updating user Stripe customer ID:', error);
throw error;
} finally {
connection.release();
}
}
/**
* Get user payment statistics
*/
async getUserPaymentStatistics(userId: string): Promise<{
totalSpent: number;
totalPayments: number;
averagePaymentValue: number;
lastPaymentDate: string | null;
}> {
const connection = await pool.getConnection();
try {
const [rows] = await connection.execute(`
SELECT
COUNT(*) as total_payments,
SUM(CASE WHEN status = 'paid' THEN amount ELSE 0 END) as total_spent,
AVG(CASE WHEN status = 'paid' THEN amount ELSE NULL END) as avg_payment_value,
MAX(CASE WHEN status = 'paid' THEN paid_at ELSE NULL END) as last_payment_date
FROM payment_records
WHERE user_id = ?
`, [userId]);
const stats = Array.isArray(rows) ? rows[0] as any : {};
return {
totalSpent: stats.total_spent || 0,
totalPayments: stats.total_payments || 0,
averagePaymentValue: stats.avg_payment_value || 0,
lastPaymentDate: stats.last_payment_date || null,
};
} catch (error) {
$log.error('Error getting user payment statistics:', error);
throw error;
} finally {
connection.release();
}
}
private mapUserToResponse(user: User): UserResponse {
return {
id: user.id,
email: user.email,
first_name: user.first_name,
last_name: user.last_name,
role: user.role,
company_name: user.company_name,
avatar_url: user.avatar_url,
is_active: user.is_active,
last_login_at: user.last_login_at,
email_verified_at: user.email_verified_at,
created_at: user.created_at,
updated_at: user.updated_at
};
}
}

171
backend/src/types/admin.ts Normal file
View File

@ -0,0 +1,171 @@
// Admin-specific types
export interface SystemStatistics {
total_users: number;
active_users: number;
total_jobs: number;
total_interviews: number;
total_tokens_purchased: number;
total_tokens_used: number;
total_revenue: number;
generated_at: string;
}
export interface UserWithStats {
id: string;
email: string;
first_name: string;
last_name: string;
role: 'admin' | 'recruiter';
company_name?: string;
avatar_url?: string;
is_active: boolean;
last_login_at?: string;
email_verified_at?: string;
created_at: string;
updated_at: string;
}
export interface JobWithUser {
id: string;
user_id: string;
title: string;
description: string;
requirements: string;
skills_required: string[];
location: string;
employment_type: string;
experience_level: string;
salary_min?: number;
salary_max?: number;
currency: string;
status: string;
evaluation_criteria: any;
interview_questions: any;
application_deadline?: string;
created_at: string;
updated_at: string;
first_name?: string;
last_name?: string;
email?: string;
company_name?: string;
}
export interface UserTokenSummary {
user_id: string;
first_name: string;
last_name: string;
email: string;
total_purchased: number;
total_used: number;
total_available: number;
utilization_percentage: number;
}
export interface TokenPackage {
id: string;
name: string;
description: string;
quantity: number;
price_per_token: number;
total_price: number;
discount_percentage: number;
is_popular: boolean;
is_active: boolean;
created_at: string;
updated_at: string;
}
export interface AddTokensRequest {
user_id: string;
quantity: number;
price_per_token: number;
total_price: number;
}
export interface CreateUserRequest {
email: string;
password: string;
first_name: string;
last_name: string;
role: 'admin' | 'recruiter';
company_name?: string;
}
export interface UpdateUserRequest {
first_name?: string;
last_name?: string;
email?: string;
role?: 'admin' | 'recruiter';
company_name?: string;
avatar_url?: string;
is_active?: boolean;
}
export interface CreateTokenPackageRequest {
name: string;
description: string;
quantity: number;
price_per_token: number;
total_price: number;
discount_percentage?: number;
is_popular?: boolean;
is_active?: boolean;
}
export interface UpdateTokenPackageRequest {
name?: string;
description?: string;
quantity?: number;
price_per_token?: number;
total_price?: number;
discount_percentage?: number;
is_popular?: boolean;
is_active?: boolean;
}
export interface InterviewWithDetails {
id: string;
user_id: string;
candidate_id: string;
job_id: string;
token: string;
status: string;
started_at?: string;
completed_at?: string;
duration_minutes: number;
ai_questions: any;
candidate_responses: any;
ai_evaluation: any;
overall_score?: number;
technical_score?: number;
communication_score?: number;
culture_fit_score?: number;
ai_feedback?: string;
created_at: string;
updated_at: string;
first_name?: string;
last_name?: string;
email?: string;
job_title?: string;
}
export interface PaymentRecord {
id: string;
user_id: string;
interview_token_id?: string;
token_package_id?: string;
amount: number;
currency: string;
status: string;
payment_method?: string;
payment_reference?: string;
invoice_url?: string;
paid_at?: string;
created_at: string;
updated_at: string;
first_name?: string;
last_name?: string;
email?: string;
package_name?: string;
}

51
backend/src/types/auth.ts Normal file
View File

@ -0,0 +1,51 @@
// Authentication types that will be preserved in JavaScript compilation
export const LoginRequestSchema = {
email: String,
password: String
};
export const RegisterRequestSchema = {
email: String,
password: String,
first_name: String,
last_name: String,
company_name: String
};
export const CreateUserRequestSchema = {
email: String,
password: String,
first_name: String,
last_name: String,
company_name: String,
role: String
};
export const UpdateUserRequestSchema = {
first_name: String,
last_name: String,
company_name: String,
avatar_url: String,
is_active: Boolean
};
export const UserResponseSchema = {
id: String,
email: String,
first_name: String,
last_name: String,
role: String,
company_name: String,
avatar_url: String,
is_active: Boolean,
last_login_at: Date,
email_verified_at: Date,
created_at: Date,
updated_at: Date
};
export const LoginResponseSchema = {
token: String,
user: UserResponseSchema
};

View File

@ -0,0 +1,29 @@
{
"compilerOptions": {
"module": "NodeNext",
"target": "ESNext",
"experimentalDecorators": true,
"emitDecoratorMetadata": true,
"moduleResolution": "NodeNext",
"downlevelIteration": false,
"isolatedModules": false,
"suppressImplicitAnyIndexErrors": false,
"noImplicitAny": true,
"strictNullChecks": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"useDefineForClassFields": false,
"importHelpers": true,
"resolveJsonModule": true,
"newLine": "LF",
"skipLibCheck": true,
"lib": ["ESNext", "esnext.asynciterable"],
"declaration": false,
"noResolve": false,
"preserveConstEnums": true,
"sourceMap": true,
"noEmit": true
}
}

13
backend/tsconfig.json Normal file
View File

@ -0,0 +1,13 @@
{
"extends": "./tsconfig.base.json",
"compilerOptions": {
"baseUrl": ".",
"noEmit": true
},
"include": [],
"references": [
{
"path": "./tsconfig.node.json"
}
]
}

View File

@ -0,0 +1,20 @@
{
"extends": "./tsconfig.base.json",
"compilerOptions": {
"baseUrl": "."
},
"include": [
"src/**/*"
],
"exclude": [
"src/**/*.spec.ts",
"dist",
"node_modules",
"**/helpers/*Fixture.ts",
"**/__mock__/**",
"coverage"
],
"linterOptions": {
"exclude": []
}
}

View File

@ -0,0 +1 @@

95
backend/views/index.ejs Normal file
View File

@ -0,0 +1,95 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Swagger UI</title>
<link href="https://fonts.googleapis.com/css?family=Open+Sans:400,700|Source+Code+Pro:300,600|Titillium+Web:400,600,700"
rel="stylesheet">
<link rel="stylesheet" type="text/css" href="./swagger-ui.css">
<link rel="icon" type="image/png" href="./favicon-32x32.png" sizes="32x32"/>
<link rel="icon" type="image/png" href="./favicon-16x16.png" sizes="16x16"/>
<style>
<% if (!showExplorer) { %>
.swagger-ui .topbar .download-url-wrapper {
display: none
}
<% } %>
</style>
<% if (cssPath) { %>
<link rel="stylesheet" type="text/css" href="<%= cssPath %>">
<% } %>
</head>
<body>
<div id="swagger-ui"></div>
<script src="./swagger-ui-bundle.js"></script>
<script src="./swagger-ui-standalone-preset.js"></script>
<script>
const initialOptions = <%- JSON.stringify(swaggerOptions) %>;
const currentUrl = window.origin + "<%- url %>";
const urls = <%- JSON.stringify(urls) %>
.map(function (o) {
if (!o.url.match(/^https?:/)) {
const url = window.origin + o.url;
return {
name: o.name,
url: url,
selected: url === currentUrl
}
}
return o;
})
.sort(function (a, b) {
return a.selected ? -1 : 1
});
const SwaggerUIBuilder = {
config: Object.assign({
urls: urls,
dom_id: '#swagger-ui',
deepLinking: true,
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIStandalonePreset
],
plugins: [
SwaggerUIBundle.plugins.DownloadUrl
],
layout: "StandaloneLayout",
oauth2RedirectUrl: currentUrl.replace('swagger.json', 'oauth2-redirect.html')
}, initialOptions),
/**
*
*/
init: function () {
this.ui = SwaggerUIBundle(this.config);
if (this.config.oauth) {
this.ui.initOAuth(this.config.oauth);
}
if (this.config.authorize) {
this.ui.authActions.authorize(this.config.authorize);
}
window.ui = this.ui;
const event = new Event('swagger.init');
document.dispatchEvent(event);
}
}
</script>
<% if (jsPath) { %>
<script src="<%= jsPath %>"></script>
<% } %>
<script>
window.onload = function () {
SwaggerUIBuilder.init();
}
</script>
</body>
</html>

100
backend/views/swagger.ejs Normal file
View File

@ -0,0 +1,100 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width,initial-scale=1.0">
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
<title>client</title>
<link href="https://fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,600,700" rel="stylesheet" />
<style>
body, h1 {
font-family: Source Sans Pro,sans-serif;
}
body:after {
content: "";
background-image: radial-gradient(#eef2f5 0,#f4f7f8 40%,transparent 75%);
position: absolute;
top: 0;
right: 0;
width: 60%;
height: 100%;
z-index: 1;
}
.container {
position: fixed;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
z-index: 2;
display: flex;
align-items: center;
justify-content: center;
}
.container-logo {
display: flex;
align-items: center;
justify-content: center;
margin-bottom: 60px;
}
.container-logo img {
max-width: 150px;
border-radius: 50%;
}
ul {
list-style: none;
margin: 0;
padding: 0;
display: flex;
align-items: center;
justify-content: center;
margin-bottom: 40px;
}
ul li a {
padding-left: 1rem;
padding-right: 1rem;
padding-top: .25rem;
padding-bottom: .25rem;
margin-left: 10px;
margin-right: 10px;
border: 2px solid #504747;
min-width: 110px;
border-radius: 10px;
text-align: center;
display: block;
border-radius: 1rem;
color: #504747;
text-decoration: none;
transition: all ease-in-out 0.5s;
}
ul li a:hover {
color: #14a5c2;
border-color: #14a5c2;
}
ul li a span {
margin: .25rem;
display: block;
}
</style>
</head>
<body>
<div class="container">
<div>
<div class="container-logo">
<img src="https://tsed.dev/tsed-og.png" alt="Ts.ED">
</div>
<ul>
<% docs.forEach((doc) => { %>
<li><a href="<%= doc.path %>"><span>OpenSpec <%= doc.specVersion %></span></a></li>
<% }) %>
</ul>
</div>
</div>
<!-- built files will be auto injected -->
</body>
</html>

24
candidat-integrated.sln Normal file
View File

@ -0,0 +1,24 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.5.2.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AISApp", "AISApp\AISApp.csproj", "{1671248C-43AD-2D25-4F0F-918991BEF94A}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{1671248C-43AD-2D25-4F0F-918991BEF94A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1671248C-43AD-2D25-4F0F-918991BEF94A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1671248C-43AD-2D25-4F0F-918991BEF94A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1671248C-43AD-2D25-4F0F-918991BEF94A}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {14FC4F28-B89B-49C6-BD21-D4B1F80AE49C}
EndGlobalSection
EndGlobal

15
database/Dockerfile Normal file
View File

@ -0,0 +1,15 @@
# Use MySQL 8.0 as base image
FROM mysql:8.0
# Rely on runtime environment variables provided by docker-compose
# (MYSQL_ROOT_PASSWORD, MYSQL_DATABASE, MYSQL_USER, MYSQL_PASSWORD)
# Copy only the deploy_dump.sql file with deterministic name for init order
COPY deploy_dump.sql /docker-entrypoint-initdb.d/00_deploy_dump.sql
# Expose MySQL port
EXPOSE 3306
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD mysqladmin ping -h localhost || exit 1

View File

@ -0,0 +1,55 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `audit_logs`
--
DROP TABLE IF EXISTS `audit_logs`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `audit_logs` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`action` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`resource_type` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`resource_id` varchar(36) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`old_values` json DEFAULT NULL,
`new_values` json DEFAULT NULL,
`ip_address` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`user_agent` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_user_action` (`user_id`,`action`),
KEY `idx_resource` (`resource_type`,`resource_id`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `audit_logs_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,53 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `candidate_responses`
--
DROP TABLE IF EXISTS `candidate_responses`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `candidate_responses` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`question_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`response_text` text COLLATE utf8mb4_unicode_ci NOT NULL,
`response_audio_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ai_score` decimal(5,2) DEFAULT NULL,
`ai_feedback` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_response_per_question` (`interview_id`,`question_id`),
KEY `question_id` (`question_id`),
CONSTRAINT `candidate_responses_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE,
CONSTRAINT `candidate_responses_ibfk_2` FOREIGN KEY (`question_id`) REFERENCES `interview_questions` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,63 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `candidates`
--
DROP TABLE IF EXISTS `candidates`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `candidates` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`first_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`last_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`phone` varchar(20) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`resume_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`cover_letter` text COLLATE utf8mb4_unicode_ci,
`source` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`status` enum('applied','interviewing','evaluated','hired','rejected') COLLATE utf8mb4_unicode_ci DEFAULT 'applied',
`applied_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`last_activity_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_candidate_per_job` (`job_id`,`email`),
KEY `idx_user_job` (`user_id`,`job_id`),
KEY `idx_status` (`status`),
KEY `idx_candidates_user_job_status` (`user_id`,`job_id`,`status`),
CONSTRAINT `candidates_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `candidates_ibfk_2` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,52 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `conversation_messages`
--
DROP TABLE IF EXISTS `conversation_messages`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `conversation_messages` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`link_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`sender` enum('candidate','ai') COLLATE utf8mb4_unicode_ci NOT NULL,
`message` text COLLATE utf8mb4_unicode_ci NOT NULL,
`message_data` json DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_interview_id` (`interview_id`),
KEY `idx_link_id` (`link_id`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `conversation_messages_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:31

View File

@ -0,0 +1,52 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `interview_events`
--
DROP TABLE IF EXISTS `interview_events`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `interview_events` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`link_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`event_type` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`event_data` json DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_job_id` (`job_id`),
KEY `idx_link_id` (`link_id`),
KEY `idx_event_type` (`event_type`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `interview_events_ibfk_1` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,52 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `interview_questions`
--
DROP TABLE IF EXISTS `interview_questions`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `interview_questions` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`question_text` text COLLATE utf8mb4_unicode_ci NOT NULL,
`question_type` enum('technical','behavioral','situational','culture_fit') COLLATE utf8mb4_unicode_ci NOT NULL,
`difficulty_level` enum('easy','medium','hard') COLLATE utf8mb4_unicode_ci DEFAULT 'medium',
`expected_answer` text COLLATE utf8mb4_unicode_ci,
`evaluation_criteria` json DEFAULT NULL,
`order_index` int NOT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_interview_order` (`interview_id`,`order_index`),
CONSTRAINT `interview_questions_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,79 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `interview_tokens`
--
DROP TABLE IF EXISTS `interview_tokens`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `interview_tokens` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token_type` enum('single','bulk') COLLATE utf8mb4_unicode_ci NOT NULL,
`quantity` int NOT NULL DEFAULT '1',
`price_per_token` decimal(10,2) NOT NULL,
`total_price` decimal(10,2) NOT NULL,
`tokens_used` int DEFAULT '0',
`tokens_remaining` int GENERATED ALWAYS AS ((`quantity` - `tokens_used`)) STORED,
`status` enum('active','exhausted','expired') COLLATE utf8mb4_unicode_ci DEFAULT 'active',
`expires_at` timestamp NULL DEFAULT NULL,
`purchased_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_expires_at` (`expires_at`),
KEY `idx_interview_tokens_user_active` (`user_id`,`status`),
CONSTRAINT `interview_tokens_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_interview_tokens_quantity_positive` CHECK ((`quantity` > 0)),
CONSTRAINT `chk_interview_tokens_used_valid` CHECK (((`tokens_used` >= 0) and (`tokens_used` <= `quantity`)))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
/*!50003 CREATE*/ /*!50017 DEFINER=`root`@`localhost`*/ /*!50003 TRIGGER `update_token_usage_after_purchase` AFTER INSERT ON `interview_tokens` FOR EACH ROW BEGIN
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (NEW.user_id, NEW.quantity)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + NEW.quantity;
END */;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,96 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `interviews`
--
DROP TABLE IF EXISTS `interviews`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `interviews` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`candidate_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(64) COLLATE utf8mb4_unicode_ci NOT NULL,
`status` enum('scheduled','in_progress','completed','abandoned') COLLATE utf8mb4_unicode_ci DEFAULT 'scheduled',
`started_at` timestamp NULL DEFAULT NULL,
`completed_at` timestamp NULL DEFAULT NULL,
`duration_minutes` int DEFAULT '0',
`ai_questions` json DEFAULT NULL,
`candidate_responses` json DEFAULT NULL,
`ai_evaluation` json DEFAULT NULL,
`overall_score` decimal(5,2) DEFAULT NULL,
`technical_score` decimal(5,2) DEFAULT NULL,
`communication_score` decimal(5,2) DEFAULT NULL,
`culture_fit_score` decimal(5,2) DEFAULT NULL,
`ai_feedback` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `token` (`token`),
KEY `candidate_id` (`candidate_id`),
KEY `job_id` (`job_id`),
KEY `idx_token` (`token`),
KEY `idx_user_candidate` (`user_id`,`candidate_id`),
KEY `idx_status` (`status`),
KEY `idx_interviews_user_status` (`user_id`,`status`),
KEY `idx_interviews_token_status` (`token`,`status`),
CONSTRAINT `interviews_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `interviews_ibfk_2` FOREIGN KEY (`candidate_id`) REFERENCES `candidates` (`id`) ON DELETE CASCADE,
CONSTRAINT `interviews_ibfk_3` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_scores_valid` CHECK ((((`overall_score` is null) or ((`overall_score` >= 0) and (`overall_score` <= 100))) and ((`technical_score` is null) or ((`technical_score` >= 0) and (`technical_score` <= 100))) and ((`communication_score` is null) or ((`communication_score` >= 0) and (`communication_score` <= 100))) and ((`culture_fit_score` is null) or ((`culture_fit_score` >= 0) and (`culture_fit_score` <= 100)))))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
/*!50003 CREATE*/ /*!50017 DEFINER=`root`@`localhost`*/ /*!50003 TRIGGER `update_interview_usage_after_complete` AFTER UPDATE ON `interviews` FOR EACH ROW BEGIN
IF OLD.status != 'completed' AND NEW.status = 'completed' THEN
-- Update user usage
INSERT INTO user_usage (user_id, interviews_completed, tokens_used)
VALUES (NEW.user_id, 1, 1)
ON DUPLICATE KEY UPDATE
interviews_completed = interviews_completed + 1,
tokens_used = tokens_used + 1;
END IF;
END */;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:31

View File

@ -0,0 +1,52 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `job_links`
--
DROP TABLE IF EXISTS `job_links`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `job_links` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`url_slug` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`tokens_available` int DEFAULT '0',
`tokens_used` int DEFAULT '0',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `url_slug` (`url_slug`),
KEY `idx_job_id` (`job_id`),
KEY `idx_url_slug` (`url_slug`),
CONSTRAINT `job_links_ibfk_1` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,85 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `jobs`
--
DROP TABLE IF EXISTS `jobs`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `jobs` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`title` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` text COLLATE utf8mb4_unicode_ci NOT NULL,
`requirements` text COLLATE utf8mb4_unicode_ci NOT NULL,
`skills_required` json DEFAULT NULL,
`location` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`employment_type` enum('full_time','part_time','contract','internship') COLLATE utf8mb4_unicode_ci DEFAULT 'full_time',
`experience_level` enum('entry','mid','senior','lead','executive') COLLATE utf8mb4_unicode_ci DEFAULT 'mid',
`salary_min` decimal(10,2) DEFAULT NULL,
`salary_max` decimal(10,2) DEFAULT NULL,
`currency` varchar(3) COLLATE utf8mb4_unicode_ci DEFAULT 'USD',
`status` enum('draft','active','paused','closed') COLLATE utf8mb4_unicode_ci DEFAULT 'draft',
`evaluation_criteria` json DEFAULT NULL,
`interview_questions` json DEFAULT NULL,
`interview_style` enum('personal','balanced','technical') COLLATE utf8mb4_unicode_ci DEFAULT 'balanced',
`application_deadline` timestamp NULL DEFAULT NULL,
`icon` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT 'briefcase',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_created_at` (`created_at`),
KEY `idx_jobs_user_status_created` (`user_id`,`status`,`created_at` DESC),
CONSTRAINT `jobs_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
/*!50003 CREATE*/ /*!50017 DEFINER=`root`@`localhost`*/ /*!50003 TRIGGER `update_job_usage_after_insert` AFTER INSERT ON `jobs` FOR EACH ROW BEGIN
INSERT INTO user_usage (user_id, jobs_created)
VALUES (NEW.user_id, 1)
ON DUPLICATE KEY UPDATE jobs_created = jobs_created + 1;
END */;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:31

View File

@ -0,0 +1,59 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `payment_records`
--
DROP TABLE IF EXISTS `payment_records`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `payment_records` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token_package_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`amount` decimal(10,2) NOT NULL,
`currency` varchar(3) COLLATE utf8mb4_unicode_ci DEFAULT 'USD',
`status` enum('pending','paid','failed','refunded','cancelled') COLLATE utf8mb4_unicode_ci DEFAULT 'pending',
`payment_method` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`payment_reference` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`invoice_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`paid_at` timestamp NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `token_package_id` (`token_package_id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_payment_reference` (`payment_reference`),
KEY `idx_payment_records_user_created` (`user_id`,`created_at` DESC),
CONSTRAINT `payment_records_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `payment_records_ibfk_2` FOREIGN KEY (`token_package_id`) REFERENCES `token_packages` (`id`) ON DELETE RESTRICT
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:31

View File

@ -0,0 +1,631 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Dumping events for database 'candidb_main'
--
--
-- Dumping routines for database 'candidb_main'
--
/*!50003 DROP FUNCTION IF EXISTS `can_create_job` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `can_create_job`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE current_jobs INT DEFAULT 0;
DECLARE max_jobs INT DEFAULT 100; -- Hard limit of 100 jobs
-- Get current job count
SELECT COALESCE(jobs_created, 0) INTO current_jobs
FROM user_usage
WHERE user_id = user_uuid;
-- Return TRUE if under limit
RETURN current_jobs < max_jobs;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP FUNCTION IF EXISTS `get_all_users` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `get_all_users`() RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE result JSON;
SELECT JSON_ARRAYAGG(
JSON_OBJECT(
'id', id,
'email', email,
'first_name', first_name,
'last_name', last_name,
'role', role,
'company_name', company_name,
'is_active', is_active,
'last_login_at', last_login_at,
'email_verified_at', email_verified_at,
'created_at', created_at
)
) INTO result
FROM users
WHERE deleted_at IS NULL
ORDER BY created_at DESC;
RETURN result;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP FUNCTION IF EXISTS `get_token_usage_summary` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `get_token_usage_summary`(user_uuid VARCHAR(36)) RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE total_purchased INT DEFAULT 0;
DECLARE total_used INT DEFAULT 0;
DECLARE total_available INT DEFAULT 0;
DECLARE result JSON;
-- Get total purchased tokens
SELECT COALESCE(SUM(quantity), 0) INTO total_purchased
FROM interview_tokens
WHERE user_id = user_uuid;
-- Get total used tokens
SELECT COALESCE(SUM(tokens_used), 0) INTO total_used
FROM interview_tokens
WHERE user_id = user_uuid;
-- Get total available tokens
SELECT COALESCE(SUM(tokens_remaining), 0) INTO total_available
FROM interview_tokens
WHERE user_id = user_uuid
AND status = 'active'
AND (expires_at IS NULL OR expires_at > NOW());
-- Build JSON result
SET result = JSON_OBJECT(
'total_purchased', total_purchased,
'total_used', total_used,
'total_available', total_available,
'utilization_percentage', CASE
WHEN total_purchased > 0 THEN ROUND((total_used / total_purchased) * 100, 2)
ELSE 0
END
);
RETURN result;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP FUNCTION IF EXISTS `get_user_statistics` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `get_user_statistics`(user_uuid VARCHAR(36)) RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE result JSON;
DECLARE user_usage_data JSON;
DECLARE token_summary JSON;
-- Get usage data
SELECT JSON_OBJECT(
'jobs_created', COALESCE(jobs_created, 0),
'interviews_completed', COALESCE(interviews_completed, 0),
'tokens_purchased', COALESCE(tokens_purchased, 0),
'tokens_used', COALESCE(tokens_used, 0)
) INTO user_usage_data
FROM user_usage
WHERE user_id = user_uuid;
-- Get token summary
SELECT get_token_usage_summary(user_uuid) INTO token_summary;
-- Build result
SET result = JSON_OBJECT(
'usage', user_usage_data,
'tokens', token_summary
);
RETURN result;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP FUNCTION IF EXISTS `has_available_tokens` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `has_available_tokens`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE available_tokens INT DEFAULT 0;
-- Get available tokens (active and not expired)
SELECT COALESCE(SUM(tokens_remaining), 0) INTO available_tokens
FROM interview_tokens
WHERE user_id = user_uuid
AND status = 'active'
AND (expires_at IS NULL OR expires_at > NOW());
-- Return TRUE if has available tokens
RETURN available_tokens > 0;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP FUNCTION IF EXISTS `is_admin` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` FUNCTION `is_admin`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE user_role VARCHAR(20) DEFAULT NULL;
SELECT role INTO user_role
FROM users
WHERE id = user_uuid AND is_active = TRUE;
RETURN user_role = 'admin';
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `add_tokens_to_user` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `add_tokens_to_user`(
IN p_user_id VARCHAR(36),
IN p_quantity INT,
IN p_price_per_token DECIMAL(10,2),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE v_total_price DECIMAL(10,2);
DECLARE v_token_id VARCHAR(36);
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while adding tokens';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Check if user exists
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
-- Calculate total price
SET v_total_price = p_quantity * p_price_per_token;
-- Create token record
SET v_token_id = UUID();
INSERT INTO interview_tokens (
id, user_id, token_type, quantity, price_per_token,
total_price, status, purchased_at
) VALUES (
v_token_id, p_user_id,
CASE WHEN p_quantity = 1 THEN 'single' ELSE 'bulk' END,
p_quantity, p_price_per_token, v_total_price,
'active', NOW()
);
-- Create payment record (admin-granted)
INSERT INTO payment_records (
user_id, interview_token_id, token_package_id,
amount, status, payment_method, payment_reference
) VALUES (
p_user_id, v_token_id, NULL, v_total_price,
'paid', 'admin_granted', CONCAT('ADMIN_', p_admin_id, '_', NOW())
);
SET p_success = TRUE;
SET p_message = CONCAT('Successfully added ', p_quantity, ' tokens to user');
END IF;
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `change_user_password` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `change_user_password`(
IN p_user_id VARCHAR(36),
IN p_new_password_hash VARCHAR(255),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while changing password';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Check if user exists
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
-- Update password
UPDATE users SET
password_hash = p_new_password_hash,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'Password changed successfully';
END IF;
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `create_user` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `create_user`(
IN p_email VARCHAR(255),
IN p_password_hash VARCHAR(255),
IN p_first_name VARCHAR(100),
IN p_last_name VARCHAR(100),
IN p_role ENUM('admin', 'recruiter'),
IN p_company_name VARCHAR(255),
IN p_admin_id VARCHAR(36),
OUT p_user_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while creating user';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Check if email already exists
IF EXISTS (SELECT 1 FROM users WHERE email = p_email AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'Email already exists';
ELSE
-- Create user
SET p_user_id = UUID();
INSERT INTO users (
id, email, password_hash, first_name, last_name,
role, company_name, is_active, email_verified_at
) VALUES (
p_user_id, p_email, p_password_hash, p_first_name, p_last_name,
p_role, p_company_name, TRUE, NOW()
);
-- Initialize usage tracking
INSERT INTO user_usage (user_id) VALUES (p_user_id);
SET p_success = TRUE;
SET p_message = 'User created successfully';
END IF;
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `deactivate_user` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `deactivate_user`(
IN p_user_id VARCHAR(36),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while deactivating user';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Check if user exists
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
-- Deactivate user
UPDATE users SET
is_active = FALSE,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'User deactivated successfully';
END IF;
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `get_system_statistics` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `get_system_statistics`(
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255),
OUT p_statistics JSON
)
BEGIN
DECLARE v_total_users INT DEFAULT 0;
DECLARE v_active_users INT DEFAULT 0;
DECLARE v_total_jobs INT DEFAULT 0;
DECLARE v_total_interviews INT DEFAULT 0;
DECLARE v_total_tokens_purchased INT DEFAULT 0;
DECLARE v_total_tokens_used INT DEFAULT 0;
DECLARE v_total_revenue DECIMAL(10,2) DEFAULT 0;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while getting statistics';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Get statistics
SELECT COUNT(*) INTO v_total_users FROM users WHERE deleted_at IS NULL;
SELECT COUNT(*) INTO v_active_users FROM users WHERE is_active = TRUE AND deleted_at IS NULL;
SELECT COALESCE(SUM(jobs_created), 0) INTO v_total_jobs FROM user_usage;
SELECT COALESCE(SUM(interviews_completed), 0) INTO v_total_interviews FROM user_usage;
SELECT COALESCE(SUM(tokens_purchased), 0) INTO v_total_tokens_purchased FROM user_usage;
SELECT COALESCE(SUM(tokens_used), 0) INTO v_total_tokens_used FROM user_usage;
SELECT COALESCE(SUM(amount), 0) INTO v_total_revenue FROM payment_records WHERE status = 'paid';
-- Build statistics JSON
SET p_statistics = JSON_OBJECT(
'total_users', v_total_users,
'active_users', v_active_users,
'total_jobs', v_total_jobs,
'total_interviews', v_total_interviews,
'total_tokens_purchased', v_total_tokens_purchased,
'total_tokens_used', v_total_tokens_used,
'total_revenue', v_total_revenue,
'generated_at', NOW()
);
SET p_success = TRUE;
SET p_message = 'Statistics retrieved successfully';
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!50003 DROP PROCEDURE IF EXISTS `update_user` */;
/*!50003 SET @saved_cs_client = @@character_set_client */ ;
/*!50003 SET @saved_cs_results = @@character_set_results */ ;
/*!50003 SET @saved_col_connection = @@collation_connection */ ;
/*!50003 SET character_set_client = utf8mb4 */ ;
/*!50003 SET character_set_results = utf8mb4 */ ;
/*!50003 SET collation_connection = utf8mb4_0900_ai_ci */ ;
/*!50003 SET @saved_sql_mode = @@sql_mode */ ;
/*!50003 SET sql_mode = 'STRICT_TRANS_TABLES,NO_ENGINE_SUBSTITUTION' */ ;
DELIMITER ;;
CREATE DEFINER=`root`@`localhost` PROCEDURE `update_user`(
IN p_user_id VARCHAR(36),
IN p_email VARCHAR(255),
IN p_first_name VARCHAR(100),
IN p_last_name VARCHAR(100),
IN p_role ENUM('admin', 'recruiter'),
IN p_company_name VARCHAR(255),
IN p_is_active BOOLEAN,
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while updating user';
END;
-- Check if admin
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
-- Check if user exists
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
-- Update user
UPDATE users SET
email = p_email,
first_name = p_first_name,
last_name = p_last_name,
role = p_role,
company_name = p_company_name,
is_active = p_is_active,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'User updated successfully';
END IF;
END IF;
END ;;
DELIMITER ;
/*!50003 SET sql_mode = @saved_sql_mode */ ;
/*!50003 SET character_set_client = @saved_cs_client */ ;
/*!50003 SET character_set_results = @saved_cs_results */ ;
/*!50003 SET collation_connection = @saved_col_connection */ ;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,55 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `token_packages`
--
DROP TABLE IF EXISTS `token_packages`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `token_packages` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` text COLLATE utf8mb4_unicode_ci,
`quantity` int NOT NULL,
`price_per_token` decimal(10,2) NOT NULL,
`total_price` decimal(10,2) NOT NULL,
`discount_percentage` decimal(5,2) DEFAULT '0.00',
`is_popular` tinyint(1) DEFAULT '0',
`is_active` tinyint(1) DEFAULT '1',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
CONSTRAINT `chk_token_packages_discount_valid` CHECK (((`discount_percentage` >= 0) and (`discount_percentage` <= 100))),
CONSTRAINT `chk_token_packages_price_positive` CHECK ((`price_per_token` > 0)),
CONSTRAINT `chk_token_packages_quantity_positive` CHECK ((`quantity` > 0))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

View File

@ -0,0 +1,53 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `user_usage`
--
DROP TABLE IF EXISTS `user_usage`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `user_usage` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`jobs_created` int DEFAULT '0',
`interviews_completed` int DEFAULT '0',
`tokens_purchased` int DEFAULT '0',
`tokens_used` int DEFAULT '0',
`last_reset_date` date DEFAULT (curdate()),
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_user_usage` (`user_id`),
CONSTRAINT `user_usage_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_usage_positive` CHECK (((`jobs_created` >= 0) and (`interviews_completed` >= 0) and (`tokens_purchased` >= 0) and (`tokens_used` >= 0)))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:31

View File

@ -0,0 +1,60 @@
CREATE DATABASE IF NOT EXISTS `candidb_main` /*!40100 DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci */ /*!80016 DEFAULT ENCRYPTION='N' */;
USE `candidb_main`;
-- MySQL dump 10.13 Distrib 8.0.38, for Win64 (x86_64)
--
-- Host: localhost Database: candidb_main
-- ------------------------------------------------------
-- Server version 8.0.39
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `users`
--
DROP TABLE IF EXISTS `users`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `users` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`password_hash` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`first_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`last_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`role` enum('admin','recruiter') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'recruiter',
`company_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`avatar_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`is_active` tinyint(1) DEFAULT '1',
`last_login_at` timestamp NULL DEFAULT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`),
KEY `idx_email` (`email`),
KEY `idx_role` (`role`),
KEY `idx_active` (`is_active`),
KEY `idx_role_active` (`role`,`is_active`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2025-09-16 20:22:32

718
database/deploy_dump.sql Normal file
View File

@ -0,0 +1,718 @@
-- Auto-generated consolidated deployment SQL based on candidb_dump1
DROP DATABASE IF EXISTS candidb_main;
CREATE DATABASE IF NOT EXISTS `candidb_main`
CHARACTER SET utf8mb4
COLLATE utf8mb4_unicode_ci;
USE `candidb_main`;
-- Core tables (ordered by dependencies)
-- users
CREATE TABLE `users` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`password_hash` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`first_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`last_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`role` enum('admin','recruiter') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'recruiter',
`company_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`avatar_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`stripe_customer_id` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Stripe Customer ID for payment processing',
`is_active` tinyint(1) DEFAULT '1',
`last_login_at` timestamp NULL DEFAULT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `email` (`email`),
KEY `idx_email` (`email`),
KEY `idx_role` (`role`),
KEY `idx_active` (`is_active`),
KEY `idx_role_active` (`role`,`is_active`),
KEY `idx_stripe_customer_id` (`stripe_customer_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- token_packages
CREATE TABLE `token_packages` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` text COLLATE utf8mb4_unicode_ci,
`quantity` int NOT NULL,
`price_per_token` decimal(10,2) NOT NULL,
`total_price` decimal(10,2) NOT NULL,
`discount_percentage` decimal(5,2) DEFAULT '0.00',
`is_popular` tinyint(1) DEFAULT '0',
`is_active` tinyint(1) DEFAULT '1',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
CONSTRAINT `chk_token_packages_discount_valid` CHECK (((`discount_percentage` >= 0) and (`discount_percentage` <= 100))),
CONSTRAINT `chk_token_packages_price_positive` CHECK ((`price_per_token` > 0)),
CONSTRAINT `chk_token_packages_quantity_positive` CHECK ((`quantity` > 0))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- jobs
CREATE TABLE `jobs` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`title` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`description` text COLLATE utf8mb4_unicode_ci NOT NULL,
`requirements` text COLLATE utf8mb4_unicode_ci NOT NULL,
`skills_required` json DEFAULT NULL,
`location` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`employment_type` enum('full_time','part_time','contract','internship') COLLATE utf8mb4_unicode_ci DEFAULT 'full_time',
`experience_level` enum('entry','mid','senior','lead','executive') COLLATE utf8mb4_unicode_ci DEFAULT 'mid',
`salary_min` decimal(10,2) DEFAULT NULL,
`salary_max` decimal(10,2) DEFAULT NULL,
`currency` varchar(3) COLLATE utf8mb4_unicode_ci DEFAULT 'USD',
`status` enum('draft','active','paused','closed') COLLATE utf8mb4_unicode_ci DEFAULT 'draft',
`evaluation_criteria` json DEFAULT NULL,
`interview_questions` json DEFAULT NULL,
`interview_style` enum('personal','balanced','technical') COLLATE utf8mb4_unicode_ci DEFAULT 'balanced',
`application_deadline` timestamp NULL DEFAULT NULL,
`icon` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT 'briefcase',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_created_at` (`created_at`),
KEY `idx_jobs_user_status_created` (`user_id`,`status`,`created_at` DESC),
CONSTRAINT `jobs_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- candidates
CREATE TABLE `candidates` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`first_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`last_name` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`phone` varchar(20) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`resume_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`cover_letter` text COLLATE utf8mb4_unicode_ci,
`source` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`status` enum('applied','interviewing','evaluated','hired','rejected') COLLATE utf8mb4_unicode_ci DEFAULT 'applied',
`applied_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`last_activity_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`deleted_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_candidate_per_job` (`job_id`,`email`),
KEY `idx_user_job` (`user_id`,`job_id`),
KEY `idx_status` (`status`),
KEY `idx_candidates_user_job_status` (`user_id`,`job_id`,`status`),
CONSTRAINT `candidates_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `candidates_ibfk_2` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- interviews
CREATE TABLE `interviews` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`candidate_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(64) COLLATE utf8mb4_unicode_ci NOT NULL,
`status` enum('scheduled','in_progress','completed','abandoned') COLLATE utf8mb4_unicode_ci DEFAULT 'scheduled',
`started_at` timestamp NULL DEFAULT NULL,
`completed_at` timestamp NULL DEFAULT NULL,
`duration_minutes` int DEFAULT '0',
`ai_questions` json DEFAULT NULL,
`candidate_responses` json DEFAULT NULL,
`ai_evaluation` json DEFAULT NULL,
`overall_score` decimal(5,2) DEFAULT NULL,
`technical_score` decimal(5,2) DEFAULT NULL,
`communication_score` decimal(5,2) DEFAULT NULL,
`culture_fit_score` decimal(5,2) DEFAULT NULL,
`ai_feedback` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `token` (`token`),
KEY `candidate_id` (`candidate_id`),
KEY `job_id` (`job_id`),
KEY `idx_token` (`token`),
KEY `idx_user_candidate` (`user_id`,`candidate_id`),
KEY `idx_status` (`status`),
KEY `idx_interviews_user_status` (`user_id`,`status`),
KEY `idx_interviews_token_status` (`token`,`status`),
CONSTRAINT `interviews_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `interviews_ibfk_2` FOREIGN KEY (`candidate_id`) REFERENCES `candidates` (`id`) ON DELETE CASCADE,
CONSTRAINT `interviews_ibfk_3` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_scores_valid` CHECK ((((`overall_score` is null) or ((`overall_score` >= 0) and (`overall_score` <= 100))) and ((`technical_score` is null) or ((`technical_score` >= 0) and (`technical_score` <= 100))) and ((`communication_score` is null) or ((`communication_score` >= 0) and (`communication_score` <= 100))) and ((`culture_fit_score` is null) or ((`culture_fit_score` >= 0) and (`culture_fit_score` <= 100)))))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- interview_questions
CREATE TABLE `interview_questions` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`question_text` text COLLATE utf8mb4_unicode_ci NOT NULL,
`question_type` enum('technical','behavioral','situational','culture_fit') COLLATE utf8mb4_unicode_ci NOT NULL,
`difficulty_level` enum('easy','medium','hard') COLLATE utf8mb4_unicode_ci DEFAULT 'medium',
`expected_answer` text COLLATE utf8mb4_unicode_ci,
`evaluation_criteria` json DEFAULT NULL,
`order_index` int NOT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_interview_order` (`interview_id`,`order_index`),
CONSTRAINT `interview_questions_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- candidate_responses
CREATE TABLE `candidate_responses` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`question_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`response_text` text COLLATE utf8mb4_unicode_ci NOT NULL,
`response_audio_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`ai_score` decimal(5,2) DEFAULT NULL,
`ai_feedback` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_response_per_question` (`interview_id`,`question_id`),
KEY `question_id` (`question_id`),
CONSTRAINT `candidate_responses_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE,
CONSTRAINT `candidate_responses_ibfk_2` FOREIGN KEY (`question_id`) REFERENCES `interview_questions` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- interview_tokens
CREATE TABLE `interview_tokens` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token_type` enum('single','bulk') COLLATE utf8mb4_unicode_ci NOT NULL,
`quantity` int NOT NULL DEFAULT '1',
`price_per_token` decimal(10,2) NOT NULL,
`total_price` decimal(10,2) NOT NULL,
`tokens_used` int DEFAULT '0',
`tokens_remaining` int GENERATED ALWAYS AS ((`quantity` - `tokens_used`)) STORED,
`status` enum('active','exhausted','expired') COLLATE utf8mb4_unicode_ci DEFAULT 'active',
`expires_at` timestamp NULL DEFAULT NULL,
`purchased_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_expires_at` (`expires_at`),
KEY `idx_interview_tokens_user_active` (`user_id`,`status`),
CONSTRAINT `interview_tokens_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_interview_tokens_quantity_positive` CHECK ((`quantity` > 0)),
CONSTRAINT `chk_interview_tokens_used_valid` CHECK (((`tokens_used` >= 0) and (`tokens_used` <= `quantity`)))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- payment_records
CREATE TABLE `payment_records` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`token_package_id` varchar(36) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`amount` decimal(10,2) NOT NULL,
`currency` varchar(3) COLLATE utf8mb4_unicode_ci DEFAULT 'EUR',
`status` enum('pending','processing','paid','failed','refunded','cancelled') COLLATE utf8mb4_unicode_ci DEFAULT 'pending',
`payment_method` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`payment_reference` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`stripe_payment_intent_id` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Stripe Payment Intent ID for tracking payments',
`stripe_payment_method_id` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Stripe Payment Method ID for saved payment methods',
`stripe_customer_id` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Stripe Customer ID for user payment methods',
`payment_flow_type` enum('card','ideal','bank_transfer','admin_granted') COLLATE utf8mb4_unicode_ci DEFAULT 'admin_granted' COMMENT 'Type of payment flow used',
`invoice_url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`stripe_metadata` json DEFAULT NULL COMMENT 'Additional Stripe metadata and webhook data',
`refund_reason` text COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Reason for payment refund',
`refunded_amount` decimal(10,2) DEFAULT '0.00' COMMENT 'Amount refunded for this payment',
`custom_quantity` int DEFAULT NULL COMMENT 'Custom token quantity for non-package purchases',
`applied_discount_percentage` decimal(5,2) DEFAULT '0.00' COMMENT 'Discount percentage applied to this purchase',
`paid_at` timestamp NULL DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `token_package_id` (`token_package_id`),
KEY `idx_user_status` (`user_id`,`status`),
KEY `idx_payment_reference` (`payment_reference`),
KEY `idx_payment_records_user_created` (`user_id`,`created_at` DESC),
KEY `idx_stripe_payment_intent_id` (`stripe_payment_intent_id`),
KEY `idx_stripe_customer_id` (`stripe_customer_id`),
KEY `idx_payment_flow_type` (`payment_flow_type`),
KEY `idx_custom_quantity` (`custom_quantity`),
CONSTRAINT `payment_records_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `payment_records_ibfk_2` FOREIGN KEY (`token_package_id`) REFERENCES `token_packages` (`id`) ON DELETE RESTRICT
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- user_usage
CREATE TABLE `user_usage` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`jobs_created` int DEFAULT '0',
`interviews_completed` int DEFAULT '0',
`tokens_purchased` int DEFAULT '0',
`tokens_used` int DEFAULT '0',
`last_reset_date` date DEFAULT (curdate()),
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_user_usage` (`user_id`),
CONSTRAINT `user_usage_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE,
CONSTRAINT `chk_usage_positive` CHECK (((`jobs_created` >= 0) and (`interviews_completed` >= 0) and (`tokens_purchased` >= 0) and (`tokens_used` >= 0)))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- audit_logs
CREATE TABLE `audit_logs` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`user_id` varchar(36) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`action` varchar(100) COLLATE utf8mb4_unicode_ci NOT NULL,
`resource_type` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`resource_id` varchar(36) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`old_values` json DEFAULT NULL,
`new_values` json DEFAULT NULL,
`ip_address` varchar(45) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`user_agent` text COLLATE utf8mb4_unicode_ci,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_user_action` (`user_id`,`action`),
KEY `idx_resource` (`resource_type`,`resource_id`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `audit_logs_ibfk_1` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- job_links
CREATE TABLE `job_links` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`url_slug` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`tokens_available` int DEFAULT '0',
`tokens_used` int DEFAULT '0',
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
`updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
UNIQUE KEY `url_slug` (`url_slug`),
KEY `idx_job_id` (`job_id`),
KEY `idx_url_slug` (`url_slug`),
CONSTRAINT `job_links_ibfk_1` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- conversation_messages
CREATE TABLE `conversation_messages` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT (uuid()),
`interview_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`link_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`sender` enum('candidate','ai') COLLATE utf8mb4_unicode_ci NOT NULL,
`message` text COLLATE utf8mb4_unicode_ci NOT NULL,
`message_data` json DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_interview_id` (`interview_id`),
KEY `idx_link_id` (`link_id`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `conversation_messages_ibfk_1` FOREIGN KEY (`interview_id`) REFERENCES `interviews` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- interview_events
CREATE TABLE `interview_events` (
`id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`job_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`link_id` varchar(36) COLLATE utf8mb4_unicode_ci NOT NULL,
`event_type` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`event_data` json DEFAULT NULL,
`created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`),
KEY `idx_job_id` (`job_id`),
KEY `idx_link_id` (`link_id`),
KEY `idx_event_type` (`event_type`),
KEY `idx_created_at` (`created_at`),
CONSTRAINT `interview_events_ibfk_1` FOREIGN KEY (`job_id`) REFERENCES `jobs` (`id`) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- Triggers
DELIMITER $$
CREATE TRIGGER `update_job_usage_after_insert` AFTER INSERT ON `jobs` FOR EACH ROW BEGIN
INSERT INTO user_usage (user_id, jobs_created)
VALUES (NEW.user_id, 1)
ON DUPLICATE KEY UPDATE jobs_created = jobs_created + 1;
END$$
DELIMITER ;
DELIMITER $$
CREATE TRIGGER `update_token_usage_after_purchase` AFTER INSERT ON `interview_tokens` FOR EACH ROW BEGIN
INSERT INTO user_usage (user_id, tokens_purchased)
VALUES (NEW.user_id, NEW.quantity)
ON DUPLICATE KEY UPDATE tokens_purchased = tokens_purchased + NEW.quantity;
END$$
DELIMITER ;
DELIMITER $$
CREATE TRIGGER `update_interview_usage_after_complete` AFTER UPDATE ON `interviews` FOR EACH ROW BEGIN
IF OLD.status != 'completed' AND NEW.status = 'completed' THEN
INSERT INTO user_usage (user_id, interviews_completed, tokens_used)
VALUES (NEW.user_id, 1, 1)
ON DUPLICATE KEY UPDATE
interviews_completed = interviews_completed + 1,
tokens_used = tokens_used + 1;
END IF;
END$$
DELIMITER ;
-- Functions
DELIMITER $$
CREATE FUNCTION `can_create_job`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE current_jobs INT DEFAULT 0;
DECLARE max_jobs INT DEFAULT 100;
SELECT COALESCE(jobs_created, 0) INTO current_jobs FROM user_usage WHERE user_id = user_uuid;
RETURN current_jobs < max_jobs;
END$$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION `get_all_users`() RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE result JSON;
SELECT JSON_ARRAYAGG(
JSON_OBJECT(
'id', id,
'email', email,
'first_name', first_name,
'last_name', last_name,
'role', role,
'company_name', company_name,
'is_active', is_active,
'last_login_at', last_login_at,
'email_verified_at', email_verified_at,
'created_at', created_at
)
) INTO result
FROM users
WHERE deleted_at IS NULL
ORDER BY created_at DESC;
RETURN result;
END$$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION `get_token_usage_summary`(user_uuid VARCHAR(36)) RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE total_purchased INT DEFAULT 0;
DECLARE total_used INT DEFAULT 0;
DECLARE total_available INT DEFAULT 0;
DECLARE result JSON;
SELECT COALESCE(SUM(quantity), 0) INTO total_purchased FROM interview_tokens WHERE user_id = user_uuid;
SELECT COALESCE(SUM(tokens_used), 0) INTO total_used FROM interview_tokens WHERE user_id = user_uuid;
SELECT COALESCE(SUM(tokens_remaining), 0) INTO total_available FROM interview_tokens WHERE user_id = user_uuid AND status = 'active' AND (expires_at IS NULL OR expires_at > NOW());
SET result = JSON_OBJECT(
'total_purchased', total_purchased,
'total_used', total_used,
'total_available', total_available,
'utilization_percentage', CASE WHEN total_purchased > 0 THEN ROUND((total_used / total_purchased) * 100, 2) ELSE 0 END
);
RETURN result;
END$$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION `get_user_statistics`(user_uuid VARCHAR(36)) RETURNS json
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE result JSON;
DECLARE user_usage_data JSON;
DECLARE token_summary JSON;
SELECT JSON_OBJECT(
'jobs_created', COALESCE(jobs_created, 0),
'interviews_completed', COALESCE(interviews_completed, 0),
'tokens_purchased', COALESCE(tokens_purchased, 0),
'tokens_used', COALESCE(tokens_used, 0)
) INTO user_usage_data FROM user_usage WHERE user_id = user_uuid;
SELECT get_token_usage_summary(user_uuid) INTO token_summary;
SET result = JSON_OBJECT('usage', user_usage_data, 'tokens', token_summary);
RETURN result;
END$$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION `has_available_tokens`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE available_tokens INT DEFAULT 0;
SELECT COALESCE(SUM(tokens_remaining), 0) INTO available_tokens FROM interview_tokens WHERE user_id = user_uuid AND status = 'active' AND (expires_at IS NULL OR expires_at > NOW());
RETURN available_tokens > 0;
END$$
DELIMITER ;
DELIMITER $$
CREATE FUNCTION `is_admin`(user_uuid VARCHAR(36)) RETURNS tinyint(1)
READS SQL DATA
DETERMINISTIC
BEGIN
DECLARE user_role VARCHAR(20) DEFAULT NULL;
SELECT role INTO user_role FROM users WHERE id = user_uuid AND is_active = TRUE;
RETURN user_role = 'admin';
END$$
DELIMITER ;
-- Procedures (from dump routines)
DELIMITER $$
CREATE PROCEDURE `add_tokens_to_user`(
IN p_user_id VARCHAR(36),
IN p_quantity INT,
IN p_price_per_token DECIMAL(10,2),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE v_total_price DECIMAL(10,2);
DECLARE v_token_id VARCHAR(36);
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while adding tokens';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
SET v_total_price = p_quantity * p_price_per_token;
SET v_token_id = UUID();
INSERT INTO interview_tokens (
id, user_id, token_type, quantity, price_per_token,
total_price, status, purchased_at
) VALUES (
v_token_id, p_user_id,
CASE WHEN p_quantity = 1 THEN 'single' ELSE 'bulk' END,
p_quantity, p_price_per_token, v_total_price,
'active', NOW()
);
-- NOTE: routines in dump referenced interview_token_id; schema doesn't have it. Keeping minimal insert
INSERT INTO payment_records (
user_id, token_package_id, amount, status, payment_method, payment_reference
) VALUES (
p_user_id, NULL, v_total_price,
'paid', 'admin_granted', CONCAT('ADMIN_', p_admin_id, '_', NOW())
);
SET p_success = TRUE;
SET p_message = CONCAT('Successfully added ', p_quantity, ' tokens to user');
END IF;
END IF;
END$$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE `change_user_password`(
IN p_user_id VARCHAR(36),
IN p_new_password_hash VARCHAR(255),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while changing password';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
UPDATE users SET
password_hash = p_new_password_hash,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'Password changed successfully';
END IF;
END IF;
END$$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE `create_user`(
IN p_email VARCHAR(255),
IN p_password_hash VARCHAR(255),
IN p_first_name VARCHAR(100),
IN p_last_name VARCHAR(100),
IN p_role ENUM('admin', 'recruiter'),
IN p_company_name VARCHAR(255),
IN p_admin_id VARCHAR(36),
OUT p_user_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while creating user';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
IF EXISTS (SELECT 1 FROM users WHERE email = p_email AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'Email already exists';
ELSE
SET p_user_id = UUID();
INSERT INTO users (
id, email, password_hash, first_name, last_name,
role, company_name, is_active, email_verified_at
) VALUES (
p_user_id, p_email, p_password_hash, p_first_name, p_last_name,
p_role, p_company_name, TRUE, NOW()
);
INSERT INTO user_usage (user_id) VALUES (p_user_id);
SET p_success = TRUE;
SET p_message = 'User created successfully';
END IF;
END IF;
END$$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE `deactivate_user`(
IN p_user_id VARCHAR(36),
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while deactivating user';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
UPDATE users SET
is_active = FALSE,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'User deactivated successfully';
END IF;
END IF;
END$$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE `get_system_statistics`(
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255),
OUT p_statistics JSON
)
BEGIN
DECLARE v_total_users INT DEFAULT 0;
DECLARE v_active_users INT DEFAULT 0;
DECLARE v_total_jobs INT DEFAULT 0;
DECLARE v_total_interviews INT DEFAULT 0;
DECLARE v_total_tokens_purchased INT DEFAULT 0;
DECLARE v_total_tokens_used INT DEFAULT 0;
DECLARE v_total_revenue DECIMAL(10,2) DEFAULT 0;
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while getting statistics';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
SELECT COUNT(*) INTO v_total_users FROM users WHERE deleted_at IS NULL;
SELECT COUNT(*) INTO v_active_users FROM users WHERE is_active = TRUE AND deleted_at IS NULL;
SELECT COALESCE(SUM(jobs_created), 0) INTO v_total_jobs FROM user_usage;
SELECT COALESCE(SUM(interviews_completed), 0) INTO v_total_interviews FROM user_usage;
SELECT COALESCE(SUM(tokens_purchased), 0) INTO v_total_tokens_purchased FROM user_usage;
SELECT COALESCE(SUM(tokens_used), 0) INTO v_total_tokens_used FROM user_usage;
SELECT COALESCE(SUM(amount), 0) INTO v_total_revenue FROM payment_records WHERE status = 'paid';
SET p_statistics = JSON_OBJECT(
'total_users', v_total_users,
'active_users', v_active_users,
'total_jobs', v_total_jobs,
'total_interviews', v_total_interviews,
'total_tokens_purchased', v_total_tokens_purchased,
'total_tokens_used', v_total_tokens_used,
'total_revenue', v_total_revenue,
'generated_at', NOW()
);
SET p_success = TRUE;
SET p_message = 'Statistics retrieved successfully';
END IF;
END$$
DELIMITER ;
DELIMITER $$
CREATE PROCEDURE `update_user`(
IN p_user_id VARCHAR(36),
IN p_email VARCHAR(255),
IN p_first_name VARCHAR(100),
IN p_last_name VARCHAR(100),
IN p_role ENUM('admin', 'recruiter'),
IN p_company_name VARCHAR(255),
IN p_is_active BOOLEAN,
IN p_admin_id VARCHAR(36),
OUT p_success BOOLEAN,
OUT p_message VARCHAR(255)
)
BEGIN
DECLARE EXIT HANDLER FOR SQLEXCEPTION
BEGIN
ROLLBACK;
SET p_success = FALSE;
SET p_message = 'An error occurred while updating user';
END;
IF NOT is_admin(p_admin_id) THEN
SET p_success = FALSE;
SET p_message = 'Access denied: Admin privileges required';
ELSE
IF NOT EXISTS (SELECT 1 FROM users WHERE id = p_user_id AND deleted_at IS NULL) THEN
SET p_success = FALSE;
SET p_message = 'User not found';
ELSE
UPDATE users SET
email = p_email,
first_name = p_first_name,
last_name = p_last_name,
role = p_role,
company_name = p_company_name,
is_active = p_is_active,
updated_at = NOW()
WHERE id = p_user_id;
SET p_success = TRUE;
SET p_message = 'User updated successfully';
END IF;
END IF;
END$$
DELIMITER ;
-- Insert default admin user (password: admin123 - CHANGE THIS!)
INSERT INTO users (id, email, password_hash, first_name, last_name, role, is_active, email_verified_at) VALUES
(UUID(), 'admin@candivista.com', '$2b$10$rcKrXbkDjjjT3vA3kMH78OkyUFNTn6nuCsqK90JEA2.S2p0dVjFUi', 'Admin', 'User', 'admin', TRUE, NOW());

View File

@ -1,13 +1,196 @@
version: '3.8' version: '3.8'
services: services:
aisapp2: # Database Service
build: . database:
ports: build:
- "5000:80" context: ./database
working_dir: /app dockerfile: Dockerfile
command: ["dotnet", "run", "--project", "AISApp", "--urls", "http://0.0.0.0:80"] image: candidat/database:${APP_VERSION:-latest}
volumes: container_name: candidat-database
- .:/app
environment: environment:
- ASPNETCORE_ENVIRONMENT=Development MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY} MYSQL_DATABASE: ${MYSQL_DATABASE}
MYSQL_USER: ${MYSQL_USER}
MYSQL_PASSWORD: ${MYSQL_PASSWORD}
ports:
- "${DB_PORT:-3307}:3306"
- "${DB_X_PORT:-33061}:33060" # MySQL X Protocol for development
volumes:
- db_data:/var/lib/mysql
networks:
- candidat-network
restart: unless-stopped
command: --default-authentication-plugin=mysql_native_password
healthcheck:
test: ["CMD", "mysqladmin", "ping", "-h", "localhost"]
timeout: 20s
retries: 10
deploy:
resources:
limits:
memory: 1G
reservations:
memory: 512M
# Backend Service
backend:
build:
context: ./backend
dockerfile: Dockerfile
image: candidat/backend:${APP_VERSION:-latest}
container_name: candidat-backend
environment:
NODE_ENV: ${NODE_ENV:-production}
DB_HOST: database
DB_PORT: 3306
DB_NAME: ${MYSQL_DATABASE}
DB_USER: ${MYSQL_USER}
DB_PASSWORD: ${MYSQL_PASSWORD}
AI_PROVIDER: ${AI_PROVIDER}
OPENROUTER_API_KEY: ${OPENROUTER_API_KEY}
OPENROUTER_MODEL: ${OPENROUTER_MODEL}
OPENROUTER_BASE_URL: ${OPENROUTER_BASE_URL}
OPENROUTER_REL_PATH: ${OPENROUTER_REL_PATH}
OPENROUTER_TEMPERATURE: ${OPENROUTER_TEMPERATURE}
AI_PORT: ${AI_PORT}
AI_MODEL: ${AI_MODEL}
# Stripe Payment Configuration
STRIPE_PUBLISHABLE_KEY: ${STRIPE_PUBLISHABLE_KEY}
STRIPE_SECRET_KEY: ${STRIPE_SECRET_KEY}
STRIPE_WEBHOOK_SECRET: ${STRIPE_WEBHOOK_SECRET}
ports:
- "${BACKEND_PORT:-8083}:8083"
volumes:
# Development hot reloading (only if NODE_ENV=development)
- ./backend/src:/app/src:ro
depends_on:
database:
condition: service_healthy
networks:
- candidat-network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8083/rest/ai/test-ai"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 512M
reservations:
memory: 256M
# Frontend Service
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
image: candidat/frontend:${APP_VERSION:-latest}
container_name: candidat-frontend
environment:
NODE_ENV: ${NODE_ENV:-production}
NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL}
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY: ${NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY}
ports:
- "${FRONTEND_PORT:-3000}:3000"
volumes:
# Development hot reloading (only if NODE_ENV=development)
- ./frontend/src:/app/src:ro
depends_on:
backend:
condition: service_healthy
networks:
- candidat-network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:3000"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
deploy:
resources:
limits:
memory: 256M
reservations:
memory: 128M
# Chatbot Service
chatbot:
build:
context: ./AISApp
dockerfile: Dockerfile
image: candidat/chatbot:${APP_VERSION:-latest}
container_name: candidat-chatbot
environment:
ASPNETCORE_ENVIRONMENT: ${NODE_ENV:-production}
OPENROUTER_API_KEY: ${OPENROUTER_API_KEY}
CHATBOT_DB_HOST: database
CHATBOT_DB_NAME: ${MYSQL_DATABASE}
CHATBOT_DB_USER: ${MYSQL_USER}
CHATBOT_DB_PASSWORD: ${MYSQL_PASSWORD}
CHATBOT_DB_PORT: 3306
ports:
- "${CHATBOT_PORT:-5000}:80"
depends_on:
database:
condition: service_healthy
networks:
- candidat-network
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost/api/chat"]
interval: 30s
timeout: 10s
retries: 3
start_period: 40s
deploy:
resources:
limits:
memory: 512M
reservations:
memory: 256M
# Nginx Reverse Proxy
nginx:
image: nginx:alpine
container_name: candidat-nginx
ports:
- "${NGINX_PORT:-80}:80"
- "${NGINX_SSL_PORT:-443}:443"
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf:ro
- ./nginx/ssl:/etc/nginx/ssl:ro
- nginx_logs:/var/log/nginx
depends_on:
- frontend
- backend
- chatbot
networks:
- candidat-network
restart: unless-stopped
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://localhost/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
deploy:
resources:
limits:
memory: 128M
reservations:
memory: 64M
volumes:
db_data:
driver: local
nginx_logs:
driver: local
networks:
candidat-network:
driver: bridge

38
env.cloudflare Normal file
View File

@ -0,0 +1,38 @@
# Cloudflare Environment Configuration for VPS
APP_VERSION=1.0.0
NODE_ENV=production
# Database (Docker)
MYSQL_ROOT_PASSWORD=your_secure_root_password_here
MYSQL_DATABASE=candidb_main
MYSQL_USER=candidat
MYSQL_PASSWORD=your_secure_db_password_here
# Database ports (internal only, not exposed externally)
DB_PORT=3306
DB_X_PORT=33060
# Application URLs and Ports (Cloudflare handles SSL termination)
NEXT_PUBLIC_API_URL=https://candivista.com
BACKEND_PORT=8083
FRONTEND_PORT=3000
NGINX_PORT=80
NGINX_SSL_PORT=443
# AI Configuration
AI_PROVIDER=openrouter
OPENROUTER_API_KEY=your_openrouter_api_key_here
OPENROUTER_MODEL=gemma
OPENROUTER_BASE_URL=openrouter.ai
OPENROUTER_REL_PATH=/api
OPENROUTER_TEMPERATURE=0.7
# Fallback AI (if needed)
AI_PORT=11434
AI_MODEL=gpt-oss:20b
# Chatbot Service Configuration
CHATBOT_SERVICE_URL=http://chatbot:80
CHATBOT_SERVICE_TIMEOUT=30000
CHATBOT_FALLBACK_ENABLED=true
CHATBOT_PORT=5000

44
env.example Normal file
View File

@ -0,0 +1,44 @@
# Environment
APP_VERSION=1.0.0
NODE_ENV=development
# Database (Docker)
MYSQL_ROOT_PASSWORD=musicisoverrated
MYSQL_DATABASE=candidb_main
MYSQL_USER=candidat
MYSQL_PASSWORD=StrongLocalDevPass123
DB_PORT=3306
DB_X_PORT=33060
# Application URLs and Ports
NEXT_PUBLIC_API_URL=https://candivista.com
BACKEND_PORT=8083
FRONTEND_PORT=3000
NGINX_PORT=80
NGINX_SSL_PORT=443
# AI Configuration
AI_PROVIDER=openrouter
OPENROUTER_API_KEY="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"
OPENROUTER_MODEL=gemma # or any model from your predefined list
OPENROUTER_BASE_URL=openrouter.ai
OPENROUTER_REL_PATH=/api
OPENROUTER_TEMPERATURE=0.7
# Fallback AI (if needed)
AI_PORT=11434
AI_MODEL=gpt-oss:20b
# Chatbot Service Configuration
CHATBOT_SERVICE_URL=http://chatbot:80
CHATBOT_SERVICE_TIMEOUT=30000
CHATBOT_FALLBACK_ENABLED=true
CHATBOT_PORT=5000
# Stripe Payment Configuration
STRIPE_PUBLISHABLE_KEY=pk_test_your_publishable_key_here
STRIPE_SECRET_KEY=sk_test_your_secret_key_here
STRIPE_WEBHOOK_SECRET=whsec_your_webhook_secret_here
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_your_publishable_key_here

44
env.production Normal file
View File

@ -0,0 +1,44 @@
# Production Environment Configuration for VPS
APP_VERSION=1.0.0
NODE_ENV=production
# Database (Docker)
MYSQL_ROOT_PASSWORD=your_secure_root_password_here
MYSQL_DATABASE=candidb_main
MYSQL_USER=candidat
MYSQL_PASSWORD=your_secure_db_password_here
# Database ports (internal only, not exposed externally)
DB_PORT=3306
DB_X_PORT=33060
# Application URLs and Ports
NEXT_PUBLIC_API_URL=https://candivista.com
BACKEND_PORT=8083
FRONTEND_PORT=3000
NGINX_PORT=80
NGINX_SSL_PORT=443
# AI Configuration
AI_PROVIDER=openrouter
OPENROUTER_API_KEY=your_openrouter_api_key_here
OPENROUTER_MODEL=gemma
OPENROUTER_BASE_URL=openrouter.ai
OPENROUTER_REL_PATH=/api
OPENROUTER_TEMPERATURE=0.7
# Fallback AI (if needed)
AI_PORT=11434
AI_MODEL=gpt-oss:20b
# Chatbot Service Configuration
CHATBOT_SERVICE_URL=http://chatbot:80
CHATBOT_SERVICE_TIMEOUT=30000
CHATBOT_FALLBACK_ENABLED=true
CHATBOT_PORT=5000
# Stripe Payment Configuration
STRIPE_PUBLISHABLE_KEY=pk_test_51S9Qz6Iy0BkbEj7PVcCGfjIIYTLtJvhMs7wJ4v1KYzZkKeDcQ1KHWDmwWJI3sBgKcxUMA1ayIs2SODYjX5b2E7lu00AJVAqGO4
STRIPE_SECRET_KEY=sk_test_51S9Qz6Iy0BkbEj7PoHSwzVSxu4qcbWKs6yFzYEFkucQ4XorHuVwR4VHbrhgWnZhkfFRkqxkZKTWN39IJJPPeVD0C00Vei6TCWV
STRIPE_WEBHOOK_SECRET=whsec_YOUR_WEBHOOK_SECRET_HERE
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=pk_test_51S9Qz6Iy0BkbEj7PVcCGfjIIYTLtJvhMs7wJ4v1KYzZkKeDcQ1KHWDmwWJI3sBgKcxUMA1ayIs2SODYjX5b2E7lu00AJVAqGO4

13
frontend/.dockerignore Normal file
View File

@ -0,0 +1,13 @@
node_modules
.next
.git
.gitignore
README.md
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.DS_Store
*.log

41
frontend/.gitignore vendored Normal file
View File

@ -0,0 +1,41 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
/.pnp
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/versions
# testing
/coverage
# next.js
/.next/
/out/
# production
/build
# misc
.DS_Store
*.pem
# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# env files (can opt-in for committing if needed)
.env*
# vercel
.vercel
# typescript
*.tsbuildinfo
next-env.d.ts

55
frontend/Dockerfile Normal file
View File

@ -0,0 +1,55 @@
# Multi-stage build for Next.js
FROM node:18-alpine AS base
# Install dependencies only when needed
FROM base AS deps
RUN apk add --no-cache libc6-compat
WORKDIR /app
# Copy package files
COPY package*.json ./
RUN npm ci
# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app
COPY --from=deps /app/node_modules ./node_modules
COPY . .
# Build the application
RUN npm run build
# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app
ENV NODE_ENV=production
# Install curl for health checks
RUN apk add --no-cache curl
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
COPY --from=builder /app/public ./public
# Set the correct permission for prerender cache
RUN mkdir .next
RUN chown nextjs:nodejs .next
# Automatically leverage output traces to reduce image size
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl -f http://localhost:3000 || exit 1
CMD ["node", "server.js"]

258
frontend/FRONTEND_README.md Normal file
View File

@ -0,0 +1,258 @@
# Candivista Frontend - Modern AI Recruitment Platform
## 🎨 **Beautiful, Modern Frontend**
A stunning, responsive frontend built with Next.js 15, TypeScript, and Tailwind CSS that showcases the complete Candivista AI-powered recruitment platform.
## ✨ **Key Features**
### 🎯 **Modern Design**
- **Gradient animations** and smooth transitions
- **Glass morphism effects** with backdrop blur
- **Interactive hover animations** and micro-interactions
- **Responsive design** for all devices
- **Custom CSS animations** for enhanced UX
### 🚀 **Performance Optimized**
- **Next.js 15** with App Router
- **TypeScript** for type safety
- **Tailwind CSS** for utility-first styling
- **Optimized images** and lazy loading
- **Smooth scrolling** and navigation
### 🎭 **Interactive Components**
- **AnimatedCounter** - Smooth number animations
- **FeatureCard** - Hover effects and gradients
- **PricingCard** - Interactive pricing plans
- **TechStackCard** - Technology showcase
## 🏗️ **Project Structure**
```
frontend/
├── src/
│ ├── app/
│ │ ├── page.tsx # Main landing page
│ │ ├── globals.css # Global styles & animations
│ │ ├── layout.tsx # Root layout
│ │ └── favicon.ico
│ └── components/
│ ├── AnimatedCounter.tsx # Animated number counter
│ ├── FeatureCard.tsx # Feature showcase card
│ ├── PricingCard.tsx # Pricing plan card
│ └── TechStackCard.tsx # Technology stack card
├── public/ # Static assets
├── package.json
├── next.config.js
├── tailwind.config.js
└── tsconfig.json
```
## 🎨 **Design System**
### **Color Palette**
- **Primary**: Blue (#3B82F6) to Indigo (#6366F1)
- **Secondary**: Purple (#8B5CF6) to Pink (#EC4899)
- **Accent**: Green (#10B981) for success states
- **Neutral**: Gray scale for text and backgrounds
### **Typography**
- **Headings**: Bold, large sizes with gradient text
- **Body**: Clean, readable font with proper line height
- **Responsive**: Scales appropriately on all devices
### **Animations**
- **Gradient animations** for text and backgrounds
- **Hover effects** with scale and shadow transitions
- **Smooth scrolling** between sections
- **Loading animations** with custom spinners
## 🚀 **Getting Started**
### **Prerequisites**
- Node.js 18+
- npm or yarn
- Next.js 15
### **Installation**
```bash
# Install dependencies
npm install
# Start development server
npm run dev
# Build for production
npm run build
# Start production server
npm start
```
### **Development**
```bash
# Run with hot reload
npm run dev
# Type checking
npm run type-check
# Linting
npm run lint
```
## 🎯 **Key Sections**
### **1. Hero Section**
- **Compelling headline** with gradient text animation
- **Clear value proposition** for AI recruitment
- **Call-to-action buttons** with hover effects
- **Interactive illustration** showing the workflow
### **2. Features Section**
- **Multi-tenant architecture** explanation
- **Flexible link system** showcase
- **AI-powered intelligence** highlights
- **Visual dashboard mockup** with animations
### **3. Pricing Section**
- **Token-based pricing** with clear tiers
- **Interactive pricing cards** with hover effects
- **Feature comparison** for each plan
- **Popular plan highlighting**
### **4. Technology Stack**
- **Modern tech showcase** with icons
- **Hover animations** for each technology
- **Performance benefits** explanation
- **Developer experience** highlights
### **5. Stats Section**
- **Animated counters** showing platform success
- **Trust indicators** for credibility
- **Social proof** elements
### **6. Call-to-Action**
- **Compelling final CTA** with gradient background
- **Multiple action options** for different users
- **Urgency and value** messaging
## 🎨 **Custom Animations**
### **CSS Animations**
```css
/* Gradient text animation */
.animate-gradient-x {
animation: gradient-x 3s ease infinite;
}
/* Floating animation */
.animate-float {
animation: float 6s ease-in-out infinite;
}
/* Pulse glow effect */
.animate-pulse-glow {
animation: pulse-glow 2s ease-in-out infinite;
}
```
### **Component Animations**
- **Staggered animations** for feature cards
- **Hover transformations** for interactive elements
- **Smooth transitions** between states
- **Loading states** with custom spinners
## 📱 **Responsive Design**
### **Breakpoints**
- **Mobile**: 320px - 768px
- **Tablet**: 768px - 1024px
- **Desktop**: 1024px+
### **Mobile Optimizations**
- **Touch-friendly** button sizes
- **Optimized typography** for small screens
- **Swipe gestures** for carousels
- **Fast loading** on mobile networks
## 🎯 **Performance Features**
### **Optimization**
- **Image optimization** with Next.js Image component
- **Code splitting** for faster loading
- **Lazy loading** for below-the-fold content
- **Minimal bundle size** with tree shaking
### **SEO Ready**
- **Semantic HTML** structure
- **Meta tags** for social sharing
- **Structured data** for search engines
- **Fast loading** for better rankings
## 🔧 **Customization**
### **Theming**
- **CSS variables** for easy color changes
- **Tailwind config** for design system
- **Component props** for flexibility
- **Dark mode** support ready
### **Content Management**
- **Easy text updates** in components
- **Image replacement** in public folder
- **Configuration** in separate files
- **Environment variables** for API URLs
## 🚀 **Deployment**
### **Production Build**
```bash
# Build optimized production bundle
npm run build
# Start production server
npm start
```
### **Docker Support**
```dockerfile
# Multi-stage build for optimization
FROM node:18-alpine AS builder
WORKDIR /app
COPY package*.json ./
RUN npm ci --only=production
COPY . .
RUN npm run build
FROM node:18-alpine AS runner
WORKDIR /app
COPY --from=builder /app/.next ./.next
COPY --from=builder /app/public ./public
COPY --from=builder /app/package*.json ./
EXPOSE 3000
CMD ["npm", "start"]
```
## 🎉 **Result**
A stunning, modern frontend that:
- ✅ **Showcases** the complete Candivista platform
- ✅ **Engages** users with beautiful animations
- ✅ **Converts** visitors with clear value propositions
- ✅ **Performs** excellently on all devices
- ✅ **Scales** for future feature additions
The frontend perfectly represents the sophisticated AI recruitment platform with a professional, modern design that will impress users and drive conversions.
## 📞 **Support**
For questions or support regarding the frontend:
- **Documentation**: Check component READMEs
- **Issues**: Create GitHub issues
- **Contributions**: Submit pull requests
- **Contact**: Reach out to the development team
---
**Built with ❤️ using Next.js 15, TypeScript, and Tailwind CSS**

36
frontend/README.md Normal file
View File

@ -0,0 +1,36 @@
This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/cli/create-next-app).
## Getting Started
First, run the development server:
```bash
npm run dev
# or
yarn dev
# or
pnpm dev
# or
bun dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
## Learn More
To learn more about Next.js, take a look at the following resources:
- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
## Deploy on Vercel
The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.

25
frontend/next.config.js Normal file
View File

@ -0,0 +1,25 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
// Only use standalone output for Docker builds
...(process.env.NODE_ENV === 'production' && { output: 'standalone' }),
env: {
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL || 'https://candivista.com',
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY: process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY || 'pk_test_your_publishable_key_here',
},
// Only add rewrites for production (Docker)
...(process.env.NODE_ENV === 'production' && {
async rewrites() {
const apiUrl = process.env.NEXT_PUBLIC_API_URL || 'https://candivista.com';
return [
{
source: '/rest/:path*',
destination: `${apiUrl}/rest/:path*`,
},
];
},
}),
}
module.exports = nextConfig

7
frontend/next.config.ts Normal file
View File

@ -0,0 +1,7 @@
import type { NextConfig } from "next";
const nextConfig: NextConfig = {
/* config options here */
};
export default nextConfig;

3784
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
frontend/package.json Normal file
View File

@ -0,0 +1,32 @@
{
"name": "candivista-frontend",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev --turbopack",
"build": "next build",
"start": "next start"
},
"dependencies": {
"@hookform/resolvers": "^5.2.1",
"@stripe/react-stripe-js": "^4.0.2",
"@stripe/stripe-js": "^7.9.0",
"axios": "^1.11.0",
"next": "15.5.2",
"next-themes": "^0.4.6",
"react": "19.1.0",
"react-dom": "19.1.0",
"react-hook-form": "^7.62.0",
"swagger-ui-react": "^5.29.0",
"zod": "^4.1.5"
},
"devDependencies": {
"@tailwindcss/postcss": "^4",
"@types/node": "^20",
"@types/react": "^19",
"@types/react-dom": "^19",
"@types/swagger-ui-react": "^5.18.0",
"tailwindcss": "^4",
"typescript": "^5"
}
}

View File

@ -0,0 +1,5 @@
const config = {
plugins: ["@tailwindcss/postcss"],
};
export default config;

1
frontend/public/file.svg Normal file
View File

@ -0,0 +1 @@
<svg fill="none" viewBox="0 0 16 16" xmlns="http://www.w3.org/2000/svg"><path d="M14.5 13.5V5.41a1 1 0 0 0-.3-.7L9.8.29A1 1 0 0 0 9.08 0H1.5v13.5A2.5 2.5 0 0 0 4 16h8a2.5 2.5 0 0 0 2.5-2.5m-1.5 0v-7H8v-5H3v12a1 1 0 0 0 1 1h8a1 1 0 0 0 1-1M9.5 5V2.12L12.38 5zM5.13 5h-.62v1.25h2.12V5zm-.62 3h7.12v1.25H4.5zm.62 3h-.62v1.25h7.12V11z" clip-rule="evenodd" fill="#666" fill-rule="evenodd"/></svg>

After

Width:  |  Height:  |  Size: 391 B

View File

@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><g clip-path="url(#a)"><path fill-rule="evenodd" clip-rule="evenodd" d="M10.27 14.1a6.5 6.5 0 0 0 3.67-3.45q-1.24.21-2.7.34-.31 1.83-.97 3.1M8 16A8 8 0 1 0 8 0a8 8 0 0 0 0 16m.48-1.52a7 7 0 0 1-.96 0H7.5a4 4 0 0 1-.84-1.32q-.38-.89-.63-2.08a40 40 0 0 0 3.92 0q-.25 1.2-.63 2.08a4 4 0 0 1-.84 1.31zm2.94-4.76q1.66-.15 2.95-.43a7 7 0 0 0 0-2.58q-1.3-.27-2.95-.43a18 18 0 0 1 0 3.44m-1.27-3.54a17 17 0 0 1 0 3.64 39 39 0 0 1-4.3 0 17 17 0 0 1 0-3.64 39 39 0 0 1 4.3 0m1.1-1.17q1.45.13 2.69.34a6.5 6.5 0 0 0-3.67-3.44q.65 1.26.98 3.1M8.48 1.5l.01.02q.41.37.84 1.31.38.89.63 2.08a40 40 0 0 0-3.92 0q.25-1.2.63-2.08a4 4 0 0 1 .85-1.32 7 7 0 0 1 .96 0m-2.75.4a6.5 6.5 0 0 0-3.67 3.44 29 29 0 0 1 2.7-.34q.31-1.83.97-3.1M4.58 6.28q-1.66.16-2.95.43a7 7 0 0 0 0 2.58q1.3.27 2.95.43a18 18 0 0 1 0-3.44m.17 4.71q-1.45-.12-2.69-.34a6.5 6.5 0 0 0 3.67 3.44q-.65-1.27-.98-3.1" fill="#666"/></g><defs><clipPath id="a"><path fill="#fff" d="M0 0h16v16H0z"/></clipPath></defs></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

1
frontend/public/next.svg Normal file
View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 1155 1000"><path d="m577.3 0 577.4 1000H0z" fill="#fff"/></svg>

After

Width:  |  Height:  |  Size: 128 B

View File

@ -0,0 +1 @@
<svg fill="none" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16"><path fill-rule="evenodd" clip-rule="evenodd" d="M1.5 2.5h13v10a1 1 0 0 1-1 1h-11a1 1 0 0 1-1-1zM0 1h16v11.5a2.5 2.5 0 0 1-2.5 2.5h-11A2.5 2.5 0 0 1 0 12.5zm3.75 4.5a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5M7 4.75a.75.75 0 1 1-1.5 0 .75.75 0 0 1 1.5 0m1.75.75a.75.75 0 1 0 0-1.5.75.75 0 0 0 0 1.5" fill="#666"/></svg>

After

Width:  |  Height:  |  Size: 385 B

View File

@ -0,0 +1,145 @@
"use client";
import { useEffect, useState } from "react";
import { useRouter } from "next/navigation";
import axios from "axios";
import AdminLayout from "../../components/AdminLayout";
import AdminDashboard from "../../components/AdminDashboard";
import UserManagement from "../../components/UserManagement";
import JobManagement from "../../components/JobManagement";
import TokenManagement from "../../components/TokenManagement";
import SystemStats from "../../components/SystemStats";
import DeveloperTools from "../../components/DeveloperTools";
interface User {
id: string;
email: string;
first_name: string;
last_name: string;
role: string;
company_name?: string;
avatar_url?: string;
is_active: boolean;
last_login_at?: string;
email_verified_at?: string;
created_at: string;
updated_at: string;
}
interface SystemStatistics {
total_users: number;
active_users: number;
total_jobs: number;
total_interviews: number;
total_tokens_purchased: number;
total_tokens_used: number;
total_revenue: number;
generated_at: string;
}
export default function AdminPage() {
const [user, setUser] = useState<User | null>(null);
const [loading, setLoading] = useState(true);
const [activeTab, setActiveTab] = useState("dashboard");
const [systemStats, setSystemStats] = useState<SystemStatistics | null>(null);
const router = useRouter();
useEffect(() => {
const token = localStorage.getItem("token");
if (!token) {
router.push("/login");
return;
}
// Verify token and check if user is admin
axios.get(`${process.env.NEXT_PUBLIC_API_URL}/rest/auth/me`, {
headers: {
Authorization: `Bearer ${token}`
}
})
.then(response => {
const userData = response.data;
if (userData.role !== 'admin') {
router.push("/dashboard");
return;
}
setUser(userData);
// Fetch system statistics
fetchSystemStats();
})
.catch(() => {
localStorage.removeItem("token");
localStorage.removeItem("user");
router.push("/login");
})
.finally(() => {
setLoading(false);
});
}, [router]);
const fetchSystemStats = async () => {
try {
const token = localStorage.getItem("token");
const response = await axios.get(`${process.env.NEXT_PUBLIC_API_URL}/rest/admin/statistics`, {
headers: {
Authorization: `Bearer ${token}`
}
});
setSystemStats(response.data);
} catch (error) {
console.error("Failed to fetch system statistics:", error);
}
};
const handleLogout = () => {
localStorage.removeItem("token");
localStorage.removeItem("user");
router.push("/login");
};
const handleTabChange = (tab: string) => {
setActiveTab(tab);
};
if (loading) {
return (
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-4 text-gray-600">Loading admin dashboard...</p>
</div>
</div>
);
}
const renderContent = () => {
switch (activeTab) {
case "dashboard":
return <AdminDashboard stats={systemStats} onRefresh={fetchSystemStats} />;
case "users":
return <UserManagement />;
case "jobs":
return <JobManagement />;
case "tokens":
return <TokenManagement />;
case "stats":
return <SystemStats stats={systemStats} onRefresh={fetchSystemStats} />;
case "devtools":
return <DeveloperTools />;
default:
return <AdminDashboard stats={systemStats} onRefresh={fetchSystemStats} />;
}
};
return (
<AdminLayout
user={user || undefined}
activeTab={activeTab}
onTabChange={handleTabChange}
onLogout={handleLogout}
>
{renderContent()}
</AdminLayout>
);
}

View File

@ -0,0 +1,204 @@
"use client";
import { useEffect, useState } from "react";
import { useRouter } from "next/navigation";
import axios from "axios";
import Layout from "../../components/Layout";
import JobsList from "../../components/JobsList";
interface User {
id: string;
email: string;
first_name: string;
last_name: string;
role: string;
company_name?: string;
avatar_url?: string;
is_active: boolean;
last_login_at?: string;
email_verified_at?: string;
created_at: string;
updated_at: string;
}
interface Job {
id: string;
title: string;
description: string;
requirements: string;
skills_required?: string[];
location?: string;
employment_type: string;
experience_level: string;
salary_min?: number;
salary_max?: number;
currency: string;
status: string;
icon?: string;
created_at: string;
updated_at: string;
// Metrics
total_interviews?: number;
interviews_completed?: number;
available_interviews?: number;
running_days?: number;
applications?: number;
}
export default function DashboardPage() {
const [user, setUser] = useState<User | null>(null);
const [jobs, setJobs] = useState<Job[]>([]);
const [loading, setLoading] = useState(true);
const [activeSidebarItem, setActiveSidebarItem] = useState("jobs");
const router = useRouter();
useEffect(() => {
console.log("Dashboard useEffect triggered");
const token = localStorage.getItem("token");
console.log("Token found:", !!token);
if (!token) {
console.log("No token, redirecting to login");
router.push("/login");
return;
}
// Verify token with backend
console.log("Verifying token with backend...");
axios.get(`${process.env.NEXT_PUBLIC_API_URL}/rest/auth/me`, {
headers: {
Authorization: `Bearer ${token}`
}
})
.then(async response => {
console.log("Auth response received:", response.data);
const userData = response.data;
setUser(userData);
// Redirect admins to admin panel
if (userData.role === 'admin') {
console.log("Admin user, redirecting to admin panel");
router.push("/admin");
return;
}
// Fetch jobs from backend
try {
console.log("Fetching jobs from backend...");
const jobsResponse = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/rest/jobs`, {
headers: {
Authorization: `Bearer ${token}`
}
});
console.log("Jobs response status:", jobsResponse.status);
if (jobsResponse.ok) {
const jobsData = await jobsResponse.json();
console.log("Jobs data received:", jobsData);
console.log("Jobs array:", jobsData.jobs);
console.log("Jobs count:", jobsData.jobs?.length || 0);
setJobs(jobsData.jobs || []);
} else {
// Silencing console usage to satisfy linter in Server Components
await jobsResponse.text().catch(() => undefined);
setJobs([]);
}
} catch (error) {
console.error("Error fetching jobs:", error);
setJobs([]);
}
})
.catch((error) => {
console.error("Auth error:", error);
localStorage.removeItem("token");
localStorage.removeItem("user");
router.push("/login");
})
.finally(() => {
console.log("Setting loading to false");
setLoading(false);
});
}, [router]);
const handleLogout = () => {
localStorage.removeItem("token");
localStorage.removeItem("user");
router.push("/login");
};
const handleSidebarItemClick = (item: string) => {
setActiveSidebarItem(item);
// TODO: Handle navigation to different pages
console.log("Navigate to:", item);
};
const handleEditJob = (job: Job) => {
// TODO: Navigate to edit job page or open modal
console.log("Edit job:", job.id);
};
const handleDeleteJob = (job: Job) => {
// TODO: Show confirmation dialog and delete job
console.log("Delete job:", job.id);
};
const handleViewJob = (job: Job) => {
// This will be handled by the JobsList component now
console.log("View job:", job.id);
};
const refreshJobs = async () => {
try {
const token = localStorage.getItem("token");
if (!token) return;
console.log("Refreshing jobs from backend...");
const jobsResponse = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/rest/jobs`, {
headers: {
Authorization: `Bearer ${token}`
}
});
if (jobsResponse.ok) {
const jobsData = await jobsResponse.json();
console.log("Jobs refreshed:", jobsData);
setJobs(jobsData.jobs || []);
} else {
console.error("Failed to refresh jobs:", jobsResponse.statusText);
}
} catch (error) {
console.error("Error refreshing jobs:", error);
}
};
if (loading) {
return (
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-4 text-gray-600">Loading...</p>
</div>
</div>
);
}
return (
<Layout
title="Jobs"
user={user || undefined}
activeSidebarItem={activeSidebarItem}
onSidebarItemClick={handleSidebarItemClick}
onLogout={handleLogout}
>
<JobsList
jobs={jobs}
onEditJob={handleEditJob}
onDeleteJob={handleDeleteJob}
onViewJob={handleViewJob}
onRefreshJobs={refreshJobs}
/>
</Layout>
);
}

View File

@ -0,0 +1,101 @@
"use client";
import { useEffect, useState } from "react";
import dynamic from "next/dynamic";
// Dynamically import SwaggerUI to avoid SSR issues
const SwaggerUI = dynamic(() => import("swagger-ui-react"), { ssr: false });
export default function DocsPage() {
const [swaggerSpec, setSwaggerSpec] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
useEffect(() => {
const fetchSwaggerSpec = async () => {
try {
const apiUrl = process.env.NEXT_PUBLIC_API_URL || "http://localhost:8083";
const response = await fetch(`${apiUrl}/doc/swagger.json`);
if (!response.ok) {
throw new Error(`Failed to fetch API spec: ${response.status}`);
}
const spec = await response.json();
setSwaggerSpec(spec);
} catch (err) {
console.error("Error fetching Swagger spec:", err);
setError(err instanceof Error ? err.message : "Failed to load API documentation");
} finally {
setLoading(false);
}
};
fetchSwaggerSpec();
}, []);
if (loading) {
return (
<div className="min-h-screen bg-gray-50 dark:bg-gray-900 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-4 text-gray-600 dark:text-gray-300">Loading API documentation...</p>
</div>
</div>
);
}
if (error) {
return (
<div className="min-h-screen bg-gray-50 dark:bg-gray-900 flex items-center justify-center">
<div className="text-center max-w-md mx-auto p-6">
<div className="text-red-500 text-6xl mb-4"></div>
<h1 className="text-2xl font-bold text-gray-900 dark:text-white mb-2">API Documentation Unavailable</h1>
<p className="text-gray-600 dark:text-gray-300 mb-4">{error}</p>
<div className="space-y-2 text-sm text-gray-500 dark:text-gray-400">
<p>Make sure the backend is running on:</p>
<code className="block bg-gray-100 dark:bg-gray-800 p-2 rounded">
{process.env.NEXT_PUBLIC_API_URL || "http://localhost:8083"}
</code>
<p>And Swagger is available at:</p>
<code className="block bg-gray-100 dark:bg-gray-800 p-2 rounded">
/doc and /doc/swagger.json
</code>
</div>
</div>
</div>
);
}
return (
<div className="min-h-screen bg-white dark:bg-gray-900">
<div className="bg-gray-50 dark:bg-gray-800 border-b border-gray-200 dark:border-gray-700 px-6 py-4">
<div className="max-w-7xl mx-auto">
<h1 className="text-2xl font-bold text-gray-900 dark:text-white">API Documentation</h1>
<p className="text-gray-600 dark:text-gray-300 mt-1">
Interactive API documentation for Candivista backend services
</p>
</div>
</div>
<div className="max-w-7xl mx-auto">
{swaggerSpec && (
<SwaggerUI
spec={swaggerSpec}
docExpansion="list"
defaultModelsExpandDepth={2}
defaultModelExpandDepth={2}
tryItOutEnabled={true}
requestInterceptor={(request) => {
const token = localStorage.getItem("token");
if (token) {
request.headers.Authorization = `Bearer ${token}`;
}
return request;
}}
/>
)}
</div>
</div>
);
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

View File

@ -0,0 +1,236 @@
@import "tailwindcss";
/* Custom animations */
@keyframes gradient-x {
0%, 100% {
background-size: 200% 200%;
background-position: left center;
}
50% {
background-size: 200% 200%;
background-position: right center;
}
}
@keyframes float {
0%, 100% {
transform: translateY(0px);
}
50% {
transform: translateY(-20px);
}
}
@keyframes pulse-glow {
0%, 100% {
box-shadow: 0 0 20px rgba(59, 130, 246, 0.3);
}
50% {
box-shadow: 0 0 40px rgba(59, 130, 246, 0.6);
}
}
@keyframes slide-up {
from {
opacity: 0;
transform: translateY(30px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
@keyframes fade-in {
from {
opacity: 0;
}
to {
opacity: 1;
}
}
@keyframes scale-in {
from {
opacity: 0;
transform: scale(0.9);
}
to {
opacity: 1;
transform: scale(1);
}
}
/* Utility classes */
.animate-gradient-x {
animation: gradient-x 3s ease infinite;
}
.animate-float {
animation: float 6s ease-in-out infinite;
}
.animate-pulse-glow {
animation: pulse-glow 2s ease-in-out infinite;
}
.animate-slide-up {
animation: slide-up 0.6s ease-out;
}
.animate-fade-in {
animation: fade-in 0.8s ease-out;
}
.animate-scale-in {
animation: scale-in 0.5s ease-out;
}
/* Custom scrollbar */
::-webkit-scrollbar {
width: 8px;
}
::-webkit-scrollbar-track {
background: #f1f5f9;
}
::-webkit-scrollbar-thumb {
background: linear-gradient(to bottom, #3b82f6, #8b5cf6);
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: linear-gradient(to bottom, #2563eb, #7c3aed);
}
/* Glass morphism effect */
.glass {
background: rgba(255, 255, 255, 0.1);
backdrop-filter: blur(10px);
border: 1px solid rgba(255, 255, 255, 0.2);
}
/* Gradient text */
.gradient-text {
background: linear-gradient(135deg, #3b82f6, #8b5cf6, #ec4899);
-webkit-background-clip: text;
-webkit-text-fill-color: transparent;
background-clip: text;
}
/* Hover effects */
.hover-lift {
transition: transform 0.3s ease, box-shadow 0.3s ease;
}
.hover-lift:hover {
transform: translateY(-5px);
box-shadow: 0 20px 40px rgba(0, 0, 0, 0.1);
}
/* Custom button styles */
.btn-primary {
background: linear-gradient(135deg, #3b82f6, #8b5cf6);
transition: all 0.3s ease;
}
.btn-primary:hover {
background: linear-gradient(135deg, #2563eb, #7c3aed);
transform: translateY(-2px);
box-shadow: 0 10px 25px rgba(59, 130, 246, 0.3);
}
/* Card hover effects */
.card-hover {
transition: all 0.3s ease;
}
.card-hover:hover {
transform: translateY(-8px);
box-shadow: 0 25px 50px rgba(0, 0, 0, 0.15);
}
/* Loading animation */
.loading-dots {
display: inline-block;
}
.loading-dots::after {
content: '';
animation: loading-dots 1.5s infinite;
}
@keyframes loading-dots {
0%, 20% {
content: '';
}
40% {
content: '.';
}
60% {
content: '..';
}
80%, 100% {
content: '...';
}
}
/* Responsive text */
@media (max-width: 640px) {
.hero-title {
font-size: 3rem;
line-height: 1.1;
}
}
@media (min-width: 641px) {
.hero-title {
font-size: 4rem;
line-height: 1.1;
}
}
@media (min-width: 1024px) {
.hero-title {
font-size: 5rem;
line-height: 1.1;
}
}
/* Smooth scrolling */
html {
scroll-behavior: smooth;
}
/* Focus styles */
.focus-ring:focus {
outline: none;
ring: 2px;
ring-color: #3b82f6;
ring-offset: 2px;
}
/* Custom selection */
::selection {
background: rgba(59, 130, 246, 0.2);
color: #1e40af;
}
/* Dark mode support */
@media (prefers-color-scheme: dark) {
.dark-mode-text {
color: #f8fafc;
}
.dark-mode-bg {
background: #0f172a;
}
}
/* Print styles */
@media print {
.no-print {
display: none !important;
}
}

View File

@ -0,0 +1,275 @@
"use client";
import { useEffect, useState, Suspense } from "react";
import { useSearchParams } from "next/navigation";
import ConsentScreen from "../../components/ConsentScreen";
import NameInputScreen from "../../components/NameInputScreen";
import MandatoryQuestionsScreen from "../../components/MandatoryQuestionsScreen";
import ChatScreen from "../../components/ChatScreen";
interface Job {
id: string;
title: string;
description: string;
requirements: string;
skills_required?: string[];
location?: string;
employment_type: string;
experience_level: string;
salary_min?: number;
salary_max?: number;
currency: string;
status: string;
icon?: string;
created_at: string;
updated_at: string;
}
interface InterviewState {
step: 'loading' | 'consent' | 'name_input' | 'mandatory_questions' | 'chat' | 'completed' | 'error';
job: Job | null;
candidateName: string;
error: string | null;
consentGiven: boolean;
mandatoryAnswers: string[];
}
function InterviewPageInner() {
const searchParams = useSearchParams();
const linkId = searchParams.get('id');
const isTestMode = searchParams.get('test') === 'true';
const [state, setState] = useState<InterviewState>({
step: 'loading',
job: null,
candidateName: '',
error: null,
consentGiven: false,
mandatoryAnswers: []
});
useEffect(() => {
if (linkId) {
fetchJobByLink();
} else {
setState(prev => ({
...prev,
step: 'error',
error: "Invalid interview link"
}));
}
}, [linkId]);
const fetchJobByLink = async () => {
try {
const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/rest/jobs/interview/${linkId}`);
if (response.ok) {
const data = await response.json();
setState(prev => ({
...prev,
step: 'consent',
job: data.job
}));
} else {
setState(prev => ({
...prev,
step: 'error',
error: "Interview link not found or expired"
}));
}
} catch (err) {
setState(prev => ({
...prev,
step: 'error',
error: "Failed to load interview"
}));
}
};
const handleConsent = (consent: boolean) => {
if (consent) {
setState(prev => ({
...prev,
step: 'name_input',
consentGiven: true
}));
} else {
// Log failed attempt and show sad smiley
logFailedAttempt();
setState(prev => ({
...prev,
step: 'completed'
}));
}
};
const handleNameSubmit = (name: string) => {
setState(prev => ({
...prev,
step: 'mandatory_questions',
candidateName: name
}));
};
const handleMandatoryQuestionsComplete = (answers: string[]) => {
setState(prev => ({
...prev,
step: 'chat',
mandatoryAnswers: answers
}));
};
const handleInterviewComplete = () => {
setState(prev => ({
...prev,
step: 'completed'
}));
};
const logFailedAttempt = async () => {
try {
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/rest/jobs/interview/${linkId}/failed`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
});
} catch (error) {
console.error('Failed to log failed attempt:', error);
}
};
if (state.step === 'loading') {
return (
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-4 text-gray-600">Loading interview...</p>
</div>
</div>
);
}
if (state.step === 'error' || !state.job) {
return (
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center">
<div className="w-16 h-16 bg-red-100 rounded-full flex items-center justify-center mx-auto mb-4">
<svg className="w-8 h-8 text-red-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-2.5L13.732 4c-.77-.833-1.964-.833-2.732 0L3.732 16.5c-.77.833.192 2.5 1.732 2.5z" />
</svg>
</div>
<h1 className="text-xl font-semibold text-gray-900 mb-2">Interview Not Available</h1>
<p className="text-gray-600">{state.error}</p>
</div>
</div>
);
}
if (state.step === 'consent') {
return (
<ConsentScreen
job={state.job}
onConsent={handleConsent}
/>
);
}
if (state.step === 'name_input') {
return (
<NameInputScreen
onNameSubmit={handleNameSubmit}
/>
);
}
if (state.step === 'mandatory_questions') {
return (
<MandatoryQuestionsScreen
job={state.job!}
candidateName={state.candidateName}
linkId={linkId!}
isTestMode={isTestMode}
onComplete={handleMandatoryQuestionsComplete}
/>
);
}
if (state.step === 'chat') {
return (
<ChatScreen
job={state.job}
candidateName={state.candidateName}
linkId={linkId!}
isTestMode={isTestMode}
mandatoryAnswers={state.mandatoryAnswers}
onComplete={handleInterviewComplete}
/>
);
}
if (state.step === 'completed') {
return (
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center max-w-md mx-auto p-6">
{state.consentGiven ? (
<>
<div className="w-16 h-16 bg-green-100 rounded-full flex items-center justify-center mx-auto mb-4">
<svg className="w-8 h-8 text-green-600" fill="none" stroke="currentColor" viewBox="0 0 24 24">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M5 13l4 4L19 7" />
</svg>
</div>
<h1 className="text-xl font-semibold text-gray-900 mb-2">Interview Completed</h1>
<p className="text-gray-600 mb-4">
Thank you for completing the interview. We'll review your responses and get back to you soon.
</p>
<div className="bg-blue-50 border border-blue-200 rounded-lg p-4">
<p className="text-sm text-blue-800">
<strong>Position:</strong> {state.job.title}
</p>
<p className="text-sm text-blue-800">
<strong>Company:</strong> {state.job.location || "Remote"}
</p>
</div>
</>
) : (
<>
<div className="w-16 h-16 bg-gray-100 rounded-full flex items-center justify-center mx-auto mb-4">
<span className="text-4xl">😢</span>
</div>
<h1 className="text-xl font-semibold text-gray-900 mb-2">Interview Declined</h1>
<p className="text-gray-600 mb-4">
We understand you've chosen not to proceed with the interview. Thank you for your time.
</p>
<div className="bg-gray-50 border border-gray-200 rounded-lg p-4">
<p className="text-sm text-gray-600">
<strong>Position:</strong> {state.job.title}
</p>
<p className="text-sm text-gray-600">
<strong>Company:</strong> {state.job.location || "Remote"}
</p>
</div>
</>
)}
</div>
</div>
);
}
return null;
}
export default function InterviewPage() {
return (
<Suspense fallback={
<div className="min-h-screen bg-gray-50 flex items-center justify-center">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto"></div>
<p className="mt-4 text-gray-600">Loading interview...</p>
</div>
</div>
}>
<InterviewPageInner />
</Suspense>
);
}

View File

@ -0,0 +1,40 @@
import type { Metadata } from "next";
import { Geist, Geist_Mono } from "next/font/google";
import "./globals.css";
import { ThemeProvider } from "next-themes";
import StripeProvider from "../components/StripeProvider";
const geistSans = Geist({
variable: "--font-geist-sans",
subsets: ["latin"],
});
const geistMono = Geist_Mono({
variable: "--font-geist-mono",
subsets: ["latin"],
});
export const metadata: Metadata = {
title: "Candivista App",
description: "A modern authentication system with Next.js and TypeScript",
};
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
return (
<html lang="en" suppressHydrationWarning>
<body
className={`${geistSans.variable} ${geistMono.variable} antialiased bg-white dark:bg-gray-900 text-gray-900 dark:text-white`}
>
<ThemeProvider attribute="class" defaultTheme="system" enableSystem>
<StripeProvider>
{children}
</StripeProvider>
</ThemeProvider>
</body>
</html>
);
}

Some files were not shown because too many files have changed in this diff Show More