summary-task/94-backend #20

Merged
Donat merged 2 commits from summary-task/94-backend into main 2025-08-23 04:28:46 +02:00
23911 changed files with 1148085 additions and 2711630 deletions
-18
View File
@@ -1,18 +0,0 @@
DB_PASSWORD=QwEr12345.
Default_User=Admin
Default_Password=Admin.Admin
# Database configuration
DB_1=Default
DB_2=QUESTIONS
DB_3=STATISTICS
DB_4=USERS
DB_PSWD_1=42vM2ftPy1YetSz9AdAHkayxKvQHuV9Wh0nT8c5DKYt
DB_PSWD_2=3cYzPBoLtPindO53Eh3cA80DqqjYvq7CJOzA2Eik00k
DB_PSWD_3=HyJcDLDW3ZYXbMLzlkL4zWqWwyhExQ4XvVCuT8ihld0
DB_PSWD_4=965o6d3Mz7YlgI8GkJkiZD6PIpTZfaBzIheZX3nIpY3
# Ports configuration
BACKEND_PORT=3000
FRONTEND_PORT=5173
DATABASE_PORT=80
-30
View File
@@ -1,30 +0,0 @@
FROM node:18 AS base
WORKDIR /usr/local/app
################ FRONTEND BUILD #################
FROM base AS client-build
WORKDIR /usr/local/app/frontend
COPY SerpentRace_Frontend/package.json SerpentRace_Frontend/package-lock.json ./
RUN npm ci
COPY SerpentRace_Frontend/ ./
RUN npm run build
################ BACKEND BUILD #################
FROM base AS backend-build
WORKDIR /usr/local/app/backend
COPY SerpentRace_Backend/package.json SerpentRace_Backend/package-lock.json ./
RUN npm ci
COPY SerpentRace_Backend/ ./
# Copy frontend build output to backend static directory
COPY --from=client-build /usr/local/app/frontend/dist ./src/static
# generate js files from ts files
RUN npm run build
################ PRODUCTION IMAGE #################
FROM backend-build AS prod
WORKDIR /usr/local/app/backend
ENV NODE_ENV=development
RUN npm ci
EXPOSE 3000
CMD ["node", "dist/index.js"]
-91
View File
@@ -1,91 +0,0 @@
services:
proxy:
image: traefik:v2.11
command: --providers.docker
ports:
- 80:80
volumes:
- /var/run/docker.sock:/var/run/docker.sock
backend:
build:
context: ..
dockerfile: SerpentRace_Docker/Dockerfile
target: prod
environment:
NODE_ENV: "development"
DB_HOST: "db"
DB_PORT: "3306"
DATABASE_NAME_1: "${DB_1}"
DATABASE_PSWD_1: "${DB_PSWD_1}"
# DATABASE_URL_2: "mysql://${DB_2}:${DB_PSWD_2}@db:3306/${DB_2}"
# DATABASE_URL_3: "mysql://${DB_3}:${DB_PSWD_3}@db:3306/${DB_3}"
# DATABASE_URL_4: "mysql://${DB_4}:${DB_PSWD_4}@db:3306/${DB_4}"
command: ["npx", "nodemon", "src/index.ts", "--watch", "src", "--ext", "ts"]
develop:
watch:
- path: ../SerpentRace_Backend/src
action: sync
target: /usr/local/app/backend/src
- path: ../SerpentRace_Backend/prisma
action: sync
target: /usr/local/app/backend/prisma
actions:
- migrate:all
- path: ../SerpentRace_Backend/package.json
action: rebuild
labels:
traefik.http.routers.backend.rule: Host(`api.localhost`)
traefik.http.services.backend.loadbalancer.server.port: 3000
depends_on:
- db
frontend:
build:
context: ..
dockerfile: SerpentRace_Docker/Dockerfile
target: client-build
command: ["npm", "run", "dev", "--", "--host", "0.0.0.0", "--port", "5173"]
working_dir: /usr/local/app/frontend
develop:
watch:
- path: ../SerpentRace_Frontend/src
action: sync
target: /usr/local/app/frontend/src
- path: ../SerpentRace_Frontend/package.json
action: rebuild
labels:
traefik.http.routers.frontend.rule: Host(`localhost`)
traefik.http.services.frontend.loadbalancer.server.port: 5173
db:
image: mariadb:latest
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${DB_PASSWORD}
MYSQL_DATABASE: ${DB_1}
MYSQL_USER: ${Default_User}
MYSQL_PASSWORD: ${Default_Password}
DB_1: ${DB_1}
DB_2: ${DB_2}
DB_3: ${DB_3}
DB_4: ${DB_4}
PSWD_1: ${DB_PSWD_1}
PSWD_2: ${DB_PSWD_2}
PSWD_3: ${DB_PSWD_3}
PSWD_4: ${DB_PSWD_4}
volumes:
# - ./init-multi-db.sh:/docker-entrypoint-initdb.d/init-multi-db.sh:ro
# - ./init-db-users.sh:/docker-entrypoint-initdb.d/init-db-users.sh:ro
- db_data:/var/lib/mysql
adminer:
image: adminer
restart: always
labels:
traefik.http.routers.adminer.rule: Host(`db.localhost`)
traefik.http.services.adminer.loadbalancer.server.port: 8080
volumes:
db_data:
@@ -1,17 +0,0 @@
#!/bin/bash
set -e
mariadb -uroot -p"$MYSQL_ROOT_PASSWORD" <<EOSQL
CREATE USER IF NOT EXISTS '$DB_1'@'%' IDENTIFIED BY '$PSWD_1';
GRANT ALL PRIVILEGES ON \`$DB_1\`.* TO '$DB_1'@'%';
CREATE USER IF NOT EXISTS '$DB_2'@'%' IDENTIFIED BY '$PSWD_2';
GRANT ALL PRIVILEGES ON \`$DB_2\`.* TO '$DB_2'@'%';
CREATE USER IF NOT EXISTS '$DB_3'@'%' IDENTIFIED BY '$PSWD_3';
GRANT ALL PRIVILEGES ON \`$DB_3\`.* TO '$DB_3'@'%';
CREATE USER IF NOT EXISTS '$DB_4'@'%' IDENTIFIED BY '$PSWD_4';
GRANT ALL PRIVILEGES ON \`$DB_4\`.* TO '$DB_4'@'%';
FLUSH PRIVILEGES;
EOSQL
@@ -1,7 +0,0 @@
#!/bin/bash
set -e
mariadb -uroot -p"$MYSQL_ROOT_PASSWORD" <<EOSQL
CREATE DATABASE IF NOT EXISTS \`${DB_2}\`;
CREATE DATABASE IF NOT EXISTS \`${DB_3}\`;
CREATE DATABASE IF NOT EXISTS \`${DB_4}\`;
EOSQL
-2
View File
@@ -1,2 +0,0 @@
docker compose down -v
pause
+297
View File
@@ -0,0 +1,297 @@
# SerpentRace Backend Build System
## Overview
This document describes the comprehensive build system for the SerpentRace backend application. The build system handles TypeScript compilation, database migrations, asset management, testing, and deployment.
## Quick Start
```bash
# Development build
npm run build
# Production build with full validation
npm run build:production
# Advanced build with migrations and tests
npm run build:advanced:prod
# Development server with hot reload
npm run dev
```
## Build Scripts
### Basic Build Commands
| Command | Description |
|---------|-------------|
| `npm run build` | Standard build: clean → compile → copy assets |
| `npm run build:clean` | Clean the dist directory |
| `npm run build:compile` | Compile TypeScript to JavaScript |
| `npm run build:copy-assets` | Copy non-TS files to dist directory |
| `npm run build:docker` | Build for Docker (no tests/migrations) |
### Production Build Commands
| Command | Description |
|---------|-------------|
| `npm run build:production` | Full production build with linting, tests, and migrations |
| `npm run build:advanced` | Advanced build script with custom options |
| `npm run build:advanced:prod` | Advanced production build with all validations |
| `npm run build:advanced:ci` | CI/CD friendly build (skips linting) |
### Development Commands
| Command | Description |
|---------|-------------|
| `npm run dev` | Start development server with hot reload |
| `npm run watch` | Watch mode TypeScript compilation |
| `npm run typecheck` | Type checking without code generation |
### Database Commands
| Command | Description |
|---------|-------------|
| `npm run migration:run` | Run pending database migrations |
| `npm run migration:show` | Show migration status |
| `npm run migration:generate <name>` | Generate new migration |
| `npm run migration:create <name>` | Create empty migration |
| `npm run migration:revert` | Revert last migration |
| `npm run migration:full <name>` | Create, generate, and run migration |
### Testing Commands
| Command | Description |
|---------|-------------|
| `npm test` | Run all tests |
| `npm run test:watch` | Run tests in watch mode |
| `npm run test:coverage` | Run tests with coverage report |
| `npm run test:redis` | Run Redis-specific tests |
### Deployment Commands
| Command | Description |
|---------|-------------|
| `npm run deploy:prod` | Build for production deployment |
| `scripts/deploy.sh` | Full Linux/Mac deployment script |
| `scripts/deploy.bat` | Full Windows deployment script |
## Advanced Build Script
The advanced build script (`scripts/build.ts`) supports various options:
```bash
# Basic advanced build
npm run build:advanced
# Production build with migrations and tests
npm run build:advanced:prod
# CI/CD build (skips linting, includes tests and migrations)
npm run build:advanced:ci
```
### Build Options
- `--migrations`: Run database migrations during build
- `--test`: Run tests during build
- `--skip-lint`: Skip linting step
- `--production`: Enable production mode (strict validation)
## Deployment Scripts
### Linux/Mac Deployment
```bash
./scripts/deploy.sh [deploy|build-only|test-connections]
```
Options:
- `deploy` (default): Full deployment with validation
- `build-only`: Build without connection testing
- `test-connections`: Test database and Redis connections only
### Windows Deployment
```cmd
scripts\deploy.bat [deploy|build-only|test-connections]
```
Same options as Linux/Mac version.
### Required Environment Variables
The deployment scripts require these environment variables:
```bash
DB_HOST=localhost
DB_PORT=5432
DB_USERNAME=postgres
DB_PASSWORD=your_password
DB_NAME=serpentrace
JWT_SECRET=your_jwt_secret
REDIS_HOST=localhost
REDIS_PORT=6379
```
## Build Process Flow
### Standard Build (`npm run build`)
1. **Clean** - Remove previous build artifacts
2. **Lint** - Code quality checks (if configured)
3. **Compile** - TypeScript compilation
4. **Copy Assets** - Copy non-TS files to dist
5. **Post-build** - Validation and cleanup
### Production Build (`npm run build:production`)
1. **Clean** - Remove previous build artifacts
2. **Lint** - Code quality checks
3. **Test** - Run test suite
4. **Migrations** - Apply database migrations
5. **Compile** - TypeScript compilation
6. **Copy Assets** - Copy non-TS files to dist
7. **Validate** - Ensure build integrity
### Advanced Build (`npm run build:advanced`)
Provides fine-grained control over the build process with comprehensive logging and error handling.
## Asset Management
The build system automatically copies these file types to the dist directory:
- `.json` files (configuration, data)
- `.html` files (templates)
- `.css` files (stylesheets)
- Image files (`.png`, `.jpg`, `.jpeg`, `.gif`, `.svg`, `.ico`)
- Font files (`.woff`, `.woff2`, `.ttf`, `.eot`)
Excluded directories:
- `node_modules`
- `.git`
- `tests`
- `__tests__`
## TypeScript Configuration
The build system uses the following TypeScript settings:
- **Target**: ES2020
- **Module**: CommonJS
- **Output Directory**: `./dist`
- **Source Maps**: Enabled
- **Declarations**: Enabled for type definitions
- **Strict Mode**: Enabled for type safety
## Migration Management
### Creating Migrations
```bash
# Create empty migration
npm run migration:create AddNewTable
# Generate migration from entity changes
npm run migration:generate AddNewTable
# Full migration workflow (create + generate + run)
npm run migration:full AddNewTable
```
### Migration Best Practices
1. Always backup database before running migrations in production
2. Test migrations in development environment first
3. Use descriptive migration names
4. Review generated migrations before running them
## Docker Integration
The build system is optimized for Docker deployments:
```dockerfile
# Use build:docker for container builds
RUN npm run build:docker
# Or use production build for full validation
RUN npm run build:production
```
## Troubleshooting
### Common Issues
1. **Build fails with "Cannot find module"**
- Run `npm ci` to ensure all dependencies are installed
- Check TypeScript paths configuration
2. **Migration errors during build**
- Verify database connection parameters
- Ensure database exists and is accessible
- Check migration files for syntax errors
3. **Asset copying fails**
- Verify file permissions
- Check disk space availability
- Ensure source files exist
4. **TypeScript compilation errors**
- Run `npm run typecheck` for detailed error messages
- Check tsconfig.json configuration
- Verify all type definitions are installed
### Debug Mode
Enable verbose logging by setting the environment variable:
```bash
export DEBUG=serpentrace:*
npm run build:advanced
```
## Performance Optimization
### Build Performance Tips
1. Use `npm ci` instead of `npm install` in CI/CD
2. Enable TypeScript incremental compilation for development
3. Use `--skip-lint` in CI if linting is handled separately
4. Cache node_modules in CI/CD pipelines
### Runtime Performance
The build system optimizes the output for production:
- Source maps for debugging (can be disabled in production)
- Type declarations for library usage
- Compressed and optimized JavaScript output
## Monitoring and Logging
Build logs include:
- Timestamps for each build step
- Error details with stack traces
- Performance metrics (build duration)
- Validation results
Production builds create detailed logs in the `logs/` directory.
## Contributing
When modifying the build system:
1. Test changes with both development and production builds
2. Update this documentation for any new scripts or options
3. Ensure backward compatibility
4. Add appropriate error handling and logging
## Support
For build system issues:
1. Check this documentation
2. Review error logs in the console
3. Verify environment variables are set correctly
4. Test with a clean `node_modules` installation
+392
View File
@@ -0,0 +1,392 @@
# 🗄️ SerpentRace Database Management Guide
## 🎯 Overview
This guide provides comprehensive information about managing all database services in the SerpentRace project, including PostgreSQL, Redis, MinIO, and administration tools.
## 📊 Quick Status Check
### Check All Services
```bash
npm run db:status
```
### Check Individual Services
```bash
npm run db:status:pg # PostgreSQL only
npm run db:status:redis # Redis only
npm run db:status:docker # Docker containers only
```
### Simple Connection Test
```bash
npm run test:connections
```
## 🐘 PostgreSQL Database
### Connection Details
- **Host**: localhost:5432
- **Database**: serpentrace
- **Username**: postgres
- **Password**: postgres
- **Admin Tool**: pgAdmin at http://localhost:8080
### Database Operations
#### Run Migrations
```bash
npm run migration:run
```
#### Create New Migration
```bash
npm run migration:create src/migrations/YourMigrationName
```
#### Generate Migration from Entity Changes
```bash
npm run migration:generate src/migrations/YourMigrationName
```
#### Check Migration Status
```bash
npm run migration:show
```
#### Rollback Last Migration
```bash
npm run migration:revert
```
### Direct Database Access
#### Using psql (if installed)
```bash
psql -h localhost -p 5432 -U postgres -d serpentrace
```
#### Using pgAdmin
1. Open http://localhost:8080
2. Login with: admin@serpentrace.dev / admin
3. Server should be pre-configured as "SerpentRace"
### Common SQL Queries
#### Check Database Size
```sql
SELECT pg_size_pretty(pg_database_size('serpentrace')) as size;
```
#### List All Tables
```sql
SELECT tablename FROM pg_tables WHERE schemaname = 'public';
```
#### Check Active Connections
```sql
SELECT count(*) FROM pg_stat_activity WHERE datname = 'serpentrace';
```
## 🔴 Redis Cache
### Connection Details
- **Host**: localhost:6379
- **No Authentication**: Default Redis setup
- **Admin Tool**: Redis Commander at http://localhost:8081
### Redis Operations
#### Direct Redis Access (if redis-cli installed)
```bash
redis-cli -h localhost -p 6379
```
#### Common Redis Commands
```bash
# Get all keys
KEYS *
# Get key count
DBSIZE
# Check memory usage
INFO memory
# Flush all data (careful!)
FLUSHALL
```
### Using Redis Commander
1. Open http://localhost:8081
2. Browse keys, view data, execute commands
## 🗄️ MinIO Object Storage
### Connection Details
- **Endpoint**: localhost:9000
- **Console**: http://localhost:9001
- **Access Key**: serpentrace
- **Secret Key**: serpentrace123
- **Default Bucket**: serpentrace
### MinIO Operations
#### Access MinIO Console
1. Open http://localhost:9001
2. Login with: serpentrace / serpentrace123
3. Create buckets, upload files, manage storage
#### Health Check
```bash
curl http://localhost:9000/minio/health/live
```
### File Upload Example (Node.js)
```javascript
const Minio = require('minio');
const minioClient = new Minio.Client({
endPoint: 'localhost',
port: 9000,
useSSL: false,
accessKey: 'serpentrace',
secretKey: 'serpentrace123'
});
// Upload file
minioClient.fPutObject('serpentrace', 'test-file.txt', './file.txt');
```
## 🐳 Docker Container Management
### View All Containers
```bash
docker ps -a
```
### View SerpentRace Containers Only
```bash
docker ps -a --filter "name=serpentrace"
```
### Container Operations
#### Restart All Services
```bash
cd d:\munka\SzeSnake\SerpentRace_Docker
docker-compose -f docker-compose.dev.yml restart
```
#### Restart Individual Service
```bash
docker restart serpentrace-postgres-dev # PostgreSQL
docker restart serpentrace-redis-dev # Redis
docker restart serpentrace-minio-dev # MinIO
docker restart serpentrace-pgadmin-dev # pgAdmin
```
#### View Container Logs
```bash
docker logs serpentrace-postgres-dev
docker logs serpentrace-redis-dev -f # Follow logs
```
#### Stop All Services
```bash
cd d:\munka\SzeSnake\SerpentRace_Docker
docker-compose -f docker-compose.dev.yml down
```
#### Start All Services
```bash
cd d:\munka\SzeSnake\SerpentRace_Docker
docker-compose -f docker-compose.dev.yml up -d
```
## 🛠️ Troubleshooting
### PostgreSQL Issues
#### Connection Refused
```bash
# Check if container is running
docker ps | grep postgres
# Check container logs
docker logs serpentrace-postgres-dev
# Restart if needed
docker restart serpentrace-postgres-dev
```
#### Migration Errors
```bash
# Check migration status
npm run migration:show
# Revert last migration if problematic
npm run migration:revert
# Re-run migrations
npm run migration:run
```
### Redis Issues
#### Cannot Connect
```bash
# Check Redis container
docker ps | grep redis
# Test connection
redis-cli -h localhost -p 6379 ping
# Expected response: PONG
```
### MinIO Issues
#### Health Check Failed
```bash
# Check MinIO container
docker ps | grep minio
# Test health endpoint
curl http://localhost:9000/minio/health/live
# Expected response: 200 OK
```
### pgAdmin Issues
#### Cannot Login
- Default credentials: admin@serpentrace.dev / admin
- If issues persist, restart container:
```bash
docker restart serpentrace-pgladmin-dev
```
#### Server Not Found
- pgAdmin should auto-configure the PostgreSQL server
- If not visible, add manually:
- Host: postgres
- Port: 5432
- Database: serpentrace
- Username: postgres
- Password: postgres
## 🔧 Environment Variables
### Default Development Settings
```bash
# PostgreSQL
DB_HOST=localhost
DB_PORT=5432
DB_NAME=serpentrace
DB_USERNAME=postgres
DB_PASSWORD=postgres
# Redis
REDIS_HOST=localhost
REDIS_PORT=6379
# MinIO
MINIO_ENDPOINT=localhost
MINIO_PORT=9000
MINIO_ACCESS_KEY=serpentrace
MINIO_SECRET_KEY=serpentrace123
```
### Production Configuration
Create `.env.production` with secure values:
```bash
DB_HOST=your-production-host
DB_PASSWORD=secure-password
REDIS_PASSWORD=secure-redis-password
MINIO_SECRET_KEY=secure-minio-secret
```
## 📈 Monitoring & Maintenance
### Daily Health Check
```bash
npm run db:status
```
### Weekly Maintenance
```bash
# Check database size growth
npm run db:status:pg
# Review Redis memory usage
npm run db:status:redis
# Clean up old Docker logs
docker system prune
```
### Backup Procedures
#### PostgreSQL Backup
```bash
docker exec serpentrace-postgres-dev pg_dump -U postgres serpentrace > backup.sql
```
#### Redis Backup
```bash
docker exec serpentrace-redis-dev redis-cli BGSAVE
```
#### MinIO Backup
Use MinIO Console or mc client to backup buckets.
## 🎯 Performance Optimization
### PostgreSQL
- Monitor active connections with `npm run db:status:pg`
- Use connection pooling in production
- Regular VACUUM and ANALYZE operations
### Redis
- Monitor memory usage
- Configure appropriate eviction policies
- Use Redis persistence (RDB/AOF) in production
### MinIO
- Configure appropriate bucket policies
- Use lifecycle management for old files
- Monitor storage usage through console
## 🚀 Quick Reference Commands
```bash
# Status and Health
npm run db:status # Full system status
npm run test:connections # Quick connection test
# Database Operations
npm run migration:run # Apply migrations
npm run migration:show # Check migration status
# Docker Management
docker ps # Show running containers
docker logs <container> # View logs
docker restart <container> # Restart service
# Direct Access
psql -h localhost -U postgres -d serpentrace # PostgreSQL CLI
redis-cli -h localhost # Redis CLI
```
## 🌐 Web Interfaces Summary
| Service | URL | Credentials |
|---------|-----|------------|
| pgAdmin | http://localhost:8080 | admin@serpentrace.dev / admin |
| Redis Commander | http://localhost:8081 | No auth required |
| MinIO Console | http://localhost:9001 | serpentrace / serpentrace123 |
| Backend API | http://localhost:3000 | When running |
| Frontend | http://localhost:5173 | When running |
---
*This guide is automatically updated when database configurations change. Last updated: 2025-08-23*
+235
View File
@@ -0,0 +1,235 @@
# Docker Watcher Implementation Guide
## Overview
This document explains the Docker watcher implementation for the SerpentRace project, which automatically synchronizes local file changes with Docker containers and rebuilds images when necessary.
## What's Implemented
### Docker Compose Watch Configuration
The development Docker Compose configuration now includes `develop.watch` sections for both frontend and backend services that provide:
1. **File Synchronization**: Automatically sync source code changes to running containers
2. **Selective Rebuilding**: Rebuild containers when critical configuration files change
3. **Intelligent Ignore Patterns**: Exclude unnecessary files like `node_modules`
### Backend Watcher Configuration
```yaml
develop:
watch:
- action: sync
path: ../SerpentRace_Backend/src
target: /app/src
ignore:
- node_modules/
- action: sync
path: ../SerpentRace_Backend/package.json
target: /app/package.json
- action: rebuild
path: ../SerpentRace_Backend/package-lock.json
- action: rebuild
path: ../SerpentRace_Docker/Dockerfile_backend.dev
```
### Frontend Watcher Configuration
```yaml
develop:
watch:
- action: sync
path: ../SerpentRace_Frontend/src
target: /app/src
ignore:
- node_modules/
- action: sync
path: ../SerpentRace_Frontend/public
target: /app/public
- action: sync
path: ../SerpentRace_Frontend/package.json
target: /app/package.json
- action: rebuild
path: ../SerpentRace_Frontend/package-lock.json
- action: rebuild
path: ../SerpentRace_Frontend/vite.config.js
- action: rebuild
path: ../SerpentRace_Docker/Dockerfile_frontend.dev
```
## How It Works
### Sync Actions
- **Purpose**: Instantly copy changed files from host to container
- **Use Cases**: Source code files, static assets, configuration files that don't require rebuild
- **Performance**: Near-instant updates, no container restart needed
### Rebuild Actions
- **Purpose**: Trigger full container rebuild when critical files change
- **Use Cases**: Package files, Docker configuration, build configuration
- **Performance**: Takes longer but ensures consistency
## Usage
### New Commands Added
#### Windows (docker-manage.bat)
```bash
# Start with file watchers
.\docker-manage.bat dev:watch
# Traditional start (without watchers)
.\docker-manage.bat dev:start
```
#### Linux/Mac (docker-manage.sh)
```bash
# Start with file watchers
./docker-manage.sh dev:watch
# Traditional start (without watchers)
./docker-manage.sh dev:start
```
### Command Differences
| Command | Mode | File Watching | Container Rebuild | Use Case |
|---------|------|---------------|-------------------|----------|
| `dev:start` | Background (-d) | No | Manual only | Traditional development |
| `dev:watch` | Foreground | Yes | Automatic | Modern development with live sync |
## Benefits
### 1. Instant File Synchronization
- Source code changes are immediately available in containers
- No manual rebuild or restart required for code changes
- Maintains all existing hot-reload functionality (nodemon, Vite HMR)
### 2. Smart Rebuilding
- Automatically rebuilds when package.json or Dockerfile changes
- Ensures containers stay consistent with dependency updates
- Prevents common issues with stale dependencies
### 3. Development Efficiency
- Combines Docker's isolation with native-like development speed
- Reduces context switching between local and containerized development
- Maintains consistent environment across team members
## File Patterns Watched
### Backend
- **Synced Files**:
- `src/` directory (all TypeScript source files)
- `package.json` (for runtime reference)
- **Rebuild Triggers**:
- `package-lock.json` (dependency changes)
- `Dockerfile_backend.dev` (container configuration)
### Frontend
- **Synced Files**:
- `src/` directory (React components, styles, etc.)
- `public/` directory (static assets)
- `package.json` (for runtime reference)
- **Rebuild Triggers**:
- `package-lock.json` (dependency changes)
- `vite.config.js` (build configuration)
- `Dockerfile_frontend.dev` (container configuration)
## Performance Considerations
### Sync Performance
- File synchronization is near-instantaneous
- Uses Docker's built-in file watching mechanisms
- Optimized for development workloads
### Rebuild Performance
- Rebuilds only occur when necessary
- Docker layer caching reduces rebuild times
- Can be resource-intensive for large dependency changes
## Troubleshooting
### Common Issues
1. **File Changes Not Reflected**
- Ensure you're using `dev:watch` command
- Check that files are not in ignore patterns
- Verify file paths are correct
2. **Excessive Rebuilds**
- Check for unnecessary changes to rebuild trigger files
- Consider moving files to sync-only patterns if appropriate
3. **Performance Issues**
- Monitor Docker resource usage
- Consider excluding large directories from watching
- Use `.dockerignore` for files that should never be synced
### Debugging Commands
```bash
# Check container status
docker-compose -f SerpentRace_Docker/docker-compose.dev.yml ps
# View watcher logs
docker-compose -f SerpentRace_Docker/docker-compose.dev.yml logs -f backend
docker-compose -f SerpentRace_Docker/docker-compose.dev.yml logs -f frontend
# Check file synchronization
docker exec -it serpentrace-backend-dev ls -la /app/src
docker exec -it serpentrace-frontend-dev ls -la /app/src
```
## Requirements
### Docker Compose Version
- Requires Docker Compose v2.22+ for `develop.watch` support
- Check version: `docker-compose version`
### File System
- Works on Windows, Linux, and macOS
- Performance may vary based on file system type
- WSL2 recommended for Windows users
## Migration from Traditional Setup
### No Breaking Changes
- Existing `dev:start` command continues to work
- All volume mounts remain functional
- Hot reload functionality preserved
### Gradual Adoption
1. Try `dev:watch` for active development
2. Use `dev:start` for background services
3. Gradually migrate team to new workflow
## Best Practices
### Development Workflow
1. Use `dev:watch` during active development
2. Make code changes normally
3. Watch for automatic synchronization
4. Monitor logs for any sync issues
### File Organization
- Keep frequently changed files in sync patterns
- Place build configuration in rebuild patterns
- Use `.dockerignore` for files that should never sync
### Team Collaboration
- Document which command team members should use
- Ensure consistent Docker Compose version across team
- Share troubleshooting steps for common issues
## Future Enhancements
### Potential Improvements
1. **Selective Service Watching**: Watch only specific services
2. **Custom Ignore Patterns**: Per-developer ignore configurations
3. **Performance Monitoring**: Built-in sync performance metrics
4. **Integration with IDEs**: Better editor integration for sync status
### Configuration Expansion
- Additional file patterns as needed
- Service-specific watch configurations
- Environment-based watch rules
File diff suppressed because it is too large Load Diff
+117
View File
@@ -0,0 +1,117 @@
# pgAdmin Database Administration Guide
## Access pgAdmin
- **URL**: http://localhost:8080
- **Email**: admin@serpentrace.dev
- **Password**: admin
## Pre-configured Server
The pgAdmin interface should have a pre-configured server named **"SerpentRace PostgreSQL Dev"** in the "Development" group.
## Manual Server Configuration (If Needed)
If the server is not automatically configured, add it manually:
### Server Details
- **Name**: SerpentRace PostgreSQL Dev
- **Host**: postgres (or localhost if connecting from outside Docker)
- **Port**: 5432
- **Database**: serpentrace
- **Username**: postgres
- **Password**: postgres
### Steps to Add Server Manually
1. Right-click on "Servers" in the left panel
2. Select "Register" > "Server..."
3. Fill in the "General" tab:
- Name: `SerpentRace PostgreSQL Dev`
- Server group: `Development`
4. Fill in the "Connection" tab:
- Host name/address: `postgres`
- Port: `5432`
- Maintenance database: `serpentrace`
- Username: `postgres`
- Password: `postgres`
5. Click "Save"
## Common Database Operations
### View Tables
1. Expand the server connection
2. Expand "Databases" > "serpentrace"
3. Expand "Schemas" > "public"
4. Expand "Tables"
### Run SQL Queries
1. Right-click on the database name
2. Select "Query Tool"
3. Write your SQL queries in the editor
4. Click the "Execute" button or press F5
### View Data
1. Right-click on any table
2. Select "View/Edit Data" > "All Rows"
## Troubleshooting
### Connection Issues
- Ensure Docker containers are running: `docker ps`
- Check container logs: `docker logs serpentrace-postgres-dev`
- Test connections: `npm run test:connections`
### Authentication Failed
- Verify the password is correct: `postgres`
- Check if you're using the correct hostname: `postgres` (inside Docker) vs `localhost` (outside Docker)
### Server Not Appearing
- Restart pgAdmin container:
```bash
docker-compose -f docker-compose.dev.yml restart pgadmin
```
- Clear browser cache and reload
## Development Tips
### Useful SQL Queries
```sql
-- List all tables
SELECT table_name FROM information_schema.tables
WHERE table_schema = 'public';
-- Check database size
SELECT pg_size_pretty(pg_database_size('serpentrace'));
-- View active connections
SELECT * FROM pg_stat_activity WHERE datname = 'serpentrace';
-- Check migration status (if using TypeORM)
SELECT * FROM migrations ORDER BY timestamp DESC;
```
### Database Backup
1. Right-click on database name
2. Select "Backup..."
3. Choose format (Custom recommended for pgAdmin restore)
4. Set filename and location
5. Click "Backup"
### Database Restore
1. Right-click on "Databases"
2. Select "Restore..."
3. Choose the backup file
4. Configure options as needed
5. Click "Restore"
## Security Notes
⚠️ **Development Only**: The current configuration uses default credentials and is intended for development only. For production:
- Use strong, unique passwords
- Enable SSL connections
- Restrict network access
- Use environment variables for credentials
- Enable authentication and authorization features
+25 -1
View File
@@ -1,4 +1,28 @@
# SerpentRace
- Frontend: React (Vite)
- Backend: Node.js (Express.js)
- Backend: Node.js (Express.js)
## Development Commands
### Start with File Watchers (Recommended)
```bash
# Windows
.\docker-manage.bat dev:watch
# Linux/Mac
./docker-manage.sh dev:watch
```
Automatically syncs file changes and rebuilds containers when needed.
### Traditional Start
```bash
# Windows
.\docker-manage.bat dev:start
# Linux/Mac
./docker-manage.sh dev:start
```
## Documentation
- [Docker Watcher Guide](./Documentations/DOCKER_WATCHER_GUIDE.md) - Comprehensive guide for file watching functionality
+27
View File
@@ -0,0 +1,27 @@
node_modules
npm-debug.log
.git
.gitignore
README.md
.env
.nyc_output
coverage
.coverage
.coverage.*
.cache
logs
*.log
.DS_Store
.vscode
.idea
*.swp
*.swo
dist
build
.next
.nuxt
.vuepress/dist
.serverless
.fusebox/
.dynamodb/
.tern-port
+29
View File
@@ -0,0 +1,29 @@
# Development Environment Variables for Local Build
# These are used when running build scripts outside of Docker containers
NODE_ENV=development
PORT=3000
# Database Configuration (Docker containers)
DB_HOST=localhost
DB_PORT=5432
DB_NAME=serpentrace
DB_USERNAME=postgres
DB_PASSWORD=postgres
# Redis Configuration (Docker containers)
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_URL=redis://localhost:6379
# JWT Configuration
JWT_SECRET=dev_jwt_secret_change_in_production
JWT_EXPIRATION=24h
JWT_REFRESH_EXPIRATION=7d
# MinIO Configuration (Docker containers)
MINIO_ENDPOINT=localhost
MINIO_PORT=9000
MINIO_ACCESS_KEY=serpentrace
MINIO_SECRET_KEY=serpentrace123!
MINIO_USE_SSL=false
+4
View File
@@ -0,0 +1,4 @@
./dist/*
./node_modules/*
./Archive_*/*
./Archive_*
+4
View File
@@ -0,0 +1,4 @@
import { WebSocketService } from '../Application/Services/WebSocketService';
declare let webSocketService: WebSocketService;
export { webSocketService };
//# sourceMappingURL=index.d.ts.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/Api/index.ts"],"names":[],"mappings":"AAaA,OAAO,EAAE,gBAAgB,EAAE,MAAM,0CAA0C,CAAC;AAmJ5E,QAAA,IAAI,gBAAgB,EAAE,gBAAgB,CAAC;AAyFvC,OAAO,EAAE,gBAAgB,EAAE,CAAC"}
+225
View File
@@ -0,0 +1,225 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.webSocketService = void 0;
const express_1 = __importDefault(require("express"));
const http_1 = require("http");
const cookie_parser_1 = __importDefault(require("cookie-parser"));
const helmet_1 = __importDefault(require("helmet"));
const ormconfig_1 = require("../Infrastructure/ormconfig");
const userRouter_1 = __importDefault(require("./routers/userRouter"));
const organizationRouter_1 = __importDefault(require("./routers/organizationRouter"));
const deckRouter_1 = __importDefault(require("./routers/deckRouter"));
const chatRouter_1 = __importDefault(require("./routers/chatRouter"));
const contactRouter_1 = __importDefault(require("./routers/contactRouter"));
const adminRouter_1 = __importDefault(require("./routers/adminRouter"));
const deckImportExportRouter_1 = __importDefault(require("./routers/deckImportExportRouter"));
const Logger_1 = require("../Application/Services/Logger");
const WebSocketService_1 = require("../Application/Services/WebSocketService");
const swaggerUiSetup_1 = require("./swagger/swaggerUiSetup");
const app = (0, express_1.default)();
const httpServer = (0, http_1.createServer)(app);
const PORT = process.env.PORT || 3000;
const isDevelopment = process.env.NODE_ENV === 'development';
const loggingService = Logger_1.LoggingService.getInstance();
(0, Logger_1.logStartup)('SerpentRace Backend starting up', {
environment: process.env.NODE_ENV || 'development',
port: PORT,
nodeVersion: process.version,
chatInactivityTimeout: process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'
});
app.use((0, helmet_1.default)({
contentSecurityPolicy: isDevelopment ? false : undefined
}));
app.use(express_1.default.json({ limit: '10mb' }));
app.use(express_1.default.urlencoded({ extended: true, limit: '10mb' }));
app.use((0, cookie_parser_1.default)());
app.use(loggingService.requestLoggingMiddleware());
app.use((req, res, next) => {
const origin = req.headers.origin;
const allowedOrigins = ['http://localhost:3000', 'http://localhost:3001', 'http://localhost:8080'];
if (!origin || allowedOrigins.includes(origin)) {
res.setHeader('Access-Control-Allow-Origin', origin || '*');
}
res.setHeader('Access-Control-Allow-Credentials', 'true');
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, PATCH, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, Cookie');
if (req.method === 'OPTIONS') {
res.status(200).end();
return;
}
next();
});
if (isDevelopment) {
app.use((req, res, next) => {
(0, Logger_1.logRequest)(`${req.method} ${req.path}`, req, res);
next();
});
}
// Setup Swagger documentation
(0, swaggerUiSetup_1.setupSwagger)(app);
app.get('/', (req, res) => {
res.json({
service: 'SerpentRace Backend API',
status: 'running',
version: '1.0.0',
endpoints: {
swagger: '/api-docs',
users: '/api/users',
organizations: '/api/organizations',
decks: '/api/decks',
chats: '/api/chats',
contacts: '/api/contacts',
admin: '/api/admin',
deckImportExport: '/api/deck-import-export',
health: '/health'
},
websocket: {
enabled: true,
events: [
'chat:join', 'chat:leave', 'message:send',
'group:create', 'chat:direct', 'game:chat:create',
'chat:history'
]
}
});
});
app.get('/health', async (req, res) => {
try {
const isDbConnected = ormconfig_1.AppDataSource.isInitialized;
res.json({
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'SerpentRace Backend API',
version: '1.0.0',
environment: process.env.NODE_ENV || 'development',
database: {
connected: isDbConnected,
type: ormconfig_1.AppDataSource.options.type
},
websocket: {
enabled: true
},
uptime: process.uptime()
});
}
catch (error) {
res.status(503).json({
status: 'unhealthy',
timestamp: new Date().toISOString(),
error: 'Service health check failed'
});
}
});
// API Routes
app.use('/api/users', userRouter_1.default);
app.use('/api/organizations', organizationRouter_1.default);
app.use('/api/decks', deckRouter_1.default);
app.use('/api/chats', chatRouter_1.default);
app.use('/api/contacts', contactRouter_1.default);
app.use('/api/admin', adminRouter_1.default);
app.use('/api/deck-import-export', deckImportExportRouter_1.default);
// Global error handler (must be after routes)
app.use(loggingService.errorLoggingMiddleware());
app.use((error, req, res, next) => {
(0, Logger_1.logError)('Global error handler caught unhandled error', error, req, res);
// Don't expose internal error details in production
const isDevelopment = process.env.NODE_ENV === 'development';
res.status(500).json({
error: 'Internal server error',
timestamp: new Date().toISOString(),
...(isDevelopment && { details: error.message, stack: error.stack })
});
});
// Handle 404 routes
app.use((req, res) => {
res.status(404).json({
error: 'Route not found',
path: req.originalUrl,
method: req.method,
timestamp: new Date().toISOString()
});
});
// Initialize WebSocket service after database connection
let webSocketService;
// Initialize database connection
ormconfig_1.AppDataSource.initialize()
.then(() => {
const dbOptions = ormconfig_1.AppDataSource.options;
(0, Logger_1.logConnection)('Database connection established', 'postgresql', 'success', {
type: dbOptions.type,
host: dbOptions.host,
database: dbOptions.database
});
// Initialize WebSocket service after database is connected
exports.webSocketService = webSocketService = new WebSocketService_1.WebSocketService(httpServer);
(0, Logger_1.logStartup)('WebSocket service initialized', {
chatInactivityTimeout: process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'
});
})
.catch((error) => {
const dbOptions = ormconfig_1.AppDataSource.options;
(0, Logger_1.logConnection)('Database connection failed', 'postgresql', 'failure', {
error: error.message,
type: dbOptions.type,
host: dbOptions.host,
database: dbOptions.database
});
process.exit(1);
});
// Start server with WebSocket support
const server = httpServer.listen(PORT, () => {
(0, Logger_1.logStartup)('Server started successfully', {
port: PORT,
environment: process.env.NODE_ENV || 'development',
timestamp: new Date().toISOString(),
endpoints: {
health: `/health`,
swagger: `/api-docs`,
users: `/api/users`,
organizations: `/api/organizations`,
decks: `/api/decks`,
chats: `/api/chats`
},
websocket: {
enabled: true,
chatInactivityTimeout: `${process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'} minutes`
}
});
});
// Graceful shutdown
const gracefulShutdown = async (signal) => {
(0, Logger_1.logStartup)(`Received ${signal}. Shutting down gracefully...`);
server.close(() => {
(0, Logger_1.logStartup)('HTTP server closed');
if (ormconfig_1.AppDataSource.isInitialized) {
ormconfig_1.AppDataSource.destroy()
.then(() => {
(0, Logger_1.logConnection)('Database connection closed', 'postgresql', 'success');
process.exit(0);
})
.catch((error) => {
(0, Logger_1.logError)('Error during database shutdown', error);
process.exit(1);
});
}
else {
process.exit(0);
}
});
};
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
// Handle uncaught exceptions
process.on('uncaughtException', (error) => {
(0, Logger_1.logError)('Uncaught Exception - Server will shut down', error);
process.exit(1);
});
// Handle unhandled promise rejections
process.on('unhandledRejection', (reason, promise) => {
(0, Logger_1.logError)('Unhandled Rejection - Server will shut down', new Error(String(reason)), undefined, undefined);
process.exit(1);
});
//# sourceMappingURL=index.js.map
File diff suppressed because one or more lines are too long
+10
View File
@@ -0,0 +1,10 @@
declare global {
namespace Express {
interface Request {
file?: Express.Multer.File;
}
}
}
declare const router: import("express-serve-static-core").Router;
export default router;
//# sourceMappingURL=adminRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"adminRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/adminRouter.ts"],"names":[],"mappings":"AAYA,OAAO,CAAC,MAAM,CAAC;IACX,UAAU,OAAO,CAAC;QACd,UAAU,OAAO;YACb,IAAI,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;SAC9B;KACJ;CACJ;AAED,QAAA,MAAM,MAAM,4CAAmB,CAAC;AA4kChC,eAAe,MAAM,CAAC"}
+932
View File
@@ -0,0 +1,932 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = __importDefault(require("express"));
const multer_1 = __importDefault(require("multer"));
const DIContainer_1 = require("../../Application/Services/DIContainer");
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const ValidationMiddleware_1 = require("../../Application/Services/ValidationMiddleware");
const AdminBypassService_1 = require("../../Application/Services/AdminBypassService");
const Logger_1 = require("../../Application/Services/Logger");
const router = express_1.default.Router();
const container = DIContainer_1.DIContainer.getInstance();
// Configure multer for file uploads
const upload = (0, multer_1.default)({
storage: multer_1.default.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024, // 10MB limit
},
fileFilter: (req, file, cb) => {
if (file.mimetype === 'application/json' || file.originalname.endsWith('.spr')) {
cb(null, true);
}
else {
cb(new Error('Only JSON and .spr files are allowed'));
}
}
});
// Helper function to extract language from Accept-Language header
function extractLanguageFromAcceptHeader(acceptLanguage) {
if (!acceptLanguage)
return null;
const languages = acceptLanguage.split(',');
if (languages.length > 0) {
const primaryLanguage = languages[0].split(';')[0].trim().substring(0, 2);
return primaryLanguage;
}
return null;
}
// =============================================================================
// USER MANAGEMENT ROUTES
// =============================================================================
// Get users with pagination (RECOMMENDED)
router.get('/users/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({
error: 'Invalid pagination parameters. From and to must be valid numbers with from <= to.'
});
}
const limit = to - from + 1;
if (limit > 100) {
return res.status(400).json({
error: 'Page size too large. Maximum 100 records per request.'
});
}
(0, Logger_1.logRequest)('Admin paginated users endpoint accessed', req, res, { from, to, includeDeleted });
const result = await container.getUsersByPageQueryHandler.execute({
from,
to,
includeDeleted
});
const response = {
users: result.users,
pagination: {
from,
to,
returned: result.users.length,
totalCount: result.totalCount,
includeDeleted
}
};
(0, Logger_1.logRequest)('Admin users retrieved successfully', req, res, {
returnedUsers: result.users.length,
totalCount: result.totalCount,
from,
to,
includeDeleted
});
return res.status(200).json(response);
}
catch (error) {
(0, Logger_1.logError)('Error in admin get users endpoint', error, req, res);
return res.status(500).json({ error: 'Internal server error' });
}
});
// Get users by page (admin only) - RECOMMENDED
router.get('/users/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Admin get users by page endpoint accessed', req, res, { from, to, includeDeleted });
const result = includeDeleted
? await container.userRepository.findByPageIncludingDeleted(from, to)
: await container.userRepository.findByPage(from, to);
(0, Logger_1.logRequest)('Admin users page retrieved successfully', req, res, {
from,
to,
count: result.users.length,
total: result.totalCount,
includeDeleted
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin get users by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Get user by ID including soft-deleted ones
router.get('/users/:userId', AuthMiddleware_1.adminRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['userId']), async (req, res) => {
try {
const targetUserId = req.params.userId;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin get user by id endpoint accessed', req, res, { targetUserId, includeDeleted });
const user = includeDeleted
? await container.userRepository.findByIdIncludingDeleted(targetUserId)
: await container.userRepository.findById(targetUserId);
if (!user) {
(0, Logger_1.logWarning)('User not found', { targetUserId, includeDeleted }, req, res);
return res.status(404).json({ error: 'User not found' });
}
(0, Logger_1.logRequest)('Admin user retrieved successfully', req, res, {
targetUserId,
username: user.username,
includeDeleted
});
res.json(user);
}
catch (error) {
(0, Logger_1.logError)('Admin get user by id endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Search users including soft-deleted ones
router.get('/users/search/:searchTerm', AuthMiddleware_1.adminRequired, ValidationMiddleware_1.ValidationMiddleware.validateStringLength({ searchTerm: { min: 2, max: 100 } }), async (req, res) => {
try {
const { searchTerm } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin search users endpoint accessed', req, res, { searchTerm, includeDeleted });
const users = includeDeleted
? await container.userRepository.searchIncludingDeleted(searchTerm)
: await container.userRepository.search(searchTerm);
(0, Logger_1.logRequest)('Admin user search completed', req, res, {
searchTerm,
resultCount: Array.isArray(users) ? users.length : (users.totalCount || 0),
includeDeleted
});
res.json(users);
}
catch (error) {
(0, Logger_1.logError)('Admin search users endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Update any user (admin only)
router.patch('/users/:userId', AuthMiddleware_1.adminRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['userId']), async (req, res) => {
try {
const targetUserId = req.params.userId;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Admin update user endpoint accessed', req, res, {
adminUserId,
targetUserId,
fieldsToUpdate: Object.keys(req.body)
});
const result = await container.updateUserCommandHandler.execute({ id: targetUserId, ...req.body });
if (!result) {
return res.status(404).json({ error: 'User not found' });
}
(0, Logger_1.logRequest)('User updated by admin', req, res, {
adminUserId,
targetUserId,
username: result.username
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin update user endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('already exists')) {
return res.status(409).json({ error: error.message });
}
if (error.message.includes('validation')) {
return res.status(400).json({ error: error.message });
}
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Deactivate user (admin only)
router.post('/users/:userId/deactivate', AuthMiddleware_1.adminRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['userId']), async (req, res) => {
try {
const targetUserId = req.params.userId;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Deactivate user endpoint accessed', req, res, { adminUserId, targetUserId });
const result = await container.deactivateUserCommandHandler.execute({ id: targetUserId });
if (!result) {
return res.status(404).json({ error: 'User not found' });
}
(0, Logger_1.logAuth)('User deactivated by admin', targetUserId, { adminUserId }, req, res);
res.json({ message: 'User deactivated successfully', user: result });
}
catch (error) {
(0, Logger_1.logError)('Deactivate user endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Delete user (admin only)
router.delete('/users/:userId', AuthMiddleware_1.adminRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['userId']), async (req, res) => {
try {
const targetUserId = req.params.userId;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Delete user endpoint accessed', req, res, { adminUserId, targetUserId });
const result = await container.deleteUserCommandHandler.execute({ id: targetUserId });
if (!result) {
return res.status(404).json({ error: 'User not found' });
}
(0, Logger_1.logAuth)('User deleted by admin', targetUserId, { adminUserId }, req, res);
res.json({ message: 'User deleted successfully' });
}
catch (error) {
(0, Logger_1.logError)('Delete user endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// =============================================================================
// DECK MANAGEMENT ROUTES
// =============================================================================
// Get decks by page (admin only) - RECOMMENDED
router.get('/decks/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Admin get decks by page endpoint accessed', req, res, { from, to, includeDeleted });
// For admin, we need to pass admin context to get unrestricted decks
const adminUserId = req.user.userId;
const result = await container.getDecksByPageQueryHandler.execute({
userId: adminUserId,
userOrgId: undefined,
isAdmin: true,
from,
to,
includeDeleted
});
(0, Logger_1.logRequest)('Admin decks page retrieved successfully', req, res, {
from,
to,
count: result.decks.length,
total: result.totalCount,
includeDeleted
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin get decks by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Get deck by ID including soft-deleted ones
router.get('/decks/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { id } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin get deck by id endpoint accessed', req, res, { deckId: id, includeDeleted });
const deck = includeDeleted
? await container.deckRepository.findByIdIncludingDeleted(id)
: await container.deckRepository.findById(id);
if (!deck) {
(0, Logger_1.logWarning)('Deck not found', { deckId: id, includeDeleted }, req, res);
return res.status(404).json({ error: 'Deck not found' });
}
(0, Logger_1.logRequest)('Admin deck retrieved successfully', req, res, { deckId: id, includeDeleted });
res.json(deck);
}
catch (error) {
(0, Logger_1.logError)('Admin get deck by id endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Search decks including soft-deleted ones
router.get('/decks/search/:searchTerm', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { searchTerm } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin search decks endpoint accessed', req, res, { searchTerm, includeDeleted });
const decks = includeDeleted
? await container.deckRepository.searchIncludingDeleted(searchTerm)
: await container.deckRepository.search(searchTerm);
(0, Logger_1.logRequest)('Admin deck search completed', req, res, {
searchTerm,
resultCount: Array.isArray(decks) ? decks.length : (decks.totalCount || 0),
includeDeleted
});
res.json(decks);
}
catch (error) {
(0, Logger_1.logError)('Admin search decks endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Hard delete deck (admin only)
router.delete('/decks/:id/hard', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const deckId = req.params.id;
(0, Logger_1.logRequest)('Admin hard delete deck endpoint accessed', req, res, { deckId });
const result = await container.deleteDeckCommandHandler.execute({ id: deckId, soft: false });
(0, Logger_1.logRequest)('Admin deck hard delete successful', req, res, { deckId, success: result });
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Admin hard delete deck endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Deck not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// =============================================================================
// ORGANIZATION MANAGEMENT ROUTES
// =============================================================================
// Create organization (admin only)
router.post('/organizations', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Admin create organization endpoint accessed', req, res, { name: req.body.name, adminUserId });
const result = await container.createOrganizationCommandHandler.execute(req.body);
AdminBypassService_1.AdminAuditService.logAdminAction('CREATE_ORGANIZATION', adminUserId, {
targetType: 'organization',
targetId: result.id,
operation: 'create',
changes: req.body
}, req, res);
(0, Logger_1.logRequest)('Admin organization created successfully', req, res, { organizationId: result.id, name: req.body.name, adminUserId });
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin create organization endpoint error', error, req, res);
if (error instanceof Error && (error.message.includes('duplicate') || error.message.includes('unique constraint'))) {
return res.status(409).json({ error: 'Organization with this name already exists' });
}
if (error instanceof Error && error.message.includes('validation')) {
return res.status(400).json({ error: 'Invalid input data', details: error.message });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Update organization (admin only) - NEW ENDPOINT
router.patch('/organizations/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const organizationId = req.params.id;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Admin update organization endpoint accessed', req, res, {
adminUserId,
organizationId,
fieldsToUpdate: Object.keys(req.body)
});
const result = await container.updateOrganizationCommandHandler.execute({
id: organizationId,
...req.body
});
if (!result) {
return res.status(404).json({ error: 'Organization not found' });
}
AdminBypassService_1.AdminAuditService.logAdminAction('UPDATE_ORGANIZATION', adminUserId, {
targetType: 'organization',
targetId: organizationId,
operation: 'update',
changes: req.body,
sensitive: req.body.maxOrganizationalDecks !== undefined
}, req, res);
(0, Logger_1.logRequest)('Organization updated by admin', req, res, {
adminUserId,
organizationId,
organizationName: result.name
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin update organization endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('already exists')) {
return res.status(409).json({ error: error.message });
}
if (error.message.includes('validation')) {
return res.status(400).json({ error: error.message });
}
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Get organizations by page (admin only) - RECOMMENDED
router.get('/organizations/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Admin get organizations by page endpoint accessed', req, res, { from, to, includeDeleted });
const result = await container.getOrganizationsByPageQueryHandler.execute({
from,
to,
includeDeleted
});
(0, Logger_1.logRequest)('Admin organizations page retrieved successfully', req, res, {
from,
to,
count: result.organizations.length,
total: result.totalCount,
includeDeleted
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin get organizations by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Get organization by ID including soft-deleted ones
router.get('/organizations/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const organizationId = req.params.id;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin get organization by id endpoint accessed', req, res, { organizationId, includeDeleted });
const organization = includeDeleted
? await container.organizationRepository.findByIdIncludingDeleted(organizationId)
: await container.organizationRepository.findById(organizationId);
if (!organization) {
(0, Logger_1.logWarning)('Organization not found', { organizationId, includeDeleted }, req, res);
return res.status(404).json({ error: 'Organization not found' });
}
(0, Logger_1.logRequest)('Admin organization retrieved successfully', req, res, { organizationId, includeDeleted });
res.json(organization);
}
catch (error) {
(0, Logger_1.logError)('Admin get organization by id endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Search organizations including soft-deleted ones
router.get('/organizations/search/:searchTerm', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { searchTerm } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin search organizations endpoint accessed', req, res, { searchTerm, includeDeleted });
const organizations = includeDeleted
? await container.organizationRepository.searchIncludingDeleted(searchTerm)
: await container.organizationRepository.search(searchTerm);
(0, Logger_1.logRequest)('Admin organization search completed', req, res, {
searchTerm,
resultCount: Array.isArray(organizations) ? organizations.length : (organizations.totalCount || 0),
includeDeleted
});
res.json(organizations);
}
catch (error) {
(0, Logger_1.logError)('Admin search organizations endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Soft delete organization (admin only)
router.delete('/organizations/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const organizationId = req.params.id;
(0, Logger_1.logRequest)('Admin soft delete organization endpoint accessed', req, res, { organizationId });
const result = await container.deleteOrganizationCommandHandler.execute({ id: organizationId, soft: true });
(0, Logger_1.logRequest)('Admin organization soft delete successful', req, res, { organizationId, success: result });
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Admin soft delete organization endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Organization not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Hard delete organization (admin only)
router.delete('/organizations/:id/hard', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const organizationId = req.params.id;
(0, Logger_1.logRequest)('Admin hard delete organization endpoint accessed', req, res, { organizationId });
const result = await container.deleteOrganizationCommandHandler.execute({ id: organizationId, soft: false });
(0, Logger_1.logRequest)('Admin organization hard delete successful', req, res, { organizationId, success: result });
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Admin hard delete organization endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Organization not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// =============================================================================
// CHAT MANAGEMENT ROUTES
// =============================================================================
// Get chats with pagination (RECOMMENDED)
router.get('/chats/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({
error: 'Invalid pagination parameters. From and to must be valid numbers with from <= to.'
});
}
const limit = to - from + 1;
if (limit > 100) {
return res.status(400).json({
error: 'Page size too large. Maximum 100 records per request.'
});
}
(0, Logger_1.logRequest)('Admin paginated chats endpoint accessed', req, res, { from, to, includeDeleted });
const result = await container.getChatsByPageQueryHandler.execute({
from,
to,
includeDeleted
});
const response = {
chats: result.chats,
pagination: {
from,
to,
returned: result.chats.length,
totalCount: result.totalCount,
includeDeleted
}
};
(0, Logger_1.logRequest)('Admin chats retrieved successfully', req, res, {
returnedChats: result.chats.length,
totalCount: result.totalCount,
from,
to,
includeDeleted
});
return res.status(200).json(response);
}
catch (error) {
(0, Logger_1.logError)('Error in admin get chats endpoint', error, req, res);
return res.status(500).json({ error: 'Internal server error' });
}
});
// Get chat by ID including soft-deleted ones
router.get('/chats/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { id } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin get chat by id endpoint accessed', req, res, { chatId: id, includeDeleted });
const chat = includeDeleted
? await container.chatRepository.findByIdIncludingDeleted(id)
: await container.chatRepository.findById(id);
if (!chat) {
(0, Logger_1.logWarning)('Chat not found', { chatId: id, includeDeleted }, req, res);
return res.status(404).json({ error: 'Chat not found' });
}
(0, Logger_1.logRequest)('Admin chat retrieved successfully', req, res, { chatId: id, includeDeleted });
res.json(chat);
}
catch (error) {
(0, Logger_1.logError)('Admin get chat by id endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// =============================================================================
// CONTACT MANAGEMENT ROUTES
// =============================================================================
// Get contacts by page (admin only) - RECOMMENDED (already exists, enhanced)
router.get('/contacts/page/:from/:to', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
const includeDeleted = req.query.includeDeleted === 'true';
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Admin get contacts by page endpoint accessed', req, res, { from, to, includeDeleted });
const result = includeDeleted
? await container.contactRepository.findByPageIncludingDeleted(from, to)
: await container.contactRepository.findByPage(from, to);
(0, Logger_1.logRequest)('Admin contacts page retrieved successfully', req, res, {
from,
to,
count: result.contacts.length,
total: result.totalCount,
includeDeleted
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin get contacts by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Get contact by ID (admin only)
router.get('/contacts/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const contactId = req.params.id;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin get contact by ID endpoint accessed', req, res, { contactId, includeDeleted });
const result = includeDeleted
? await container.contactRepository.findByIdIncludingDeleted(contactId)
: await container.getContactByIdQueryHandler.execute({ id: contactId });
if (!result) {
(0, Logger_1.logRequest)('Contact not found', req, res, { contactId, includeDeleted });
return res.status(404).json({ error: 'Contact not found' });
}
(0, Logger_1.logRequest)('Admin contact retrieved successfully', req, res, { contactId, includeDeleted });
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Admin get contact by ID endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Search contacts including soft-deleted ones (admin only)
router.get('/contacts/search/:searchTerm', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { searchTerm } = req.params;
const includeDeleted = req.query.includeDeleted === 'true';
(0, Logger_1.logRequest)('Admin search contacts endpoint accessed', req, res, { searchTerm, includeDeleted });
const contacts = includeDeleted
? await container.contactRepository.searchIncludingDeleted(searchTerm)
: await container.contactRepository.search(searchTerm);
(0, Logger_1.logRequest)('Admin contact search completed', req, res, {
searchTerm,
resultCount: contacts.length,
includeDeleted
});
res.json(contacts);
}
catch (error) {
(0, Logger_1.logError)('Admin search contacts endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Respond to contact (admin only)
router.put('/contacts/:id/respond', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const contactId = req.params.id;
const adminUserId = req.user.userId;
const { adminResponse, sendEmail, language } = req.body;
if (!adminResponse) {
return res.status(400).json({ error: 'Admin response is required' });
}
// Determine language from body, headers, or default to English
let selectedLanguage = language;
if (!selectedLanguage) {
// Try to get language from Accept-Language header
const acceptLanguage = req.headers['accept-language'];
// Try to get language from custom headers (common frontend patterns)
const regionHeader = req.headers['x-region'];
const languageHeader = req.headers['x-language'];
const localeHeader = req.headers['x-locale'];
selectedLanguage = languageHeader ||
localeHeader ||
regionHeader ||
extractLanguageFromAcceptHeader(acceptLanguage) ||
'en';
}
// Validate and normalize language parameter
if (!['en', 'hu', 'de'].includes(selectedLanguage.toLowerCase())) {
selectedLanguage = 'en'; // Fallback to English for unsupported languages
}
else {
selectedLanguage = selectedLanguage.toLowerCase();
}
(0, Logger_1.logRequest)('Admin respond to contact endpoint accessed', req, res, {
contactId,
adminUserId,
sendEmail,
language: selectedLanguage,
headerLanguage: req.headers['accept-language'] || req.headers['x-language'] || 'none'
});
// Update contact with response
const result = await container.updateContactCommandHandler.execute({
id: contactId,
adminResponse,
respondedBy: adminUserId
});
if (!result) {
(0, Logger_1.logWarning)('Contact not found for response', { contactId }, req, res);
return res.status(404).json({ error: 'Contact not found' });
}
// Send email if requested
let emailSent = false;
let emailError = null;
if (sendEmail === true && adminResponse) {
try {
await container.contactEmailService.sendResponse({
to: result.email,
message: adminResponse,
contactId: contactId,
adminUserId: adminUserId,
contactName: result.name,
contactType: result.type,
originalMessage: result.txt,
language: selectedLanguage
});
emailSent = true;
(0, Logger_1.logRequest)('Contact response email sent successfully', req, res, {
contactId,
recipientEmail: result.email,
language: selectedLanguage
});
}
catch (emailErr) {
emailError = emailErr instanceof Error ? emailErr.message : 'Email sending failed';
(0, Logger_1.logError)('Contact response email failed', emailErr, req, res);
}
}
AdminBypassService_1.AdminAuditService.logAdminAction('RESPOND_TO_CONTACT', adminUserId, {
targetType: 'contact',
targetId: contactId,
operation: 'update',
changes: { adminResponse, sendEmail, language: selectedLanguage },
metadata: { emailSent, emailError }
}, req, res);
(0, Logger_1.logRequest)('Admin contact response saved successfully', req, res, {
contactId,
sendEmail,
emailSent,
language: selectedLanguage
});
res.json({
success: true,
message: 'Response saved successfully',
contact: result,
emailSent,
emailError: emailSent ? null : emailError
});
}
catch (error) {
(0, Logger_1.logError)('Admin respond to contact endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Contact not found' });
}
if (error instanceof Error && error.message.includes('validation')) {
return res.status(400).json({ error: 'Invalid input data', details: error.message });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Resend contact email (admin only) - NEW ENDPOINT
router.post('/contacts/:id/resend-email', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const contactId = req.params.id;
const adminUserId = req.user.userId;
const { language } = req.body;
(0, Logger_1.logRequest)('Admin resend contact email endpoint accessed', req, res, {
contactId,
adminUserId,
language
});
// Get contact details
const contact = await container.getContactByIdQueryHandler.execute({ id: contactId });
if (!contact) {
return res.status(404).json({ error: 'Contact not found' });
}
if (!contact.adminResponse) {
return res.status(400).json({ error: 'No admin response found to resend' });
}
const selectedLanguage = language || 'en';
try {
await container.contactEmailService.sendResponse({
to: contact.email,
message: contact.adminResponse,
contactId: contactId,
adminUserId: adminUserId,
contactName: contact.name,
contactType: contact.type,
originalMessage: contact.txt,
language: selectedLanguage
});
AdminBypassService_1.AdminAuditService.logAdminAction('RESEND_CONTACT_EMAIL', adminUserId, {
targetType: 'contact',
targetId: contactId,
operation: 'create',
metadata: { language: selectedLanguage, action: 'resend' }
}, req, res);
(0, Logger_1.logRequest)('Contact email resent successfully', req, res, {
contactId,
recipientEmail: contact.email,
language: selectedLanguage
});
res.json({
success: true,
message: 'Email resent successfully'
});
}
catch (emailErr) {
(0, Logger_1.logError)('Contact email resend failed', emailErr, req, res);
res.status(500).json({
error: 'Failed to resend email',
details: emailErr instanceof Error ? emailErr.message : 'Unknown error'
});
}
}
catch (error) {
(0, Logger_1.logError)('Admin resend contact email endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Soft delete contact (admin only) - NEW ENDPOINT
router.delete('/contacts/:id', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const contactId = req.params.id;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Admin soft delete contact endpoint accessed', req, res, { contactId, adminUserId });
const result = await container.deleteContactCommandHandler.execute({
id: contactId,
hard: false
});
AdminBypassService_1.AdminAuditService.logAdminAction('SOFT_DELETE_CONTACT', adminUserId, {
targetType: 'contact',
targetId: contactId,
operation: 'update'
}, req, res);
(0, Logger_1.logAuth)('Contact soft deleted by admin', contactId, { adminUserId }, req, res);
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Admin soft delete contact endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Contact not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// Hard delete contact (admin only) - NEW ENDPOINT
router.delete('/contacts/:id/hard', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const contactId = req.params.id;
const adminUserId = req.user.userId;
(0, Logger_1.logRequest)('Admin hard delete contact endpoint accessed', req, res, { contactId, adminUserId });
const result = await container.deleteContactCommandHandler.execute({
id: contactId,
hard: true
});
AdminBypassService_1.AdminAuditService.logAdminAction('HARD_DELETE_CONTACT', adminUserId, {
targetType: 'contact',
targetId: contactId,
operation: 'delete',
sensitive: true
}, req, res);
(0, Logger_1.logAuth)('Contact hard deleted by admin', contactId, { adminUserId }, req, res);
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Admin hard delete contact endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Contact not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
// =============================================================================
// DECK IMPORT/EXPORT ROUTES (ADMIN)
// =============================================================================
// Import deck from JSON file (unencrypted, admin only)
router.post('/decks/import', AuthMiddleware_1.adminRequired, upload.single('file'), async (req, res) => {
try {
if (!req.file) {
return res.status(400).json({ error: 'No file uploaded' });
}
const userId = req.user.userId;
const fileContent = req.file.buffer.toString('utf-8');
(0, Logger_1.logRequest)('Admin deck import from JSON endpoint accessed', req, res, { fileName: req.file.originalname });
let jsonData;
try {
jsonData = JSON.parse(fileContent);
}
catch (parseError) {
return res.status(400).json({ error: 'Invalid JSON format' });
}
// For admin import, we need to specify both target user and admin user
// Let's assume the deck will be owned by the admin user doing the import
const result = await container.deckImportExportService.adminImportFromJson(jsonData, userId, userId);
(0, Logger_1.logRequest)('Admin deck import successful', req, res, { deckId: result.id, fileName: req.file.originalname });
res.json({
success: true,
message: 'Deck imported successfully',
deckId: result.id
});
}
catch (error) {
(0, Logger_1.logError)('Admin deck import from JSON error', error, req, res);
if (error instanceof Error && error.message.includes('Invalid')) {
res.status(400).json({ error: 'Invalid deck data structure' });
}
else {
res.status(500).json({ error: 'Internal server error' });
}
}
});
// Export deck as JSON (unencrypted, admin only)
router.get('/decks/:deckId/export', AuthMiddleware_1.adminRequired, async (req, res) => {
try {
const { deckId } = req.params;
(0, Logger_1.logRequest)('Admin deck export as JSON endpoint accessed', req, res, { deckId });
const deck = await container.deckRepository.findById(deckId);
if (!deck) {
(0, Logger_1.logWarning)('Deck not found for export', { deckId }, req, res);
return res.status(404).json({ error: 'Deck not found' });
}
(0, Logger_1.logRequest)('Admin deck export successful', req, res, { deckId, deckName: deck.name });
// Return deck as JSON for admin export
res.setHeader('Content-Type', 'application/json');
res.setHeader('Content-Disposition', `attachment; filename="${deck.name || 'deck'}.json"`);
res.json(deck);
}
catch (error) {
(0, Logger_1.logError)('Admin deck export as JSON error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
exports.default = router;
//# sourceMappingURL=adminRouter.js.map
File diff suppressed because one or more lines are too long
+3
View File
@@ -0,0 +1,3 @@
declare const chatRouter: import("express-serve-static-core").Router;
export default chatRouter;
//# sourceMappingURL=chatRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"chatRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/chatRouter.ts"],"names":[],"mappings":"AAOA,QAAA,MAAM,UAAU,4CAAmB,CAAC;AAuRpC,eAAe,UAAU,CAAC"}
+231
View File
@@ -0,0 +1,231 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = __importDefault(require("express"));
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const DIContainer_1 = require("../../Application/Services/DIContainer");
const ErrorResponseService_1 = require("../../Application/Services/ErrorResponseService");
const ValidationMiddleware_1 = require("../../Application/Services/ValidationMiddleware");
const Logger_1 = require("../../Application/Services/Logger");
const chatRouter = express_1.default.Router();
// Get user's chats
chatRouter.get('/user-chats', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
const includeArchived = req.query.includeArchived === 'true';
(0, Logger_1.logRequest)('Get user chats endpoint accessed', req, res, { userId, includeArchived });
const chats = await DIContainer_1.container.getUserChatsQueryHandler.execute({
userId,
includeArchived
});
(0, Logger_1.logRequest)('User chats retrieved successfully', req, res, {
userId,
chatCount: chats.length
});
res.json(chats);
}
catch (error) {
(0, Logger_1.logError)('Get user chats endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Get chat history
chatRouter.get('/history/:chatId', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['chatId']), async (req, res) => {
try {
const userId = req.user.userId;
const chatId = req.params.chatId;
(0, Logger_1.logRequest)('Get chat history endpoint accessed', req, res, { userId, chatId });
const history = await DIContainer_1.container.getChatHistoryQueryHandler.execute({
chatId,
userId
});
if (!history) {
(0, Logger_1.logWarning)('Chat history not found or unauthorized', { userId, chatId }, req, res);
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'Chat not found or unauthorized');
}
(0, Logger_1.logRequest)('Chat history retrieved successfully', req, res, {
userId,
chatId,
messageCount: history.messages.length,
isArchived: history.isArchived
});
res.json(history);
}
catch (error) {
(0, Logger_1.logError)('Get chat history endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Create new chat (direct/group)
chatRouter.post('/create', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.combine([
ValidationMiddleware_1.ValidationMiddleware.validateRequiredFields(['type', 'userIds']),
ValidationMiddleware_1.ValidationMiddleware.validateAllowedValues({ type: ['direct', 'group'] }),
ValidationMiddleware_1.ValidationMiddleware.validateNonEmptyArrays(['userIds'])
]), async (req, res) => {
try {
const userId = req.user.userId;
const { type, name, userIds } = req.body;
(0, Logger_1.logRequest)('Create chat endpoint accessed', req, res, {
userId,
type,
targetUserCount: userIds?.length || 0
});
if (type === 'group' && !name?.trim()) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Group name is required');
}
const chat = await DIContainer_1.container.createChatCommandHandler.execute({
type,
name: name?.trim(),
createdBy: userId,
userIds
});
if (!chat) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Failed to create chat');
}
(0, Logger_1.logRequest)('Chat created successfully', req, res, {
userId,
chatId: chat.id,
chatType: chat.type
});
res.json({
id: chat.id,
type: chat.type,
name: chat.name,
users: chat.users,
messages: chat.messages
});
}
catch (error) {
(0, Logger_1.logError)('Create chat endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('Premium subscription required')) {
return ErrorResponseService_1.ErrorResponseService.sendForbidden(res, 'Premium subscription required to create groups');
}
if (error.message.includes('not found')) {
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'One or more users not found');
}
}
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Send message (REST endpoint - mainly for testing, real messaging is via WebSocket)
chatRouter.post('/message', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.combine([
ValidationMiddleware_1.ValidationMiddleware.validateRequiredFields(['chatId', 'message']),
ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['chatId']),
ValidationMiddleware_1.ValidationMiddleware.validateStringLength({ message: { min: 1, max: 2000 } })
]), async (req, res) => {
try {
const userId = req.user.userId;
const { chatId, message } = req.body;
(0, Logger_1.logRequest)('Send message endpoint accessed', req, res, {
userId,
chatId,
messageLength: message?.length || 0
});
const sentMessage = await DIContainer_1.container.sendMessageCommandHandler.execute({
chatId,
userId,
message
});
if (!sentMessage) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Failed to send message');
}
(0, Logger_1.logRequest)('Message sent successfully', req, res, {
userId,
chatId,
messageId: sentMessage.id
});
res.json(sentMessage);
}
catch (error) {
(0, Logger_1.logError)('Send message endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('Chat not found')) {
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'Chat not found');
}
if (error.message.includes('not a member')) {
return ErrorResponseService_1.ErrorResponseService.sendForbidden(res, 'Not authorized to send messages to this chat');
}
if (error.message.includes('non-empty string')) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Message must be a non-empty string');
}
}
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Archive chat manually
chatRouter.post('/archive/:chatId', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['chatId']), async (req, res) => {
try {
const userId = req.user.userId;
const chatId = req.params.chatId;
(0, Logger_1.logRequest)('Archive chat endpoint accessed', req, res, { userId, chatId });
// Check if user has access to this chat
const chat = await DIContainer_1.container.chatRepository.findById(chatId);
if (!chat) {
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'Chat not found');
}
if (!chat.users.includes(userId)) {
return ErrorResponseService_1.ErrorResponseService.sendForbidden(res, 'Not authorized to archive this chat');
}
const success = await DIContainer_1.container.archiveChatCommandHandler.execute({ chatId });
if (!success) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Failed to archive chat');
}
(0, Logger_1.logRequest)('Chat archived successfully', req, res, { userId, chatId });
res.json({ success: true, message: 'Chat archived successfully' });
}
catch (error) {
(0, Logger_1.logError)('Archive chat endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Restore chat from archive
chatRouter.post('/restore/:chatId', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['chatId']), async (req, res) => {
try {
const userId = req.user.userId;
const chatId = req.params.chatId;
(0, Logger_1.logRequest)('Restore chat endpoint accessed', req, res, { userId, chatId });
// Check if user has access to this archived chat
const archive = await DIContainer_1.container.chatArchiveRepository.findByChatId(chatId);
const userArchive = archive.find((a) => a.participants.includes(userId));
if (!userArchive) {
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'Archived chat not found or unauthorized');
}
const success = await DIContainer_1.container.restoreChatCommandHandler.execute({ chatId });
if (!success) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'Failed to restore chat (game chats cannot be restored)');
}
(0, Logger_1.logRequest)('Chat restored successfully', req, res, { userId, chatId });
res.json({ success: true, message: 'Chat restored successfully' });
}
catch (error) {
(0, Logger_1.logError)('Restore chat endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Get archived chats for a game
chatRouter.get('/archived/game/:gameId', AuthMiddleware_1.authRequired, ValidationMiddleware_1.ValidationMiddleware.validateUUIDFormat(['gameId']), async (req, res) => {
try {
const userId = req.user.userId;
const gameId = req.params.gameId;
(0, Logger_1.logRequest)('Get archived game chats endpoint accessed', req, res, { userId, gameId });
const archivedChats = await DIContainer_1.container.getArchivedChatsQueryHandler.execute({
userId,
gameId
});
(0, Logger_1.logRequest)('Archived game chats retrieved successfully', req, res, {
userId,
gameId,
chatCount: archivedChats.length
});
res.json(archivedChats);
}
catch (error) {
(0, Logger_1.logError)('Get archived game chats endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
exports.default = chatRouter;
//# sourceMappingURL=chatRouter.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,3 @@
declare const contactRouter: import("express-serve-static-core").Router;
export default contactRouter;
//# sourceMappingURL=contactRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"contactRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/contactRouter.ts"],"names":[],"mappings":"AAKA,QAAA,MAAM,aAAa,4CAAW,CAAC;AA+C/B,eAAe,aAAa,CAAC"}
+46
View File
@@ -0,0 +1,46 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = require("express");
const DIContainer_1 = require("../../Application/Services/DIContainer");
const Logger_1 = require("../../Application/Services/Logger");
const ContactAggregate_1 = require("../../Domain/Contact/ContactAggregate");
const contactRouter = (0, express_1.Router)();
// Public endpoint - anyone can create a contact
contactRouter.post('/', async (req, res) => {
try {
// Get user ID if authenticated (optional)
const userId = req.user?.userId || null;
const { name, email, type, txt } = req.body;
// Validate required fields
if (!name || !email || type === undefined || !txt) {
return res.status(400).json({
error: 'Missing required fields: name, email, type, and txt are required'
});
}
// Validate type
if (!Object.values(ContactAggregate_1.ContactType).includes(Number(type))) {
return res.status(400).json({
error: 'Invalid contact type. Must be one of: 0 (Bug), 1 (Problem), 2 (Question), 3 (Sales), 4 (Other)'
});
}
(0, Logger_1.logRequest)('Create contact endpoint accessed', req, res, { name, email, type, userId });
const result = await DIContainer_1.container.createContactCommandHandler.execute({
name,
email,
userid: userId,
type: Number(type),
txt
});
(0, Logger_1.logRequest)('Contact created successfully', req, res, { contactId: result.id, name, email, type });
res.status(201).json(result);
}
catch (error) {
(0, Logger_1.logError)('Create contact endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('validation')) {
return res.status(400).json({ error: 'Invalid input data', details: error.message });
}
res.status(500).json({ error: 'Internal server error' });
}
});
exports.default = contactRouter;
//# sourceMappingURL=contactRouter.js.map
@@ -0,0 +1 @@
{"version":3,"file":"contactRouter.js","sourceRoot":"","sources":["../../../src/Api/routers/contactRouter.ts"],"names":[],"mappings":";;AAAA,qCAAiC;AACjC,wEAAmE;AACnE,8DAAyE;AACzE,4EAAoE;AAEpE,MAAM,aAAa,GAAG,IAAA,gBAAM,GAAE,CAAC;AAE/B,gDAAgD;AAChD,aAAa,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE;IAC1C,IAAI,CAAC;QACJ,0CAA0C;QAC1C,MAAM,MAAM,GAAI,GAAW,CAAC,IAAI,EAAE,MAAM,IAAI,IAAI,CAAC;QAEjD,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,EAAE,GAAG,GAAG,CAAC,IAAI,CAAC;QAE5C,2BAA2B;QAC3B,IAAI,CAAC,IAAI,IAAI,CAAC,KAAK,IAAI,IAAI,KAAK,SAAS,IAAI,CAAC,GAAG,EAAE,CAAC;YACnD,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC;gBAC3B,KAAK,EAAE,kEAAkE;aACzE,CAAC,CAAC;QACJ,CAAC;QAED,gBAAgB;QAChB,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,8BAAW,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC;YACxD,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC;gBAC3B,KAAK,EAAE,gGAAgG;aACvG,CAAC,CAAC;QACJ,CAAC;QAED,IAAA,mBAAU,EAAC,kCAAkC,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAC;QAExF,MAAM,MAAM,GAAG,MAAM,uBAAS,CAAC,2BAA2B,CAAC,OAAO,CAAC;YAClE,IAAI;YACJ,KAAK;YACL,MAAM,EAAE,MAAM;YACd,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC;YAClB,GAAG;SACH,CAAC,CAAC;QAEH,IAAA,mBAAU,EAAC,8BAA8B,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,SAAS,EAAE,MAAM,CAAC,EAAE,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QAClG,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC9B,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QAChB,IAAA,iBAAQ,EAAC,+BAA+B,EAAE,KAAc,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAEpE,IAAI,KAAK,YAAY,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC;YACpE,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,oBAAoB,EAAE,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;QACtF,CAAC;QAED,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAC,CAAC;IAC1D,CAAC;AACF,CAAC,CAAC,CAAC;AAEH,kBAAe,aAAa,CAAC"}
@@ -0,0 +1,10 @@
declare global {
namespace Express {
interface Request {
file?: Express.Multer.File;
}
}
}
declare const router: import("express-serve-static-core").Router;
export default router;
//# sourceMappingURL=deckImportExportRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"deckImportExportRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/deckImportExportRouter.ts"],"names":[],"mappings":"AAOA,OAAO,CAAC,MAAM,CAAC;IACX,UAAU,OAAO,CAAC;QACd,UAAU,OAAO;YACb,IAAI,CAAC,EAAE,OAAO,CAAC,MAAM,CAAC,IAAI,CAAC;SAC9B;KACJ;CACJ;AAED,QAAA,MAAM,MAAM,4CAAmB,CAAC;AA4GhC,eAAe,MAAM,CAAC"}
@@ -0,0 +1,106 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = __importDefault(require("express"));
const multer_1 = __importDefault(require("multer"));
const DIContainer_1 = require("../../Application/Services/DIContainer");
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const Logger_1 = require("../../Application/Services/Logger");
const router = express_1.default.Router();
const container = DIContainer_1.DIContainer.getInstance();
// Configure multer for file uploads
const upload = (0, multer_1.default)({
storage: multer_1.default.memoryStorage(),
limits: {
fileSize: 10 * 1024 * 1024, // 10MB limit
},
fileFilter: (req, file, cb) => {
if (file.mimetype === 'application/json' || file.originalname.endsWith('.spr')) {
cb(null, true);
}
else {
cb(new Error('Only JSON and .spr files are allowed'));
}
}
});
// Export deck to .spr file (encrypted) - users can only export their own decks
router.get('/export/:deckId', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const { deckId } = req.params;
const userId = req.user.userId;
(0, Logger_1.logRequest)('Export deck endpoint accessed', req, res, { deckId, userId });
// Check if user owns the deck
const deck = await container.deckRepository.findById(deckId);
if (!deck) {
(0, Logger_1.logWarning)('Deck not found for export', { deckId, userId }, req, res);
return res.status(404).json({ error: 'Deck not found' });
}
// Users can only export their own decks
if (deck.userid !== userId) {
(0, Logger_1.logWarning)('Access denied - user attempted to export deck they do not own', {
deckId,
userId,
deckOwnerId: deck.userid
}, req, res);
return res.status(403).json({ error: 'Access denied - you can only export your own decks' });
}
const sprData = await container.deckImportExportService.exportDeckToSpr(deckId, userId);
res.setHeader('Content-Type', 'application/octet-stream');
res.setHeader('Content-Disposition', `attachment; filename="${deck.name || 'deck'}.spr"`);
(0, Logger_1.logRequest)('Deck exported successfully', req, res, {
deckId,
userId,
deckName: deck.name,
fileSize: sprData.length
});
res.send(sprData);
}
catch (error) {
(0, Logger_1.logError)('Export deck endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Import deck from .spr file (encrypted) - imported deck will be owned by the importing user
router.post('/import', AuthMiddleware_1.authRequired, upload.single('file'), async (req, res) => {
try {
const userId = req.user.userId;
(0, Logger_1.logRequest)('Import deck endpoint accessed', req, res, {
userId,
hasFile: !!req.file,
fileName: req.file?.originalname,
fileSize: req.file?.size
});
if (!req.file) {
(0, Logger_1.logWarning)('No file uploaded for deck import', { userId }, req, res);
return res.status(400).json({ error: 'No file uploaded' });
}
const fileBuffer = req.file.buffer;
// Import the deck and assign ownership to the current user
const result = await container.deckImportExportService.importDeckFromSpr(fileBuffer, userId);
(0, Logger_1.logRequest)('Deck imported successfully', req, res, {
userId,
deckId: result.id,
deckName: result.name || 'Unknown',
fileName: req.file.originalname,
fileSize: req.file.size
});
res.json({
success: true,
message: 'Deck imported successfully and added to your collection',
deckId: result.id
});
}
catch (error) {
(0, Logger_1.logError)('Import deck endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('Invalid')) {
return res.status(400).json({ error: 'Invalid file format or corrupted data' });
}
else {
res.status(500).json({ error: 'Internal server error' });
}
}
});
exports.default = router;
//# sourceMappingURL=deckImportExportRouter.js.map
@@ -0,0 +1 @@
{"version":3,"file":"deckImportExportRouter.js","sourceRoot":"","sources":["../../../src/Api/routers/deckImportExportRouter.ts"],"names":[],"mappings":";;;;;AAAA,sDAAqD;AACrD,oDAA4B;AAC5B,wEAAqE;AACrE,8EAAyE;AACzE,8DAAqF;AAWrF,MAAM,MAAM,GAAG,iBAAO,CAAC,MAAM,EAAE,CAAC;AAChC,MAAM,SAAS,GAAG,yBAAW,CAAC,WAAW,EAAE,CAAC;AAE5C,oCAAoC;AACpC,MAAM,MAAM,GAAG,IAAA,gBAAM,EAAC;IAClB,OAAO,EAAE,gBAAM,CAAC,aAAa,EAAE;IAC/B,MAAM,EAAE;QACJ,QAAQ,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI,EAAE,aAAa;KAC5C;IACD,UAAU,EAAE,CAAC,GAAQ,EAAE,IAAS,EAAE,EAAO,EAAE,EAAE;QACzC,IAAI,IAAI,CAAC,QAAQ,KAAK,kBAAkB,IAAI,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;YAC7E,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;QACnB,CAAC;aAAM,CAAC;YACJ,EAAE,CAAC,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC,CAAC;QAC1D,CAAC;IACL,CAAC;CACJ,CAAC,CAAC;AAEH,+EAA+E;AAC/E,MAAM,CAAC,GAAG,CAAC,iBAAiB,EAAE,6BAAY,EAAE,KAAK,EAAE,GAAY,EAAE,GAAa,EAAE,EAAE;IAC9E,IAAI,CAAC;QACD,MAAM,EAAE,MAAM,EAAE,GAAG,GAAG,CAAC,MAAM,CAAC;QAC9B,MAAM,MAAM,GAAI,GAAW,CAAC,IAAI,CAAC,MAAM,CAAC;QAExC,IAAA,mBAAU,EAAC,+BAA+B,EAAE,GAAG,EAAE,GAAG,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;QAE1E,8BAA8B;QAC9B,MAAM,IAAI,GAAG,MAAM,SAAS,CAAC,cAAc,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAC7D,IAAI,CAAC,IAAI,EAAE,CAAC;YACR,IAAA,mBAAU,EAAC,2BAA2B,EAAE,EAAE,MAAM,EAAE,MAAM,EAAE,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;YACtE,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,gBAAgB,EAAE,CAAC,CAAC;QAC7D,CAAC;QAED,wCAAwC;QACxC,IAAI,IAAI,CAAC,MAAM,KAAK,MAAM,EAAE,CAAC;YACzB,IAAA,mBAAU,EAAC,+DAA+D,EAAE;gBACxE,MAAM;gBACN,MAAM;gBACN,WAAW,EAAE,IAAI,CAAC,MAAM;aAC3B,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;YACb,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,oDAAoD,EAAE,CAAC,CAAC;QACjG,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,SAAS,CAAC,uBAAuB,CAAC,eAAe,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAExF,GAAG,CAAC,SAAS,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;QAC1D,GAAG,CAAC,SAAS,CAAC,qBAAqB,EAAE,yBAAyB,IAAI,CAAC,IAAI,IAAI,MAAM,OAAO,CAAC,CAAC;QAE1F,IAAA,mBAAU,EAAC,4BAA4B,EAAE,GAAG,EAAE,GAAG,EAAE;YAC/C,MAAM;YACN,MAAM;YACN,QAAQ,EAAE,IAAI,CAAC,IAAI;YACnB,QAAQ,EAAE,OAAO,CAAC,MAAM;SAC3B,CAAC,CAAC;QAEH,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IACtB,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,IAAA,iBAAQ,EAAC,4BAA4B,EAAE,KAAc,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QACjE,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAC,CAAC;IAC7D,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,6FAA6F;AAC7F,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,6BAAY,EAAE,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,EAAE,KAAK,EAAE,GAAY,EAAE,GAAa,EAAE,EAAE;IAC9F,IAAI,CAAC;QACD,MAAM,MAAM,GAAI,GAAW,CAAC,IAAI,CAAC,MAAM,CAAC;QAExC,IAAA,mBAAU,EAAC,+BAA+B,EAAE,GAAG,EAAE,GAAG,EAAE;YAClD,MAAM;YACN,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,IAAI;YACnB,QAAQ,EAAE,GAAG,CAAC,IAAI,EAAE,YAAY;YAChC,QAAQ,EAAE,GAAG,CAAC,IAAI,EAAE,IAAI;SAC3B,CAAC,CAAC;QAEH,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC;YACZ,IAAA,mBAAU,EAAC,kCAAkC,EAAE,EAAE,MAAM,EAAE,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;YACrE,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,kBAAkB,EAAE,CAAC,CAAC;QAC/D,CAAC;QAED,MAAM,UAAU,GAAG,GAAG,CAAC,IAAK,CAAC,MAAM,CAAC;QAEpC,2DAA2D;QAC3D,MAAM,MAAM,GAAG,MAAM,SAAS,CAAC,uBAAuB,CAAC,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAE7F,IAAA,mBAAU,EAAC,4BAA4B,EAAE,GAAG,EAAE,GAAG,EAAE;YAC/C,MAAM;YACN,MAAM,EAAE,MAAM,CAAC,EAAE;YACjB,QAAQ,EAAE,MAAM,CAAC,IAAI,IAAI,SAAS;YAClC,QAAQ,EAAE,GAAG,CAAC,IAAI,CAAC,YAAY;YAC/B,QAAQ,EAAE,GAAG,CAAC,IAAI,CAAC,IAAI;SAC1B,CAAC,CAAC;QAEH,GAAG,CAAC,IAAI,CAAC;YACL,OAAO,EAAE,IAAI;YACb,OAAO,EAAE,yDAAyD;YAClE,MAAM,EAAE,MAAM,CAAC,EAAE;SACpB,CAAC,CAAC;IACP,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACb,IAAA,iBAAQ,EAAC,4BAA4B,EAAE,KAAc,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAEjE,IAAI,KAAK,YAAY,KAAK,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE,CAAC;YAC9D,OAAO,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,uCAAuC,EAAE,CAAC,CAAC;QACpF,CAAC;aAAM,CAAC;YACJ,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,uBAAuB,EAAE,CAAC,CAAC;QAC7D,CAAC;IACL,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,kBAAe,MAAM,CAAC"}
+3
View File
@@ -0,0 +1,3 @@
declare const deckRouter: import("express-serve-static-core").Router;
export default deckRouter;
//# sourceMappingURL=deckRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"deckRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/deckRouter.ts"],"names":[],"mappings":"AAQA,QAAA,MAAM,UAAU,4CAAW,CAAC;AAwL5B,eAAe,UAAU,CAAC"}
+162
View File
@@ -0,0 +1,162 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = require("express");
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const DIContainer_1 = require("../../Application/Services/DIContainer");
const Generalsearch_1 = require("../../Application/Search/Generalsearch");
const Logger_1 = require("../../Application/Services/Logger");
const deckRouter = (0, express_1.Router)();
// Create search service that isn't in the container yet
const searchService = new Generalsearch_1.GeneralSearchService(DIContainer_1.container.userRepository, DIContainer_1.container.organizationRepository, DIContainer_1.container.deckRepository);
// Authenticated routes - Get decks with pagination (RECOMMENDED)
deckRouter.get('/page/:from/:to', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
const userOrgId = req.user.orgId;
const isAdmin = req.user.authLevel === 1;
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Get decks by page endpoint accessed', req, res, {
userId,
userOrgId,
isAdmin,
from,
to
});
// Use paginated query handler for memory efficiency
const result = await DIContainer_1.container.getDecksByPageQueryHandler.execute({
userId,
userOrgId,
isAdmin,
from,
to
});
(0, Logger_1.logRequest)('Get decks page completed successfully', req, res, {
userId,
from,
to,
returnedCount: result.decks.length,
totalCount: result.totalCount
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Get decks by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
deckRouter.post('/', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
(0, Logger_1.logRequest)('Create deck endpoint accessed', req, res, { name: req.body.name, userId });
const result = await DIContainer_1.container.createDeckCommandHandler.execute(req.body);
(0, Logger_1.logRequest)('Deck created successfully', req, res, { deckId: result.id, name: req.body.name, userId });
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Create deck endpoint error', error, req, res);
if (error instanceof Error && (error.message.includes('duplicate') || error.message.includes('unique constraint'))) {
return res.status(409).json({ error: 'Deck with this name already exists' });
}
if (error instanceof Error && error.message.includes('validation')) {
return res.status(400).json({ error: 'Invalid input data', details: error.message });
}
res.status(500).json({ error: 'Internal server error' });
}
});
deckRouter.get('/search', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const { q: query, limit, offset } = req.query;
(0, Logger_1.logRequest)('Search decks endpoint accessed', req, res, { query, limit, offset });
if (!query || typeof query !== 'string') {
(0, Logger_1.logWarning)('Deck search attempted without query', { query, hasQuery: !!query }, req, res);
return res.status(400).json({ error: 'Search query is required' });
}
const searchQuery = {
query: query.trim(),
limit: limit ? parseInt(limit) : 20,
offset: offset ? parseInt(offset) : 0
};
// Validate pagination parameters
if (searchQuery.limit < 1 || searchQuery.limit > 100) {
(0, Logger_1.logWarning)('Invalid deck search limit parameter', { limit: searchQuery.limit }, req, res);
return res.status(400).json({ error: 'Limit must be between 1 and 100' });
}
if (searchQuery.offset < 0) {
(0, Logger_1.logWarning)('Invalid deck search offset parameter', { offset: searchQuery.offset }, req, res);
return res.status(400).json({ error: 'Offset must be non-negative' });
}
const result = await searchService.searchFromUrl(req.originalUrl, searchQuery);
(0, Logger_1.logRequest)('Deck search completed successfully', req, res, {
query: searchQuery.query,
resultCount: Array.isArray(result) ? result.length : 0
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Search decks endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
deckRouter.get('/:id', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const deckId = req.params.id;
(0, Logger_1.logRequest)('Get deck by id endpoint accessed', req, res, { deckId });
const result = await DIContainer_1.container.getDeckByIdQueryHandler.execute({ id: deckId });
if (!result) {
(0, Logger_1.logWarning)('Deck not found', { deckId }, req, res);
return res.status(404).json({ error: 'Deck not found' });
}
(0, Logger_1.logRequest)('Deck retrieved successfully', req, res, { deckId });
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Get deck by id endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
deckRouter.put('/:id', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const deckId = req.params.id;
const userId = req.user.userId;
(0, Logger_1.logRequest)('Update deck endpoint accessed', req, res, { deckId, userId, updateFields: Object.keys(req.body) });
const result = await DIContainer_1.container.updateDeckCommandHandler.execute({ id: deckId, ...req.body });
(0, Logger_1.logRequest)('Deck updated successfully', req, res, { deckId, userId });
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Update deck endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Deck not found' });
}
if (error instanceof Error && (error.message.includes('duplicate') || error.message.includes('unique constraint'))) {
return res.status(409).json({ error: 'Deck with this name already exists' });
}
if (error instanceof Error && error.message.includes('validation')) {
return res.status(400).json({ error: 'Invalid input data', details: error.message });
}
res.status(500).json({ error: 'Internal server error' });
}
});
deckRouter.delete('/:id', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const deckId = req.params.id;
const userId = req.user.userId;
(0, Logger_1.logRequest)('Soft delete deck endpoint accessed', req, res, { deckId, userId });
const result = await DIContainer_1.container.deleteDeckCommandHandler.execute({ id: deckId, soft: true });
(0, Logger_1.logRequest)('Deck soft delete successful', req, res, { deckId, userId, success: result });
res.json({ success: result });
}
catch (error) {
(0, Logger_1.logError)('Soft delete deck endpoint error', error, req, res);
if (error instanceof Error && error.message.includes('not found')) {
return res.status(404).json({ error: 'Deck not found' });
}
res.status(500).json({ error: 'Internal server error' });
}
});
exports.default = deckRouter;
//# sourceMappingURL=deckRouter.js.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,3 @@
declare const organizationRouter: import("express-serve-static-core").Router;
export default organizationRouter;
//# sourceMappingURL=organizationRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"organizationRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/organizationRouter.ts"],"names":[],"mappings":"AAQA,QAAA,MAAM,kBAAkB,4CAAW,CAAC;AAmMpC,eAAe,kBAAkB,CAAC"}
@@ -0,0 +1,179 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = require("express");
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const DIContainer_1 = require("../../Application/Services/DIContainer");
const ErrorResponseService_1 = require("../../Application/Services/ErrorResponseService");
const Generalsearch_1 = require("../../Application/Search/Generalsearch");
const Logger_1 = require("../../Application/Services/Logger");
const organizationRouter = (0, express_1.Router)();
// Create search service that isn't in the container yet
const searchService = new Generalsearch_1.GeneralSearchService(DIContainer_1.container.userRepository, DIContainer_1.container.organizationRepository, DIContainer_1.container.deckRepository);
// Auth routes - Get organizations with pagination (RECOMMENDED)
organizationRouter.get('/page/:from/:to', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const from = parseInt(req.params.from);
const to = parseInt(req.params.to);
if (isNaN(from) || isNaN(to) || from < 0 || to < from) {
return res.status(400).json({ error: 'Invalid page parameters. "from" and "to" must be valid numbers with to >= from >= 0' });
}
(0, Logger_1.logRequest)('Get organizations by page endpoint accessed', req, res, { from, to });
const result = await DIContainer_1.container.getOrganizationsByPageQueryHandler.execute({ from, to });
(0, Logger_1.logRequest)('Organizations page retrieved successfully', req, res, {
from,
to,
count: result.organizations.length,
totalCount: result.totalCount
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Get organizations by page endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
organizationRouter.get('/search', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const { q: query, limit, offset } = req.query;
(0, Logger_1.logRequest)('Search organizations endpoint accessed', req, res, { query, limit, offset });
if (!query || typeof query !== 'string') {
(0, Logger_1.logWarning)('Organization search attempted without query', { query, hasQuery: !!query }, req, res);
return res.status(400).json({ error: 'Search query is required' });
}
const searchQuery = {
query: query.trim(),
limit: limit ? parseInt(limit) : 20,
offset: offset ? parseInt(offset) : 0
};
// Validate pagination parameters
if (searchQuery.limit < 1 || searchQuery.limit > 100) {
(0, Logger_1.logWarning)('Invalid organization search limit parameter', { limit: searchQuery.limit }, req, res);
return res.status(400).json({ error: 'Limit must be between 1 and 100' });
}
if (searchQuery.offset < 0) {
(0, Logger_1.logWarning)('Invalid organization search offset parameter', { offset: searchQuery.offset }, req, res);
return res.status(400).json({ error: 'Offset must be non-negative' });
}
const result = await searchService.searchFromUrl(req.originalUrl, searchQuery);
(0, Logger_1.logRequest)('Organization search completed successfully', req, res, {
query: searchQuery.query,
resultCount: Array.isArray(result) ? result.length : 0
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Search organizations endpoint error', error, req, res);
res.status(500).json({ error: 'Internal server error' });
}
});
// Get organization login URL
organizationRouter.get('/:orgId/login-url', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
const { orgId } = req.params;
(0, Logger_1.logRequest)('Get organization login URL endpoint accessed', req, res, {
userId,
organizationId: orgId
});
const result = await DIContainer_1.container.getOrganizationLoginUrlQueryHandler.execute({
organizationId: orgId
});
if (!result) {
(0, Logger_1.logWarning)('Organization login URL not found', {
organizationId: orgId,
userId
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'Organization login URL not found');
}
(0, Logger_1.logRequest)('Organization login URL retrieved successfully', req, res, {
organizationId: orgId,
organizationName: result.organizationName,
hasUrl: !!result.loginUrl,
userId
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Get organization login URL endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Process third-party authentication callback
organizationRouter.post('/auth-callback', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
const { organizationId, status, authToken } = req.body;
(0, Logger_1.logRequest)('Organization auth callback endpoint accessed', req, res, {
userId,
organizationId,
status,
hasAuthToken: !!authToken
});
// Validate required fields
if (!organizationId || !status) {
(0, Logger_1.logWarning)('Missing required fields for organization auth callback', {
organizationId: !!organizationId,
status: !!status,
userId
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'organizationId and status are required');
}
if (status !== 'ok' && status !== 'not_ok') {
(0, Logger_1.logWarning)('Invalid status value for organization auth callback', {
status,
userId,
organizationId
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, 'status must be either "ok" or "not_ok"');
}
const result = await DIContainer_1.container.processOrgAuthCallbackCommandHandler.execute({
organizationId,
userId,
status,
authToken
});
if (!result.success) {
if (result.message.includes('not found')) {
(0, Logger_1.logWarning)('Organization auth callback failed - entity not found', {
userId,
organizationId,
message: result.message
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, result.message);
}
if (result.message.includes('does not belong')) {
(0, Logger_1.logWarning)('Organization auth callback failed - authorization error', {
userId,
organizationId,
message: result.message
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendForbidden(res, result.message);
}
if (result.message.includes('authentication failed')) {
(0, Logger_1.logAuth)('Organization authentication failed via callback', userId, {
organizationId,
status
}, req, res);
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, result.message);
}
(0, Logger_1.logError)('Organization auth callback internal error', new Error(result.message), req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
(0, Logger_1.logAuth)('Organization auth callback processed successfully', userId, {
organizationId,
status,
updatedFields: result.updatedFields
}, req, res);
res.json({
success: result.success,
message: result.message,
updatedFields: result.updatedFields
});
}
catch (error) {
(0, Logger_1.logError)('Organization auth callback endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
exports.default = organizationRouter;
//# sourceMappingURL=organizationRouter.js.map
File diff suppressed because one or more lines are too long
+3
View File
@@ -0,0 +1,3 @@
declare const userRouter: import("express-serve-static-core").Router;
export default userRouter;
//# sourceMappingURL=userRouter.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"userRouter.d.ts","sourceRoot":"","sources":["../../../src/Api/routers/userRouter.ts"],"names":[],"mappings":"AAQA,QAAA,MAAM,UAAU,4CAAW,CAAC;AA+J5B,eAAe,UAAU,CAAC"}
+139
View File
@@ -0,0 +1,139 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = require("express");
const AuthMiddleware_1 = require("../../Application/Services/AuthMiddleware");
const DIContainer_1 = require("../../Application/Services/DIContainer");
const ErrorResponseService_1 = require("../../Application/Services/ErrorResponseService");
const ValidationMiddleware_1 = require("../../Application/Services/ValidationMiddleware");
const Generalsearch_1 = require("../../Application/Search/Generalsearch");
const Logger_1 = require("../../Application/Services/Logger");
const userRouter = (0, express_1.Router)();
// Create search service that isn't in the container yet
const searchService = new Generalsearch_1.GeneralSearchService(DIContainer_1.container.userRepository, DIContainer_1.container.organizationRepository, DIContainer_1.container.deckRepository);
// Login endpoint
userRouter.post('/login', ValidationMiddleware_1.ValidationMiddleware.combine([
ValidationMiddleware_1.ValidationMiddleware.validateRequiredFields(['username', 'password']),
ValidationMiddleware_1.ValidationMiddleware.validateStringLength({
username: { min: 3, max: 50 },
password: { min: 6, max: 100 }
})
]), async (req, res) => {
try {
(0, Logger_1.logRequest)('Login endpoint accessed', req, res, { username: req.body.username });
const { username, password } = req.body;
const result = await DIContainer_1.container.loginCommandHandler.execute({ username, password });
if (result) {
(0, Logger_1.logAuth)('User login successful', result.user.id, { username: result.user.username }, req, res);
res.json(result);
}
else {
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, 'Invalid username or password');
}
}
catch (error) {
(0, Logger_1.logError)('Login endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('Invalid username')) {
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, 'Invalid username or password');
}
if (error.message.includes('Invalid password')) {
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, 'Invalid username or password');
}
if (error.message.includes('not verified')) {
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, 'Please verify your email address');
}
if (error.message.includes('deactivated')) {
return ErrorResponseService_1.ErrorResponseService.sendUnauthorized(res, 'Account has been deactivated');
}
}
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Create user endpoint
userRouter.post('/create', ValidationMiddleware_1.ValidationMiddleware.combine([
ValidationMiddleware_1.ValidationMiddleware.validateRequiredFields(['username', 'email', 'password']),
ValidationMiddleware_1.ValidationMiddleware.validateEmailFormat(['email']),
ValidationMiddleware_1.ValidationMiddleware.validateStringLength({
username: { min: 3, max: 50 },
password: { min: 6, max: 100 }
})
]), async (req, res) => {
try {
(0, Logger_1.logRequest)('Create user endpoint accessed', req, res, {
username: req.body.username,
email: req.body.email
});
const result = await DIContainer_1.container.createUserCommandHandler.execute(req.body);
(0, Logger_1.logRequest)('User created successfully', req, res, {
userId: result.id,
username: result.username
});
res.status(201).json(result);
}
catch (error) {
(0, Logger_1.logError)('Create user endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('already exists')) {
return ErrorResponseService_1.ErrorResponseService.sendConflict(res, error.message);
}
if (error.message.includes('validation')) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, error.message);
}
}
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Get user profile (current user)
userRouter.get('/profile', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
(0, Logger_1.logRequest)('Get user profile endpoint accessed', req, res, { userId });
const result = await DIContainer_1.container.getUserByIdQueryHandler.execute({ id: userId });
if (!result) {
(0, Logger_1.logWarning)('User profile not found', { userId }, req, res);
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'User not found');
}
(0, Logger_1.logRequest)('User profile retrieved successfully', req, res, {
userId,
username: result.username
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Get user profile endpoint error', error, req, res);
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
// Update user profile (current user)
userRouter.patch('/profile', AuthMiddleware_1.authRequired, async (req, res) => {
try {
const userId = req.user.userId;
(0, Logger_1.logRequest)('Update user profile endpoint accessed', req, res, {
userId,
fieldsToUpdate: Object.keys(req.body)
});
const result = await DIContainer_1.container.updateUserCommandHandler.execute({ id: userId, ...req.body });
if (!result) {
return ErrorResponseService_1.ErrorResponseService.sendNotFound(res, 'User not found');
}
(0, Logger_1.logRequest)('User profile updated successfully', req, res, {
userId,
username: result.username
});
res.json(result);
}
catch (error) {
(0, Logger_1.logError)('Update user profile endpoint error', error, req, res);
if (error instanceof Error) {
if (error.message.includes('already exists')) {
return ErrorResponseService_1.ErrorResponseService.sendConflict(res, error.message);
}
if (error.message.includes('validation')) {
return ErrorResponseService_1.ErrorResponseService.sendBadRequest(res, error.message);
}
}
return ErrorResponseService_1.ErrorResponseService.sendInternalServerError(res);
}
});
exports.default = userRouter;
//# sourceMappingURL=userRouter.js.map
File diff suppressed because one or more lines are too long
+42
View File
@@ -0,0 +1,42 @@
export declare const swaggerOptions: {
definition: {
openapi: string;
info: {
title: string;
version: string;
description: string;
contact: {
name: string;
email: string;
};
license: {
name: string;
url: string;
};
};
servers: {
url: string;
description: string;
}[];
components: {
securitySchemes: {
bearerAuth: {
type: string;
scheme: string;
bearerFormat: string;
description: string;
};
};
};
security: {
bearerAuth: never[];
}[];
tags: {
name: string;
description: string;
}[];
};
apis: string[];
};
export declare const swaggerSpec: object;
//# sourceMappingURL=swaggerConfig.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"swaggerConfig.d.ts","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerConfig.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmE1B,CAAC;AAEF,eAAO,MAAM,WAAW,QAA+B,CAAC"}
+77
View File
@@ -0,0 +1,77 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.swaggerSpec = exports.swaggerOptions = void 0;
const swagger_jsdoc_1 = __importDefault(require("swagger-jsdoc"));
exports.swaggerOptions = {
definition: {
openapi: '3.0.0',
info: {
title: 'SerpentRace API',
version: '1.0.0',
description: 'Comprehensive API documentation for SerpentRace Backend',
contact: {
name: 'SerpentRace Development Team',
email: 'dev@serpentrace.com'
},
license: {
name: 'MIT',
url: 'https://opensource.org/licenses/MIT'
}
},
servers: [
{
url: 'http://localhost:3000',
description: 'Local development server'
},
{
url: 'https://api.serpentrace.com',
description: 'Production server'
}
],
components: {
securitySchemes: {
bearerAuth: {
type: 'http',
scheme: 'bearer',
bearerFormat: 'JWT',
description: 'Enter JWT token obtained from /api/users/login'
}
}
},
security: [{ bearerAuth: [] }],
tags: [
{
name: 'Users',
description: 'User authentication and profile management'
},
{
name: 'Organizations',
description: 'Organization management and authentication'
},
{
name: 'Decks',
description: 'Deck creation, management, and gameplay'
},
{
name: 'Chats',
description: 'Real-time chat and messaging system'
},
{
name: 'Contacts',
description: 'Contact form and support requests'
},
{
name: 'Deck Import/Export',
description: 'Import and export deck functionality'
}
]
},
apis: [
'./src/Api/swagger/swaggerDefinitions.ts'
],
};
exports.swaggerSpec = (0, swagger_jsdoc_1.default)(exports.swaggerOptions);
//# sourceMappingURL=swaggerConfig.js.map
@@ -0,0 +1 @@
{"version":3,"file":"swaggerConfig.js","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerConfig.ts"],"names":[],"mappings":";;;;;;AAAA,kEAAyC;AAE5B,QAAA,cAAc,GAAG;IAC5B,UAAU,EAAE;QACV,OAAO,EAAE,OAAO;QAChB,IAAI,EAAE;YACJ,KAAK,EAAE,iBAAiB;YACxB,OAAO,EAAE,OAAO;YAChB,WAAW,EAAE,yDAAyD;YACtE,OAAO,EAAE;gBACP,IAAI,EAAE,8BAA8B;gBACpC,KAAK,EAAE,qBAAqB;aAC7B;YACD,OAAO,EAAE;gBACP,IAAI,EAAE,KAAK;gBACX,GAAG,EAAE,qCAAqC;aAC3C;SACF;QACD,OAAO,EAAE;YACP;gBACE,GAAG,EAAE,uBAAuB;gBAC5B,WAAW,EAAE,0BAA0B;aACxC;YACD;gBACE,GAAG,EAAE,6BAA6B;gBAClC,WAAW,EAAE,mBAAmB;aACjC;SACF;QACD,UAAU,EAAE;YACV,eAAe,EAAE;gBACf,UAAU,EAAE;oBACV,IAAI,EAAE,MAAM;oBACZ,MAAM,EAAE,QAAQ;oBAChB,YAAY,EAAE,KAAK;oBACnB,WAAW,EAAE,gDAAgD;iBAC9D;aACF;SACF;QACD,QAAQ,EAAE,CAAC,EAAE,UAAU,EAAE,EAAE,EAAE,CAAC;QAC9B,IAAI,EAAE;YACJ;gBACE,IAAI,EAAE,OAAO;gBACb,WAAW,EAAE,4CAA4C;aAC1D;YACD;gBACE,IAAI,EAAE,eAAe;gBACrB,WAAW,EAAE,4CAA4C;aAC1D;YACD;gBACE,IAAI,EAAE,OAAO;gBACb,WAAW,EAAE,yCAAyC;aACvD;YACD;gBACE,IAAI,EAAE,OAAO;gBACb,WAAW,EAAE,qCAAqC;aACnD;YACD;gBACE,IAAI,EAAE,UAAU;gBAChB,WAAW,EAAE,mCAAmC;aACjD;YACD;gBACE,IAAI,EAAE,oBAAoB;gBAC1B,WAAW,EAAE,sCAAsC;aACpD;SACF;KACF;IACD,IAAI,EAAE;QACJ,yCAAyC;KAC1C;CACF,CAAC;AAEW,QAAA,WAAW,GAAG,IAAA,uBAAY,EAAC,sBAAc,CAAC,CAAC"}
File diff suppressed because it is too large Load Diff
@@ -0,0 +1 @@
{"version":3,"file":"swaggerDefinitions.d.ts","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerDefinitions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA81CG;AAEH,OAAO,EAAE,CAAC"}
File diff suppressed because it is too large Load Diff
@@ -0,0 +1 @@
{"version":3,"file":"swaggerDefinitions.js","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerDefinitions.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA81CG"}
@@ -0,0 +1,3 @@
import express from 'express';
export declare function setupSwagger(app: express.Application): void;
//# sourceMappingURL=swaggerUiSetup.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"swaggerUiSetup.d.ts","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerUiSetup.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAI9B,wBAAgB,YAAY,CAAC,GAAG,EAAE,OAAO,CAAC,WAAW,QAEpD"}
+12
View File
@@ -0,0 +1,12 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.setupSwagger = setupSwagger;
const swagger_ui_express_1 = __importDefault(require("swagger-ui-express"));
const swaggerConfig_1 = require("./swaggerConfig");
function setupSwagger(app) {
app.use('/api-docs', swagger_ui_express_1.default.serve, swagger_ui_express_1.default.setup(swaggerConfig_1.swaggerSpec));
}
//# sourceMappingURL=swaggerUiSetup.js.map
@@ -0,0 +1 @@
{"version":3,"file":"swaggerUiSetup.js","sourceRoot":"","sources":["../../../src/Api/swagger/swaggerUiSetup.ts"],"names":[],"mappings":";;;;;AAIA,oCAEC;AALD,4EAA2C;AAC3C,mDAA8C;AAE9C,SAAgB,YAAY,CAAC,GAAwB;IACnD,GAAG,CAAC,GAAG,CAAC,WAAW,EAAE,4BAAS,CAAC,KAAK,EAAE,4BAAS,CAAC,KAAK,CAAC,2BAAW,CAAC,CAAC,CAAC;AACtE,CAAC"}
@@ -0,0 +1,13 @@
import { ArchiveChatCommand, RestoreChatCommand } from './ChatCommands';
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
export declare class ArchiveChatCommandHandler {
private chatRepository;
constructor(chatRepository: IChatRepository);
execute(command: ArchiveChatCommand): Promise<boolean>;
}
export declare class RestoreChatCommandHandler {
private chatRepository;
constructor(chatRepository: IChatRepository);
execute(command: RestoreChatCommand): Promise<boolean>;
}
//# sourceMappingURL=ChatArchiveCommandHandlers.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatArchiveCommandHandlers.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/ChatArchiveCommandHandlers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AACxE,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAI9E,qBAAa,yBAAyB;IACtB,OAAO,CAAC,cAAc;gBAAd,cAAc,EAAE,eAAe;IAE7C,OAAO,CAAC,OAAO,EAAE,kBAAkB,GAAG,OAAO,CAAC,OAAO,CAAC;CAsB/D;AAED,qBAAa,yBAAyB;IACtB,OAAO,CAAC,cAAc;gBAAd,cAAc,EAAE,eAAe;IAE7C,OAAO,CAAC,OAAO,EAAE,kBAAkB,GAAG,OAAO,CAAC,OAAO,CAAC;CAiC/D"}
@@ -0,0 +1,66 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RestoreChatCommandHandler = exports.ArchiveChatCommandHandler = void 0;
const ChatAggregate_1 = require("../../../Domain/Chat/ChatAggregate");
const Logger_1 = require("../../Services/Logger");
class ArchiveChatCommandHandler {
constructor(chatRepository) {
this.chatRepository = chatRepository;
}
async execute(command) {
try {
const chat = await this.chatRepository.findById(command.chatId);
if (!chat) {
throw new Error('Chat not found');
}
await this.chatRepository.archiveChat(chat);
(0, Logger_1.logAuth)('Chat archived manually', undefined, {
chatId: command.chatId,
chatType: chat.type,
messageCount: chat.messages.length
});
return true;
}
catch (error) {
(0, Logger_1.logError)('ArchiveChatCommandHandler error', error);
return false;
}
}
}
exports.ArchiveChatCommandHandler = ArchiveChatCommandHandler;
class RestoreChatCommandHandler {
constructor(chatRepository) {
this.chatRepository = chatRepository;
}
async execute(command) {
try {
const archive = await this.chatRepository.getArchivedChat(command.chatId);
if (!archive) {
throw new Error('Archived chat not found');
}
// Game chats cannot be restored, only viewed
if (archive.chatType === ChatAggregate_1.ChatType.GAME) {
(0, Logger_1.logWarning)('Attempt to restore game chat blocked', {
chatId: command.chatId,
chatType: archive.chatType
});
return false;
}
const restoredChat = await this.chatRepository.restoreFromArchive(command.chatId);
if (!restoredChat) {
throw new Error('Failed to restore chat from archive');
}
(0, Logger_1.logAuth)('Chat restored from archive', undefined, {
chatId: command.chatId,
messageCount: archive.archivedMessages.length
});
return true;
}
catch (error) {
(0, Logger_1.logError)('RestoreChatCommandHandler error', error);
return false;
}
}
}
exports.RestoreChatCommandHandler = RestoreChatCommandHandler;
//# sourceMappingURL=ChatArchiveCommandHandlers.js.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatArchiveCommandHandlers.js","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/ChatArchiveCommandHandlers.ts"],"names":[],"mappings":";;;AAEA,sEAA8D;AAC9D,kDAAsE;AAEtE,MAAa,yBAAyB;IAClC,YAAoB,cAA+B;QAA/B,mBAAc,GAAd,cAAc,CAAiB;IAAG,CAAC;IAEvD,KAAK,CAAC,OAAO,CAAC,OAA2B;QACrC,IAAI,CAAC;YACD,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAChE,IAAI,CAAC,IAAI,EAAE,CAAC;gBACR,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;YACtC,CAAC;YAED,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;YAE5C,IAAA,gBAAO,EAAC,wBAAwB,EAAE,SAAS,EAAE;gBACzC,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,QAAQ,EAAE,IAAI,CAAC,IAAI;gBACnB,YAAY,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM;aACrC,CAAC,CAAC;YAEH,OAAO,IAAI,CAAC;QAEhB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,iCAAiC,EAAE,KAAc,CAAC,CAAC;YAC5D,OAAO,KAAK,CAAC;QACjB,CAAC;IACL,CAAC;CACJ;AAzBD,8DAyBC;AAED,MAAa,yBAAyB;IAClC,YAAoB,cAA+B;QAA/B,mBAAc,GAAd,cAAc,CAAiB;IAAG,CAAC;IAEvD,KAAK,CAAC,OAAO,CAAC,OAA2B;QACrC,IAAI,CAAC;YACD,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;YAC/C,CAAC;YAED,6CAA6C;YAC7C,IAAI,OAAO,CAAC,QAAQ,KAAK,wBAAQ,CAAC,IAAI,EAAE,CAAC;gBACrC,IAAA,mBAAU,EAAC,sCAAsC,EAAE;oBAC/C,MAAM,EAAE,OAAO,CAAC,MAAM;oBACtB,QAAQ,EAAE,OAAO,CAAC,QAAQ;iBAC7B,CAAC,CAAC;gBACH,OAAO,KAAK,CAAC;YACjB,CAAC;YAED,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,kBAAkB,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAClF,IAAI,CAAC,YAAY,EAAE,CAAC;gBAChB,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;YAC3D,CAAC;YAED,IAAA,gBAAO,EAAC,4BAA4B,EAAE,SAAS,EAAE;gBAC7C,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,MAAM;aAChD,CAAC,CAAC;YAEH,OAAO,IAAI,CAAC;QAEhB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,iCAAiC,EAAE,KAAc,CAAC,CAAC;YAC5D,OAAO,KAAK,CAAC;QACjB,CAAC;IACL,CAAC;CACJ;AApCD,8DAoCC"}
@@ -0,0 +1,19 @@
export interface CreateChatCommand {
type: 'direct' | 'group' | 'game';
name?: string;
gameId?: string;
createdBy: string;
userIds: string[];
}
export interface SendMessageCommand {
chatId: string;
userId: string;
message: string;
}
export interface ArchiveChatCommand {
chatId: string;
}
export interface RestoreChatCommand {
chatId: string;
}
//# sourceMappingURL=ChatCommands.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatCommands.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/ChatCommands.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,iBAAiB;IAC9B,IAAI,EAAE,QAAQ,GAAG,OAAO,GAAG,MAAM,CAAC;IAClC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,OAAO,EAAE,MAAM,EAAE,CAAC;CACrB;AAED,MAAM,WAAW,kBAAkB;IAC/B,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,kBAAkB;IAC/B,MAAM,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,kBAAkB;IAC/B,MAAM,EAAE,MAAM,CAAC;CAClB"}
@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=ChatCommands.js.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatCommands.js","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/ChatCommands.ts"],"names":[],"mappings":""}
@@ -0,0 +1,11 @@
import { CreateChatCommand } from './ChatCommands';
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
import { IUserRepository } from '../../../Domain/IRepository/IUserRepository';
import { ChatAggregate } from '../../../Domain/Chat/ChatAggregate';
export declare class CreateChatCommandHandler {
private chatRepository;
private userRepository;
constructor(chatRepository: IChatRepository, userRepository: IUserRepository);
execute(command: CreateChatCommand): Promise<ChatAggregate | null>;
}
//# sourceMappingURL=CreateChatCommandHandler.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"CreateChatCommandHandler.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/CreateChatCommandHandler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,gBAAgB,CAAC;AACnD,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAY,aAAa,EAAE,MAAM,oCAAoC,CAAC;AAI7E,qBAAa,wBAAwB;IAE7B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,cAAc;gBADd,cAAc,EAAE,eAAe,EAC/B,cAAc,EAAE,eAAe;IAGrC,OAAO,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC;CAuE3E"}
@@ -0,0 +1,71 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CreateChatCommandHandler = void 0;
const ChatAggregate_1 = require("../../../Domain/Chat/ChatAggregate");
const UserAggregate_1 = require("../../../Domain/User/UserAggregate");
const Logger_1 = require("../../Services/Logger");
class CreateChatCommandHandler {
constructor(chatRepository, userRepository) {
this.chatRepository = chatRepository;
this.userRepository = userRepository;
}
async execute(command) {
try {
// Validate creator exists
const creator = await this.userRepository.findById(command.createdBy);
if (!creator) {
throw new Error('Creator not found');
}
// For group chats, check if creator is premium
if (command.type === 'group' && creator.state !== UserAggregate_1.UserState.VERIFIED_PREMIUM) {
throw new Error('Premium subscription required to create groups');
}
// Validate all target users exist
const targetUsers = await Promise.all(command.userIds.map(id => this.userRepository.findById(id)));
if (targetUsers.some(user => !user)) {
throw new Error('One or more target users not found');
}
// For direct chats, check if already exists
if (command.type === 'direct' && command.userIds.length === 1) {
const existingChats = await this.chatRepository.findByUserId(command.createdBy);
const existingDirectChat = existingChats.find(chat => chat.type === ChatAggregate_1.ChatType.DIRECT &&
chat.users.length === 2 &&
chat.users.includes(command.userIds[0]));
if (existingDirectChat) {
return existingDirectChat;
}
}
// For game chats, check if already exists
if (command.type === 'game' && command.gameId) {
const existingGameChat = await this.chatRepository.findByGameId(command.gameId);
if (existingGameChat) {
return existingGameChat;
}
}
// Create chat
const chatData = {
type: command.type,
name: command.name,
gameId: command.gameId,
createdBy: command.createdBy,
users: [command.createdBy, ...command.userIds],
messages: [],
lastActivity: new Date()
};
const chat = await this.chatRepository.create(chatData);
(0, Logger_1.logAuth)('Chat created successfully', command.createdBy, {
chatId: chat.id,
chatType: command.type,
participantCount: chat.users.length,
gameId: command.gameId
});
return chat;
}
catch (error) {
(0, Logger_1.logError)('CreateChatCommandHandler error', error);
return null;
}
}
}
exports.CreateChatCommandHandler = CreateChatCommandHandler;
//# sourceMappingURL=CreateChatCommandHandler.js.map
@@ -0,0 +1 @@
{"version":3,"file":"CreateChatCommandHandler.js","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/CreateChatCommandHandler.ts"],"names":[],"mappings":";;;AAGA,sEAA6E;AAC7E,sEAA+D;AAC/D,kDAA0D;AAE1D,MAAa,wBAAwB;IACjC,YACY,cAA+B,EAC/B,cAA+B;QAD/B,mBAAc,GAAd,cAAc,CAAiB;QAC/B,mBAAc,GAAd,cAAc,CAAiB;IACxC,CAAC;IAEJ,KAAK,CAAC,OAAO,CAAC,OAA0B;QACpC,IAAI,CAAC;YACD,0BAA0B;YAC1B,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;YACtE,IAAI,CAAC,OAAO,EAAE,CAAC;gBACX,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;YACzC,CAAC;YAED,+CAA+C;YAC/C,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,IAAI,OAAO,CAAC,KAAK,KAAK,yBAAS,CAAC,gBAAgB,EAAE,CAAC;gBAC3E,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;YACtE,CAAC;YAED,kCAAkC;YAClC,MAAM,WAAW,GAAG,MAAM,OAAO,CAAC,GAAG,CACjC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,CAC9D,CAAC;YAEF,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;gBAClC,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;YAC1D,CAAC;YAED,4CAA4C;YAC5C,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBAC5D,MAAM,aAAa,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;gBAChF,MAAM,kBAAkB,GAAG,aAAa,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CACjD,IAAI,CAAC,IAAI,KAAK,wBAAQ,CAAC,MAAM;oBAC7B,IAAI,CAAC,KAAK,CAAC,MAAM,KAAK,CAAC;oBACvB,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAC1C,CAAC;gBAEF,IAAI,kBAAkB,EAAE,CAAC;oBACrB,OAAO,kBAAkB,CAAC;gBAC9B,CAAC;YACL,CAAC;YAED,0CAA0C;YAC1C,IAAI,OAAO,CAAC,IAAI,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;gBAC5C,MAAM,gBAAgB,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;gBAChF,IAAI,gBAAgB,EAAE,CAAC;oBACnB,OAAO,gBAAgB,CAAC;gBAC5B,CAAC;YACL,CAAC;YAED,cAAc;YACd,MAAM,QAAQ,GAA2B;gBACrC,IAAI,EAAE,OAAO,CAAC,IAAW;gBACzB,IAAI,EAAE,OAAO,CAAC,IAAI;gBAClB,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,SAAS,EAAE,OAAO,CAAC,SAAS;gBAC5B,KAAK,EAAE,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;gBAC9C,QAAQ,EAAE,EAAE;gBACZ,YAAY,EAAE,IAAI,IAAI,EAAE;aAC3B,CAAC;YAEF,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YAExD,IAAA,gBAAO,EAAC,2BAA2B,EAAE,OAAO,CAAC,SAAS,EAAE;gBACpD,MAAM,EAAE,IAAI,CAAC,EAAE;gBACf,QAAQ,EAAE,OAAO,CAAC,IAAI;gBACtB,gBAAgB,EAAE,IAAI,CAAC,KAAK,CAAC,MAAM;gBACnC,MAAM,EAAE,OAAO,CAAC,MAAM;aACzB,CAAC,CAAC;YAEH,OAAO,IAAI,CAAC;QAEhB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,gCAAgC,EAAE,KAAc,CAAC,CAAC;YAC3D,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;CACJ;AA7ED,4DA6EC"}
@@ -0,0 +1,10 @@
import { SendMessageCommand } from './ChatCommands';
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
import { Message } from '../../../Domain/Chat/ChatAggregate';
export declare class SendMessageCommandHandler {
private chatRepository;
constructor(chatRepository: IChatRepository);
execute(command: SendMessageCommand): Promise<Message | null>;
private pruneMessages;
}
//# sourceMappingURL=SendMessageCommandHandler.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"SendMessageCommandHandler.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/SendMessageCommandHandler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,MAAM,gBAAgB,CAAC;AACpD,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAE,OAAO,EAAE,MAAM,oCAAoC,CAAC;AAI7D,qBAAa,yBAAyB;IACtB,OAAO,CAAC,cAAc;gBAAd,cAAc,EAAE,eAAe;IAE7C,OAAO,CAAC,OAAO,EAAE,kBAAkB,GAAG,OAAO,CAAC,OAAO,GAAG,IAAI,CAAC;IAiDnE,OAAO,CAAC,aAAa;CAyBxB"}
@@ -0,0 +1,74 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SendMessageCommandHandler = void 0;
const Logger_1 = require("../../Services/Logger");
const uuid_1 = require("uuid");
class SendMessageCommandHandler {
constructor(chatRepository) {
this.chatRepository = chatRepository;
}
async execute(command) {
try {
// Validate message is non-empty string
if (typeof command.message !== 'string' || !command.message.trim()) {
throw new Error('Message must be a non-empty string');
}
const chat = await this.chatRepository.findById(command.chatId);
if (!chat) {
throw new Error('Chat not found');
}
// Check if user is member of this chat
if (!chat.users.includes(command.userId)) {
throw new Error('User is not a member of this chat');
}
// Create message
const message = {
id: (0, uuid_1.v4)(),
date: new Date(),
userid: command.userId,
text: command.message.trim()
};
// Manage message history (keep last 10 per user, up to 2 weeks)
let updatedMessages = [...chat.messages, message];
updatedMessages = this.pruneMessages(updatedMessages);
// Update chat
await this.chatRepository.update(command.chatId, {
messages: updatedMessages,
lastActivity: new Date()
});
(0, Logger_1.logAuth)('Message sent successfully', command.userId, {
chatId: command.chatId,
messageLength: command.message.length,
totalMessages: updatedMessages.length
});
return message;
}
catch (error) {
(0, Logger_1.logError)('SendMessageCommandHandler error', error);
return null;
}
}
pruneMessages(messages) {
const twoWeeksAgo = new Date(Date.now() - 14 * 24 * 60 * 60 * 1000);
// Remove messages older than 2 weeks
let prunedMessages = messages.filter(msg => new Date(msg.date) > twoWeeksAgo);
// Group by user and keep last 10 messages per user
const messagesByUser = new Map();
prunedMessages.forEach(msg => {
if (!messagesByUser.has(msg.userid)) {
messagesByUser.set(msg.userid, []);
}
messagesByUser.get(msg.userid).push(msg);
});
// Keep only last 10 messages per user
const finalMessages = [];
messagesByUser.forEach((userMessages, userId) => {
const last10 = userMessages.slice(-10);
finalMessages.push(...last10);
});
// Sort by date
return finalMessages.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
}
}
exports.SendMessageCommandHandler = SendMessageCommandHandler;
//# sourceMappingURL=SendMessageCommandHandler.js.map
@@ -0,0 +1 @@
{"version":3,"file":"SendMessageCommandHandler.js","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/SendMessageCommandHandler.ts"],"names":[],"mappings":";;;AAGA,kDAA0D;AAC1D,+BAAoC;AAEpC,MAAa,yBAAyB;IAClC,YAAoB,cAA+B;QAA/B,mBAAc,GAAd,cAAc,CAAiB;IAAG,CAAC;IAEvD,KAAK,CAAC,OAAO,CAAC,OAA2B;QACrC,IAAI,CAAC;YACD,uCAAuC;YACvC,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC;gBACjE,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;YAC1D,CAAC;YAED,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAChE,IAAI,CAAC,IAAI,EAAE,CAAC;gBACR,MAAM,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;YACtC,CAAC;YAED,uCAAuC;YACvC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAC;gBACvC,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;YACzD,CAAC;YAED,iBAAiB;YACjB,MAAM,OAAO,GAAY;gBACrB,EAAE,EAAE,IAAA,SAAM,GAAE;gBACZ,IAAI,EAAE,IAAI,IAAI,EAAE;gBAChB,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE;aAC/B,CAAC;YAEF,gEAAgE;YAChE,IAAI,eAAe,GAAG,CAAC,GAAG,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAClD,eAAe,GAAG,IAAI,CAAC,aAAa,CAAC,eAAe,CAAC,CAAC;YAEtD,cAAc;YACd,MAAM,IAAI,CAAC,cAAc,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,EAAE;gBAC7C,QAAQ,EAAE,eAAe;gBACzB,YAAY,EAAE,IAAI,IAAI,EAAE;aAC3B,CAAC,CAAC;YAEH,IAAA,gBAAO,EAAC,2BAA2B,EAAE,OAAO,CAAC,MAAM,EAAE;gBACjD,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,aAAa,EAAE,OAAO,CAAC,OAAO,CAAC,MAAM;gBACrC,aAAa,EAAE,eAAe,CAAC,MAAM;aACxC,CAAC,CAAC;YAEH,OAAO,OAAO,CAAC;QAEnB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,iCAAiC,EAAE,KAAc,CAAC,CAAC;YAC5D,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;IAEO,aAAa,CAAC,QAAmB;QACrC,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC;QAEpE,qCAAqC;QACrC,IAAI,cAAc,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,CAAC;QAE9E,mDAAmD;QACnD,MAAM,cAAc,GAAG,IAAI,GAAG,EAAqB,CAAC;QACpD,cAAc,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;gBAClC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;YACvC,CAAC;YACD,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC9C,CAAC,CAAC,CAAC;QAEH,sCAAsC;QACtC,MAAM,aAAa,GAAc,EAAE,CAAC;QACpC,cAAc,CAAC,OAAO,CAAC,CAAC,YAAY,EAAE,MAAM,EAAE,EAAE;YAC5C,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;YACvC,aAAa,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,CAAC;QAClC,CAAC,CAAC,CAAC;QAEH,eAAe;QACf,OAAO,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;IACjG,CAAC;CACJ;AA7ED,8DA6EC"}
@@ -0,0 +1 @@
//# sourceMappingURL=SoftDeleteCommandHandlers.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"SoftDeleteCommandHandlers.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/SoftDeleteCommandHandlers.ts"],"names":[],"mappings":""}
@@ -0,0 +1,2 @@
"use strict";
//# sourceMappingURL=SoftDeleteCommandHandlers.js.map
@@ -0,0 +1 @@
{"version":3,"file":"SoftDeleteCommandHandlers.js","sourceRoot":"","sources":["../../../../src/Application/Chat/commands/SoftDeleteCommandHandlers.ts"],"names":[],"mappings":""}
@@ -0,0 +1,28 @@
import { GetChatHistoryQuery, GetArchivedChatsQuery } from './ChatQueries';
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
import { IChatArchiveRepository } from '../../../Domain/IRepository/IChatArchiveRepository';
import { Message } from '../../../Domain/Chat/ChatAggregate';
interface ChatHistoryResult {
chatId: string;
messages: Message[];
isArchived: boolean;
chatInfo: {
type: string;
name: string | null;
gameId: string | null;
users: string[];
};
}
export declare class GetChatHistoryQueryHandler {
private chatRepository;
private chatArchiveRepository;
constructor(chatRepository: IChatRepository, chatArchiveRepository: IChatArchiveRepository);
execute(query: GetChatHistoryQuery): Promise<ChatHistoryResult | null>;
}
export declare class GetArchivedChatsQueryHandler {
private chatArchiveRepository;
constructor(chatArchiveRepository: IChatArchiveRepository);
execute(query: GetArchivedChatsQuery): Promise<ChatHistoryResult[]>;
}
export {};
//# sourceMappingURL=ChatHistoryQueryHandlers.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatHistoryQueryHandlers.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/ChatHistoryQueryHandlers.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAC;AAC3E,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAE,sBAAsB,EAAE,MAAM,oDAAoD,CAAC;AAC5F,OAAO,EAAE,OAAO,EAAE,MAAM,oCAAoC,CAAC;AAG7D,UAAU,iBAAiB;IACvB,MAAM,EAAE,MAAM,CAAC;IACf,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,UAAU,EAAE,OAAO,CAAC;IACpB,QAAQ,EAAE;QACN,IAAI,EAAE,MAAM,CAAC;QACb,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;QACpB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QACtB,KAAK,EAAE,MAAM,EAAE,CAAC;KACnB,CAAC;CACL;AAED,qBAAa,0BAA0B;IAE/B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,qBAAqB;gBADrB,cAAc,EAAE,eAAe,EAC/B,qBAAqB,EAAE,sBAAsB;IAGnD,OAAO,CAAC,KAAK,EAAE,mBAAmB,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC;CAwE/E;AAED,qBAAa,4BAA4B;IACzB,OAAO,CAAC,qBAAqB;gBAArB,qBAAqB,EAAE,sBAAsB;IAE3D,OAAO,CAAC,KAAK,EAAE,qBAAqB,GAAG,OAAO,CAAC,iBAAiB,EAAE,CAAC;CAuC5E"}
@@ -0,0 +1,116 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GetArchivedChatsQueryHandler = exports.GetChatHistoryQueryHandler = void 0;
const Logger_1 = require("../../Services/Logger");
class GetChatHistoryQueryHandler {
constructor(chatRepository, chatArchiveRepository) {
this.chatRepository = chatRepository;
this.chatArchiveRepository = chatArchiveRepository;
}
async execute(query) {
try {
// First try to find active chat
const chat = await this.chatRepository.findById(query.chatId);
if (chat) {
// Check authorization
if (!chat.users.includes(query.userId)) {
(0, Logger_1.logWarning)('Unauthorized chat history access attempt', {
chatId: query.chatId,
userId: query.userId
});
return null;
}
(0, Logger_1.logAuth)('Chat history retrieved', query.userId, {
chatId: query.chatId,
messageCount: chat.messages.length,
isArchived: false
});
return {
chatId: query.chatId,
messages: chat.messages,
isArchived: false,
chatInfo: {
type: chat.type,
name: chat.name,
gameId: chat.gameId,
users: chat.users
}
};
}
// Try to find in archives
const archives = await this.chatArchiveRepository.findByChatId(query.chatId);
const userArchive = archives.find(archive => archive.participants.includes(query.userId));
if (userArchive) {
(0, Logger_1.logAuth)('Archived chat history retrieved', query.userId, {
chatId: query.chatId,
messageCount: userArchive.archivedMessages.length,
isArchived: true
});
return {
chatId: query.chatId,
messages: userArchive.archivedMessages,
isArchived: true,
chatInfo: {
type: userArchive.chatType,
name: userArchive.chatName,
gameId: userArchive.gameId,
users: userArchive.participants
}
};
}
(0, Logger_1.logWarning)('Chat history not found', {
chatId: query.chatId,
userId: query.userId
});
return null;
}
catch (error) {
(0, Logger_1.logError)('GetChatHistoryQueryHandler error', error);
return null;
}
}
}
exports.GetChatHistoryQueryHandler = GetChatHistoryQueryHandler;
class GetArchivedChatsQueryHandler {
constructor(chatArchiveRepository) {
this.chatArchiveRepository = chatArchiveRepository;
}
async execute(query) {
try {
let archives = [];
if (query.gameId) {
// Get archived game chats
archives = await this.chatArchiveRepository.findByGameId(query.gameId);
}
else {
// Get all archived chats for user (would need different query)
// For now, return empty - this would need a new repository method
archives = [];
}
const result = archives
.filter(archive => archive.participants.includes(query.userId))
.map(archive => ({
chatId: archive.chatId,
messages: archive.archivedMessages,
isArchived: true,
chatInfo: {
type: archive.chatType,
name: archive.chatName,
gameId: archive.gameId,
users: archive.participants
}
}));
(0, Logger_1.logAuth)('Archived chats retrieved', query.userId, {
count: result.length,
gameId: query.gameId
});
return result;
}
catch (error) {
(0, Logger_1.logError)('GetArchivedChatsQueryHandler error', error);
return [];
}
}
}
exports.GetArchivedChatsQueryHandler = GetArchivedChatsQueryHandler;
//# sourceMappingURL=ChatHistoryQueryHandlers.js.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatHistoryQueryHandlers.js","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/ChatHistoryQueryHandlers.ts"],"names":[],"mappings":";;;AAIA,kDAAsE;AActE,MAAa,0BAA0B;IACnC,YACY,cAA+B,EAC/B,qBAA6C;QAD7C,mBAAc,GAAd,cAAc,CAAiB;QAC/B,0BAAqB,GAArB,qBAAqB,CAAwB;IACtD,CAAC;IAEJ,KAAK,CAAC,OAAO,CAAC,KAA0B;QACpC,IAAI,CAAC;YACD,gCAAgC;YAChC,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAE9D,IAAI,IAAI,EAAE,CAAC;gBACP,sBAAsB;gBACtB,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC;oBACrC,IAAA,mBAAU,EAAC,0CAA0C,EAAE;wBACnD,MAAM,EAAE,KAAK,CAAC,MAAM;wBACpB,MAAM,EAAE,KAAK,CAAC,MAAM;qBACvB,CAAC,CAAC;oBACH,OAAO,IAAI,CAAC;gBAChB,CAAC;gBAED,IAAA,gBAAO,EAAC,wBAAwB,EAAE,KAAK,CAAC,MAAM,EAAE;oBAC5C,MAAM,EAAE,KAAK,CAAC,MAAM;oBACpB,YAAY,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM;oBAClC,UAAU,EAAE,KAAK;iBACpB,CAAC,CAAC;gBAEH,OAAO;oBACH,MAAM,EAAE,KAAK,CAAC,MAAM;oBACpB,QAAQ,EAAE,IAAI,CAAC,QAAQ;oBACvB,UAAU,EAAE,KAAK;oBACjB,QAAQ,EAAE;wBACN,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,IAAI,EAAE,IAAI,CAAC,IAAI;wBACf,MAAM,EAAE,IAAI,CAAC,MAAM;wBACnB,KAAK,EAAE,IAAI,CAAC,KAAK;qBACpB;iBACJ,CAAC;YACN,CAAC;YAED,0BAA0B;YAC1B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAC7E,MAAM,WAAW,GAAG,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CACxC,OAAO,CAAC,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,CAC9C,CAAC;YAEF,IAAI,WAAW,EAAE,CAAC;gBACd,IAAA,gBAAO,EAAC,iCAAiC,EAAE,KAAK,CAAC,MAAM,EAAE;oBACrD,MAAM,EAAE,KAAK,CAAC,MAAM;oBACpB,YAAY,EAAE,WAAW,CAAC,gBAAgB,CAAC,MAAM;oBACjD,UAAU,EAAE,IAAI;iBACnB,CAAC,CAAC;gBAEH,OAAO;oBACH,MAAM,EAAE,KAAK,CAAC,MAAM;oBACpB,QAAQ,EAAE,WAAW,CAAC,gBAAgB;oBACtC,UAAU,EAAE,IAAI;oBAChB,QAAQ,EAAE;wBACN,IAAI,EAAE,WAAW,CAAC,QAAQ;wBAC1B,IAAI,EAAE,WAAW,CAAC,QAAQ;wBAC1B,MAAM,EAAE,WAAW,CAAC,MAAM;wBAC1B,KAAK,EAAE,WAAW,CAAC,YAAY;qBAClC;iBACJ,CAAC;YACN,CAAC;YAED,IAAA,mBAAU,EAAC,wBAAwB,EAAE;gBACjC,MAAM,EAAE,KAAK,CAAC,MAAM;gBACpB,MAAM,EAAE,KAAK,CAAC,MAAM;aACvB,CAAC,CAAC;YAEH,OAAO,IAAI,CAAC;QAEhB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,kCAAkC,EAAE,KAAc,CAAC,CAAC;YAC7D,OAAO,IAAI,CAAC;QAChB,CAAC;IACL,CAAC;CACJ;AA9ED,gEA8EC;AAED,MAAa,4BAA4B;IACrC,YAAoB,qBAA6C;QAA7C,0BAAqB,GAArB,qBAAqB,CAAwB;IAAG,CAAC;IAErE,KAAK,CAAC,OAAO,CAAC,KAA4B;QACtC,IAAI,CAAC;YACD,IAAI,QAAQ,GAAU,EAAE,CAAC;YAEzB,IAAI,KAAK,CAAC,MAAM,EAAE,CAAC;gBACf,0BAA0B;gBAC1B,QAAQ,GAAG,MAAM,IAAI,CAAC,qBAAqB,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YAC3E,CAAC;iBAAM,CAAC;gBACJ,+DAA+D;gBAC/D,kEAAkE;gBAClE,QAAQ,GAAG,EAAE,CAAC;YAClB,CAAC;YAED,MAAM,MAAM,GAAG,QAAQ;iBAClB,MAAM,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,CAAC,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;iBAC9D,GAAG,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;gBACb,MAAM,EAAE,OAAO,CAAC,MAAM;gBACtB,QAAQ,EAAE,OAAO,CAAC,gBAAgB;gBAClC,UAAU,EAAE,IAAI;gBAChB,QAAQ,EAAE;oBACN,IAAI,EAAE,OAAO,CAAC,QAAQ;oBACtB,IAAI,EAAE,OAAO,CAAC,QAAQ;oBACtB,MAAM,EAAE,OAAO,CAAC,MAAM;oBACtB,KAAK,EAAE,OAAO,CAAC,YAAY;iBAC9B;aACJ,CAAC,CAAC,CAAC;YAER,IAAA,gBAAO,EAAC,0BAA0B,EAAE,KAAK,CAAC,MAAM,EAAE;gBAC9C,KAAK,EAAE,MAAM,CAAC,MAAM;gBACpB,MAAM,EAAE,KAAK,CAAC,MAAM;aACvB,CAAC,CAAC;YAEH,OAAO,MAAM,CAAC;QAElB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,oCAAoC,EAAE,KAAc,CAAC,CAAC;YAC/D,OAAO,EAAE,CAAC;QACd,CAAC;IACL,CAAC;CACJ;AA1CD,oEA0CC"}
@@ -0,0 +1,13 @@
export interface GetUserChatsQuery {
userId: string;
includeArchived?: boolean;
}
export interface GetChatHistoryQuery {
chatId: string;
userId: string;
}
export interface GetArchivedChatsQuery {
userId: string;
gameId?: string;
}
//# sourceMappingURL=ChatQueries.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatQueries.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/ChatQueries.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,iBAAiB;IAC9B,MAAM,EAAE,MAAM,CAAC;IACf,eAAe,CAAC,EAAE,OAAO,CAAC;CAC7B;AAED,MAAM,WAAW,mBAAmB;IAChC,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,qBAAqB;IAClC,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=ChatQueries.js.map
@@ -0,0 +1 @@
{"version":3,"file":"ChatQueries.js","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/ChatQueries.ts"],"names":[],"mappings":""}
@@ -0,0 +1,6 @@
export interface GetChatsByPageQuery {
from: number;
to: number;
includeDeleted?: boolean;
}
//# sourceMappingURL=GetChatsByPageQuery.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"GetChatsByPageQuery.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetChatsByPageQuery.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,mBAAmB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,EAAE,EAAE,MAAM,CAAC;IACX,cAAc,CAAC,EAAE,OAAO,CAAC;CAC5B"}
@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=GetChatsByPageQuery.js.map
@@ -0,0 +1 @@
{"version":3,"file":"GetChatsByPageQuery.js","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetChatsByPageQuery.ts"],"names":[],"mappings":""}
@@ -0,0 +1,12 @@
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
import { GetChatsByPageQuery } from './GetChatsByPageQuery';
import { ShortChatDto } from '../../DTOs/ChatDto';
export declare class GetChatsByPageQueryHandler {
private readonly chatRepo;
constructor(chatRepo: IChatRepository);
execute(query: GetChatsByPageQuery): Promise<{
chats: ShortChatDto[];
totalCount: number;
}>;
}
//# sourceMappingURL=GetChatsByPageQueryHandler.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"GetChatsByPageQueryHandler.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetChatsByPageQueryHandler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAIlD,qBAAa,0BAA0B;IACzB,OAAO,CAAC,QAAQ,CAAC,QAAQ;gBAAR,QAAQ,EAAE,eAAe;IAEhD,OAAO,CAAC,KAAK,EAAE,mBAAmB,GAAG,OAAO,CAAC;QAAE,KAAK,EAAE,YAAY,EAAE,CAAC;QAAC,UAAU,EAAE,MAAM,CAAA;KAAE,CAAC;CA6ClG"}
@@ -0,0 +1,51 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GetChatsByPageQueryHandler = void 0;
const ChatMapper_1 = require("../../DTOs/Mappers/ChatMapper");
const Logger_1 = require("../../Services/Logger");
class GetChatsByPageQueryHandler {
constructor(chatRepo) {
this.chatRepo = chatRepo;
}
async execute(query) {
try {
// Validate pagination parameters
if (query.from < 0 || query.to < query.from) {
throw new Error('Invalid pagination parameters');
}
const limit = query.to - query.from + 1;
if (limit > 100) {
throw new Error('Page size too large. Maximum 100 records per request');
}
(0, Logger_1.logRequest)('Get chats by page query started', undefined, undefined, {
from: query.from,
to: query.to,
includeDeleted: query.includeDeleted || false
});
const result = query.includeDeleted
? await this.chatRepo.findByPageIncludingDeleted(query.from, query.to)
: await this.chatRepo.findByPage(query.from, query.to);
(0, Logger_1.logRequest)('Get chats by page query completed', undefined, undefined, {
from: query.from,
to: query.to,
returned: result.chats.length,
totalCount: result.totalCount,
includeDeleted: query.includeDeleted || false
});
return {
chats: ChatMapper_1.ChatMapper.toShortDtoList(result.chats),
totalCount: result.totalCount
};
}
catch (error) {
(0, Logger_1.logError)('GetChatsByPageQueryHandler error', error instanceof Error ? error : new Error(String(error)));
// Re-throw validation errors as-is
if (error instanceof Error && (error.message.includes('Invalid pagination') || error.message.includes('Page size'))) {
throw error;
}
throw new Error('Failed to retrieve chats page');
}
}
}
exports.GetChatsByPageQueryHandler = GetChatsByPageQueryHandler;
//# sourceMappingURL=GetChatsByPageQueryHandler.js.map
@@ -0,0 +1 @@
{"version":3,"file":"GetChatsByPageQueryHandler.js","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetChatsByPageQueryHandler.ts"],"names":[],"mappings":";;;AAGA,8DAA2D;AAC3D,kDAA6D;AAE7D,MAAa,0BAA0B;IACrC,YAA6B,QAAyB;QAAzB,aAAQ,GAAR,QAAQ,CAAiB;IAAG,CAAC;IAE1D,KAAK,CAAC,OAAO,CAAC,KAA0B;QACtC,IAAI,CAAC;YACH,iCAAiC;YACjC,IAAI,KAAK,CAAC,IAAI,GAAG,CAAC,IAAI,KAAK,CAAC,EAAE,GAAG,KAAK,CAAC,IAAI,EAAE,CAAC;gBAC5C,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;YACnD,CAAC;YAED,MAAM,KAAK,GAAG,KAAK,CAAC,EAAE,GAAG,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC;YACxC,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;gBAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;YAC1E,CAAC;YAED,IAAA,mBAAU,EAAC,iCAAiC,EAAE,SAAS,EAAE,SAAS,EAAE;gBAClE,IAAI,EAAE,KAAK,CAAC,IAAI;gBAChB,EAAE,EAAE,KAAK,CAAC,EAAE;gBACZ,cAAc,EAAE,KAAK,CAAC,cAAc,IAAI,KAAK;aAC9C,CAAC,CAAC;YAEH,MAAM,MAAM,GAAG,KAAK,CAAC,cAAc;gBACjC,CAAC,CAAC,MAAM,IAAI,CAAC,QAAQ,CAAC,0BAA0B,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,EAAE,CAAC;gBACtE,CAAC,CAAC,MAAM,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,EAAE,CAAC,CAAC;YAEzD,IAAA,mBAAU,EAAC,mCAAmC,EAAE,SAAS,EAAE,SAAS,EAAE;gBACpE,IAAI,EAAE,KAAK,CAAC,IAAI;gBAChB,EAAE,EAAE,KAAK,CAAC,EAAE;gBACZ,QAAQ,EAAE,MAAM,CAAC,KAAK,CAAC,MAAM;gBAC7B,UAAU,EAAE,MAAM,CAAC,UAAU;gBAC7B,cAAc,EAAE,KAAK,CAAC,cAAc,IAAI,KAAK;aAC9C,CAAC,CAAC;YAEH,OAAO;gBACL,KAAK,EAAE,uBAAU,CAAC,cAAc,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC9C,UAAU,EAAE,MAAM,CAAC,UAAU;aAC9B,CAAC;QACJ,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,IAAA,iBAAQ,EAAC,kCAAkC,EAAE,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAExG,mCAAmC;YACnC,IAAI,KAAK,YAAY,KAAK,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,oBAAoB,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC,EAAE,CAAC;gBACpH,MAAM,KAAK,CAAC;YACd,CAAC;YAED,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAC;QACnD,CAAC;IACH,CAAC;CACF;AAhDD,gEAgDC"}
@@ -0,0 +1,23 @@
import { GetUserChatsQuery } from './ChatQueries';
import { IChatRepository } from '../../../Domain/IRepository/IChatRepository';
import { IChatArchiveRepository } from '../../../Domain/IRepository/IChatArchiveRepository';
interface ChatWithMetadata {
id: string;
type: string;
name: string | null;
gameId: string | null;
users: string[];
lastActivity: Date | null;
isArchived: boolean;
messageCount: number;
unreadCount?: number;
}
export declare class GetUserChatsQueryHandler {
private chatRepository;
private chatArchiveRepository;
constructor(chatRepository: IChatRepository, chatArchiveRepository: IChatArchiveRepository);
execute(query: GetUserChatsQuery): Promise<ChatWithMetadata[]>;
private calculateUnreadMessages;
}
export {};
//# sourceMappingURL=GetUserChatsQueryHandler.d.ts.map
@@ -0,0 +1 @@
{"version":3,"file":"GetUserChatsQueryHandler.d.ts","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetUserChatsQueryHandler.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAClD,OAAO,EAAE,eAAe,EAAE,MAAM,6CAA6C,CAAC;AAC9E,OAAO,EAAE,sBAAsB,EAAE,MAAM,oDAAoD,CAAC;AAK5F,UAAU,gBAAgB;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACtB,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,YAAY,EAAE,IAAI,GAAG,IAAI,CAAC;IAC1B,UAAU,EAAE,OAAO,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,qBAAa,wBAAwB;IAE7B,OAAO,CAAC,cAAc;IACtB,OAAO,CAAC,qBAAqB;gBADrB,cAAc,EAAE,eAAe,EAC/B,qBAAqB,EAAE,sBAAsB;IAGnD,OAAO,CAAC,KAAK,EAAE,iBAAiB,GAAG,OAAO,CAAC,gBAAgB,EAAE,CAAC;IAkEpE,OAAO,CAAC,uBAAuB;CAKlC"}
@@ -0,0 +1,76 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GetUserChatsQueryHandler = void 0;
const Logger_1 = require("../../Services/Logger");
class GetUserChatsQueryHandler {
constructor(chatRepository, chatArchiveRepository) {
this.chatRepository = chatRepository;
this.chatArchiveRepository = chatArchiveRepository;
}
async execute(query) {
try {
const result = [];
// Get active chats
const activeChats = await this.chatRepository.findActiveChatsForUser(query.userId);
result.push(...activeChats.map(chat => ({
id: chat.id,
type: chat.type,
name: chat.name,
gameId: chat.gameId,
users: chat.users,
lastActivity: chat.lastActivity,
isArchived: false,
messageCount: chat.messages.length,
unreadCount: this.calculateUnreadMessages(chat, query.userId)
})));
// Get archived chats if requested
if (query.includeArchived) {
const userActiveChats = await this.chatRepository.findByUserId(query.userId);
const archivedChatIds = userActiveChats
.filter(chat => chat.archiveDate !== null)
.map(chat => chat.id);
const archives = await Promise.all(archivedChatIds.map(id => this.chatArchiveRepository.findByChatId(id)));
archives.forEach(archiveArray => {
archiveArray.forEach(archive => {
if (archive.participants.includes(query.userId)) {
result.push({
id: archive.chatId,
type: archive.chatType,
name: archive.chatName,
gameId: archive.gameId,
users: archive.participants,
lastActivity: archive.archivedAt,
isArchived: true,
messageCount: archive.archivedMessages.length,
unreadCount: 0 // Archived chats have no unread messages
});
}
});
});
}
(0, Logger_1.logAuth)('User chats retrieved', query.userId, {
activeCount: activeChats.length,
totalCount: result.length,
includeArchived: query.includeArchived
});
return result.sort((a, b) => {
if (!a.lastActivity)
return 1;
if (!b.lastActivity)
return -1;
return new Date(b.lastActivity).getTime() - new Date(a.lastActivity).getTime();
});
}
catch (error) {
(0, Logger_1.logError)('GetUserChatsQueryHandler error', error);
return [];
}
}
calculateUnreadMessages(chat, userId) {
// Simple implementation - count messages from other users
// In production, you'd store lastSeen timestamp per user per chat
return chat.messages.filter(msg => msg.userid !== userId).length;
}
}
exports.GetUserChatsQueryHandler = GetUserChatsQueryHandler;
//# sourceMappingURL=GetUserChatsQueryHandler.js.map
@@ -0,0 +1 @@
{"version":3,"file":"GetUserChatsQueryHandler.js","sourceRoot":"","sources":["../../../../src/Application/Chat/queries/GetUserChatsQueryHandler.ts"],"names":[],"mappings":";;;AAKA,kDAA0D;AAc1D,MAAa,wBAAwB;IACjC,YACY,cAA+B,EAC/B,qBAA6C;QAD7C,mBAAc,GAAd,cAAc,CAAiB;QAC/B,0BAAqB,GAArB,qBAAqB,CAAwB;IACtD,CAAC;IAEJ,KAAK,CAAC,OAAO,CAAC,KAAwB;QAClC,IAAI,CAAC;YACD,MAAM,MAAM,GAAuB,EAAE,CAAC;YAEtC,mBAAmB;YACnB,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,sBAAsB,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;YACnF,MAAM,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBACpC,EAAE,EAAE,IAAI,CAAC,EAAE;gBACX,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,IAAI,EAAE,IAAI,CAAC,IAAI;gBACf,MAAM,EAAE,IAAI,CAAC,MAAM;gBACnB,KAAK,EAAE,IAAI,CAAC,KAAK;gBACjB,YAAY,EAAE,IAAI,CAAC,YAAY;gBAC/B,UAAU,EAAE,KAAK;gBACjB,YAAY,EAAE,IAAI,CAAC,QAAQ,CAAC,MAAM;gBAClC,WAAW,EAAE,IAAI,CAAC,uBAAuB,CAAC,IAAI,EAAE,KAAK,CAAC,MAAM,CAAC;aAChE,CAAC,CAAC,CAAC,CAAC;YAEL,kCAAkC;YAClC,IAAI,KAAK,CAAC,eAAe,EAAE,CAAC;gBACxB,MAAM,eAAe,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;gBAC7E,MAAM,eAAe,GAAG,eAAe;qBAClC,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,WAAW,KAAK,IAAI,CAAC;qBACzC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;gBAE1B,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,GAAG,CAC9B,eAAe,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,IAAI,CAAC,qBAAqB,CAAC,YAAY,CAAC,EAAE,CAAC,CAAC,CACzE,CAAC;gBAEF,QAAQ,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;oBAC5B,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;wBAC3B,IAAI,OAAO,CAAC,YAAY,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC;4BAC9C,MAAM,CAAC,IAAI,CAAC;gCACR,EAAE,EAAE,OAAO,CAAC,MAAM;gCAClB,IAAI,EAAE,OAAO,CAAC,QAAQ;gCACtB,IAAI,EAAE,OAAO,CAAC,QAAQ;gCACtB,MAAM,EAAE,OAAO,CAAC,MAAM;gCACtB,KAAK,EAAE,OAAO,CAAC,YAAY;gCAC3B,YAAY,EAAE,OAAO,CAAC,UAAU;gCAChC,UAAU,EAAE,IAAI;gCAChB,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,MAAM;gCAC7C,WAAW,EAAE,CAAC,CAAC,yCAAyC;6BAC3D,CAAC,CAAC;wBACP,CAAC;oBACL,CAAC,CAAC,CAAC;gBACP,CAAC,CAAC,CAAC;YACP,CAAC;YAED,IAAA,gBAAO,EAAC,sBAAsB,EAAE,KAAK,CAAC,MAAM,EAAE;gBAC1C,WAAW,EAAE,WAAW,CAAC,MAAM;gBAC/B,UAAU,EAAE,MAAM,CAAC,MAAM;gBACzB,eAAe,EAAE,KAAK,CAAC,eAAe;aACzC,CAAC,CAAC;YAEH,OAAO,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;gBACxB,IAAI,CAAC,CAAC,CAAC,YAAY;oBAAE,OAAO,CAAC,CAAC;gBAC9B,IAAI,CAAC,CAAC,CAAC,YAAY;oBAAE,OAAO,CAAC,CAAC,CAAC;gBAC/B,OAAO,IAAI,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,OAAO,EAAE,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC,OAAO,EAAE,CAAC;YACnF,CAAC,CAAC,CAAC;QAEP,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACb,IAAA,iBAAQ,EAAC,gCAAgC,EAAE,KAAc,CAAC,CAAC;YAC3D,OAAO,EAAE,CAAC;QACd,CAAC;IACL,CAAC;IAEO,uBAAuB,CAAC,IAAmB,EAAE,MAAc;QAC/D,0DAA0D;QAC1D,kEAAkE;QAClE,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,KAAK,MAAM,CAAC,CAAC,MAAM,CAAC;IACrE,CAAC;CACJ;AA7ED,4DA6EC"}
@@ -0,0 +1,9 @@
import { ContactType } from '../../../Domain/Contact/ContactAggregate';
export interface CreateContactCommand {
name: string;
email: string;
userid?: string;
type: ContactType;
txt: string;
}
//# sourceMappingURL=CreateContactCommand.d.ts.map

Some files were not shown because too many files have changed in this diff Show More