Merge fixed backend from main
new file: SerpentRace_Backend/node_modules/jest-runner/build/testWorker.js new file: SerpentRace_Backend/node_modules/jest/bin/jest.js new file: SerpentRace_Backend/src/Api/index.ts new file: SerpentRace_Backend/src/Application/Services/LoggingService.ts new file: SerpentRace_Backend/tsconfig.json
This commit is contained in:
+510
@@ -0,0 +1,510 @@
|
||||
/*!
|
||||
* /**
|
||||
* * Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
* *
|
||||
* * This source code is licensed under the MIT license found in the
|
||||
* * LICENSE file in the root directory of this source tree.
|
||||
* * /
|
||||
*/
|
||||
/******/ (() => { // webpackBootstrap
|
||||
/******/ "use strict";
|
||||
/******/ var __webpack_modules__ = ({
|
||||
|
||||
/***/ "./src/runTest.ts":
|
||||
/***/ ((__unused_webpack_module, exports) => {
|
||||
|
||||
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({
|
||||
value: true
|
||||
}));
|
||||
exports["default"] = runTest;
|
||||
function _nodeVm() {
|
||||
const data = require("node:vm");
|
||||
_nodeVm = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _chalk() {
|
||||
const data = _interopRequireDefault(require("chalk"));
|
||||
_chalk = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function fs() {
|
||||
const data = _interopRequireWildcard(require("graceful-fs"));
|
||||
fs = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function sourcemapSupport() {
|
||||
const data = _interopRequireWildcard(require("source-map-support"));
|
||||
sourcemapSupport = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _console() {
|
||||
const data = require("@jest/console");
|
||||
_console = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _transform() {
|
||||
const data = require("@jest/transform");
|
||||
_transform = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function docblock() {
|
||||
const data = _interopRequireWildcard(require("jest-docblock"));
|
||||
docblock = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestLeakDetector() {
|
||||
const data = _interopRequireDefault(require("jest-leak-detector"));
|
||||
_jestLeakDetector = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestMessageUtil() {
|
||||
const data = require("jest-message-util");
|
||||
_jestMessageUtil = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestResolve() {
|
||||
const data = require("jest-resolve");
|
||||
_jestResolve = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestUtil() {
|
||||
const data = require("jest-util");
|
||||
_jestUtil = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
*/
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/consistent-type-imports
|
||||
|
||||
function freezeConsole(testConsole, config) {
|
||||
// @ts-expect-error: `_log` is `private` - we should figure out some proper API here
|
||||
testConsole._log = function fakeConsolePush(_type, message) {
|
||||
const error = new (_jestUtil().ErrorWithStack)(`${_chalk().default.red(`${_chalk().default.bold('Cannot log after tests are done.')} Did you forget to wait for something async in your test?`)}\nAttempted to log "${message}".`, fakeConsolePush);
|
||||
const formattedError = (0, _jestMessageUtil().formatExecError)(error, config, {
|
||||
noStackTrace: false
|
||||
}, undefined, true);
|
||||
process.stderr.write(`\n${formattedError}\n`);
|
||||
process.exitCode = 1;
|
||||
};
|
||||
}
|
||||
|
||||
// Keeping the core of "runTest" as a separate function (as "runTestInternal")
|
||||
// is key to be able to detect memory leaks. Since all variables are local to
|
||||
// the function, when "runTestInternal" finishes its execution, they can all be
|
||||
// freed, UNLESS something else is leaking them (and that's why we can detect
|
||||
// the leak!).
|
||||
//
|
||||
// If we had all the code in a single function, we should manually nullify all
|
||||
// references to verify if there is a leak, which is not maintainable and error
|
||||
// prone. That's why "runTestInternal" CANNOT be inlined inside "runTest".
|
||||
async function runTestInternal(path, globalConfig, projectConfig, resolver, context, sendMessageToJest) {
|
||||
const testSource = fs().readFileSync(path, 'utf8');
|
||||
const docblockPragmas = docblock().parse(docblock().extract(testSource));
|
||||
const customEnvironment = docblockPragmas['jest-environment'];
|
||||
const loadTestEnvironmentStart = Date.now();
|
||||
let testEnvironment = projectConfig.testEnvironment;
|
||||
if (customEnvironment) {
|
||||
if (Array.isArray(customEnvironment)) {
|
||||
throw new TypeError(`You can only define a single test environment through docblocks, got "${customEnvironment.join(', ')}"`);
|
||||
}
|
||||
testEnvironment = (0, _jestResolve().resolveTestEnvironment)({
|
||||
...projectConfig,
|
||||
// we wanna avoid webpack trying to be clever
|
||||
requireResolveFunction: module => require.resolve(module),
|
||||
testEnvironment: customEnvironment
|
||||
});
|
||||
}
|
||||
const cacheFS = new Map([[path, testSource]]);
|
||||
const transformer = await (0, _transform().createScriptTransformer)(projectConfig, cacheFS);
|
||||
const TestEnvironment = await transformer.requireAndTranspileModule(testEnvironment);
|
||||
const testFramework = await transformer.requireAndTranspileModule(process.env.JEST_JASMINE === '1' ? require.resolve('jest-jasmine2') : projectConfig.testRunner);
|
||||
const Runtime = (0, _jestUtil().interopRequireDefault)(projectConfig.runtime ? require(projectConfig.runtime) : require('jest-runtime')).default;
|
||||
const consoleOut = globalConfig.useStderr ? process.stderr : process.stdout;
|
||||
const consoleFormatter = (type, message) => (0, _console().getConsoleOutput)(
|
||||
// 4 = the console call is buried 4 stack frames deep
|
||||
_console().BufferedConsole.write([], type, message, 4), projectConfig, globalConfig);
|
||||
let testConsole;
|
||||
if (globalConfig.silent) {
|
||||
testConsole = new (_console().NullConsole)(consoleOut, consoleOut, consoleFormatter);
|
||||
} else if (globalConfig.verbose) {
|
||||
testConsole = new (_console().CustomConsole)(consoleOut, consoleOut, consoleFormatter);
|
||||
} else {
|
||||
testConsole = new (_console().BufferedConsole)();
|
||||
}
|
||||
let extraTestEnvironmentOptions;
|
||||
const docblockEnvironmentOptions = docblockPragmas['jest-environment-options'];
|
||||
if (typeof docblockEnvironmentOptions === 'string') {
|
||||
extraTestEnvironmentOptions = JSON.parse(docblockEnvironmentOptions);
|
||||
}
|
||||
const environment = new TestEnvironment({
|
||||
globalConfig,
|
||||
projectConfig: extraTestEnvironmentOptions ? {
|
||||
...projectConfig,
|
||||
testEnvironmentOptions: {
|
||||
...projectConfig.testEnvironmentOptions,
|
||||
...extraTestEnvironmentOptions
|
||||
}
|
||||
} : projectConfig
|
||||
}, {
|
||||
console: testConsole,
|
||||
docblockPragmas,
|
||||
testPath: path
|
||||
});
|
||||
const loadTestEnvironmentEnd = Date.now();
|
||||
if (typeof environment.getVmContext !== 'function') {
|
||||
console.error(`Test environment found at "${testEnvironment}" does not export a "getVmContext" method, which is mandatory from Jest 27. This method is a replacement for "runScript".`);
|
||||
process.exit(1);
|
||||
}
|
||||
const leakDetector = projectConfig.detectLeaks ? new (_jestLeakDetector().default)(environment) : null;
|
||||
(0, _jestUtil().setGlobal)(environment.global, 'console', testConsole, 'retain');
|
||||
const runtime = new Runtime(projectConfig, environment, resolver, transformer, cacheFS, {
|
||||
changedFiles: context.changedFiles,
|
||||
collectCoverage: globalConfig.collectCoverage,
|
||||
collectCoverageFrom: globalConfig.collectCoverageFrom,
|
||||
coverageProvider: globalConfig.coverageProvider,
|
||||
sourcesRelatedToTestsInChangedFiles: context.sourcesRelatedToTestsInChangedFiles
|
||||
}, path, globalConfig);
|
||||
let isTornDown = false;
|
||||
const tearDownEnv = async () => {
|
||||
if (!isTornDown) {
|
||||
runtime.teardown();
|
||||
|
||||
// source-map-support keeps memory leftovers in `Error.prepareStackTrace`
|
||||
(0, _nodeVm().runInContext)("Error.prepareStackTrace = () => '';", environment.getVmContext());
|
||||
sourcemapSupport().resetRetrieveHandlers();
|
||||
try {
|
||||
await environment.teardown();
|
||||
} finally {
|
||||
isTornDown = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
const start = Date.now();
|
||||
const setupFilesStart = Date.now();
|
||||
for (const path of projectConfig.setupFiles) {
|
||||
const esm = runtime.unstable_shouldLoadAsEsm(path);
|
||||
if (esm) {
|
||||
await runtime.unstable_importModule(path);
|
||||
} else {
|
||||
const setupFile = runtime.requireModule(path);
|
||||
if (typeof setupFile === 'function') {
|
||||
await setupFile();
|
||||
}
|
||||
}
|
||||
}
|
||||
const setupFilesEnd = Date.now();
|
||||
const sourcemapOptions = {
|
||||
environment: 'node',
|
||||
handleUncaughtExceptions: false,
|
||||
retrieveSourceMap: source => {
|
||||
const sourceMapSource = runtime.getSourceMaps()?.get(source);
|
||||
if (sourceMapSource) {
|
||||
try {
|
||||
return {
|
||||
map: JSON.parse(fs().readFileSync(sourceMapSource, 'utf8')),
|
||||
url: source
|
||||
};
|
||||
} catch {}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// For tests
|
||||
runtime.requireInternalModule(require.resolve('source-map-support')).install(sourcemapOptions);
|
||||
|
||||
// For runtime errors
|
||||
sourcemapSupport().install(sourcemapOptions);
|
||||
if (environment.global && environment.global.process && environment.global.process.exit) {
|
||||
const realExit = environment.global.process.exit;
|
||||
environment.global.process.exit = function exit(...args) {
|
||||
const error = new (_jestUtil().ErrorWithStack)(`process.exit called with "${args.join(', ')}"`, exit);
|
||||
const formattedError = (0, _jestMessageUtil().formatExecError)(error, projectConfig, {
|
||||
noStackTrace: false
|
||||
}, undefined, true);
|
||||
process.stderr.write(formattedError);
|
||||
return realExit(...args);
|
||||
};
|
||||
}
|
||||
|
||||
// if we don't have `getVmContext` on the env skip coverage
|
||||
const collectV8Coverage = globalConfig.collectCoverage && globalConfig.coverageProvider === 'v8' && typeof environment.getVmContext === 'function';
|
||||
|
||||
// Node's error-message stack size is limited at 10, but it's pretty useful
|
||||
// to see more than that when a test fails.
|
||||
Error.stackTraceLimit = 100;
|
||||
try {
|
||||
await environment.setup();
|
||||
let result;
|
||||
try {
|
||||
if (collectV8Coverage) {
|
||||
await runtime.collectV8Coverage();
|
||||
}
|
||||
result = await testFramework(globalConfig, projectConfig, environment, runtime, path, sendMessageToJest);
|
||||
} catch (error) {
|
||||
// Access all stacks before uninstalling sourcemaps
|
||||
let e = error;
|
||||
while (typeof e === 'object' && e !== null && 'stack' in e) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
||||
e.stack;
|
||||
e = e?.cause;
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (collectV8Coverage) {
|
||||
await runtime.stopCollectingV8Coverage();
|
||||
}
|
||||
}
|
||||
freezeConsole(testConsole, projectConfig);
|
||||
const testCount = result.numPassingTests + result.numFailingTests + result.numPendingTests + result.numTodoTests;
|
||||
const end = Date.now();
|
||||
const testRuntime = end - start;
|
||||
result.perfStats = {
|
||||
...result.perfStats,
|
||||
end,
|
||||
loadTestEnvironmentEnd,
|
||||
loadTestEnvironmentStart,
|
||||
runtime: testRuntime,
|
||||
setupFilesEnd,
|
||||
setupFilesStart,
|
||||
slow: testRuntime / 1000 > projectConfig.slowTestThreshold,
|
||||
start
|
||||
};
|
||||
result.testFilePath = path;
|
||||
result.console = testConsole.getBuffer();
|
||||
result.skipped = testCount === result.numPendingTests;
|
||||
result.displayName = projectConfig.displayName;
|
||||
const coverage = runtime.getAllCoverageInfoCopy();
|
||||
if (coverage) {
|
||||
const coverageKeys = Object.keys(coverage);
|
||||
if (coverageKeys.length > 0) {
|
||||
result.coverage = coverage;
|
||||
}
|
||||
}
|
||||
if (collectV8Coverage) {
|
||||
const v8Coverage = runtime.getAllV8CoverageInfoCopy();
|
||||
if (v8Coverage && v8Coverage.length > 0) {
|
||||
result.v8Coverage = v8Coverage;
|
||||
}
|
||||
}
|
||||
if (globalConfig.logHeapUsage) {
|
||||
globalThis.gc?.();
|
||||
result.memoryUsage = process.memoryUsage().heapUsed;
|
||||
}
|
||||
await tearDownEnv();
|
||||
|
||||
// Delay the resolution to allow log messages to be output.
|
||||
return await new Promise(resolve => {
|
||||
setImmediate(() => resolve({
|
||||
leakDetector,
|
||||
result
|
||||
}));
|
||||
});
|
||||
} finally {
|
||||
await tearDownEnv();
|
||||
}
|
||||
}
|
||||
async function runTest(path, globalConfig, config, resolver, context, sendMessageToJest) {
|
||||
const {
|
||||
leakDetector,
|
||||
result
|
||||
} = await runTestInternal(path, globalConfig, config, resolver, context, sendMessageToJest);
|
||||
if (leakDetector) {
|
||||
// We wanna allow a tiny but time to pass to allow last-minute cleanup
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Resolve leak detector, outside the "runTestInternal" closure.
|
||||
result.leaks = await leakDetector.isLeaking();
|
||||
} else {
|
||||
result.leaks = false;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/***/ })
|
||||
|
||||
/******/ });
|
||||
/************************************************************************/
|
||||
/******/ // The module cache
|
||||
/******/ var __webpack_module_cache__ = {};
|
||||
/******/
|
||||
/******/ // The require function
|
||||
/******/ function __webpack_require__(moduleId) {
|
||||
/******/ // Check if module is in cache
|
||||
/******/ var cachedModule = __webpack_module_cache__[moduleId];
|
||||
/******/ if (cachedModule !== undefined) {
|
||||
/******/ return cachedModule.exports;
|
||||
/******/ }
|
||||
/******/ // Create a new module (and put it into the cache)
|
||||
/******/ var module = __webpack_module_cache__[moduleId] = {
|
||||
/******/ // no module.id needed
|
||||
/******/ // no module.loaded needed
|
||||
/******/ exports: {}
|
||||
/******/ };
|
||||
/******/
|
||||
/******/ // Execute the module function
|
||||
/******/ __webpack_modules__[moduleId](module, module.exports, __webpack_require__);
|
||||
/******/
|
||||
/******/ // Return the exports of the module
|
||||
/******/ return module.exports;
|
||||
/******/ }
|
||||
/******/
|
||||
/************************************************************************/
|
||||
var __webpack_exports__ = {};
|
||||
// This entry needs to be wrapped in an IIFE because it uses a non-standard name for the exports (exports).
|
||||
(() => {
|
||||
var exports = __webpack_exports__;
|
||||
|
||||
|
||||
Object.defineProperty(exports, "__esModule", ({
|
||||
value: true
|
||||
}));
|
||||
exports.setup = setup;
|
||||
exports.worker = worker;
|
||||
function _exitX() {
|
||||
const data = _interopRequireDefault(require("exit-x"));
|
||||
_exitX = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestHasteMap() {
|
||||
const data = _interopRequireDefault(require("jest-haste-map"));
|
||||
_jestHasteMap = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestMessageUtil() {
|
||||
const data = require("jest-message-util");
|
||||
_jestMessageUtil = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestRuntime() {
|
||||
const data = _interopRequireDefault(require("jest-runtime"));
|
||||
_jestRuntime = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
function _jestWorker() {
|
||||
const data = require("jest-worker");
|
||||
_jestWorker = function () {
|
||||
return data;
|
||||
};
|
||||
return data;
|
||||
}
|
||||
var _runTest = _interopRequireDefault(__webpack_require__("./src/runTest.ts"));
|
||||
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
||||
/**
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
*/
|
||||
|
||||
// Make sure uncaught errors are logged before we exit.
|
||||
process.on('uncaughtException', err => {
|
||||
if (err.stack) {
|
||||
console.error(err.stack);
|
||||
} else {
|
||||
console.error(err);
|
||||
}
|
||||
(0, _exitX().default)(1);
|
||||
});
|
||||
const formatError = error => {
|
||||
if (typeof error === 'string') {
|
||||
const {
|
||||
message,
|
||||
stack
|
||||
} = (0, _jestMessageUtil().separateMessageFromStack)(error);
|
||||
return {
|
||||
message,
|
||||
stack,
|
||||
type: 'Error'
|
||||
};
|
||||
}
|
||||
return {
|
||||
code: error.code || undefined,
|
||||
message: error.message,
|
||||
stack: error.stack,
|
||||
type: 'Error'
|
||||
};
|
||||
};
|
||||
const resolvers = new Map();
|
||||
const getResolver = config => {
|
||||
const resolver = resolvers.get(config.id);
|
||||
if (!resolver) {
|
||||
throw new Error(`Cannot find resolver for: ${config.id}`);
|
||||
}
|
||||
return resolver;
|
||||
};
|
||||
function setup(setupData) {
|
||||
// Module maps that will be needed for the test runs are passed.
|
||||
for (const {
|
||||
config,
|
||||
serializableModuleMap
|
||||
} of setupData.serializableResolvers) {
|
||||
const moduleMap = _jestHasteMap().default.getStatic(config).getModuleMapFromJSON(serializableModuleMap);
|
||||
resolvers.set(config.id, _jestRuntime().default.createResolver(config, moduleMap));
|
||||
}
|
||||
}
|
||||
const sendMessageToJest = (eventName, args) => {
|
||||
(0, _jestWorker().messageParent)([eventName, args]);
|
||||
};
|
||||
async function worker({
|
||||
config,
|
||||
globalConfig,
|
||||
path,
|
||||
context
|
||||
}) {
|
||||
try {
|
||||
return await (0, _runTest.default)(path, globalConfig, config, getResolver(config), {
|
||||
...context,
|
||||
changedFiles: context.changedFiles && new Set(context.changedFiles),
|
||||
sourcesRelatedToTestsInChangedFiles: context.sourcesRelatedToTestsInChangedFiles && new Set(context.sourcesRelatedToTestsInChangedFiles)
|
||||
}, sendMessageToJest);
|
||||
} catch (error) {
|
||||
throw formatError(error);
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
||||
module.exports = __webpack_exports__;
|
||||
/******/ })()
|
||||
;
|
||||
+13
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
*
|
||||
* This source code is licensed under the MIT license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*/
|
||||
|
||||
const importLocal = require('import-local');
|
||||
|
||||
if (!importLocal(__filename)) {
|
||||
require('jest-cli/bin/jest');
|
||||
}
|
||||
@@ -0,0 +1,285 @@
|
||||
import express from 'express';
|
||||
import { createServer } from 'http';
|
||||
import cookieParser from 'cookie-parser';
|
||||
import helmet from 'helmet';
|
||||
import { AppDataSource } from '../Infrastructure/ormconfig';
|
||||
import userRouter from './routers/userRouter';
|
||||
import organizationRouter from './routers/organizationRouter';
|
||||
import deckRouter from './routers/deckRouter';
|
||||
import chatRouter from './routers/chatRouter';
|
||||
import contactRouter from './routers/contactRouter';
|
||||
import adminRouter from './routers/adminRouter';
|
||||
import deckImportExportRouter from './routers/deckImportExportRouter';
|
||||
import gameRouter from './routers/gameRouter';
|
||||
import { LoggingService, logStartup, logConnection, logError, logRequest } from '../Application/Services/Logger';
|
||||
import { WebSocketService } from '../Application/Services/WebSocketService';
|
||||
import { GameWebSocketService } from '../Application/Services/GameWebSocketService';
|
||||
import { container } from '../Application/Services/DIContainer';
|
||||
import { GameRepository } from '../Infrastructure/Repository/GameRepository';
|
||||
import { UserRepository } from '../Infrastructure/Repository/UserRepository';
|
||||
import { RedisService } from '../Application/Services/RedisService';
|
||||
import { setupSwagger } from './swagger/swaggerUiSetup';
|
||||
|
||||
const app = express();
|
||||
const httpServer = createServer(app);
|
||||
const PORT = process.env.PORT || 3000;
|
||||
const isDevelopment = process.env.NODE_ENV === 'development';
|
||||
|
||||
const loggingService = LoggingService.getInstance();
|
||||
|
||||
logStartup('SerpentRace Backend starting up', {
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
port: PORT,
|
||||
nodeVersion: process.version,
|
||||
chatInactivityTimeout: process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'
|
||||
});
|
||||
|
||||
app.use(helmet({
|
||||
contentSecurityPolicy: isDevelopment ? false : undefined
|
||||
}));
|
||||
|
||||
app.use(express.json({ limit: '10mb' }));
|
||||
app.use(express.urlencoded({ extended: true, limit: '10mb' }));
|
||||
app.use(cookieParser());
|
||||
|
||||
app.use(loggingService.requestLoggingMiddleware());
|
||||
|
||||
app.use((req, res, next) => {
|
||||
const origin = req.headers.origin;
|
||||
const allowedOrigins = ['http://localhost:3000', 'http://localhost:3001', 'http://localhost:8080', process.env.FRONTEND_URL];
|
||||
|
||||
if (!origin || allowedOrigins.includes(origin)) {
|
||||
res.setHeader('Access-Control-Allow-Origin', origin || '*');
|
||||
}
|
||||
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE, PATCH, OPTIONS');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, Cookie');
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.status(200).end();
|
||||
return;
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
|
||||
if (isDevelopment) {
|
||||
app.use((req, res, next) => {
|
||||
logRequest(`${req.method} ${req.path}`, req, res);
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
// Setup Swagger documentation
|
||||
setupSwagger(app);
|
||||
|
||||
app.get('/', (req, res) => {
|
||||
res.json({
|
||||
service: 'SerpentRace Backend API',
|
||||
status: 'running',
|
||||
version: '1.0.0',
|
||||
endpoints: {
|
||||
swagger: '/api-docs',
|
||||
users: '/api/users',
|
||||
organizations: '/api/organizations',
|
||||
decks: '/api/decks',
|
||||
chats: '/api/chats',
|
||||
contacts: '/api/contacts',
|
||||
admin: '/api/admin',
|
||||
deckImportExport: '/api/deck-import-export',
|
||||
health: '/health'
|
||||
},
|
||||
websocket: {
|
||||
enabled: true,
|
||||
events: [
|
||||
'chat:join', 'chat:leave', 'message:send',
|
||||
'group:create', 'chat:direct', 'game:chat:create',
|
||||
'chat:history'
|
||||
]
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
app.get('/health', async (req, res) => {
|
||||
try {
|
||||
const isDbConnected = AppDataSource.isInitialized;
|
||||
|
||||
res.json({
|
||||
status: 'healthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
service: 'SerpentRace Backend API',
|
||||
version: '1.0.0',
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
database: {
|
||||
connected: isDbConnected,
|
||||
type: AppDataSource.options.type
|
||||
},
|
||||
websocket: {
|
||||
enabled: true
|
||||
},
|
||||
uptime: process.uptime()
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(503).json({
|
||||
status: 'unhealthy',
|
||||
timestamp: new Date().toISOString(),
|
||||
error: 'Service health check failed'
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// API Routes
|
||||
app.use('/api/users', userRouter);
|
||||
app.use('/api/organizations', organizationRouter);
|
||||
app.use('/api/decks', deckRouter);
|
||||
app.use('/api/chats', chatRouter);
|
||||
app.use('/api/contacts', contactRouter);
|
||||
app.use('/api/admin', adminRouter);
|
||||
app.use('/api/deck-import-export', deckImportExportRouter);
|
||||
app.use('/api/games', gameRouter);
|
||||
|
||||
// Global error handler (must be after routes)
|
||||
app.use(loggingService.errorLoggingMiddleware());
|
||||
app.use((error: Error, req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
logError('Global error handler caught unhandled error', error, req, res);
|
||||
|
||||
// Don't expose internal error details in production
|
||||
const isDevelopment = process.env.NODE_ENV === 'development';
|
||||
|
||||
res.status(500).json({
|
||||
error: 'Internal server error',
|
||||
timestamp: new Date().toISOString(),
|
||||
...(isDevelopment && { details: error.message, stack: error.stack })
|
||||
});
|
||||
});
|
||||
|
||||
// Handle 404 routes
|
||||
app.use((req: express.Request, res: express.Response) => {
|
||||
res.status(404).json({
|
||||
error: 'Route not found',
|
||||
path: req.originalUrl,
|
||||
method: req.method,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
// Initialize WebSocket service after database connection
|
||||
let webSocketService: WebSocketService;
|
||||
let gameWebSocketService: GameWebSocketService;
|
||||
let server: any; // Declare server variable
|
||||
|
||||
// Initialize database connection and start server
|
||||
AppDataSource.initialize()
|
||||
.then(() => {
|
||||
const dbOptions = AppDataSource.options as any;
|
||||
logConnection('Database connection established', 'postgresql', 'success', {
|
||||
type: dbOptions.type,
|
||||
host: dbOptions.host,
|
||||
database: dbOptions.database
|
||||
});
|
||||
|
||||
// Initialize WebSocket service after database is connected
|
||||
webSocketService = new WebSocketService(httpServer);
|
||||
logStartup('WebSocket service initialized', {
|
||||
chatInactivityTimeout: process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'
|
||||
});
|
||||
|
||||
// Initialize Game WebSocket service for /game namespace via DIContainer
|
||||
container.setSocketIO(webSocketService['io']);
|
||||
gameWebSocketService = container.gameWebSocketService;
|
||||
logStartup('Game WebSocket service initialized for /game namespace');
|
||||
|
||||
// Restore active games from snapshots (if any exist)
|
||||
gameWebSocketService.restoreAllActiveGames()
|
||||
.then(restoredCount => {
|
||||
if (restoredCount > 0) {
|
||||
logStartup(`Restored ${restoredCount} active game(s) from snapshots`);
|
||||
}
|
||||
})
|
||||
.catch(error => {
|
||||
logError('Failed to restore games from snapshots', error);
|
||||
});
|
||||
|
||||
// Start server with WebSocket support AFTER database is ready
|
||||
server = httpServer.listen(PORT, () => {
|
||||
logStartup('Server started successfully', {
|
||||
port: PORT,
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
timestamp: new Date().toISOString(),
|
||||
endpoints: {
|
||||
health: `/health`,
|
||||
swagger: `/api-docs`,
|
||||
users: `/api/users`,
|
||||
organizations: `/api/organizations`,
|
||||
decks: `/api/decks`,
|
||||
chats: `/api/chats`
|
||||
},
|
||||
websocket: {
|
||||
enabled: true,
|
||||
chatInactivityTimeout: `${process.env.CHAT_INACTIVITY_TIMEOUT_MINUTES || '30'} minutes`
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
const dbOptions = AppDataSource.options as any;
|
||||
logConnection('Database connection failed', 'postgresql', 'failure', {
|
||||
error: error.message,
|
||||
type: dbOptions.type,
|
||||
host: dbOptions.host,
|
||||
database: dbOptions.database
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Graceful shutdown
|
||||
const gracefulShutdown = async (signal: string) => {
|
||||
logStartup(`Received ${signal}. Shutting down gracefully...`);
|
||||
|
||||
// Snapshot all active games before shutdown
|
||||
if (gameWebSocketService) {
|
||||
try {
|
||||
const snapshotCount = await gameWebSocketService.snapshotAllActiveGames();
|
||||
logStartup(`Created ${snapshotCount} game snapshot(s) before shutdown`);
|
||||
} catch (error) {
|
||||
logError('Failed to snapshot games before shutdown', error as Error);
|
||||
}
|
||||
}
|
||||
|
||||
server.close(() => {
|
||||
logStartup('HTTP server closed');
|
||||
|
||||
if (AppDataSource.isInitialized) {
|
||||
AppDataSource.destroy()
|
||||
.then(() => {
|
||||
logConnection('Database connection closed', 'postgresql', 'success');
|
||||
process.exit(0);
|
||||
})
|
||||
.catch((error) => {
|
||||
logError('Error during database shutdown', error);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
|
||||
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on('uncaughtException', (error) => {
|
||||
logError('Uncaught Exception - Server will shut down', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Handle unhandled promise rejections
|
||||
process.on('unhandledRejection', (reason, promise) => {
|
||||
logError('Unhandled Rejection - Server will shut down', new Error(String(reason)), undefined, undefined);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Export WebSocket services for game integration
|
||||
export { webSocketService, gameWebSocketService };
|
||||
@@ -0,0 +1,418 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import * as Minio from 'minio';
|
||||
|
||||
export enum LogLevel {
|
||||
REQUEST = 'REQUEST',
|
||||
ERROR = 'ERROR',
|
||||
WARNING = 'WARNING',
|
||||
AUTH = 'AUTH',
|
||||
DATABASE = 'DATABASE',
|
||||
STARTUP = 'STARTUP',
|
||||
CONNECTION = 'CONNECTION',
|
||||
OTHER = 'OTHER'
|
||||
}
|
||||
|
||||
export interface LogEntry {
|
||||
timestamp: string;
|
||||
level: LogLevel;
|
||||
message: string;
|
||||
metadata?: any;
|
||||
requestId?: string;
|
||||
userId?: string;
|
||||
ip?: string;
|
||||
userAgent?: string;
|
||||
method?: string;
|
||||
url?: string;
|
||||
statusCode?: number;
|
||||
responseTime?: number;
|
||||
}
|
||||
|
||||
export class LoggingService {
|
||||
private static instance: LoggingService;
|
||||
private minioClient: Minio.Client | null = null;
|
||||
private logBuffer: LogEntry[] = [];
|
||||
private currentLogFile: string | null = null;
|
||||
private logCount = 0;
|
||||
private readonly maxLogsPerFile = parseInt(process.env.MAX_LOGS_PER_FILE || '10000');
|
||||
private readonly logsDir = path.join(process.cwd(), 'logs');
|
||||
private readonly bucketName = process.env.MINIO_BUCKET_NAME || 'serpentrace-logs';
|
||||
private uploadInterval: NodeJS.Timeout | null = null;
|
||||
|
||||
private constructor() {
|
||||
this.initializeLogsDirectory();
|
||||
this.initializeMinioClient();
|
||||
this.createNewLogFile();
|
||||
|
||||
if (process.env.NODE_ENV !== 'test') {
|
||||
this.startPeriodicUpload();
|
||||
}
|
||||
|
||||
process.on('SIGTERM', () => this.shutdown());
|
||||
process.on('SIGINT', () => this.shutdown());
|
||||
process.on('beforeExit', () => this.shutdown());
|
||||
}
|
||||
|
||||
static getInstance(): LoggingService {
|
||||
if (!LoggingService.instance) {
|
||||
LoggingService.instance = new LoggingService();
|
||||
}
|
||||
return LoggingService.instance;
|
||||
}
|
||||
|
||||
private initializeLogsDirectory(): void {
|
||||
try {
|
||||
if (!fs.existsSync(this.logsDir)) {
|
||||
fs.mkdirSync(this.logsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Create monthly subdirectory
|
||||
const monthlyDir = this.getMonthlyDirectory();
|
||||
if (!fs.existsSync(monthlyDir)) {
|
||||
fs.mkdirSync(monthlyDir, { recursive: true });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize logs directory:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private initializeMinioClient(): void {
|
||||
try {
|
||||
// Check if in production or development
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (process.env.MINIO_ENDPOINT && process.env.MINIO_ACCESS_KEY && process.env.MINIO_SECRET_KEY) {
|
||||
this.minioClient = new Minio.Client({
|
||||
endPoint: process.env.MINIO_ENDPOINT,
|
||||
port: parseInt(process.env.MINIO_PORT || '9000'),
|
||||
useSSL: process.env.MINIO_USE_SSL === 'true',
|
||||
accessKey: process.env.MINIO_ACCESS_KEY,
|
||||
secretKey: process.env.MINIO_SECRET_KEY
|
||||
});
|
||||
|
||||
this.ensureBucketExists();
|
||||
} else {
|
||||
console.warn('Minio configuration not found. Logs will only be stored locally and in console.');
|
||||
}
|
||||
} else {
|
||||
// Development mode - only use MinIO if explicitly configured
|
||||
if (process.env.MINIO_ENDPOINT || process.env.ENABLE_MINIO === 'true') {
|
||||
this.minioClient = new Minio.Client({
|
||||
endPoint: process.env.MINIO_ENDPOINT || 'localhost',
|
||||
port: parseInt(process.env.MINIO_PORT || '9000'),
|
||||
useSSL: false,
|
||||
accessKey: process.env.MINIO_ACCESS_KEY || 'serpentrace',
|
||||
secretKey: process.env.MINIO_SECRET_KEY || 'serpentrace123!'
|
||||
});
|
||||
|
||||
this.ensureBucketExists();
|
||||
} else {
|
||||
console.log('Development mode: MinIO disabled. Set ENABLE_MINIO=true to enable MinIO logging.');
|
||||
this.minioClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize Minio client:', error);
|
||||
this.minioClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
private async ensureBucketExists(): Promise<void> {
|
||||
if (!this.minioClient) return;
|
||||
|
||||
try {
|
||||
const exists = await this.minioClient.bucketExists(this.bucketName);
|
||||
if (!exists) {
|
||||
await this.minioClient.makeBucket(this.bucketName);
|
||||
this.log(LogLevel.STARTUP, `Created Minio bucket: ${this.bucketName}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('MinIO connection failed - disabling MinIO logging:', (error as Error).message);
|
||||
// Disable MinIO client if connection fails
|
||||
this.minioClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
private startPeriodicUpload(): void {
|
||||
// Upload current log file to Minio every 2 minutes
|
||||
this.uploadInterval = setInterval(async () => {
|
||||
if (this.currentLogFile && this.minioClient) {
|
||||
await this.uploadToMinio(this.currentLogFile);
|
||||
}
|
||||
}, 2 * 60 * 1000); // 2 minutes
|
||||
}
|
||||
|
||||
private getMonthlyDirectory(): string {
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
return path.join(this.logsDir, `${year}-${month}`);
|
||||
}
|
||||
|
||||
private getMonthlyMinioPrefix(): string {
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
return `${year}-${month}/`;
|
||||
}
|
||||
|
||||
private createNewLogFile(): void {
|
||||
const now = new Date();
|
||||
const timestamp = now.toISOString().replace(/[:.]/g, '-');
|
||||
const fileName = `serpentrace-${timestamp}.log`;
|
||||
|
||||
this.currentLogFile = path.join(this.getMonthlyDirectory(), fileName);
|
||||
this.logCount = 0;
|
||||
|
||||
// Write log file header
|
||||
const header = `# SerpentRace Backend Logs\n# Started: ${now.toISOString()}\n# Max entries per file: ${this.maxLogsPerFile}\n\n`;
|
||||
try {
|
||||
fs.writeFileSync(this.currentLogFile, header);
|
||||
} catch (error) {
|
||||
console.error('Failed to create log file:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private formatLogEntry(entry: LogEntry): string {
|
||||
const parts = [
|
||||
entry.timestamp,
|
||||
`[${entry.level}]`,
|
||||
entry.message
|
||||
];
|
||||
|
||||
if (entry.requestId) parts.push(`ReqId:${entry.requestId}`);
|
||||
if (entry.userId) parts.push(`UserId:${entry.userId}`);
|
||||
if (entry.ip) parts.push(`IP:${entry.ip}`);
|
||||
if (entry.method && entry.url) parts.push(`${entry.method} ${entry.url}`);
|
||||
if (entry.statusCode) parts.push(`Status:${entry.statusCode}`);
|
||||
if (entry.responseTime) parts.push(`Time:${entry.responseTime}ms`);
|
||||
if (entry.userAgent) parts.push(`UA:${entry.userAgent.substring(0, 50)}`);
|
||||
if (entry.metadata) parts.push(`Meta:${JSON.stringify(entry.metadata)}`);
|
||||
|
||||
return parts.join(' | ');
|
||||
}
|
||||
|
||||
private async writeToLocalFile(entry: LogEntry): Promise<void> {
|
||||
if (!this.currentLogFile) return;
|
||||
|
||||
try {
|
||||
const logLine = this.formatLogEntry(entry) + '\n';
|
||||
fs.appendFileSync(this.currentLogFile, logLine);
|
||||
|
||||
this.logCount++;
|
||||
|
||||
// Check if we need to rotate the log file
|
||||
if (this.logCount >= this.maxLogsPerFile) {
|
||||
await this.rotateLogFile();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to write to log file:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async rotateLogFile(): Promise<void> {
|
||||
if (!this.currentLogFile) return;
|
||||
|
||||
try {
|
||||
// Upload current file to Minio before rotating
|
||||
await this.uploadToMinio(this.currentLogFile);
|
||||
|
||||
// Create new log file
|
||||
this.createNewLogFile();
|
||||
|
||||
this.log(LogLevel.OTHER, 'Log file rotated due to size limit');
|
||||
} catch (error) {
|
||||
console.error('Failed to rotate log file:', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async uploadToMinio(filePath: string): Promise<void> {
|
||||
if (!this.minioClient) {
|
||||
console.warn('Minio client not initialized, skipping upload');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
console.warn(`Log file does not exist: ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const fileName = path.basename(filePath);
|
||||
const objectName = this.getMonthlyMinioPrefix() + fileName;
|
||||
|
||||
console.log(`Attempting to upload log file to Minio: ${objectName}`);
|
||||
await this.minioClient.fPutObject(this.bucketName, objectName, filePath);
|
||||
console.log(`Successfully uploaded log file to Minio: ${objectName}`);
|
||||
} catch (error) {
|
||||
console.error('Failed to upload to Minio:', error);
|
||||
console.error('Minio config:', {
|
||||
endpoint: this.minioClient ? 'configured' : 'not configured',
|
||||
bucket: this.bucketName
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private logToConsole(entry: LogEntry): void {
|
||||
// In production, skip OTHER, CONNECTION, and REQUEST logs
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (entry.level === LogLevel.OTHER ||
|
||||
entry.level === LogLevel.REQUEST) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const formattedEntry = this.formatLogEntry(entry);
|
||||
|
||||
switch (entry.level) {
|
||||
case LogLevel.ERROR:
|
||||
console.error(formattedEntry);
|
||||
break;
|
||||
case LogLevel.WARNING:
|
||||
console.warn(formattedEntry);
|
||||
break;
|
||||
case LogLevel.REQUEST:
|
||||
case LogLevel.AUTH:
|
||||
case LogLevel.DATABASE:
|
||||
case LogLevel.CONNECTION:
|
||||
console.info(formattedEntry);
|
||||
break;
|
||||
case LogLevel.STARTUP:
|
||||
console.log(formattedEntry);
|
||||
break;
|
||||
default:
|
||||
console.log(formattedEntry);
|
||||
}
|
||||
}
|
||||
|
||||
public log(
|
||||
level: LogLevel,
|
||||
message: string,
|
||||
metadata?: any,
|
||||
req?: Request,
|
||||
res?: Response,
|
||||
responseTime?: number
|
||||
): void {
|
||||
// In production, skip OTHER, CONNECTION, and REQUEST logs entirely
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (level === LogLevel.OTHER ||
|
||||
level === LogLevel.CONNECTION ||
|
||||
level === LogLevel.REQUEST) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const entry: LogEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level,
|
||||
message,
|
||||
metadata
|
||||
};
|
||||
|
||||
// Add request context if available
|
||||
if (req) {
|
||||
entry.requestId = (req as any).requestId || this.generateRequestId();
|
||||
entry.userId = (req as any).user?.userId;
|
||||
entry.ip = req.ip || req.socket?.remoteAddress || 'unknown';
|
||||
entry.userAgent = req.get ? req.get('User-Agent') : 'unknown';
|
||||
entry.method = req.method;
|
||||
entry.url = req.originalUrl || req.url;
|
||||
}
|
||||
|
||||
if (res) {
|
||||
entry.statusCode = res.statusCode;
|
||||
}
|
||||
|
||||
if (responseTime !== undefined) {
|
||||
entry.responseTime = responseTime;
|
||||
}
|
||||
|
||||
// Log to all three destinations
|
||||
this.logToConsole(entry);
|
||||
this.writeToLocalFile(entry);
|
||||
|
||||
// Add to buffer for potential batch processing
|
||||
this.logBuffer.push(entry);
|
||||
|
||||
// Limit buffer size
|
||||
if (this.logBuffer.length > 1000) {
|
||||
this.logBuffer = this.logBuffer.slice(-500);
|
||||
}
|
||||
}
|
||||
|
||||
private generateRequestId(): string {
|
||||
return Math.random().toString(36).substr(2, 9);
|
||||
}
|
||||
|
||||
public async shutdown(): Promise<void> {
|
||||
try {
|
||||
// Clear the upload interval
|
||||
if (this.uploadInterval) {
|
||||
clearInterval(this.uploadInterval);
|
||||
this.uploadInterval = null;
|
||||
}
|
||||
|
||||
// Upload current log file to Minio
|
||||
if (this.currentLogFile) {
|
||||
await this.uploadToMinio(this.currentLogFile);
|
||||
}
|
||||
|
||||
this.log(LogLevel.STARTUP, 'Logging service shutting down gracefully');
|
||||
|
||||
// Give time for final logs to be written
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
} catch (error) {
|
||||
console.error('Error during logging service shutdown:', error);
|
||||
}
|
||||
}
|
||||
|
||||
// Middleware factory methods
|
||||
public requestLoggingMiddleware() {
|
||||
return (req: Request, res: Response, next: NextFunction) => {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Generate request ID
|
||||
(req as any).requestId = this.generateRequestId();
|
||||
|
||||
// Log request start
|
||||
this.log(LogLevel.REQUEST, `Incoming request`, undefined, req);
|
||||
|
||||
// Override res.end to log response
|
||||
const originalEnd = res.end.bind(res);
|
||||
res.end = (...args: any[]): Response => {
|
||||
const responseTime = Date.now() - startTime;
|
||||
LoggingService.getInstance().log(
|
||||
LogLevel.REQUEST,
|
||||
`Request completed`,
|
||||
undefined,
|
||||
req,
|
||||
res,
|
||||
responseTime
|
||||
);
|
||||
return originalEnd(...args);
|
||||
};
|
||||
|
||||
next();
|
||||
};
|
||||
}
|
||||
|
||||
public errorLoggingMiddleware() {
|
||||
return (error: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
this.log(
|
||||
LogLevel.ERROR,
|
||||
`Unhandled error: ${error.message}`,
|
||||
{
|
||||
stack: error.stack,
|
||||
name: error.name
|
||||
},
|
||||
req,
|
||||
res
|
||||
);
|
||||
next(error);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default LoggingService;
|
||||
@@ -0,0 +1,120 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "ES2020", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
||||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "libReplacement": true, /* Enable lib replacement. */
|
||||
"experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
|
||||
"emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
|
||||
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
|
||||
|
||||
/* Modules */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
|
||||
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
|
||||
// "rewriteRelativeImportExtensions": true, /* Rewrite '.ts', '.tsx', '.mts', and '.cts' file extensions in relative import paths to their JavaScript equivalent in output files. */
|
||||
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
||||
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
||||
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
||||
// "noUncheckedSideEffectImports": true, /* Check side effect imports. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files. */
|
||||
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
||||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
|
||||
|
||||
/* Emit */
|
||||
"declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
"declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
"sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "./dist", /* Specify an output folder for all emitted files. */
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
|
||||
// "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */
|
||||
// "erasableSyntaxOnly": true, /* Do not allow runtime constructs that are not part of ECMAScript. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "strictBuiltinIteratorReturn": true, /* Built-in iterators are instantiated with a 'TReturn' type of 'undefined' instead of 'any'. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
|
||||
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user