Compare commits

..

No commits in common. "master" and "v1.6.0" have entirely different histories.

36 changed files with 4825 additions and 378 deletions

3
.env
View File

@ -1,8 +1,5 @@
PORT=5000 PORT=5000
TRUST_PROXY = 1
AUTOMAILER_SMTP_SERVICE
AUTOMAILER_SMTP_HOST
AUTOMAILER_ID = "mailerID@mailserver.domain" AUTOMAILER_ID = "mailerID@mailserver.domain"
AUTOMAILER_APP_PASSWD = "mailerpasswd" AUTOMAILER_APP_PASSWD = "mailerpasswd"

View File

@ -1,8 +1,5 @@
DB_USER = your_database_username DB_USERNAME='your_local_db_username'
DB_PASSWD = your_database_password DB_PASSWORD='your_local_db_password'
DB_NAME = your_database_name DB_NAME='your_local_db_name'
DB_HOST = localhost
DB_PORT = your_database_port
DB_DIALECT = your_database_dialect
CAPTCHA_SECRET = 'your-captcha-secret' CAPTCHA_SECRET = "6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe"

View File

@ -1,3 +0,0 @@
DB_URL = 'your_database_connection_string'
CAPTCHA_SECRET = 'your-captcha-secret'

99
.gitignore vendored
View File

@ -6,101 +6,14 @@ yarn-debug.log*
yarn-error.log* yarn-error.log*
lerna-debug.log* lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories # Dependency directories
node_modules/ node_modules/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file # dotenv environment variables file
.env.local .env.development
.env.*.local .env.staging
.env.production
*.env
# parcel-bundler cache (https://parceljs.org/) # Data files
.cache */*.csv
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# SQLite db
*.db

21
LICENSE
View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Kaushik Narayan R
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,17 +1,21 @@
# Express-Sequelize backend server template # Express-Sequelize backend server template
## To get started ### To get started:
- Clone this repo: `git clone https://github.com/20kaushik02/express-sequelize-backend-template` - Clone this repo: `git clone https://gitlab.com/ctf-tech-2023/backend-template`
- Reset the git remote repo URL: `git remote rm origin` - Reset the git remote repo URL: `git remote rm origin`
- Set new git remote URL: `git remote add origin https://github.com/20kaushik02/<<new-repo-name>>` - Set new git remote URL: `git remote add origin https://gitlab.com/ctf-tech-2023/new-repo-name`
- Remove the template environment files from git alone: `git rm -r --cached *.env*` - Remove the template environment files from git alone: `git rm -r --cached *.env*`
## Project setup ### Project setup:
- Edit `package.json` to reflect the new name and URLs - Edit `package.json` to reflect the new name and URLs
- Edit `README.md` to reflect project details - Edit `README.md` to reflect project details
- Run `npm i` to install all dependencies - Run `npm i` to install all dependencies
- Before running `sequelize-cli` commands while developing, make sure to set `$env:NODE_ENV='development'` on Windows, or `NODE_ENV=development` on Linux/MacOS - Before running `sequelize-cli` commands while developing, make sure to set `$env:NODE_ENV='development'` on Windows, or `NODE_ENV=development` on Linux/MacOS
- [See here](https://github.com/kerimdzhanov/dotenv-flow?tab=readme-ov-file#files-under-version-control) for best practices for .env files configuration - Env config:
- **.env** - All things common to all environments (port, mailer creds, JWT secret, admin data access creds, etc.)
- **.env.development** - Development environment (dev captcha secret, dev DB details)
- **.env.staging** - Staging environment (dev captcha secret, staging DB conn. string) - **for sysadmins**
- **.env.production** - Production environment (production captcha secret, prod DB conn. string) - **for sysadmins**
- Staging: `npm run staging_prep` and `npm run staging` to deploy on Render after configuring a new web service on Render dashboard - Staging: `npm run staging_prep` and `npm run staging` to deploy on Render after configuring a new web service on Render dashboard

View File

@ -1 +1 @@
# Boilerplates - reusable code templates ## Boilerplates - reusable code templates

View File

@ -1,6 +1,5 @@
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs"); const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
/** /**
* Business logic to go in these controller functions. * Business logic to go in these controller functions.
@ -13,9 +12,8 @@ const __controller_func = async (req, res) => {
try { try {
} catch (error) { } catch (error) {
res.sendStatus(500);
logger.error("__controller_func", { error }); logger.error("__controller_func", { error });
return; return res.status(500).send({ message: "Server Error. Try again." });
} }
} }

View File

@ -1,13 +1,14 @@
const router = require("express").Router(); const router = require("express").Router();
const { validate } = require("../validators"); const { validate } = require("../validators");
const { __controller_func } = require("./controller");
router.get( router.get(
// URL, // URL,
// middleware, // middleware,
// validators, // validators,
// validate, // validate,
// controller // __controller_func
); );
router.post( router.post(

View File

@ -24,4 +24,5 @@ const __validator_func = async (req, res, next) => {
module.exports = { module.exports = {
__validator_func, __validator_func,
}; }

View File

@ -1 +1 @@
# Configuration files and data ## Configuration files and data

View File

@ -1,28 +1,23 @@
const logger = require("../utils/logger")(module); module.exports = {
"development": {
const connConfigs = { "username": process.env.DB_USERNAME, // local PostgreSQL DB username
development: { "password": process.env.DB_PASSWORD, // local PostgreSQL DB password
username: process.env.DB_USERNAME || 'postgres', "host": "127.0.0.1", // localhost
password: process.env.DB_PASSWORD || '', "database": process.env.DB_NAME, // local PostgreSQL DB name
database: process.env.DB_NAME || 'postgres', "dialect": "postgres"
host: process.env.DB_HOST || '127.0.0.1', },
port: process.env.DB_PORT || 5432, "staging": {
}, "use_env_variable": "DB_URL", // staging database connection string
staging: { "dialect": "postgres",
use_env_variable: "DB_URL", // use connection string for non-dev env "dialectOptions": {
}, "ssl": true,
production: { },
use_env_variable: "DB_URL", // use connection string for non-dev env },
// dialectOptions: { "production": {
// ssl: true, "use_env_variable": "DB_URL", // production database connection string
// }, "dialect": "postgres",
} "dialectOptions": {
"ssl": true,
},
}
} }
// common config
for (const conf in connConfigs) {
connConfigs[conf]['logging'] = (msg) => logger.debug(msg);
connConfigs[conf]['dialect'] = process.env.DB_DIALECT || 'postgres';
}
module.exports = connConfigs;

View File

@ -1 +1 @@
# Controllers - business logic functions, end of the API route ## Controllers - business logic functions, end of the API route

View File

@ -1,35 +1,23 @@
require("dotenv-flow").config(); require("dotenv-flow").config();
const util = require('util');
const express = require("express"); const express = require("express");
const cors = require("cors"); const cors = require("cors");
const helmet = require("helmet"); const helmet = require("helmet");
const logger = require("./utils/logger")(module); const logger = require("./utils/logger")(module);
const app = express(); const app = express();
// Enable this if you run behind a proxy (e.g. nginx) app.use(express.json());
app.set('trust proxy', process.env.TRUST_PROXY); app.use(express.urlencoded({ extended: true }));
app.use(cors()); app.use(cors());
app.use(helmet()); app.use(helmet());
app.disable("x-powered-by"); app.disable("x-powered-by");
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Static
app.use(express.static(__dirname + '/static'));
// Put routes here // Put routes here
// Fallbacks app.use((_req, res) => {
app.use((req, res) => { return res.status(200).send("Back-end for");
res.status(200).send("Back-end for");
logger.info("Unrecognized URL", { url: req.url });
return;
}); });
const port = process.env.PORT || 5000; const port = process.env.PORT || 5000;
@ -37,20 +25,3 @@ const port = process.env.PORT || 5000;
app.listen(port, () => { app.listen(port, () => {
logger.info(`App Listening on port ${port}`); logger.info(`App Listening on port ${port}`);
}); });
const cleanupFunc = (signal) => {
if (signal)
logger.info(`${signal} signal received, shutting down now...`);
Promise.allSettled([
// handle DB conn, sockets, etc. here
util.promisify(server.close),
]).then(() => {
logger.info("Cleaned up, exiting.");
process.exit(0);
});
}
['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGTERM', 'SIGUSR1', 'SIGUSR2'].forEach((signal) => {
process.on(signal, () => cleanupFunc(signal));
});

View File

@ -1 +1 @@
# Keys - public/private key pairs for certificate QR signing and verification ## Keys - public/private key pairs for certificate QR signing and verification

View File

@ -1 +1 @@
# Middleware - functionalities that must be in the middle of the API route control flow ## Middleware - functionalities that must be in the middle of the API route control flow

View File

@ -1,6 +1,5 @@
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs"); const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
const creds = JSON.parse(process.env.ADMIN_CREDS); const creds = JSON.parse(process.env.ADMIN_CREDS);
@ -12,7 +11,7 @@ const creds = JSON.parse(process.env.ADMIN_CREDS);
*/ */
const adminQueryCreds = async (req, res, next) => { const adminQueryCreds = async (req, res, next) => {
try { try {
/** @type {any} */ /** @type {JSON} */
const { user, access } = req.query; const { user, access } = req.query;
if (creds[user] === access) { if (creds[user] === access) {
logger.info("Admin access - " + user); logger.info("Admin access - " + user);
@ -21,17 +20,15 @@ const adminQueryCreds = async (req, res, next) => {
else { else {
// we do a bit of trolling here // we do a bit of trolling here
const unauthIP = req.headers['x-real-ip'] || req.ip const unauthIP = req.headers['x-real-ip'] || req.ip
res.status(401).send("Intruder alert. IP address: " + unauthIP);
logger.warn("Intruder alert.", { ip: unauthIP }); logger.warn("Intruder alert.", { ip: unauthIP });
return; return res.status(401).send("Intruder alert. IP address: " + unauthIP);
} }
} catch (error) { } catch (error) {
res.sendStatus(500);
logger.error("adminQueryCreds", { error }); logger.error("adminQueryCreds", { error });
return; return res.status(500).send({ message: "Server Error. Try again." });
} }
} }
module.exports = { module.exports = {
adminQueryCreds, adminQueryCreds,
}; }

View File

@ -1,8 +1,7 @@
const fetch = require("cross-fetch"); const fetch = require("cross-fetch");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs"); const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
/** /**
* Google ReCAPTCHA v2 verification * Google ReCAPTCHA v2 verification
@ -20,20 +19,18 @@ const verifyCaptcha = async (req, res, next) => {
const captchaResp = await fetch(verifyCaptchaURL); const captchaResp = await fetch(verifyCaptchaURL);
const captchaData = await captchaResp.json(); const captchaData = await captchaResp.json();
if (captchaData.success !== undefined && !captchaData.success) { if (captchaData.success !== undefined && !captchaData.success) {
res.status(403).send({ logger.error("Recaptcha", { captchaData });
return res.status(403).send({
message: "Failed captcha verification" message: "Failed captcha verification"
}); });
logger.error("Recaptcha", { captchaData });
return;
} }
next(); next();
} catch (error) { } catch (error) {
res.sendStatus(500); logger.error("Error", { error });
logger.error("verifyCaptcha", { error }); return res.status(500).send({ message: "Server Error. Try again." });
return;
} }
} }
module.exports = { module.exports = {
verifyCaptcha verifyCaptcha
}; }

View File

@ -1 +1 @@
# Sequelize migrations folder ## Database (mainly Sequelize's) migrations

View File

@ -1 +1 @@
# Sequelize model schema ## Database ORM (mainly Sequelize's) models

View File

@ -8,39 +8,40 @@ const env = process.env.NODE_ENV || "development";
const config = require(__dirname + "/../config/sequelize.js")[env]; const config = require(__dirname + "/../config/sequelize.js")[env];
const db = {}; const db = {};
// Create new Sequelize instance
let sequelize; let sequelize;
if (config.use_env_variable) { if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config); sequelize = new Sequelize(process.env[config.use_env_variable], config);
} else { } else {
sequelize = new Sequelize(config.database, config.username, config.password, config); sequelize = new Sequelize(config.database, config.username, config.password, config);
} }
(async () => { sequelize.authenticate()
try { .then(
await sequelize.authenticate(); () => {
logger.info("Sequelize auth success"); logger.info('Sequelize auth success');
} catch (error) { },
logger.error("Sequelize auth error", { err }); (err) => {
throw error; logger.error('Sequelize auth error', { err });
} }
})(); )
// Read model definitions from folder // Read model definitions from folder
fs fs
.readdirSync(__dirname) .readdirSync(__dirname)
.filter(file => { .filter(file => {
return (file.indexOf(".") !== 0) && (file !== basename) && (file.slice(-3) === ".js"); return (file.indexOf(".") !== 0) && (file !== basename) && (file.slice(-3) === ".js");
}) })
.forEach(file => { .forEach(file => {
const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes); const model = require(path.join(__dirname, file))(sequelize, Sequelize.DataTypes);
db[model.name] = model; db[model.name] = model;
}); });
// Setup defined associations // Setup defined associations
Object.keys(db).forEach(modelName => { Object.keys(db).forEach(modelName => {
if (db[modelName].associate) { if (db[modelName].associate) {
db[modelName].associate(db); db[modelName].associate(db);
} }
}); });
db.sequelize = sequelize; db.sequelize = sequelize;

4495
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{ {
"name": "backend-template", "name": "backend-template",
"version": "2.0.0", "version": "1.3.1",
"description": "Template for a back-end server using Express and Sequelize.", "description": "Template for back-end server using Express and Sequelize.",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"dev": "cross-env NODE_ENV=development nodemon --exitcrash index.js", "dev": "cross-env NODE_ENV=development nodemon --exitcrash index.js",
@ -11,36 +11,31 @@
}, },
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/20kaushik02/express-sequelize-backend-template.git" "url": "git+https://gitlab.com/ctf-tech-2023/backend-template.git"
}, },
"author": "Kaushik Ravishankar <rknarayan02@gmail.com>", "author": "Kaushik Ravishankar <rknarayan02@gmail.com>",
"license": "MIT", "license": "ISC",
"bugs": { "bugs": {
"url": "https://github.com/20kaushik02/express-sequelize-backend-template/issues" "url": "https://gitlab.com/ctf-tech-2023/backend-template/issues"
}, },
"homepage": "https://github.com/20kaushik02/express-sequelize-backend-template#readme", "homepage": "https://gitlab.com/ctf-tech-2023/backend-template#readme",
"dependencies": { "dependencies": {
"archiver": "^7.0.1",
"cors": "^2.8.5", "cors": "^2.8.5",
"cross-fetch": "^4.0.0", "cross-fetch": "^3.1.5",
"dotenv-flow": "^4.1.0", "dotenv-flow": "^3.2.0",
"express": "^4.18.2", "express": "^4.18.1",
"express-validator": "^7.2.0", "express-validator": "^6.14.2",
"fast-csv": "^5.0.1", "fast-csv": "^4.3.6",
"helmet": "^7.1.0", "helmet": "^6.0.0",
"jsonwebtoken": "^9.0.2", "jsonwebtoken": "^9.0.0",
"nodemailer": "^6.9.14", "nodemailer": "^6.8.0",
"pg": "^8.12.0", "pg": "^8.8.0",
"qrcode": "^1.5.4", "sequelize": "^6.24.0",
"sequelize": "^6.37.3", "winston": "^3.8.2"
"winston": "^3.14.1"
}, },
"devDependencies": { "devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^22.2.0",
"cross-env": "^7.0.3", "cross-env": "^7.0.3",
"nodemon": "^3.1.4", "nodemon": "^2.0.20",
"sequelize-cli": "^6.6.2", "sequelize-cli": "^6.5.1"
"typescript": "^5.5.4"
} }
} }

View File

@ -1 +1 @@
# Routes - define control flow of the API route ## Routes - define control flow of the API route

View File

@ -1 +1 @@
# Sequelize seeder scripts - initial data feed, for dummy data and testing ## Database (mainly Sequelize's) seeders - initial data feed, for dummy data and testing

View File

@ -6,6 +6,12 @@
* @typedef {import("express").Request} Req * @typedef {import("express").Request} Req
* @typedef {import("express").Response} Res * @typedef {import("express").Response} Res
* @typedef {import("express").NextFunction} Next * @typedef {import("express").NextFunction} Next
*
* @typedef {import("sequelize")} Sequelize
* @typedef {import("sequelize").Model} Model
* @typedef {import("sequelize").QueryInterface} QueryInterface
*
* @typedef {import("winston").Logger} Logger
*/ */
exports.unused = {}; exports.unused = {};

View File

@ -2,8 +2,8 @@ const fs = require("fs");
const archiver = require('archiver'); const archiver = require('archiver');
/** /**
* @param {string} sourceDir /some/folder/to/compress * @param {String} sourceDir: /some/folder/to/compress
* @param {string} outPath /path/to/created.zip * @param {String} outPath: /path/to/created.zip
* @returns {Promise} * @returns {Promise}
*/ */
function zipDirectory(sourceDir, outPath) { function zipDirectory(sourceDir, outPath) {
@ -24,4 +24,4 @@ function zipDirectory(sourceDir, outPath) {
module.exports = { module.exports = {
zipDirectory, zipDirectory,
}; }

View File

@ -1,14 +1,12 @@
/** /**
* Returns a timestamp string to use for timestamped files * Returns a timestamp string to use for timestamped files
* @returns {string} String of current datetime in YYYYMMDDHHMMSS format * @returns {string} String of current datetime in YYYY.MM.DD-HH:MM:SS format
*/ */
const dateForFilename = () => { const dateForFilename = () => {
return new Date(). const dt = new Date();
toISOString().slice(-24). return `${dt.getFullYear()}-${dt.getMonth() + 1}-${dt.getDate()}-${dt.getHours()}-${dt.getMinutes()}-${dt.getSeconds()}`;
replace(/\D/g, '').
slice(0, 14);
} }
module.exports = { module.exports = {
dateForFilename, dateForFilename,
}; }

View File

@ -1,9 +1,3 @@
/**
* Recursively build a FormData object from a JSON object
* @param {FormData} formData
* @param {any} data
* @param {string} parentKey
*/
function buildFormData(formData, data, parentKey) { function buildFormData(formData, data, parentKey) {
if (data && typeof data === 'object' && !(data instanceof Date)) { if (data && typeof data === 'object' && !(data instanceof Date)) {
Object.keys(data).forEach(key => { Object.keys(data).forEach(key => {
@ -16,11 +10,6 @@ function buildFormData(formData, data, parentKey) {
} }
} }
/**
* Converts a JSON object to a FormData object
* @param {any} data
* @returns {FormData}
*/
function jsonToFormData(data) { function jsonToFormData(data) {
const formData = new FormData(); const formData = new FormData();
@ -32,4 +21,4 @@ function jsonToFormData(data) {
module.exports = { module.exports = {
jsonToFormData, jsonToFormData,
buildFormData, buildFormData,
}; }

View File

@ -1,11 +1,11 @@
/** /**
* Stringifies only values of a JSON object, including nested ones * String joins all the values of a JSON object, including nested keys
* *
* @param {any} obj JSON object * @param {any} obj JSON object
* @param {string} delimiter Delimiter of final string * @param {string} delimiter Delimiter of final string
* @returns {string} * @returns
*/ */
const getNestedValuesString = (obj, delimiter = ', ') => { const getNestedValuesString = (obj, delimiter) => {
let values = []; let values = [];
for (key in obj) { for (key in obj) {
if (typeof obj[key] !== "object") { if (typeof obj[key] !== "object") {
@ -15,9 +15,9 @@ const getNestedValuesString = (obj, delimiter = ', ') => {
} }
} }
return values.join(delimiter); return delimiter ? values.join(delimiter) : values.join();
} }
module.exports = { module.exports = {
getNestedValuesString getNestedValuesString
}; }

View File

@ -1,67 +1,60 @@
// Whole thing is winston logger stuff, if you want to learn read the docs
const path = require("path"); const path = require("path");
const { createLogger, transports, config, format } = require('winston'); const { createLogger, transports, config, format } = require("winston");
const { combine, label, timestamp, printf, errors } = format; const { combine, label, timestamp, printf } = format;
const typedefs = require("../typedefs"); const typedefs = require("../typedefs");
const getLabel = (callingModule) => { const getLabel = (callingModule) => {
if (!callingModule.filename) return "repl"; const parts = callingModule.filename.split(path.sep);
const parts = callingModule.filename?.split(path.sep); return path.join(parts[parts.length - 2], parts.pop());
return path.join(parts[parts.length - 2], parts.pop());
}; };
const allowedErrorKeys = ["name", "code", "message", "stack"]; const logMetaReplacer = (key, value) => {
if (key === "error") {
return value.name + ": " + value.message;
}
return value;
}
const metaFormat = (meta) => { const metaFormat = (meta) => {
if (Object.keys(meta).length > 0) if (Object.keys(meta).length > 0)
return '\n' + JSON.stringify(meta, null, "\t"); return "\n" + JSON.stringify(meta, logMetaReplacer) + "\n";
return ''; return "\n";
} }
const logFormat = printf(({ level, message, label, timestamp, ...meta }) => { const logFormat = printf(({ level, message, label, timestamp, ...meta }) => {
if (meta.error) { // if the error was passed if (meta.error) {
for (const key in meta.error) { for (const key in meta.error) {
if (!allowedErrorKeys.includes(key)) { if (typeof key !== "symbol" && key !== "message" && key !== "name") {
delete meta.error[key]; delete meta.error[key]
} }
} }
const { stack, ...rest } = meta.error; }
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(rest)}\n` + return `${timestamp} [${label}] ${level}: ${message}${metaFormat(meta)}`;
`${stack ?? ''}`;
}
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(meta)}`;
}); });
/** /**
* Creates a curried function, and call it with the module in use to get logs with filename * Creates a curried function, and call it with the module in use to get logs with filename
* @param {typedefs.Module} callingModule The module from which the logger is called * @param {typedefs.Module} callingModule The module from which the logger is called
* @returns {typedefs.Logger}
*/ */
const curriedLogger = (callingModule) => { const logger = (callingModule) => {
let winstonLogger = createLogger({ return createLogger({
levels: config.npm.levels, levels: config.npm.levels,
format: combine( format: combine(
errors({ stack: true }), label({ label: getLabel(callingModule) }),
label({ label: getLabel(callingModule) }), timestamp({ format: "YYYY-MM-DD HH:mm:ss" }),
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), logFormat,
logFormat, ),
), transports: [
transports: [ new transports.Console(),
new transports.Console({ level: 'info' }), new transports.File({ filename: __dirname + "/../logs/common.log" }),
new transports.File({ new transports.File({ filename: __dirname + "/../logs/error.log", level: "error" }),
filename: __dirname + '/../logs/debug.log', ]
level: 'debug', });
maxsize: 10485760,
}),
new transports.File({
filename: __dirname + '/../logs/error.log',
level: 'error',
maxsize: 1048576,
}),
]
});
winstonLogger.on('error', (error) => winstonLogger.error("Error inside logger", { error }));
return winstonLogger;
} }
module.exports = curriedLogger; module.exports = logger;

45
utils/mailer.js Normal file
View File

@ -0,0 +1,45 @@
const mailer = require("nodemailer");
const logger = require("./logger")(module);
// Creates a mailer transporter object with authentication and base config
const transport = mailer.createTransport({
host: "smtp.gmail.com",
port: 465,
secure: true,
service: "gmail",
auth: {
user: process.env.AUTOMAILER_ID,
pass: process.env.AUTOMAILER_APP_PASSWD,
}
});
/**
* Sends a mail from web user to a mail inside organization
* @param {string} mailTarget Target mail - must be within organization
* @param {string} mailSubject Mail subject
* @param {{name: string, email: string, message: string}} userData User details: name, email, and message
*/
const inboundMailer = (mailTarget, mailSubject, userData) => {
if (!mailTarget.endsWith("cegtechforum.in")) {
throw new Error("Invalid target mail domain.");
}
const message = {
to: mailTarget,
subject: mailSubject,
html:
"<p>Name: " + userData.name + "</p><p>Email: " + userData.email + "</p><br/><p>Message:<br/>" + userData.message + "</p>"
};
transport.sendMail(message, (err, info) => {
if (err) {
logger.error("Failure: QUERY mail NOT sent", { err, userData });
} else {
logger.info("Success: QUERY mail sent", { info });
}
});
};
module.exports = {
inboundMailer
}

58
utils/qrGenerator.js Normal file
View File

@ -0,0 +1,58 @@
const pathLib = require("path");
const qr = require("qrcode");
const logger = require("./logger")(module);
const { getSignedJWT } = require("./token");
/**
* Generates QR code from data and writes to file in tmp folder.
* To avoid race conditions, use email or other unique attributes for id.
* @param {string|any} data String or JSON object
*/
const qrPNGFile = (id, data) => {
qr.toFile(
path = pathLib.join(__dirname, "../tmp/tmpQR-" + id + ".png"),
text = (typeof data === "object" ? JSON.stringify(data) : data),
options = { type: 'png' },
(err) => {
if (err) {
logger.error("qrPNGFile", err);
throw err;
}
}
);
}
/**
* Generates QR code from data after signing and writes to file in tmp or k-qrs folder.
*
* To avoid race conditions, use email or other unique attributes for ID.
* @param {string|any} data String or JSON object
*/
const qrSignedPNGFile = (id, data, tmp = true) => {
const signedData = getSignedJWT(data);
const qrFilename = `${tmp ? 'tmpEncQR' : 'K-QR'}-${id}.png`;
const targetPath = pathLib.join(
__dirname, "..",
tmp ? "tmp" : pathLib.join("uploads", "2023", "k-qrs"),
qrFilename,
);
qr.toFile(
path = targetPath,
text = (typeof data === "object" ? JSON.stringify(signedData) : signedData),
options = { type: 'png' },
(err) => {
if (err) {
logger.error("qrSignedPNGFile", err);
throw err;
}
}
)
return qrFilename;
}
module.exports = {
qrPNGFile,
qrSignedPNGFile,
}

23
utils/quickEncrypt.js Normal file
View File

@ -0,0 +1,23 @@
/* Taken from quick-encrypt package, which is not maintained anymore */
const crypto = require('crypto')
const acceptableBitSizes = [1024, 2048];
exports.generate = (sizeInBits) => {
if (!acceptableBitSizes.includes(sizeInBits))
throw Error('Error generating public and private key. Key size can only be 1024 or 2048. Example usage: ` let keys = QuickEncrypt.generate(2048); `')
return keypair({ bits: sizeInBits })
}
exports.encrypt = (payloadString, publicKey) => {
if (typeof payloadString !== 'string' || typeof publicKey !== 'string')
throw Error("Error encrypting. Payload and Public Key should be in text format. Example usage: ` let encryptedText = QuickEncrypt.encrypt('Some secret text here!', 'the public RSA key in text format here'); ` ")
return crypto.publicEncrypt(publicKey, Buffer.from(payloadString, 'utf8')).toString('hex')
}
exports.decrypt = (encryptedString, privateKey) => {
if (typeof encryptedString !== 'string' || typeof privateKey !== 'string')
throw Error("Error decrypting. Decrypted Text and Private Key should be in text format. Example usage: ` let decryptedText = QuickEncrypt.decrypt('asddd213d19jenacanscasn', 'the private RSA key in text format here'); ` ")
return crypto.privateDecrypt({ key: privateKey }, Buffer.from(encryptedString, 'hex')).toString()
}

View File

@ -1,8 +1,8 @@
const fs = require("fs"); const fs = require("fs");
const jwt = require("jsonwebtoken"); const jwt = require("jsonwebtoken");
const privateKey = fs.readFileSync(process.env.PRIVKEY_PATH); const privateKey = fs.readFileSync(process.env.PRIVKEY);
const publicKey = fs.readFileSync(process.env.PUBKEY_PATH); const publicKey = fs.readFileSync(process.env.PUBKEY);
/** /**
* Sign data into JWT with JWT env secret * Sign data into JWT with JWT env secret
@ -10,7 +10,7 @@ const publicKey = fs.readFileSync(process.env.PUBKEY_PATH);
* @returns {jwt.JwtPayload} * @returns {jwt.JwtPayload}
*/ */
const getJWT = (data) => { const getJWT = (data) => {
return jwt.sign({ id: data }, process.env.JWTSECRET, { algorithm: "HS256" }); // symmetric encryption, so simple secret with SHA return jwt.sign({ id: data }, process.env.JWTSECRET, { algorithm: "HS256" }); // symmetric encryption, so simple secret with SHA
}; };
/** /**
@ -19,7 +19,13 @@ const getJWT = (data) => {
* @returns {jwt.JwtPayload} * @returns {jwt.JwtPayload}
*/ */
const getSignedJWT = (data) => { const getSignedJWT = (data) => {
return jwt.sign({ id: data }, privateKey, { algorithm: "RS256" }); // asymmetric signing, so private key with RSA return jwt.sign(
{ id: data },
privateKey,
{
algorithm: "RS256", // asymmetric signing, so private key with RSA
}
)
} }
/** /**
@ -28,7 +34,7 @@ const getSignedJWT = (data) => {
* @returns {string|any} * @returns {string|any}
*/ */
const verifyJWT = (data) => { const verifyJWT = (data) => {
return jwt.verify(data, process.env.JWTSECRET, { algorithms: ["HS256"] }); return jwt.verify(data, process.env.JWTSECRET, { algorithms: ["HS256"] });
} }
/** /**
@ -37,12 +43,18 @@ const verifyJWT = (data) => {
* @returns {string|any} * @returns {string|any}
*/ */
const verifySignedJWT = (signedString) => { const verifySignedJWT = (signedString) => {
return jwt.verify(signedString, publicKey, { algorithms: ["RS256"] }); return jwt.verify(
signedString,
publicKey,
{
algorithms: ["RS256"]
}
);
} }
module.exports = { module.exports = {
getJWT, getJWT,
verifyJWT, verifyJWT,
getSignedJWT, getSignedJWT,
verifySignedJWT, verifySignedJWT,
}; };

View File

@ -1,9 +1,7 @@
const { validationResult } = require("express-validator"); const { validationResult } = require("express-validator");
const { getNestedValuesString } = require("../utils/jsonTransformer");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs"); const typedefs = require("../typedefs");
const { getNestedValuesString } = require("../utils/jsonTransformer");
/** /**
* Refer: https://stackoverflow.com/questions/58848625/access-messages-in-express-validator * Refer: https://stackoverflow.com/questions/58848625/access-messages-in-express-validator
@ -17,30 +15,17 @@ const validate = (req, res, next) => {
if (errors.isEmpty()) { if (errors.isEmpty()) {
return next(); return next();
} }
const extractedErrors = []
errors.array().map(err => extractedErrors.push({
[err.param]: err.msg
}));
const extractedErrors = []; return res.status(400).send({
errors.array().forEach(err => {
if (err.type === 'alternative') {
err.nestedErrors.forEach(nestedErr => {
extractedErrors.push({
[nestedErr.path]: nestedErr.msg
});
});
} else if (err.type === 'field') {
extractedErrors.push({
[err.path]: err.msg
});
}
});
res.status(400).json({
message: getNestedValuesString(extractedErrors), message: getNestedValuesString(extractedErrors),
errors: extractedErrors errors: extractedErrors
}); })
logger.warn("invalid request", { extractedErrors });
return;
} }
module.exports = { module.exports = {
validate validate,
}; }