Compare commits

...

5 Commits

Author SHA1 Message Date
32735ad7ff boom!
overall: formatting check, jsdoc type hints, express res/return stuff

utils - changes in logger, dateformatter and removed unneeded ones

.env file changes

license check, readme update

package.json update - version, deps, URLs

server cleanup

sequelize config check
2024-08-14 21:08:58 +05:30
7318e8e325
oops 2023-09-22 20:27:33 -07:00
Kaushik Narayan R
47527d443f
Create LICENSE 2023-09-21 16:48:41 -07:00
Kaushik Narayan R
ba4a4e1fcd
Update README.md before going public 2023-09-21 16:43:06 -07:00
62ed623c7e packages fix, validator update 2023-05-14 04:55:25 +05:30
36 changed files with 379 additions and 4826 deletions

3
.env
View File

@ -1,5 +1,8 @@
PORT=5000
TRUST_PROXY = 1
AUTOMAILER_SMTP_SERVICE
AUTOMAILER_SMTP_HOST
AUTOMAILER_ID = "mailerID@mailserver.domain"
AUTOMAILER_APP_PASSWD = "mailerpasswd"

View File

@ -1,5 +1,8 @@
DB_USERNAME='your_local_db_username'
DB_PASSWORD='your_local_db_password'
DB_NAME='your_local_db_name'
DB_USER = your_database_username
DB_PASSWD = your_database_password
DB_NAME = your_database_name
DB_HOST = localhost
DB_PORT = your_database_port
DB_DIALECT = your_database_dialect
CAPTCHA_SECRET = "6LeIxAcTAAAAAGG-vFI1TnRWxMZNFuojJ4WifJWe"
CAPTCHA_SECRET = 'your-captcha-secret'

3
.env.production Normal file
View File

@ -0,0 +1,3 @@
DB_URL = 'your_database_connection_string'
CAPTCHA_SECRET = 'your-captcha-secret'

101
.gitignore vendored
View File

@ -6,14 +6,101 @@ yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
# dotenv environment variables file
.env.development
.env.staging
.env.production
*.env
# TypeScript v1 declaration files
typings/
# Data files
*/*.csv
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env.local
.env.*.local
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# SQLite db
*.db

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2024 Kaushik Narayan R
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,21 +1,17 @@
# Express-Sequelize backend server template
### To get started:
## To get started
- Clone this repo: `git clone https://gitlab.com/ctf-tech-2023/backend-template`
- Clone this repo: `git clone https://github.com/20kaushik02/express-sequelize-backend-template`
- Reset the git remote repo URL: `git remote rm origin`
- Set new git remote URL: `git remote add origin https://gitlab.com/ctf-tech-2023/new-repo-name`
- Set new git remote URL: `git remote add origin https://github.com/20kaushik02/<<new-repo-name>>`
- Remove the template environment files from git alone: `git rm -r --cached *.env*`
### Project setup:
## Project setup
- Edit `package.json` to reflect the new name and URLs
- Edit `README.md` to reflect project details
- Run `npm i` to install all dependencies
- Before running `sequelize-cli` commands while developing, make sure to set `$env:NODE_ENV='development'` on Windows, or `NODE_ENV=development` on Linux/MacOS
- Env config:
- **.env** - All things common to all environments (port, mailer creds, JWT secret, admin data access creds, etc.)
- **.env.development** - Development environment (dev captcha secret, dev DB details)
- **.env.staging** - Staging environment (dev captcha secret, staging DB conn. string) - **for sysadmins**
- **.env.production** - Production environment (production captcha secret, prod DB conn. string) - **for sysadmins**
- [See here](https://github.com/kerimdzhanov/dotenv-flow?tab=readme-ov-file#files-under-version-control) for best practices for .env files configuration
- Staging: `npm run staging_prep` and `npm run staging` to deploy on Render after configuring a new web service on Render dashboard

View File

@ -1 +1 @@
## Boilerplates - reusable code templates
# Boilerplates - reusable code templates

View File

@ -1,6 +1,7 @@
const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs");
/**
* Business logic to go in these controller functions.
* Everything should be contained inside try-catch blocks
@ -12,8 +13,9 @@ const __controller_func = async (req, res) => {
try {
} catch (error) {
res.sendStatus(500);
logger.error("__controller_func", { error });
return res.status(500).send({ message: "Server Error. Try again." });
return;
}
}

View File

@ -1,14 +1,13 @@
const router = require("express").Router();
const { validate } = require("../validators");
const { __controller_func } = require("./controller");
router.get(
// URL,
// middleware,
// validators,
// validate,
// __controller_func
// controller
);
router.post(

View File

@ -24,5 +24,4 @@ const __validator_func = async (req, res, next) => {
module.exports = {
__validator_func,
}
};

View File

@ -1 +1 @@
## Configuration files and data
# Configuration files and data

View File

@ -1,23 +1,28 @@
module.exports = {
"development": {
"username": process.env.DB_USERNAME, // local PostgreSQL DB username
"password": process.env.DB_PASSWORD, // local PostgreSQL DB password
"host": "127.0.0.1", // localhost
"database": process.env.DB_NAME, // local PostgreSQL DB name
"dialect": "postgres"
const logger = require("../utils/logger")(module);
const connConfigs = {
development: {
username: process.env.DB_USERNAME || 'postgres',
password: process.env.DB_PASSWORD || '',
database: process.env.DB_NAME || 'postgres',
host: process.env.DB_HOST || '127.0.0.1',
port: process.env.DB_PORT || 5432,
},
"staging": {
"use_env_variable": "DB_URL", // staging database connection string
"dialect": "postgres",
"dialectOptions": {
"ssl": true,
},
},
"production": {
"use_env_variable": "DB_URL", // production database connection string
"dialect": "postgres",
"dialectOptions": {
"ssl": true,
staging: {
use_env_variable: "DB_URL", // use connection string for non-dev env
},
production: {
use_env_variable: "DB_URL", // use connection string for non-dev env
// dialectOptions: {
// ssl: true,
// },
}
}
// common config
for (const conf in connConfigs) {
connConfigs[conf]['logging'] = (msg) => logger.debug(msg);
connConfigs[conf]['dialect'] = process.env.DB_DIALECT || 'postgres';
}
module.exports = connConfigs;

View File

@ -1 +1 @@
## Controllers - business logic functions, end of the API route
# Controllers - business logic functions, end of the API route

View File

@ -1,23 +1,35 @@
require("dotenv-flow").config();
const util = require('util');
const express = require("express");
const cors = require("cors");
const helmet = require("helmet");
const logger = require("./utils/logger")(module);
const app = express();
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Enable this if you run behind a proxy (e.g. nginx)
app.set('trust proxy', process.env.TRUST_PROXY);
app.use(cors());
app.use(helmet());
app.disable("x-powered-by");
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Static
app.use(express.static(__dirname + '/static'));
// Put routes here
app.use((_req, res) => {
return res.status(200).send("Back-end for");
// Fallbacks
app.use((req, res) => {
res.status(200).send("Back-end for");
logger.info("Unrecognized URL", { url: req.url });
return;
});
const port = process.env.PORT || 5000;
@ -25,3 +37,20 @@ const port = process.env.PORT || 5000;
app.listen(port, () => {
logger.info(`App Listening on port ${port}`);
});
const cleanupFunc = (signal) => {
if (signal)
logger.info(`${signal} signal received, shutting down now...`);
Promise.allSettled([
// handle DB conn, sockets, etc. here
util.promisify(server.close),
]).then(() => {
logger.info("Cleaned up, exiting.");
process.exit(0);
});
}
['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGTERM', 'SIGUSR1', 'SIGUSR2'].forEach((signal) => {
process.on(signal, () => cleanupFunc(signal));
});

View File

@ -1 +1 @@
## Keys - public/private key pairs for certificate QR signing and verification
# Keys - public/private key pairs for certificate QR signing and verification

View File

@ -1 +1 @@
## Middleware - functionalities that must be in the middle of the API route control flow
# Middleware - functionalities that must be in the middle of the API route control flow

View File

@ -1,6 +1,7 @@
const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs");
const creds = JSON.parse(process.env.ADMIN_CREDS);
/**
@ -11,7 +12,7 @@ const creds = JSON.parse(process.env.ADMIN_CREDS);
*/
const adminQueryCreds = async (req, res, next) => {
try {
/** @type {JSON} */
/** @type {any} */
const { user, access } = req.query;
if (creds[user] === access) {
logger.info("Admin access - " + user);
@ -20,15 +21,17 @@ const adminQueryCreds = async (req, res, next) => {
else {
// we do a bit of trolling here
const unauthIP = req.headers['x-real-ip'] || req.ip
res.status(401).send("Intruder alert. IP address: " + unauthIP);
logger.warn("Intruder alert.", { ip: unauthIP });
return res.status(401).send("Intruder alert. IP address: " + unauthIP);
return;
}
} catch (error) {
res.sendStatus(500);
logger.error("adminQueryCreds", { error });
return res.status(500).send({ message: "Server Error. Try again." });
return;
}
}
module.exports = {
adminQueryCreds,
}
};

View File

@ -1,8 +1,9 @@
const fetch = require("cross-fetch");
const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs");
/**
* Google ReCAPTCHA v2 verification
*
@ -19,18 +20,20 @@ const verifyCaptcha = async (req, res, next) => {
const captchaResp = await fetch(verifyCaptchaURL);
const captchaData = await captchaResp.json();
if (captchaData.success !== undefined && !captchaData.success) {
logger.error("Recaptcha", { captchaData });
return res.status(403).send({
res.status(403).send({
message: "Failed captcha verification"
});
logger.error("Recaptcha", { captchaData });
return;
}
next();
} catch (error) {
logger.error("Error", { error });
return res.status(500).send({ message: "Server Error. Try again." });
res.sendStatus(500);
logger.error("verifyCaptcha", { error });
return;
}
}
module.exports = {
verifyCaptcha
}
};

View File

@ -1 +1 @@
## Database (mainly Sequelize's) migrations
# Sequelize migrations folder

View File

@ -1 +1 @@
## Database ORM (mainly Sequelize's) models
# Sequelize model schema

View File

@ -8,7 +8,6 @@ const env = process.env.NODE_ENV || "development";
const config = require(__dirname + "/../config/sequelize.js")[env];
const db = {};
// Create new Sequelize instance
let sequelize;
if (config.use_env_variable) {
sequelize = new Sequelize(process.env[config.use_env_variable], config);
@ -16,15 +15,15 @@ if (config.use_env_variable) {
sequelize = new Sequelize(config.database, config.username, config.password, config);
}
sequelize.authenticate()
.then(
() => {
logger.info('Sequelize auth success');
},
(err) => {
logger.error('Sequelize auth error', { err });
(async () => {
try {
await sequelize.authenticate();
logger.info("Sequelize auth success");
} catch (error) {
logger.error("Sequelize auth error", { err });
throw error;
}
)
})();
// Read model definitions from folder
fs

4495
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,7 @@
{
"name": "backend-template",
"version": "1.3.1",
"description": "Template for back-end server using Express and Sequelize.",
"version": "2.0.0",
"description": "Template for a back-end server using Express and Sequelize.",
"main": "index.js",
"scripts": {
"dev": "cross-env NODE_ENV=development nodemon --exitcrash index.js",
@ -11,31 +11,36 @@
},
"repository": {
"type": "git",
"url": "git+https://gitlab.com/ctf-tech-2023/backend-template.git"
"url": "git+https://github.com/20kaushik02/express-sequelize-backend-template.git"
},
"author": "Kaushik Ravishankar <rknarayan02@gmail.com>",
"license": "ISC",
"license": "MIT",
"bugs": {
"url": "https://gitlab.com/ctf-tech-2023/backend-template/issues"
"url": "https://github.com/20kaushik02/express-sequelize-backend-template/issues"
},
"homepage": "https://gitlab.com/ctf-tech-2023/backend-template#readme",
"homepage": "https://github.com/20kaushik02/express-sequelize-backend-template#readme",
"dependencies": {
"archiver": "^7.0.1",
"cors": "^2.8.5",
"cross-fetch": "^3.1.5",
"dotenv-flow": "^3.2.0",
"express": "^4.18.1",
"express-validator": "^6.14.2",
"fast-csv": "^4.3.6",
"helmet": "^6.0.0",
"jsonwebtoken": "^9.0.0",
"nodemailer": "^6.8.0",
"pg": "^8.8.0",
"sequelize": "^6.24.0",
"winston": "^3.8.2"
"cross-fetch": "^4.0.0",
"dotenv-flow": "^4.1.0",
"express": "^4.18.2",
"express-validator": "^7.2.0",
"fast-csv": "^5.0.1",
"helmet": "^7.1.0",
"jsonwebtoken": "^9.0.2",
"nodemailer": "^6.9.14",
"pg": "^8.12.0",
"qrcode": "^1.5.4",
"sequelize": "^6.37.3",
"winston": "^3.14.1"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^22.2.0",
"cross-env": "^7.0.3",
"nodemon": "^2.0.20",
"sequelize-cli": "^6.5.1"
"nodemon": "^3.1.4",
"sequelize-cli": "^6.6.2",
"typescript": "^5.5.4"
}
}

View File

@ -1 +1 @@
## Routes - define control flow of the API route
# Routes - define control flow of the API route

View File

@ -1 +1 @@
## Database (mainly Sequelize's) seeders - initial data feed, for dummy data and testing
# Sequelize seeder scripts - initial data feed, for dummy data and testing

View File

@ -6,12 +6,6 @@
* @typedef {import("express").Request} Req
* @typedef {import("express").Response} Res
* @typedef {import("express").NextFunction} Next
*
* @typedef {import("sequelize")} Sequelize
* @typedef {import("sequelize").Model} Model
* @typedef {import("sequelize").QueryInterface} QueryInterface
*
* @typedef {import("winston").Logger} Logger
*/
exports.unused = {};

View File

@ -2,8 +2,8 @@ const fs = require("fs");
const archiver = require('archiver');
/**
* @param {String} sourceDir: /some/folder/to/compress
* @param {String} outPath: /path/to/created.zip
* @param {string} sourceDir /some/folder/to/compress
* @param {string} outPath /path/to/created.zip
* @returns {Promise}
*/
function zipDirectory(sourceDir, outPath) {
@ -24,4 +24,4 @@ function zipDirectory(sourceDir, outPath) {
module.exports = {
zipDirectory,
}
};

View File

@ -1,12 +1,14 @@
/**
* Returns a timestamp string to use for timestamped files
* @returns {string} String of current datetime in YYYY.MM.DD-HH:MM:SS format
* @returns {string} String of current datetime in YYYYMMDDHHMMSS format
*/
const dateForFilename = () => {
const dt = new Date();
return `${dt.getFullYear()}-${dt.getMonth() + 1}-${dt.getDate()}-${dt.getHours()}-${dt.getMinutes()}-${dt.getSeconds()}`;
return new Date().
toISOString().slice(-24).
replace(/\D/g, '').
slice(0, 14);
}
module.exports = {
dateForFilename,
}
};

View File

@ -1,3 +1,9 @@
/**
* Recursively build a FormData object from a JSON object
* @param {FormData} formData
* @param {any} data
* @param {string} parentKey
*/
function buildFormData(formData, data, parentKey) {
if (data && typeof data === 'object' && !(data instanceof Date)) {
Object.keys(data).forEach(key => {
@ -10,6 +16,11 @@ function buildFormData(formData, data, parentKey) {
}
}
/**
* Converts a JSON object to a FormData object
* @param {any} data
* @returns {FormData}
*/
function jsonToFormData(data) {
const formData = new FormData();
@ -21,4 +32,4 @@ function jsonToFormData(data) {
module.exports = {
jsonToFormData,
buildFormData,
}
};

View File

@ -1,11 +1,11 @@
/**
* String joins all the values of a JSON object, including nested keys
* Stringifies only values of a JSON object, including nested ones
*
* @param {any} obj JSON object
* @param {string} delimiter Delimiter of final string
* @returns
* @returns {string}
*/
const getNestedValuesString = (obj, delimiter) => {
const getNestedValuesString = (obj, delimiter = ', ') => {
let values = [];
for (key in obj) {
if (typeof obj[key] !== "object") {
@ -15,9 +15,9 @@ const getNestedValuesString = (obj, delimiter) => {
}
}
return delimiter ? values.join(delimiter) : values.join();
return values.join(delimiter);
}
module.exports = {
getNestedValuesString
}
};

View File

@ -1,37 +1,34 @@
// Whole thing is winston logger stuff, if you want to learn read the docs
const path = require("path");
const { createLogger, transports, config, format } = require("winston");
const { combine, label, timestamp, printf } = format;
const { createLogger, transports, config, format } = require('winston');
const { combine, label, timestamp, printf, errors } = format;
const typedefs = require("../typedefs");
const getLabel = (callingModule) => {
const parts = callingModule.filename.split(path.sep);
if (!callingModule.filename) return "repl";
const parts = callingModule.filename?.split(path.sep);
return path.join(parts[parts.length - 2], parts.pop());
};
const logMetaReplacer = (key, value) => {
if (key === "error") {
return value.name + ": " + value.message;
}
return value;
}
const allowedErrorKeys = ["name", "code", "message", "stack"];
const metaFormat = (meta) => {
if (Object.keys(meta).length > 0)
return "\n" + JSON.stringify(meta, logMetaReplacer) + "\n";
return "\n";
return '\n' + JSON.stringify(meta, null, "\t");
return '';
}
const logFormat = printf(({ level, message, label, timestamp, ...meta }) => {
if (meta.error) {
if (meta.error) { // if the error was passed
for (const key in meta.error) {
if (typeof key !== "symbol" && key !== "message" && key !== "name") {
delete meta.error[key]
if (!allowedErrorKeys.includes(key)) {
delete meta.error[key];
}
}
const { stack, ...rest } = meta.error;
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(rest)}\n` +
`${stack ?? ''}`;
}
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(meta)}`;
});
@ -39,22 +36,32 @@ const logFormat = printf(({ level, message, label, timestamp, ...meta }) => {
/**
* Creates a curried function, and call it with the module in use to get logs with filename
* @param {typedefs.Module} callingModule The module from which the logger is called
* @returns {typedefs.Logger}
*/
const logger = (callingModule) => {
return createLogger({
const curriedLogger = (callingModule) => {
let winstonLogger = createLogger({
levels: config.npm.levels,
format: combine(
errors({ stack: true }),
label({ label: getLabel(callingModule) }),
timestamp({ format: "YYYY-MM-DD HH:mm:ss" }),
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
logFormat,
),
transports: [
new transports.Console(),
new transports.File({ filename: __dirname + "/../logs/common.log" }),
new transports.File({ filename: __dirname + "/../logs/error.log", level: "error" }),
new transports.Console({ level: 'info' }),
new transports.File({
filename: __dirname + '/../logs/debug.log',
level: 'debug',
maxsize: 10485760,
}),
new transports.File({
filename: __dirname + '/../logs/error.log',
level: 'error',
maxsize: 1048576,
}),
]
});
winstonLogger.on('error', (error) => winstonLogger.error("Error inside logger", { error }));
return winstonLogger;
}
module.exports = logger;
module.exports = curriedLogger;

View File

@ -1,45 +0,0 @@
const mailer = require("nodemailer");
const logger = require("./logger")(module);
// Creates a mailer transporter object with authentication and base config
const transport = mailer.createTransport({
host: "smtp.gmail.com",
port: 465,
secure: true,
service: "gmail",
auth: {
user: process.env.AUTOMAILER_ID,
pass: process.env.AUTOMAILER_APP_PASSWD,
}
});
/**
* Sends a mail from web user to a mail inside organization
* @param {string} mailTarget Target mail - must be within organization
* @param {string} mailSubject Mail subject
* @param {{name: string, email: string, message: string}} userData User details: name, email, and message
*/
const inboundMailer = (mailTarget, mailSubject, userData) => {
if (!mailTarget.endsWith("cegtechforum.in")) {
throw new Error("Invalid target mail domain.");
}
const message = {
to: mailTarget,
subject: mailSubject,
html:
"<p>Name: " + userData.name + "</p><p>Email: " + userData.email + "</p><br/><p>Message:<br/>" + userData.message + "</p>"
};
transport.sendMail(message, (err, info) => {
if (err) {
logger.error("Failure: QUERY mail NOT sent", { err, userData });
} else {
logger.info("Success: QUERY mail sent", { info });
}
});
};
module.exports = {
inboundMailer
}

View File

@ -1,58 +0,0 @@
const pathLib = require("path");
const qr = require("qrcode");
const logger = require("./logger")(module);
const { getSignedJWT } = require("./token");
/**
* Generates QR code from data and writes to file in tmp folder.
* To avoid race conditions, use email or other unique attributes for id.
* @param {string|any} data String or JSON object
*/
const qrPNGFile = (id, data) => {
qr.toFile(
path = pathLib.join(__dirname, "../tmp/tmpQR-" + id + ".png"),
text = (typeof data === "object" ? JSON.stringify(data) : data),
options = { type: 'png' },
(err) => {
if (err) {
logger.error("qrPNGFile", err);
throw err;
}
}
);
}
/**
* Generates QR code from data after signing and writes to file in tmp or k-qrs folder.
*
* To avoid race conditions, use email or other unique attributes for ID.
* @param {string|any} data String or JSON object
*/
const qrSignedPNGFile = (id, data, tmp = true) => {
const signedData = getSignedJWT(data);
const qrFilename = `${tmp ? 'tmpEncQR' : 'K-QR'}-${id}.png`;
const targetPath = pathLib.join(
__dirname, "..",
tmp ? "tmp" : pathLib.join("uploads", "2023", "k-qrs"),
qrFilename,
);
qr.toFile(
path = targetPath,
text = (typeof data === "object" ? JSON.stringify(signedData) : signedData),
options = { type: 'png' },
(err) => {
if (err) {
logger.error("qrSignedPNGFile", err);
throw err;
}
}
)
return qrFilename;
}
module.exports = {
qrPNGFile,
qrSignedPNGFile,
}

View File

@ -1,23 +0,0 @@
/* Taken from quick-encrypt package, which is not maintained anymore */
const crypto = require('crypto')
const acceptableBitSizes = [1024, 2048];
exports.generate = (sizeInBits) => {
if (!acceptableBitSizes.includes(sizeInBits))
throw Error('Error generating public and private key. Key size can only be 1024 or 2048. Example usage: ` let keys = QuickEncrypt.generate(2048); `')
return keypair({ bits: sizeInBits })
}
exports.encrypt = (payloadString, publicKey) => {
if (typeof payloadString !== 'string' || typeof publicKey !== 'string')
throw Error("Error encrypting. Payload and Public Key should be in text format. Example usage: ` let encryptedText = QuickEncrypt.encrypt('Some secret text here!', 'the public RSA key in text format here'); ` ")
return crypto.publicEncrypt(publicKey, Buffer.from(payloadString, 'utf8')).toString('hex')
}
exports.decrypt = (encryptedString, privateKey) => {
if (typeof encryptedString !== 'string' || typeof privateKey !== 'string')
throw Error("Error decrypting. Decrypted Text and Private Key should be in text format. Example usage: ` let decryptedText = QuickEncrypt.decrypt('asddd213d19jenacanscasn', 'the private RSA key in text format here'); ` ")
return crypto.privateDecrypt({ key: privateKey }, Buffer.from(encryptedString, 'hex')).toString()
}

View File

@ -1,8 +1,8 @@
const fs = require("fs");
const jwt = require("jsonwebtoken");
const privateKey = fs.readFileSync(process.env.PRIVKEY);
const publicKey = fs.readFileSync(process.env.PUBKEY);
const privateKey = fs.readFileSync(process.env.PRIVKEY_PATH);
const publicKey = fs.readFileSync(process.env.PUBKEY_PATH);
/**
* Sign data into JWT with JWT env secret
@ -19,13 +19,7 @@ const getJWT = (data) => {
* @returns {jwt.JwtPayload}
*/
const getSignedJWT = (data) => {
return jwt.sign(
{ id: data },
privateKey,
{
algorithm: "RS256", // asymmetric signing, so private key with RSA
}
)
return jwt.sign({ id: data }, privateKey, { algorithm: "RS256" }); // asymmetric signing, so private key with RSA
}
/**
@ -43,13 +37,7 @@ const verifyJWT = (data) => {
* @returns {string|any}
*/
const verifySignedJWT = (signedString) => {
return jwt.verify(
signedString,
publicKey,
{
algorithms: ["RS256"]
}
);
return jwt.verify(signedString, publicKey, { algorithms: ["RS256"] });
}
module.exports = {

View File

@ -1,7 +1,9 @@
const { validationResult } = require("express-validator");
const typedefs = require("../typedefs");
const { getNestedValuesString } = require("../utils/jsonTransformer");
const logger = require("../utils/logger")(module);
const typedefs = require("../typedefs");
/**
* Refer: https://stackoverflow.com/questions/58848625/access-messages-in-express-validator
@ -15,17 +17,30 @@ const validate = (req, res, next) => {
if (errors.isEmpty()) {
return next();
}
const extractedErrors = []
errors.array().map(err => extractedErrors.push({
[err.param]: err.msg
}));
return res.status(400).send({
const extractedErrors = [];
errors.array().forEach(err => {
if (err.type === 'alternative') {
err.nestedErrors.forEach(nestedErr => {
extractedErrors.push({
[nestedErr.path]: nestedErr.msg
});
});
} else if (err.type === 'field') {
extractedErrors.push({
[err.path]: err.msg
});
}
});
res.status(400).json({
message: getNestedValuesString(extractedErrors),
errors: extractedErrors
})
});
logger.warn("invalid request", { extractedErrors });
return;
}
module.exports = {
validate,
}
validate
};