small improvements, bug fixes, ocd formatting,
This commit is contained in:
Kaushik Narayan R 2024-12-26 03:13:35 -07:00
parent fa9208940a
commit d999db53ae
28 changed files with 178 additions and 181 deletions

View File

@ -8,7 +8,7 @@ Personal Spotify playlist manager. Features inbound!
- stores links as from-to pairs
- fetches all playlists and links of the user into memory and then works with data. assumption is graphs won't be too big
## to-do:
## to-do
- re-evaluate all logging
- DRY all the API calls and surrounding processing

View File

@ -1,14 +1,14 @@
const axios = require('axios');
const axios = require("axios");
const { baseAPIURL, accountsAPIURL } = require("../constants");
const logger = require('../utils/logger')(module);
const logger = require("../utils/logger")(module);
const authInstance = axios.default.create({
baseURL: accountsAPIURL,
timeout: 20000,
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Authorization': 'Basic ' + (Buffer.from(process.env.CLIENT_ID + ':' + process.env.CLIENT_SECRET).toString('base64'))
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Basic " + (Buffer.from(process.env.CLIENT_ID + ":" + process.env.CLIENT_SECRET).toString("base64"))
},
});
@ -16,7 +16,7 @@ const axiosInstance = axios.default.create({
baseURL: baseAPIURL,
timeout: 20000,
headers: {
'Content-Type': 'application/json'
"Content-Type": "application/json"
},
});

View File

@ -11,9 +11,9 @@ const logPrefix = "Spotify API: ";
* Spotify API - one-off request handler
* @param {typedefs.Req} req convenient auto-placing headers from middleware (not a good approach?)
* @param {typedefs.Res} res handle failure responses here itself (not a good approach?)
* @param {import('axios').Method} method HTTP method
* @param {import("axios").Method} method HTTP method
* @param {string} path request path
* @param {import('axios').AxiosRequestConfig} config request params, headers, etc.
* @param {import("axios").AxiosRequestConfig} config request params, headers, etc.
* @param {any} data request body
* @param {boolean} inlineData true if data is to be placed inside config
*/
@ -36,7 +36,7 @@ const singleRequest = async (req, res, method, path, config = {}, data = null, i
let logMsg;
if (error.response.status >= 400 && error.response.status < 600) {
res.status(error.response.status).send(error.response.data);
logMsg = '' + error.response.status
logMsg = "" + error.response.status
}
else {
res.sendStatus(error.response.status);
@ -130,6 +130,27 @@ const removeItemsFromPlaylist = async (req, res, nextBatch, playlistID, snapshot
return res.headersSent ? null : response.data;
}
const checkPlaylistEditable = async (req, res, playlistID, userID) => {
let checkFields = ["collaborative", "owner(id)"];
const checkFromData = await getPlaylistDetailsFirstPage(req, res, checkFields.join(), playlistID);
if (res.headersSent) return false;
// https://web.archive.org/web/20241226081630/https://developer.spotify.com/documentation/web-api/concepts/playlists#:~:text=A%20playlist%20can%20also%20be%20made%20collaborative
// playlist is editable if it's collaborative (and thus private) or owned by the user
if (checkFromData.collaborative !== true &&
checkFromData.owner.id !== userID) {
res.status(403).send({
message: "You cannot edit this playlist, you must be the owner/the playlist must be collaborative",
playlistID: playlistID
});
logger.warn("user cannot edit target playlist", { playlistID: playlistID });
return false;
} else {
return true;
}
}
module.exports = {
singleRequest,
getUserProfile,
@ -139,4 +160,5 @@ module.exports = {
getPlaylistDetailsNextPage,
addItemsToPlaylist,
removeItemsFromPlaylist,
checkPlaylistEditable,
}

View File

@ -11,7 +11,7 @@ const __controller_func = async (req, res) => {
} catch (error) {
res.sendStatus(500);
logger.error('__controller_func', { error });
logger.error("__controller_func", { error });
return;
}
}

View File

@ -1,4 +1,4 @@
const router = require('express').Router();
const router = require("express").Router();
const { validate } = require("../validators");

View File

@ -8,9 +8,9 @@ const typedefs = require("../typedefs");
* @param {typedefs.Next} next
*/
const __validator_func = async (req, res, next) => {
await body('field_name')
await body("field_name")
.notEmpty()
.withMessage('field_name not defined in body')
.withMessage("field_name not defined in body")
.run(req);
next();

View File

@ -2,10 +2,10 @@ const logger = require("../utils/logger")(module);
const connConfigs = {
development: {
username: process.env.DB_USER || 'postgres',
password: process.env.DB_PASSWD || '',
database: process.env.DB_NAME || 'postgres',
host: process.env.DB_HOST || '127.0.0.1',
username: process.env.DB_USER || "postgres",
password: process.env.DB_PASSWD || "",
database: process.env.DB_NAME || "postgres",
host: process.env.DB_HOST || "127.0.0.1",
port: process.env.DB_PORT || 5432,
},
staging: {
@ -21,8 +21,8 @@ const connConfigs = {
// common config
for (const conf in connConfigs) {
connConfigs[conf]['logging'] = (msg) => logger.debug(msg);
connConfigs[conf]['dialect'] = process.env.DB_DIALECT || 'postgres';
connConfigs[conf]["logging"] = (msg) => logger.debug(msg);
connConfigs[conf]["dialect"] = process.env.DB_DIALECT || "postgres";
}
module.exports = connConfigs;

View File

@ -1,19 +1,19 @@
const accountsAPIURL = 'https://accounts.spotify.com';
const baseAPIURL = 'https://api.spotify.com/v1';
const sessionName = 'spotify-manager';
const stateKey = 'spotify_auth_state';
const accountsAPIURL = "https://accounts.spotify.com";
const baseAPIURL = "https://api.spotify.com/v1";
const sessionName = "spotify-manager";
const stateKey = "spotify_auth_state";
const scopes = {
// ImageUpload: 'ugc-image-upload',
AccessPrivatePlaylists: 'playlist-read-private',
AccessCollaborativePlaylists: 'playlist-read-collaborative',
ModifyPublicPlaylists: 'playlist-modify-public',
ModifyPrivatePlaylists: 'playlist-modify-private',
// ModifyFollow: 'user-follow-modify',
AccessFollow: 'user-follow-read',
ModifyLibrary: 'user-library-modify',
AccessLibrary: 'user-library-read',
AccessUser: 'user-read-private',
// ImageUpload: "ugc-image-upload",
AccessPrivatePlaylists: "playlist-read-private",
AccessCollaborativePlaylists: "playlist-read-collaborative",
ModifyPublicPlaylists: "playlist-modify-public",
ModifyPrivatePlaylists: "playlist-modify-private",
// ModifyFollow: "user-follow-modify",
AccessFollow: "user-follow-read",
ModifyLibrary: "user-library-modify",
AccessLibrary: "user-library-read",
AccessUser: "user-read-private",
};
module.exports = {

View File

@ -1,11 +1,11 @@
const { authInstance } = require("../api/axios");
const typedefs = require("../typedefs");
const { scopes, stateKey, accountsAPIURL, sessionName } = require('../constants');
const { scopes, stateKey, accountsAPIURL, sessionName } = require("../constants");
const generateRandString = require('../utils/generateRandString');
const generateRandString = require("../utils/generateRandString");
const { getUserProfile } = require("../api/spotify");
const logger = require('../utils/logger')(module);
const logger = require("../utils/logger")(module);
/**
* Stateful redirect to Spotify login with credentials
@ -17,11 +17,11 @@ const login = (_req, res) => {
const state = generateRandString(16);
res.cookie(stateKey, state);
const scope = Object.values(scopes).join(' ');
const scope = Object.values(scopes).join(" ");
res.redirect(
`${accountsAPIURL}/authorize?` +
new URLSearchParams({
response_type: 'code',
response_type: "code",
client_id: process.env.CLIENT_ID,
scope: scope,
redirect_uri: process.env.REDIRECT_URI,
@ -31,7 +31,7 @@ const login = (_req, res) => {
return;
} catch (error) {
res.sendStatus(500);
logger.error('login', { error });
logger.error("login", { error });
return;
}
}
@ -48,12 +48,12 @@ const callback = async (req, res) => {
// check state
if (state === null || state !== storedState) {
res.redirect(409, '/');
logger.error('state mismatch');
res.redirect(409, "/");
logger.error("state mismatch");
return;
} else if (error) {
res.status(401).send("Auth callback error");
logger.error('callback error', { error });
logger.error("callback error", { error });
return;
} else {
// get auth tokens
@ -62,21 +62,21 @@ const callback = async (req, res) => {
const authForm = {
code: code,
redirect_uri: process.env.REDIRECT_URI,
grant_type: 'authorization_code'
grant_type: "authorization_code"
}
const authPayload = (new URLSearchParams(authForm)).toString();
const tokenResponse = await authInstance.post('/api/token', authPayload);
const tokenResponse = await authInstance.post("/api/token", authPayload);
if (tokenResponse.status === 200) {
logger.debug('Tokens obtained.');
logger.debug("Tokens obtained.");
req.session.accessToken = tokenResponse.data.access_token;
req.session.refreshToken = tokenResponse.data.refresh_token;
req.session.cookie.maxAge = 7 * 24 * 60 * 60 * 1000 // 1 week
} else {
logger.error('login failed', { statusCode: tokenResponse.status });
res.status(tokenResponse.status).send('Error: Login failed');
logger.error("login failed", { statusCode: tokenResponse.status });
res.status(tokenResponse.status).send("Error: Login failed");
}
const userData = await getUserProfile(req, res);
@ -94,7 +94,7 @@ const callback = async (req, res) => {
}
} catch (error) {
res.sendStatus(500);
logger.error('callback', { error });
logger.error("callback", { error });
return;
}
}
@ -108,28 +108,28 @@ const refresh = async (req, res) => {
try {
const authForm = {
refresh_token: req.session.refreshToken,
grant_type: 'refresh_token',
grant_type: "refresh_token",
}
const authPayload = (new URLSearchParams(authForm)).toString();
const response = await authInstance.post('/api/token', authPayload);
const response = await authInstance.post("/api/token", authPayload);
if (response.status === 200) {
req.session.accessToken = response.data.access_token;
req.session.refreshToken = response.data.refresh_token ?? req.session.refreshToken; // refresh token rotation
res.sendStatus(200);
logger.info(`Access token refreshed${(response.data.refresh_token !== null) ? ' and refresh token updated' : ''}.`);
logger.info(`Access token refreshed${(response.data.refresh_token !== null) ? " and refresh token updated" : ""}.`);
return;
} else {
res.status(response.status).send('Error: Refresh token flow failed.');
logger.error('refresh failed', { statusCode: response.status });
res.status(response.status).send("Error: Refresh token flow failed.");
logger.error("refresh failed", { statusCode: response.status });
return;
}
} catch (error) {
res.sendStatus(500);
logger.error('refresh', { error });
logger.error("refresh", { error });
return;
}
};
@ -155,7 +155,7 @@ const logout = async (req, res) => {
})
} catch (error) {
res.sendStatus(500);
logger.error('logout', { error });
logger.error("logout", { error });
return;
}
}

View File

@ -1,7 +1,7 @@
const typedefs = require("../typedefs");
const logger = require("../utils/logger")(module);
const { getUserPlaylistsFirstPage, getUserPlaylistsNextPage, getPlaylistDetailsFirstPage, getPlaylistDetailsNextPage, removeItemsFromPlaylist } = require("../api/spotify");
const { getUserPlaylistsFirstPage, getUserPlaylistsNextPage, getPlaylistDetailsFirstPage, getPlaylistDetailsNextPage, addItemsToPlaylist, removeItemsFromPlaylist, checkPlaylistEditable } = require("../api/spotify");
const { parseSpotifyLink } = require("../utils/spotifyURITransformer");
const myGraph = require("../utils/graph");
@ -115,12 +115,12 @@ const updateUser = async (req, res) => {
}
}
res.status(200).send({ removedLinks });
res.status(200).send({ removedLinks: removedLinks > 0 });
logger.info("Updated user data", { delLinks: removedLinks, delPls: cleanedUser, addPls: updatedUser.length });
return;
} catch (error) {
res.sendStatus(500);
logger.error('updateUser', { error });
logger.error("updateUser", { error });
return;
}
}
@ -158,7 +158,7 @@ const fetchUser = async (req, res) => {
return;
} catch (error) {
res.sendStatus(500);
logger.error('fetchUser', { error });
logger.error("fetchUser", { error });
return;
}
}
@ -248,7 +248,7 @@ const createLink = async (req, res) => {
return;
} catch (error) {
res.sendStatus(500);
logger.error('createLink', { error });
logger.error("createLink", { error });
return;
}
}
@ -314,12 +314,11 @@ const removeLink = async (req, res) => {
return;
} catch (error) {
res.sendStatus(500);
logger.error('removeLink', { error });
logger.error("removeLink", { error });
return;
}
}
/**
* Add tracks to the link-head playlist,
* that are present in the link-tail playlist but not in the link-head playlist,
@ -376,20 +375,8 @@ const populateSingleLink = async (req, res) => {
return;
}
let checkFields = ["collaborative", "owner(id)"];
const checkFromData = await getPlaylistDetailsFirstPage(req, res, checkFields.join(), fromPl.id);
if (res.headersSent) return;
// editable = collaborative || user is owner
if (checkFromData.collaborative !== true &&
checkFromData.owner.id !== uID) {
res.status(403).send({
message: "You cannot edit this playlist, you must be owner/playlist must be collaborative",
playlistID: fromPl.id
});
logger.warn("user cannot edit target playlist", { playlistID: fromPl.id });
if (!await checkPlaylistEditable(req, res, fromPl.id, uID))
return;
}
let initialFields = ["tracks(next,items(is_local,track(uri)))"];
let mainFields = ["next", "items(is_local,track(uri))"];
@ -413,7 +400,7 @@ const populateSingleLink = async (req, res) => {
// keep getting batches of 50 till exhausted
while (fromPlaylist.next) {
for (let i = 1; "next" in fromPlaylist; i++) {
const nextData = await getPlaylistDetailsNextPage(req, res, fromPlaylist.next);
if (res.headersSent) return;
@ -449,9 +436,10 @@ const populateSingleLink = async (req, res) => {
});
// keep getting batches of 50 till exhausted
while (toPlaylist.next) {
for (let i = 1; "next" in toPlaylist; i++) {
const nextData = await getPlaylistDetailsNextPage(req, res, toPlaylist.next);
if (res.headersSent) return;
toPlaylist.tracks.push(
...nextData.items.map((playlist_item) => {
return {
@ -476,22 +464,22 @@ const populateSingleLink = async (req, res) => {
const localNum = toPlaylist.tracks.filter(track => track.is_local).length;
// append to end in batches of 100
while (toTrackURIs.length) {
while (toTrackURIs.length > 0) {
const nextBatch = toTrackURIs.splice(0, 100);
const addData = await addItemsToPlaylist(req, res, nextBatch, fromPl.id);
if (res.headersSent) return;
}
res.status(201).send({
message: 'Added tracks.',
message: `Added ${toAddNum} tracks, could not add ${localNum} local files.`,
added: toAddNum,
local: localNum,
});
logger.info(`Backfilled ${result.added} tracks, could not add ${result.local} local files.`);
logger.info(`Backfilled ${toAddNum} tracks, could not add ${localNum} local files.`);
return;
} catch (error) {
res.sendStatus(500);
logger.error('populateSingleLink', { error });
logger.error("populateSingleLink", { error });
return;
}
}
@ -548,21 +536,8 @@ const pruneSingleLink = async (req, res) => {
return
}
let checkFields = ["collaborative", "owner(id)"];
const checkToData = await getPlaylistDetailsFirstPage(req, res, checkFields.join(), toPl.id);
if (res.headersSent) return;
// editable = collaborative || user is owner
if (checkToData.collaborative !== true &&
checkToData.owner.id !== uID) {
res.status(403).send({
message: "You cannot edit this playlist, you must be owner/playlist must be collaborative",
playlistID: toPl.id
});
logger.error("user cannot edit target playlist");
if (!await checkPlaylistEditable(req, res, toPl.id, uID))
return;
}
let initialFields = ["snapshot_id", "tracks(next,items(is_local,track(uri)))"];
let mainFields = ["next", "items(is_local,track(uri))"];
@ -586,7 +561,7 @@ const pruneSingleLink = async (req, res) => {
});
// keep getting batches of 50 till exhausted
while (fromPlaylist.next) {
for (let i = 1; "next" in fromPlaylist; i++) {
const nextData = await getPlaylistDetailsNextPage(req, res, fromPlaylist.next);
if (res.headersSent) return;
@ -623,7 +598,7 @@ const pruneSingleLink = async (req, res) => {
});
// keep getting batches of 50 till exhausted
while (toPlaylist.next) {
for (let i = 1; "next" in toPlaylist; i++) {
const nextData = await getPlaylistDetailsNextPage(req, res, toPlaylist.next);
if (res.headersSent) return;
@ -651,7 +626,7 @@ const pruneSingleLink = async (req, res) => {
let indexes = indexedToTrackURIs.filter(track => !fromTrackURIs.includes(track.uri)); // only those missing from the 'from' playlist
indexes = indexes.map(track => track.position); // get track positions
const logNum = indexes.length;
const toDelNum = indexes.length;
// remove in batches of 100 (from reverse, to preserve positions while modifying)
let currentSnapshot = toPlaylist.snapshot_id;
@ -662,12 +637,12 @@ const pruneSingleLink = async (req, res) => {
currentSnapshot = delResponse.snapshot_id;
}
res.status(200).send({ message: `Removed ${logNum} tracks.` });
logger.info(`Pruned ${logNum} tracks`);
res.status(200).send({ message: `Removed ${toDelNum} tracks.` });
logger.info(`Pruned ${toDelNum} tracks`);
return;
} catch (error) {
res.sendStatus(500);
logger.error('pruneSingleLink', { error });
logger.error("pruneSingleLink", { error });
return;
}
}

View File

@ -51,11 +51,11 @@ const fetchUserPlaylists = async (req, res) => {
delete userPlaylists.next;
res.status(200).send(userPlaylists);
logger.debug("Fetched user's playlists", { num: userPlaylists.total });
logger.info("Fetched user playlists", { num: userPlaylists.total });
return;
} catch (error) {
res.sendStatus(500);
logger.error('fetchUserPlaylists', { error });
logger.error("fetchUserPlaylists", { error });
return;
}
}
@ -148,7 +148,7 @@ const fetchPlaylistDetails = async (req, res) => {
return;
} catch (error) {
res.sendStatus(500);
logger.error('getPlaylistDetails', { error });
logger.error("getPlaylistDetails", { error });
return;
}
}

View File

@ -1,25 +1,25 @@
require('dotenv-flow').config();
require("dotenv-flow").config();
const util = require('util');
const express = require('express');
const util = require("util");
const express = require("express");
const session = require("express-session");
const cors = require('cors');
const cookieParser = require('cookie-parser');
const cors = require("cors");
const cookieParser = require("cookie-parser");
const helmet = require("helmet");
const SQLiteStore = require("connect-sqlite3")(session);
const { sessionName } = require('./constants');
const { sessionName } = require("./constants");
const db = require("./models");
const { isAuthenticated } = require('./middleware/authCheck');
const { isAuthenticated } = require("./middleware/authCheck");
const logger = require("./utils/logger")(module);
const app = express();
// Enable this if you run behind a proxy (e.g. nginx)
app.set('trust proxy', process.env.TRUST_PROXY);
app.set("trust proxy", process.env.TRUST_PROXY);
// Configure SQLite store file
const sqliteStore = new SQLiteStore({
@ -35,21 +35,21 @@ app.use(session({
resave: false,
saveUninitialized: false,
cookie: {
secure: 'auto', // if true only transmit cookie over https
secure: "auto", // if true only transmit cookie over https
httpOnly: true, // if true prevent client side JS from reading the cookie
}
}));
app.use(cors());
app.use(helmet());
app.disable('x-powered-by');
app.disable("x-powered-by");
app.use(cookieParser());
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Static
app.use(express.static(__dirname + '/static'));
app.use(express.static(__dirname + "/static"));
// Routes
app.use("/api/auth/", require("./routes/auth"));
@ -84,6 +84,6 @@ const cleanupFunc = (signal) => {
});
}
['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGTERM', 'SIGUSR1', 'SIGUSR2'].forEach((signal) => {
["SIGHUP", "SIGINT", "SIGQUIT", "SIGTERM", "SIGUSR1", "SIGUSR2"].forEach((signal) => {
process.on(signal, () => cleanupFunc(signal));
});

View File

@ -11,8 +11,8 @@ const logger = require("../utils/logger")(module);
const isAuthenticated = (req, res, next) => {
if (req.session.accessToken) {
req.sessHeaders = {
'Authorization': `Bearer ${req.session.accessToken}`,
// 'X-RateLimit-SessID': `${req.sessionID}_${req.session.user.username}`
"Authorization": `Bearer ${req.session.accessToken}`,
// "X-RateLimit-SessID": `${req.sessionID}_${req.session.user.username}`
};
next();
} else {

View File

@ -1,8 +1,8 @@
'use strict';
/** @type {import('sequelize-cli').Migration} */
"use strict";
/** @type {import("sequelize-cli").Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('playlists', {
await queryInterface.createTable("playlists", {
id: {
allowNull: false,
autoIncrement: true,
@ -29,6 +29,6 @@ module.exports = {
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('playlists');
await queryInterface.dropTable("playlists");
}
};

View File

@ -1,8 +1,8 @@
'use strict';
/** @type {import('sequelize-cli').Migration} */
"use strict";
/** @type {import("sequelize-cli").Migration} */
module.exports = {
async up(queryInterface, Sequelize) {
await queryInterface.createTable('links', {
await queryInterface.createTable("links", {
id: {
allowNull: false,
autoIncrement: true,
@ -29,6 +29,6 @@ module.exports = {
});
},
async down(queryInterface, Sequelize) {
await queryInterface.dropTable('links');
await queryInterface.dropTable("links");
}
};

View File

@ -20,7 +20,7 @@ if (config.use_env_variable) {
await sequelize.authenticate();
logger.info("Sequelize auth success");
} catch (error) {
logger.error("Sequelize auth error", { err });
logger.error("Sequelize auth error", { error });
throw error;
}
})();

View File

@ -1,7 +1,7 @@
'use strict';
"use strict";
const {
Model
} = require('sequelize');
} = require("sequelize");
module.exports = (sequelize, DataTypes) => {
class links extends Model {
/**
@ -19,7 +19,7 @@ module.exports = (sequelize, DataTypes) => {
to: DataTypes.STRING
}, {
sequelize,
modelName: 'links',
modelName: "links",
});
return links;
};

View File

@ -1,7 +1,7 @@
'use strict';
"use strict";
const {
Model
} = require('sequelize');
} = require("sequelize");
module.exports = (sequelize, DataTypes) => {
class playlists extends Model {
/**
@ -19,7 +19,7 @@ module.exports = (sequelize, DataTypes) => {
userID: DataTypes.STRING
}, {
sequelize,
modelName: 'playlists',
modelName: "playlists",
});
return playlists;
};

View File

@ -1,7 +1,7 @@
const router = require('express').Router();
const router = require("express").Router();
const { login, callback, refresh, logout } = require('../controllers/auth');
const { isAuthenticated } = require('../middleware/authCheck');
const { login, callback, refresh, logout } = require("../controllers/auth");
const { isAuthenticated } = require("../middleware/authCheck");
const validator = require("../validators");
router.get(

View File

@ -1,8 +1,8 @@
const router = require('express').Router();
const router = require("express").Router();
const { updateUser, fetchUser, createLink, removeLink, populateSingleLink, pruneSingleLink } = require('../controllers/operations');
const { validate } = require('../validators');
const { createLinkValidator, removeLinkValidator, populateSingleLinkValidator, pruneSingleLinkValidator } = require('../validators/operations');
const { updateUser, fetchUser, createLink, removeLink, populateSingleLink, pruneSingleLink } = require("../controllers/operations");
const { validate } = require("../validators");
const { createLinkValidator, removeLinkValidator, populateSingleLinkValidator, pruneSingleLinkValidator } = require("../validators/operations");
router.put(
"/update",

View File

@ -1,7 +1,7 @@
const router = require('express').Router();
const router = require("express").Router();
const { fetchUserPlaylists, fetchPlaylistDetails } = require('../controllers/playlists');
const { getPlaylistDetailsValidator } = require('../validators/playlists');
const { fetchUserPlaylists, fetchPlaylistDetails } = require("../controllers/playlists");
const { getPlaylistDetailsValidator } = require("../validators/playlists");
const { validate } = require("../validators");
router.get(

View File

@ -1,9 +1,9 @@
/**
* @typedef {import('module')} Module
* @typedef {import("module")} Module
*
* @typedef {import('express').Request} Req
* @typedef {import('express').Response} Res
* @typedef {import('express').NextFunction} Next
* @typedef {import("express").Request} Req
* @typedef {import("express").Response} Res
* @typedef {import("express").NextFunction} Next
*
* @typedef {{
* type: string,

View File

@ -4,8 +4,8 @@
* @return {string} The generated string
*/
module.exports = (length) => {
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
let text = '';
const possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
let text = "";
for (let i = 0; i < length; i++) {
text += possible.charAt(Math.floor(Math.random() * possible.length));

View File

@ -9,13 +9,13 @@ const typedefs = require("../typedefs");
*
* Example:
* ```javascript
* let nodes = ['a', 'b', 'c', 'd', 'e'];
* let nodes = ["a", "b", "c", "d", "e"];
* let edges = [
* { from: 'a', to: 'b' },
* { from: 'b', to: 'c' },
* { from: 'c', to: 'd' },
* { from: 'd', to: 'a' },
* { from: 'e', to: 'a' }
* { from: "a", to: "b" },
* { from: "b", to: "c" },
* { from: "c", to: "d" },
* { from: "d", to: "a" },
* { from: "e", to: "a" }
* ];
* let g = new myGraph(nodes, edges);
* console.log(g.detectCycle()); // true

View File

@ -5,7 +5,7 @@
* @param {string} delimiter Delimiter of final string
* @returns {string}
*/
const getNestedValuesString = (obj, delimiter = ', ') => {
const getNestedValuesString = (obj, delimiter = ", ") => {
let values = [];
for (key in obj) {
if (typeof obj[key] !== "object") {

View File

@ -1,6 +1,6 @@
const path = require("path");
const { createLogger, transports, config, format } = require('winston');
const { createLogger, transports, config, format } = require("winston");
const { combine, label, timestamp, printf, errors } = format;
const typedefs = require("../typedefs");
@ -15,8 +15,8 @@ const allowedErrorKeys = ["name", "code", "message", "stack"];
const metaFormat = (meta) => {
if (Object.keys(meta).length > 0)
return '\n' + JSON.stringify(meta, null, "\t");
return '';
return "\n" + JSON.stringify(meta, null, "\t");
return "";
}
const logFormat = printf(({ level, message, label, timestamp, ...meta }) => {
@ -28,7 +28,7 @@ const logFormat = printf(({ level, message, label, timestamp, ...meta }) => {
}
const { stack, ...rest } = meta.error;
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(rest)}\n` +
`${stack ?? ''}`;
`${stack ?? ""}`;
}
return `${timestamp} [${label}] ${level}: ${message}${metaFormat(meta)}`;
});
@ -43,24 +43,24 @@ const curriedLogger = (callingModule) => {
format: combine(
errors({ stack: true }),
label({ label: getLabel(callingModule) }),
timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
timestamp({ format: "YYYY-MM-DD HH:mm:ss" }),
logFormat,
),
transports: [
new transports.Console({ level: 'info' }),
new transports.Console({ level: "info" }),
new transports.File({
filename: __dirname + '/../logs/debug.log',
level: 'debug',
filename: __dirname + "/../logs/debug.log",
level: "debug",
maxsize: 10485760,
}),
new transports.File({
filename: __dirname + '/../logs/error.log',
level: 'error',
filename: __dirname + "/../logs/error.log",
level: "error",
maxsize: 1048576,
}),
]
});
winstonLogger.on('error', (error) => winstonLogger.error("Error inside logger", { error }));
winstonLogger.on("error", (error) => winstonLogger.error("Error inside logger", { error }));
return winstonLogger;
}

View File

@ -27,8 +27,8 @@ const parseSpotifyURI = (uri) => {
}
// URL decode artist, album, and title
const artist = decodeURIComponent(idParts[0] || '');
const album = decodeURIComponent(idParts[1] || '');
const artist = decodeURIComponent(idParts[0] || "");
const album = decodeURIComponent(idParts[1] || "");
const title = decodeURIComponent(idParts[2]);
const duration = parseInt(idParts[3], 10);
@ -71,8 +71,8 @@ const parseSpotifyLink = (link) => {
}
// URL decode artist, album, and title
const artist = decodeURIComponent(matches[1] || '');
const album = decodeURIComponent(matches[2] || '');
const artist = decodeURIComponent(matches[1] || "");
const album = decodeURIComponent(matches[2] || "");
const title = decodeURIComponent(matches[3]);
const duration = parseInt(matches[4], 10);
@ -108,10 +108,10 @@ const parseSpotifyLink = (link) => {
*/
const buildSpotifyURI = (uriObj) => {
if (uriObj.is_local) {
const artist = encodeURIComponent(uriObj.artist ?? '');
const album = encodeURIComponent(uriObj.album ?? '');
const title = encodeURIComponent(uriObj.title ?? '');
const duration = uriObj.duration ? uriObj.duration.toString() : '';
const artist = encodeURIComponent(uriObj.artist ?? "");
const album = encodeURIComponent(uriObj.album ?? "");
const title = encodeURIComponent(uriObj.title ?? "");
const duration = uriObj.duration ? uriObj.duration.toString() : "";
return `spotify:local:${artist}:${album}:${title}:${duration}`;
}
return `spotify:${uriObj.type}:${uriObj.id}`;
@ -124,10 +124,10 @@ const buildSpotifyURI = (uriObj) => {
*/
const buildSpotifyLink = (uriObj) => {
if (uriObj.is_local) {
const artist = encodeURIComponent(uriObj.artist ?? '');
const album = encodeURIComponent(uriObj.album ?? '');
const title = encodeURIComponent(uriObj.title ?? '');
const duration = uriObj.duration ? uriObj.duration.toString() : '';
const artist = encodeURIComponent(uriObj.artist ?? "");
const album = encodeURIComponent(uriObj.album ?? "");
const title = encodeURIComponent(uriObj.title ?? "");
const duration = uriObj.duration ? uriObj.duration.toString() : "";
return `https://open.spotify.com/local/${artist}/${album}/${title}/${duration}`;
}
return `https://open.spotify.com/${uriObj.type}/${uriObj.id}`

View File

@ -20,13 +20,13 @@ const validate = (req, res, next) => {
const extractedErrors = [];
errors.array().forEach(err => {
if (err.type === 'alternative') {
if (err.type === "alternative") {
err.nestedErrors.forEach(nestedErr => {
extractedErrors.push({
[nestedErr.path]: nestedErr.msg
});
});
} else if (err.type === 'field') {
} else if (err.type === "field") {
extractedErrors.push({
[err.path]: err.msg
});