mirror of
https://github.com/20kaushik02/spotify-manager.git
synced 2025-12-06 07:54:07 +00:00
refactoring some op functions, chain populating, type corrections
This commit is contained in:
parent
17e0480f83
commit
e39d0381c9
@ -1,4 +1,5 @@
|
|||||||
// TODO: rate limit module is busted (CJS types), do something for rate limiting
|
// TODO: rate limit module is busted (CJS types), do something for rate limiting
|
||||||
|
// bottleneck (https://npmjs.com/package/bottleneck) looks nice
|
||||||
import axios, { type AxiosInstance } from "axios";
|
import axios, { type AxiosInstance } from "axios";
|
||||||
import { baseAPIURL, accountsAPIURL } from "../constants.ts";
|
import { baseAPIURL, accountsAPIURL } from "../constants.ts";
|
||||||
import logger from "../utils/logger.ts";
|
import logger from "../utils/logger.ts";
|
||||||
|
|||||||
@ -78,7 +78,7 @@ const singleRequest = async <RespDataType>({
|
|||||||
if (error.response) {
|
if (error.response) {
|
||||||
// Non 2XX response received
|
// Non 2XX response received
|
||||||
message = message.concat(
|
message = message.concat(
|
||||||
`${error.response.status} - ${error.response.data?.message}`
|
`${error.response.status} - ${error.response.data?.error?.message}`
|
||||||
);
|
);
|
||||||
res?.status(error.response.status).send(error.response.data);
|
res?.status(error.response.status).send(error.response.data);
|
||||||
logger.warn(message, {
|
logger.warn(message, {
|
||||||
|
|||||||
@ -11,12 +11,7 @@ import {
|
|||||||
} from "../api/spotify.ts";
|
} from "../api/spotify.ts";
|
||||||
|
|
||||||
import type { RequestHandler } from "express";
|
import type { RequestHandler } from "express";
|
||||||
import type {
|
import type { EndpointHandlerWithResArgs } from "spotify_manager/index.d.ts";
|
||||||
EndpointHandlerWithResArgs,
|
|
||||||
LinkModel_Edge,
|
|
||||||
PlaylistModel_Pl,
|
|
||||||
URIObject,
|
|
||||||
} from "spotify_manager/index.d.ts";
|
|
||||||
|
|
||||||
import seqConn from "../models/index.ts";
|
import seqConn from "../models/index.ts";
|
||||||
|
|
||||||
@ -44,7 +39,8 @@ const updateUser: RequestHandler = async (req, res) => {
|
|||||||
throw new ReferenceError("session does not have auth headers");
|
throw new ReferenceError("session does not have auth headers");
|
||||||
const uID = req.session.user.id;
|
const uID = req.session.user.id;
|
||||||
|
|
||||||
let currentPlaylists: PlaylistModel_Pl[] = [];
|
type PlaylistCore = { playlistID: string; playlistName: string };
|
||||||
|
let currentPlaylists: PlaylistCore[] = [];
|
||||||
|
|
||||||
// get first 50
|
// get first 50
|
||||||
const { resp } = await getCurrentUsersPlaylistsFirstPage({
|
const { resp } = await getCurrentUsersPlaylistsFirstPage({
|
||||||
@ -84,7 +80,7 @@ const updateUser: RequestHandler = async (req, res) => {
|
|||||||
nextURL = nextData.next;
|
nextURL = nextData.next;
|
||||||
}
|
}
|
||||||
|
|
||||||
let oldPlaylists = await Playlists.findAll({
|
let oldPlaylists: PlaylistCore[] = await Playlists.findAll({
|
||||||
attributes: ["playlistID", "playlistName"],
|
attributes: ["playlistID", "playlistName"],
|
||||||
raw: true,
|
raw: true,
|
||||||
where: {
|
where: {
|
||||||
@ -92,8 +88,8 @@ const updateUser: RequestHandler = async (req, res) => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const deleted: PlaylistModel_Pl[] = [];
|
const deleted: PlaylistCore[] = [];
|
||||||
const added: PlaylistModel_Pl[] = [];
|
const added: PlaylistCore[] = [];
|
||||||
const renamed: { playlistID: string; oldName: string; newName: string }[] =
|
const renamed: { playlistID: string; oldName: string; newName: string }[] =
|
||||||
[];
|
[];
|
||||||
|
|
||||||
@ -286,11 +282,11 @@ const createLink: RequestHandler = async (req, res) => {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const playlists = (await Playlists.findAll({
|
const playlists = await Playlists.findAll({
|
||||||
attributes: ["playlistID"],
|
attributes: ["playlistID"],
|
||||||
raw: true,
|
raw: true,
|
||||||
where: { userID: uID },
|
where: { userID: uID },
|
||||||
})) as unknown as PlaylistModel_Pl[];
|
});
|
||||||
const playlistIDs = playlists.map((pl) => pl.playlistID);
|
const playlistIDs = playlists.map((pl) => pl.playlistID);
|
||||||
|
|
||||||
// if playlists are unknown
|
// if playlists are unknown
|
||||||
@ -312,11 +308,11 @@ const createLink: RequestHandler = async (req, res) => {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const allLinks = (await Links.findAll({
|
const allLinks = await Links.findAll({
|
||||||
attributes: ["from", "to"],
|
attributes: ["from", "to"],
|
||||||
raw: true,
|
raw: true,
|
||||||
where: { userID: uID },
|
where: { userID: uID },
|
||||||
})) as unknown as LinkModel_Edge[];
|
});
|
||||||
|
|
||||||
const newGraph = new myGraph(playlistIDs, [
|
const newGraph = new myGraph(playlistIDs, [
|
||||||
...allLinks,
|
...allLinks,
|
||||||
@ -413,58 +409,66 @@ const removeLink: RequestHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type _TrackObj = { is_local: boolean; uri: string };
|
||||||
interface _GetPlaylistTracksArgs extends EndpointHandlerWithResArgs {
|
interface _GetPlaylistTracksArgs extends EndpointHandlerWithResArgs {
|
||||||
playlistID: string;
|
playlistID: string;
|
||||||
}
|
}
|
||||||
interface _GetPlaylistTracks {
|
interface _GetPlaylistTracks {
|
||||||
tracks: {
|
tracks: _TrackObj[];
|
||||||
is_local: boolean;
|
|
||||||
uri: string;
|
|
||||||
}[];
|
|
||||||
snapshotID: string;
|
snapshotID: string;
|
||||||
}
|
}
|
||||||
const _getPlaylistTracks: (
|
const _getPlaylistTracks: (
|
||||||
opts: _GetPlaylistTracksArgs
|
opts: _GetPlaylistTracksArgs
|
||||||
) => Promise<_GetPlaylistTracks | null> = async ({
|
) => Promise<_GetPlaylistTracks | null> = async ({
|
||||||
authHeaders,
|
|
||||||
res,
|
res,
|
||||||
|
authHeaders,
|
||||||
playlistID,
|
playlistID,
|
||||||
}) => {
|
}) => {
|
||||||
let initialFields = ["snapshot_id,tracks(next,items(is_local,track(uri)))"];
|
// TODO: type this to indicate that only the requested fields are present
|
||||||
let mainFields = ["next", "items(is_local,track(uri))"];
|
const { resp: snapshotResp } = await getPlaylistDetailsFirstPage({
|
||||||
|
|
||||||
const { resp } = await getPlaylistDetailsFirstPage({
|
|
||||||
authHeaders,
|
|
||||||
res,
|
res,
|
||||||
initialFields: initialFields.join(),
|
authHeaders,
|
||||||
|
initialFields: "snapshot_id",
|
||||||
playlistID,
|
playlistID,
|
||||||
});
|
});
|
||||||
if (!resp) return null;
|
if (!snapshotResp) return null;
|
||||||
const respData = resp.data;
|
|
||||||
|
const currentSnapshotID = snapshotResp.data.snapshot_id;
|
||||||
|
|
||||||
// check cache
|
// check cache
|
||||||
const cachedSnapshotID = await redisClient.get(
|
const cachedSnapshotID = await redisClient.get(
|
||||||
"playlist_snapshot:" + playlistID
|
"playlist_snapshot:" + playlistID
|
||||||
);
|
);
|
||||||
if (cachedSnapshotID === respData.snapshot_id) {
|
if (cachedSnapshotID === currentSnapshotID) {
|
||||||
const cachedTracksData = (await redisClient.json.get(
|
const cachedTracksData = (await redisClient.json.get(
|
||||||
"playlist_tracks:" + playlistID
|
"playlist_tracks:" + playlistID
|
||||||
)) as _GetPlaylistTracks["tracks"];
|
)) as _TrackObj[];
|
||||||
return { tracks: cachedTracksData, snapshotID: cachedSnapshotID };
|
return { tracks: cachedTracksData, snapshotID: cachedSnapshotID };
|
||||||
}
|
}
|
||||||
|
let firstPageFields = ["tracks(next,items(is_local,track(uri)))"];
|
||||||
|
let mainFields = ["next", "items(is_local,track(uri))"];
|
||||||
|
|
||||||
|
const { resp: firstResp } = await getPlaylistDetailsFirstPage({
|
||||||
|
res,
|
||||||
|
authHeaders,
|
||||||
|
initialFields: firstPageFields.join(),
|
||||||
|
playlistID,
|
||||||
|
});
|
||||||
|
if (!firstResp) return null;
|
||||||
|
const firstRespData = firstResp.data;
|
||||||
|
|
||||||
const pl: _GetPlaylistTracks = {
|
const pl: _GetPlaylistTracks = {
|
||||||
tracks: [],
|
tracks: [],
|
||||||
snapshotID: respData.snapshot_id,
|
snapshotID: currentSnapshotID,
|
||||||
};
|
};
|
||||||
let nextURL;
|
let nextURL;
|
||||||
|
|
||||||
if (respData.tracks.next) {
|
if (firstRespData.tracks.next) {
|
||||||
nextURL = new URL(respData.tracks.next);
|
nextURL = new URL(firstRespData.tracks.next);
|
||||||
nextURL.searchParams.set("fields", mainFields.join());
|
nextURL.searchParams.set("fields", mainFields.join());
|
||||||
nextURL = nextURL.href;
|
nextURL = nextURL.href;
|
||||||
}
|
}
|
||||||
pl.tracks = respData.tracks.items.map((playlist_item) => {
|
pl.tracks = firstRespData.tracks.items.map((playlist_item) => {
|
||||||
return {
|
return {
|
||||||
is_local: playlist_item.is_local,
|
is_local: playlist_item.is_local,
|
||||||
uri: playlist_item.track.uri,
|
uri: playlist_item.track.uri,
|
||||||
@ -494,21 +498,35 @@ const _getPlaylistTracks: (
|
|||||||
}
|
}
|
||||||
|
|
||||||
// cache new data
|
// cache new data
|
||||||
await redisClient.set(
|
await redisClient.set("playlist_snapshot:" + playlistID, currentSnapshotID);
|
||||||
"playlist_snapshot:" + playlistID,
|
|
||||||
respData.snapshot_id
|
|
||||||
);
|
|
||||||
await redisClient.json.set("playlist_tracks:" + playlistID, "$", pl.tracks);
|
await redisClient.json.set("playlist_tracks:" + playlistID, "$", pl.tracks);
|
||||||
|
|
||||||
return pl;
|
return pl;
|
||||||
};
|
};
|
||||||
|
|
||||||
interface _PopulateSingleLinkCoreArgs extends EndpointHandlerWithResArgs {
|
interface _TrackFilterArgs {
|
||||||
link: {
|
/** link head playlist */
|
||||||
from: URIObject;
|
from: _TrackObj[];
|
||||||
to: URIObject;
|
/** link tail playlist */
|
||||||
};
|
to: _TrackObj[];
|
||||||
}
|
}
|
||||||
|
type _PopulateFilter = { missing: string[]; localNum: number };
|
||||||
|
const _populateSingleLinkCore: (opts: _TrackFilterArgs) => _PopulateFilter = ({
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
}) => {
|
||||||
|
const fromTrackURIs = from.map((track) => track.uri);
|
||||||
|
let toTrackURIs = to
|
||||||
|
.filter((track) => !track.is_local) // API doesn't support adding local files to playlists yet
|
||||||
|
.filter((track) => !fromTrackURIs.includes(track.uri)) // only ones missing from the 'from' playlist
|
||||||
|
.map((track) => track.uri);
|
||||||
|
|
||||||
|
return {
|
||||||
|
missing: toTrackURIs,
|
||||||
|
localNum: to.filter((track) => track.is_local).length,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add tracks to the link-head playlist,
|
* Add tracks to the link-head playlist,
|
||||||
* that are present in the link-tail playlist but not in the link-head playlist,
|
* that are present in the link-tail playlist but not in the link-head playlist,
|
||||||
@ -526,59 +544,6 @@ interface _PopulateSingleLinkCoreArgs extends EndpointHandlerWithResArgs {
|
|||||||
*
|
*
|
||||||
* CANNOT populate local files; Spotify API does not support it yet.
|
* CANNOT populate local files; Spotify API does not support it yet.
|
||||||
*/
|
*/
|
||||||
const _populateSingleLinkCore: (opts: _PopulateSingleLinkCoreArgs) => Promise<{
|
|
||||||
toAddNum: number;
|
|
||||||
addedNum: number;
|
|
||||||
localNum: number;
|
|
||||||
} | null> = async ({ res, authHeaders, link }) => {
|
|
||||||
try {
|
|
||||||
const fromPl = link.from,
|
|
||||||
toPl = link.to;
|
|
||||||
|
|
||||||
const fromPlaylist = await _getPlaylistTracks({
|
|
||||||
res,
|
|
||||||
authHeaders,
|
|
||||||
playlistID: fromPl.id,
|
|
||||||
});
|
|
||||||
if (!fromPlaylist) return null;
|
|
||||||
|
|
||||||
const toPlaylist = await _getPlaylistTracks({
|
|
||||||
res,
|
|
||||||
authHeaders,
|
|
||||||
playlistID: toPl.id,
|
|
||||||
});
|
|
||||||
if (!toPlaylist) return null;
|
|
||||||
|
|
||||||
const fromTrackURIs = fromPlaylist.tracks.map((track) => track.uri);
|
|
||||||
let toTrackURIs = toPlaylist.tracks
|
|
||||||
.filter((track) => !track.is_local) // API doesn't support adding local files to playlists yet
|
|
||||||
.filter((track) => !fromTrackURIs.includes(track.uri)) // only ones missing from the 'from' playlist
|
|
||||||
.map((track) => track.uri);
|
|
||||||
|
|
||||||
const toAddNum = toTrackURIs.length;
|
|
||||||
const localNum = toPlaylist.tracks.filter((track) => track.is_local).length;
|
|
||||||
let addedNum = 0;
|
|
||||||
|
|
||||||
// append to end in batches of 100
|
|
||||||
while (toTrackURIs.length > 0) {
|
|
||||||
const nextBatch = toTrackURIs.splice(0, 100);
|
|
||||||
const { resp } = await addItemsToPlaylist({
|
|
||||||
authHeaders,
|
|
||||||
nextBatch,
|
|
||||||
playlistID: fromPl.id,
|
|
||||||
});
|
|
||||||
if (!resp) break;
|
|
||||||
addedNum += nextBatch.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { toAddNum, addedNum, localNum };
|
|
||||||
} catch (error) {
|
|
||||||
res.status(500).send({ message: "Internal Server Error" });
|
|
||||||
logger.error("_populateSingleLinkCore", { error });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const populateSingleLink: RequestHandler = async (req, res) => {
|
const populateSingleLink: RequestHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
if (!req.session.user)
|
if (!req.session.user)
|
||||||
@ -628,13 +593,38 @@ const populateSingleLink: RequestHandler = async (req, res) => {
|
|||||||
)
|
)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
const result = await _populateSingleLinkCore({
|
const fromTracks = await _getPlaylistTracks({
|
||||||
authHeaders,
|
|
||||||
res,
|
res,
|
||||||
link: { from: fromPl, to: toPl },
|
authHeaders,
|
||||||
|
playlistID: fromPl.id,
|
||||||
});
|
});
|
||||||
if (result) {
|
if (!fromTracks) return null;
|
||||||
const { toAddNum, addedNum, localNum } = result;
|
const toTracks = await _getPlaylistTracks({
|
||||||
|
res,
|
||||||
|
authHeaders,
|
||||||
|
playlistID: toPl.id,
|
||||||
|
});
|
||||||
|
if (!toTracks) return null;
|
||||||
|
|
||||||
|
const { missing, localNum } = _populateSingleLinkCore({
|
||||||
|
from: fromTracks.tracks,
|
||||||
|
to: toTracks.tracks,
|
||||||
|
});
|
||||||
|
const toAddNum = missing.length;
|
||||||
|
|
||||||
|
// add in batches of 100
|
||||||
|
let addedNum = 0;
|
||||||
|
while (missing.length > 0) {
|
||||||
|
const nextBatch = missing.splice(0, 100);
|
||||||
|
const { resp } = await addItemsToPlaylist({
|
||||||
|
authHeaders,
|
||||||
|
nextBatch,
|
||||||
|
playlistID: fromPl.id,
|
||||||
|
});
|
||||||
|
if (!resp) break;
|
||||||
|
addedNum += nextBatch.length;
|
||||||
|
}
|
||||||
|
|
||||||
let message;
|
let message;
|
||||||
message =
|
message =
|
||||||
toAddNum > 0 ? "Added " + addedNum + " tracks" : "No tracks to add";
|
toAddNum > 0 ? "Added " + addedNum + " tracks" : "No tracks to add";
|
||||||
@ -645,8 +635,7 @@ const populateSingleLink: RequestHandler = async (req, res) => {
|
|||||||
message += localNum > 0 ? ", skipped " + localNum + " local files" : ".";
|
message += localNum > 0 ? ", skipped " + localNum + " local files" : ".";
|
||||||
|
|
||||||
res.status(200).send({ message, toAddNum, addedNum, localNum });
|
res.status(200).send({ message, toAddNum, addedNum, localNum });
|
||||||
logger.debug(message, { toAddNum, localNum });
|
logger.debug(message, { toAddNum, addedNum, localNum });
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(500).send({ message: "Internal Server Error" });
|
res.status(500).send({ message: "Internal Server Error" });
|
||||||
@ -655,9 +644,148 @@ const populateSingleLink: RequestHandler = async (req, res) => {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
interface _PruneSingleLinkCoreArgs extends EndpointHandlerWithResArgs {
|
const populateChain: RequestHandler = async (req, res) => {
|
||||||
link: { from: URIObject; to: URIObject };
|
try {
|
||||||
}
|
if (!req.session.user)
|
||||||
|
throw new ReferenceError("session does not have user object");
|
||||||
|
const uID = req.session.user.id;
|
||||||
|
const { authHeaders } = req.session;
|
||||||
|
if (!authHeaders)
|
||||||
|
throw new ReferenceError("session does not have auth headers");
|
||||||
|
|
||||||
|
const { root } = req.body;
|
||||||
|
let rootPl;
|
||||||
|
try {
|
||||||
|
rootPl = parseSpotifyLink(root);
|
||||||
|
if (rootPl.type !== "playlist") {
|
||||||
|
res.status(400).send({ message: "Link is not a playlist" });
|
||||||
|
logger.info("non-playlist link provided", root);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
res.status(400).send({ message: "Could not parse link" });
|
||||||
|
logger.warn("parseSpotifyLink", { error });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const playlists = await Playlists.findAll({
|
||||||
|
attributes: ["playlistID"],
|
||||||
|
raw: true,
|
||||||
|
where: { userID: uID },
|
||||||
|
});
|
||||||
|
const playlistIDs = playlists.map((pl) => pl.playlistID);
|
||||||
|
|
||||||
|
const allLinks = await Links.findAll({
|
||||||
|
attributes: ["from", "to"],
|
||||||
|
raw: true,
|
||||||
|
where: { userID: uID },
|
||||||
|
});
|
||||||
|
|
||||||
|
// current idea: only add from the root, don't ripple-propagate
|
||||||
|
// for bulk opn, this should be sufficient if this method of
|
||||||
|
// chain populating is applied to every leaf node
|
||||||
|
// (although that's a challenge of its own)
|
||||||
|
const newGraph = new myGraph(playlistIDs, allLinks);
|
||||||
|
const affectedPlaylists = newGraph.getAllHeads(rootPl.id);
|
||||||
|
const affectedPlaylistsTracks = await Promise.all(
|
||||||
|
affectedPlaylists.map((pl) => {
|
||||||
|
return _getPlaylistTracks({ res, authHeaders, playlistID: pl });
|
||||||
|
})
|
||||||
|
);
|
||||||
|
if (affectedPlaylistsTracks.some((plTracks) => !plTracks)) return null;
|
||||||
|
|
||||||
|
const rootTracks = await _getPlaylistTracks({
|
||||||
|
res,
|
||||||
|
authHeaders,
|
||||||
|
playlistID: rootPl.id,
|
||||||
|
});
|
||||||
|
if (!rootTracks) return null;
|
||||||
|
|
||||||
|
const populateData = affectedPlaylistsTracks.map((plTracks) => {
|
||||||
|
return _populateSingleLinkCore({
|
||||||
|
from: plTracks!.tracks, // how to have the .some check recognized by typescript?
|
||||||
|
to: rootTracks.tracks,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// is map the best way to do this?
|
||||||
|
// or should i use a for loop and break on error?
|
||||||
|
const populateResult = await Promise.all(
|
||||||
|
populateData.map(async ({ missing, localNum }, index) => {
|
||||||
|
const toAddNum = missing.length;
|
||||||
|
const playlistID = affectedPlaylists[index]!; // ...
|
||||||
|
let addedNum = 0;
|
||||||
|
while (missing.length > 0) {
|
||||||
|
const nextBatch = missing.splice(0, 100);
|
||||||
|
const { resp } = await addItemsToPlaylist({
|
||||||
|
authHeaders,
|
||||||
|
nextBatch,
|
||||||
|
playlistID,
|
||||||
|
});
|
||||||
|
if (!resp) break;
|
||||||
|
addedNum += nextBatch.length;
|
||||||
|
}
|
||||||
|
return { playlistID, toAddNum, addedNum, localNum };
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
const reducedResult = populateResult.reduce(
|
||||||
|
(acc, curr) => {
|
||||||
|
return {
|
||||||
|
toAddNum: acc.toAddNum + curr.toAddNum,
|
||||||
|
addedNum: acc.addedNum + curr.addedNum,
|
||||||
|
localNum: acc.localNum + curr.localNum,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
{ toAddNum: 0, addedNum: 0, localNum: 0 }
|
||||||
|
);
|
||||||
|
|
||||||
|
let message;
|
||||||
|
message = `There are ${populateResult.length} playlists up the chain.`;
|
||||||
|
message +=
|
||||||
|
reducedResult.toAddNum > 0
|
||||||
|
? " Added " + reducedResult.addedNum + " tracks"
|
||||||
|
: " No tracks to add";
|
||||||
|
message +=
|
||||||
|
reducedResult.addedNum < reducedResult.toAddNum
|
||||||
|
? ", failed to add " +
|
||||||
|
(reducedResult.toAddNum - reducedResult.addedNum) +
|
||||||
|
" tracks"
|
||||||
|
: "";
|
||||||
|
message +=
|
||||||
|
reducedResult.localNum > 0
|
||||||
|
? ", skipped " + reducedResult.localNum + " local files"
|
||||||
|
: ".";
|
||||||
|
|
||||||
|
res.status(200).send({ message, ...reducedResult });
|
||||||
|
logger.debug(message, { ...reducedResult });
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
res.status(500).send({ message: "Internal Server Error" });
|
||||||
|
logger.error("populateChain", { error });
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
type _PruneFilter = { missingPositions: number[] };
|
||||||
|
const _pruneSingleLinkCore: (opts: _TrackFilterArgs) => _PruneFilter = ({
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
}) => {
|
||||||
|
const fromTrackURIs = from.map((track) => track.uri);
|
||||||
|
const indexedToTrackURIs = to.map((track, index) => {
|
||||||
|
return { ...track, position: index };
|
||||||
|
});
|
||||||
|
|
||||||
|
let indexes = indexedToTrackURIs
|
||||||
|
.filter((track) => !fromTrackURIs.includes(track.uri)) // only those missing from the 'from' playlist
|
||||||
|
.map((track) => track.position); // get track positions
|
||||||
|
|
||||||
|
return {
|
||||||
|
missingPositions: indexes,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove tracks from the link-tail playlist,
|
* Remove tracks from the link-tail playlist,
|
||||||
* that are present in the link-tail playlist but not in the link-head playlist.
|
* that are present in the link-tail playlist but not in the link-head playlist.
|
||||||
@ -673,66 +801,6 @@ interface _PruneSingleLinkCoreArgs extends EndpointHandlerWithResArgs {
|
|||||||
* after pruneSingleLink, pl_b will have tracks: b, c
|
* after pruneSingleLink, pl_b will have tracks: b, c
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
const _pruneSingleLinkCore: (
|
|
||||||
opts: _PruneSingleLinkCoreArgs
|
|
||||||
) => Promise<{ toDelNum: number; deletedNum: number } | null> = async ({
|
|
||||||
authHeaders,
|
|
||||||
res,
|
|
||||||
link,
|
|
||||||
}) => {
|
|
||||||
try {
|
|
||||||
const fromPl = link.from,
|
|
||||||
toPl = link.to;
|
|
||||||
|
|
||||||
const fromPlaylist = await _getPlaylistTracks({
|
|
||||||
authHeaders,
|
|
||||||
res,
|
|
||||||
playlistID: fromPl.id,
|
|
||||||
});
|
|
||||||
if (!fromPlaylist) return null;
|
|
||||||
|
|
||||||
const toPlaylist = await _getPlaylistTracks({
|
|
||||||
authHeaders,
|
|
||||||
res,
|
|
||||||
playlistID: toPl.id,
|
|
||||||
});
|
|
||||||
if (!toPlaylist) return null;
|
|
||||||
|
|
||||||
const fromTrackURIs = fromPlaylist.tracks.map((track) => track.uri);
|
|
||||||
const indexedToTrackURIs = toPlaylist.tracks.map((track, index) => {
|
|
||||||
return { ...track, position: index };
|
|
||||||
});
|
|
||||||
|
|
||||||
let indexes = indexedToTrackURIs
|
|
||||||
.filter((track) => !fromTrackURIs.includes(track.uri)) // only those missing from the 'from' playlist
|
|
||||||
.map((track) => track.position); // get track positions
|
|
||||||
|
|
||||||
const toDelNum = indexes.length;
|
|
||||||
let deletedNum = 0;
|
|
||||||
|
|
||||||
// remove in batches of 100 (from reverse, to preserve positions while modifying)
|
|
||||||
let currentSnapshot = toPlaylist.snapshotID;
|
|
||||||
while (indexes.length > 0) {
|
|
||||||
const nextBatch = indexes.splice(Math.max(indexes.length - 100, 0), 100);
|
|
||||||
const { resp } = await removePlaylistItems({
|
|
||||||
authHeaders,
|
|
||||||
nextBatch,
|
|
||||||
playlistID: toPl.id,
|
|
||||||
snapshotID: currentSnapshot,
|
|
||||||
});
|
|
||||||
if (!resp) break;
|
|
||||||
deletedNum += nextBatch.length;
|
|
||||||
currentSnapshot = resp.data.snapshot_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { toDelNum, deletedNum };
|
|
||||||
} catch (error) {
|
|
||||||
res.status(500).send({ message: "Internal Server Error" });
|
|
||||||
logger.error("_pruneSingleLinkCore", { error });
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const pruneSingleLink: RequestHandler = async (req, res) => {
|
const pruneSingleLink: RequestHandler = async (req, res) => {
|
||||||
try {
|
try {
|
||||||
if (!req.session.user)
|
if (!req.session.user)
|
||||||
@ -782,16 +850,46 @@ const pruneSingleLink: RequestHandler = async (req, res) => {
|
|||||||
)
|
)
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
const result = await _pruneSingleLinkCore({
|
const fromTracks = await _getPlaylistTracks({
|
||||||
authHeaders,
|
|
||||||
res,
|
res,
|
||||||
link: {
|
authHeaders,
|
||||||
from: fromPl,
|
playlistID: fromPl.id,
|
||||||
to: toPl,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
if (result) {
|
if (!fromTracks) return null;
|
||||||
const { toDelNum, deletedNum } = result;
|
|
||||||
|
const toTracks = await _getPlaylistTracks({
|
||||||
|
res,
|
||||||
|
authHeaders,
|
||||||
|
playlistID: toPl.id,
|
||||||
|
});
|
||||||
|
if (!toTracks) return null;
|
||||||
|
|
||||||
|
const { missingPositions } = _pruneSingleLinkCore({
|
||||||
|
from: fromTracks.tracks,
|
||||||
|
to: toTracks.tracks,
|
||||||
|
});
|
||||||
|
|
||||||
|
const toDelNum = missingPositions.length;
|
||||||
|
let deletedNum = 0;
|
||||||
|
|
||||||
|
// remove in batches of 100 (from reverse, to preserve positions while modifying)
|
||||||
|
let currentSnapshot = toTracks.snapshotID;
|
||||||
|
while (missingPositions.length > 0) {
|
||||||
|
const nextBatch = missingPositions.splice(
|
||||||
|
Math.max(missingPositions.length - 100, 0),
|
||||||
|
100
|
||||||
|
);
|
||||||
|
const { resp } = await removePlaylistItems({
|
||||||
|
authHeaders,
|
||||||
|
nextBatch,
|
||||||
|
playlistID: toPl.id,
|
||||||
|
snapshotID: currentSnapshot,
|
||||||
|
});
|
||||||
|
if (!resp) break;
|
||||||
|
deletedNum += nextBatch.length;
|
||||||
|
currentSnapshot = resp.data.snapshot_id;
|
||||||
|
}
|
||||||
|
|
||||||
let message;
|
let message;
|
||||||
message =
|
message =
|
||||||
toDelNum > 0
|
toDelNum > 0
|
||||||
@ -804,7 +902,6 @@ const pruneSingleLink: RequestHandler = async (req, res) => {
|
|||||||
|
|
||||||
res.status(200).send({ message, toDelNum, deletedNum });
|
res.status(200).send({ message, toDelNum, deletedNum });
|
||||||
logger.debug(message, { toDelNum, deletedNum });
|
logger.debug(message, { toDelNum, deletedNum });
|
||||||
}
|
|
||||||
return null;
|
return null;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
res.status(500).send({ message: "Internal Server Error" });
|
res.status(500).send({ message: "Internal Server Error" });
|
||||||
@ -819,5 +916,6 @@ export {
|
|||||||
createLink,
|
createLink,
|
||||||
removeLink,
|
removeLink,
|
||||||
populateSingleLink,
|
populateSingleLink,
|
||||||
|
populateChain,
|
||||||
pruneSingleLink,
|
pruneSingleLink,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -8,13 +8,9 @@ import {
|
|||||||
removeLink,
|
removeLink,
|
||||||
populateSingleLink,
|
populateSingleLink,
|
||||||
pruneSingleLink,
|
pruneSingleLink,
|
||||||
|
populateChain,
|
||||||
} from "../controllers/operations.ts";
|
} from "../controllers/operations.ts";
|
||||||
import {
|
import { linkValidator, nodeValidator } from "../validators/operations.ts";
|
||||||
createLinkValidator,
|
|
||||||
removeLinkValidator,
|
|
||||||
populateSingleLinkValidator,
|
|
||||||
pruneSingleLinkValidator,
|
|
||||||
} from "../validators/operations.ts";
|
|
||||||
|
|
||||||
import { validate } from "../validators/index.ts";
|
import { validate } from "../validators/index.ts";
|
||||||
|
|
||||||
@ -22,22 +18,13 @@ opRouter.put("/update", updateUser);
|
|||||||
|
|
||||||
opRouter.get("/fetch", fetchUser);
|
opRouter.get("/fetch", fetchUser);
|
||||||
|
|
||||||
opRouter.post("/link", createLinkValidator, validate, createLink);
|
opRouter.post("/link", linkValidator, validate, createLink);
|
||||||
|
|
||||||
opRouter.delete("/link", removeLinkValidator, validate, removeLink);
|
opRouter.delete("/link", linkValidator, validate, removeLink);
|
||||||
|
|
||||||
opRouter.put(
|
opRouter.put("/populate/link", linkValidator, validate, populateSingleLink);
|
||||||
"/populate/link",
|
opRouter.put("/populate/chain", nodeValidator, validate, populateChain);
|
||||||
populateSingleLinkValidator,
|
|
||||||
validate,
|
|
||||||
populateSingleLink
|
|
||||||
);
|
|
||||||
|
|
||||||
opRouter.put(
|
opRouter.put("/prune/link", linkValidator, validate, pruneSingleLink);
|
||||||
"/prune/link",
|
|
||||||
pruneSingleLinkValidator,
|
|
||||||
validate,
|
|
||||||
pruneSingleLink
|
|
||||||
);
|
|
||||||
|
|
||||||
export default opRouter;
|
export default opRouter;
|
||||||
|
|||||||
@ -13,16 +13,3 @@ export type User = {
|
|||||||
username: string;
|
username: string;
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface PlaylistModel_Pl {
|
|
||||||
playlistID: string;
|
|
||||||
playlistName: string;
|
|
||||||
}
|
|
||||||
export interface PlaylistModel extends PlaylistModel_Pl {
|
|
||||||
userID: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface LinkModel_Edge {
|
|
||||||
from: string;
|
|
||||||
to: string;
|
|
||||||
}
|
|
||||||
|
|||||||
@ -1,25 +1,30 @@
|
|||||||
import { body } from "express-validator";
|
import { body } from "express-validator";
|
||||||
import type { RequestHandler } from "express";
|
import type { RequestHandler } from "express";
|
||||||
|
|
||||||
const createLinkValidator: RequestHandler = async (req, _res, next) => {
|
const linkValidator: RequestHandler = async (req, _res, next) => {
|
||||||
await body("from")
|
await body("from")
|
||||||
.notEmpty()
|
.notEmpty()
|
||||||
.withMessage("from not defined in body")
|
.withMessage("from not defined in body")
|
||||||
.isURL()
|
.isURL()
|
||||||
.withMessage("from must be a valid link")
|
.withMessage("from must be a valid playlist link")
|
||||||
.run(req);
|
.run(req);
|
||||||
await body("to")
|
await body("to")
|
||||||
.notEmpty()
|
.notEmpty()
|
||||||
.withMessage("to not defined in body")
|
.withMessage("to not defined in body")
|
||||||
.isURL()
|
.isURL()
|
||||||
.withMessage("to must be a valid link")
|
.withMessage("to must be a valid playlist link")
|
||||||
.run(req);
|
.run(req);
|
||||||
next();
|
next();
|
||||||
};
|
};
|
||||||
|
|
||||||
export {
|
const nodeValidator: RequestHandler = async (req, _res, next) => {
|
||||||
createLinkValidator,
|
await body("root")
|
||||||
createLinkValidator as removeLinkValidator,
|
.notEmpty()
|
||||||
createLinkValidator as populateSingleLinkValidator,
|
.withMessage("root not defined in body")
|
||||||
createLinkValidator as pruneSingleLinkValidator,
|
.isURL()
|
||||||
|
.withMessage("root must be a valid playlist link")
|
||||||
|
.run(req);
|
||||||
|
next();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export { linkValidator, nodeValidator };
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user