mirror of
https://github.com/snobu/destreamer.git
synced 2026-01-24 17:02:20 +00:00
Major code refactoring (#164)
* Added Chromium caching of identity provider cookies * Moved token expiry check in standalone method * Created refreshSession function * Session is now refreshed if the token expires * Linting fixes * Removed debug console.log() * Added CC support * Created function to prompt user for download parameters (interactive mode) * Fix data folder for puppeteer * Fixed multiple session error * Fix token expire time * Moved session refreshing to a more sensible place * Changed Metadata name to Video (to better reflect the data structure) * Complete CLI refactoring * Removed useless sleep function * Added outDir check from CLI * Complete input parsing refactoring (both inline and file) * Fixed and improved tests to work with the new input parsing * Moved and improved output path generation to videoUtils * Main code refactoring, added outpath to video type * Minor changes in spacing and type definition style * Updated readme after code refactoring * Fix if inputFile doesn't start with url on line 1 * Minor naming change * Use module 'winston' for logging * Created logge, changed all console.log and similar to use the logger * Added verbose logging, changed posterUrl property name on Video type * Moved GUID extraction to input parsing * Added support for group links * Fixed test after last input parsing update * Removed debug proces.exit() * Changed from desc to asc order for group videos * Updated test to reflect GUIDs output after parsing * Added couple of comments and restyled some imports * More readable verbose GUIDs logging * Removed unused errors * Temporary fix for timeout not working in ApiClient * Explicit class member accessibility * Defined array naming schema to be Array<T> * Defined type/interface schema to be type only * A LOT of type definitions
This commit is contained in:
281
src/Utils.ts
281
src/Utils.ts
@@ -1,152 +1,193 @@
|
||||
import { ApiClient } from './ApiClient';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
import { Session } from './Types';
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { execSync } from 'child_process';
|
||||
import colors from 'colors';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
function sanitizeUrls(urls: string[]) {
|
||||
// eslint-disable-next-line
|
||||
const rex = new RegExp(/(?:https:\/\/)?.*\/video\/[a-z0-9]{8}-(?:[a-z0-9]{4}\-){3}[a-z0-9]{12}$/, 'i');
|
||||
const sanitized: string[] = [];
|
||||
|
||||
for (let i = 0, l = urls.length; i < l; ++i) {
|
||||
let url = urls[i].split('?')[0];
|
||||
async function extractGuids(url: string, client: ApiClient): Promise<Array<string> | null> {
|
||||
|
||||
if (!rex.test(url)) {
|
||||
if (url !== '') {
|
||||
console.warn(colors.yellow('Invalid URL at line ' + (i + 1) + ', skip..'));
|
||||
}
|
||||
const videoRegex = new RegExp(/https:\/\/.*\/video\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
const groupRegex = new RegExp(/https:\/\/.*\/group\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
|
||||
const videoMatch: RegExpExecArray | null = videoRegex.exec(url);
|
||||
const groupMatch: RegExpExecArray | null = groupRegex.exec(url);
|
||||
|
||||
if (videoMatch) {
|
||||
return [videoMatch[1]];
|
||||
}
|
||||
else if (groupMatch) {
|
||||
const videoNumber: number = await client.callApi(`groups/${groupMatch[1]}`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.metrics.videos);
|
||||
|
||||
let result: Array<string> = await client.callApi(`groups/${groupMatch[1]}/videos?$top=${videoNumber}&$orderby=publishedDate asc`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.value.map((item: any) => item.id));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the list of url given by the user via console input.
|
||||
* They can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {Array<string>} urlList list of link to parse
|
||||
* @param {string} defaultOutDir the directry used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseCLIinput(urlList: Array<string>, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
let guidList: Array<string> = [];
|
||||
|
||||
for (const url of urlList) {
|
||||
const guids: Array<string> | null = await extractGuids(url, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url '${url}', skipping..`);
|
||||
}
|
||||
}
|
||||
|
||||
const outDirList: Array<string> = Array(guidList.length).fill(defaultOutDir);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the input text file.
|
||||
* The urls in the file can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {string} inputFile path to the text file
|
||||
* @param {string} defaultOutDir the default/fallback directory used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseInputFile(inputFile: string, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
// rawContent is a list of each line of the file
|
||||
const rawContent: Array<string> = fs.readFileSync(inputFile).toString()
|
||||
.split(/\r?\n/);
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
let guidList: Array<string> = [];
|
||||
let outDirList: Array<string> = [];
|
||||
// if the last line was an url set this
|
||||
let foundUrl = false;
|
||||
|
||||
for (let i = 0; i < rawContent.length; i++) {
|
||||
const line: string = rawContent[i];
|
||||
|
||||
// filter out lines with no content
|
||||
if (!line.match(/\S/)) {
|
||||
logger.warn(`Line ${i + 1} is empty, skipping..`);
|
||||
continue;
|
||||
}
|
||||
// parse if line is option
|
||||
else if (line.includes('-dir')) {
|
||||
if (foundUrl) {
|
||||
let outDir: string | null = parseOption('-dir', line);
|
||||
|
||||
if (url.substring(0, 8) !== 'https://') {
|
||||
url = 'https://' + url;
|
||||
}
|
||||
if (outDir && checkOutDir(outDir)) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(outDir));
|
||||
}
|
||||
else {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
sanitized.push(url);
|
||||
}
|
||||
|
||||
if (!sanitized.length) {
|
||||
process.exit(ERROR_CODE.INVALID_INPUT_URLS);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function sanitizeOutDirsList(dirsList: string[]) {
|
||||
const sanitized: string[] = [];
|
||||
|
||||
dirsList.forEach(dir => {
|
||||
if (dir !== '') {
|
||||
sanitized.push(dir);
|
||||
}
|
||||
});
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function readFileToArray(path: string) {
|
||||
return fs.readFileSync(path).toString('utf-8').split(/[\r\n]/);
|
||||
}
|
||||
|
||||
export async function forEachAsync(array: any, callback: any) {
|
||||
for (let i = 0, l = array.length; i < l; ++i) {
|
||||
await callback(array[i], i, array);
|
||||
}
|
||||
}
|
||||
|
||||
export function parseVideoUrls(videoUrls: any) {
|
||||
let input = videoUrls[0] as string;
|
||||
const isPath = input.substring(input.length - 4) === '.txt';
|
||||
let urls: string[];
|
||||
|
||||
if (isPath) {
|
||||
urls = readFileToArray(input);
|
||||
}
|
||||
else {
|
||||
urls = videoUrls as string[];
|
||||
}
|
||||
|
||||
return sanitizeUrls(urls);
|
||||
}
|
||||
|
||||
export function getOutputDirectoriesList(outDirArg: string) {
|
||||
const isList = outDirArg.substring(outDirArg.length - 4) === '.txt';
|
||||
let dirsList: string[];
|
||||
|
||||
if (isList) {
|
||||
dirsList = sanitizeOutDirsList(readFileToArray(outDirArg));
|
||||
}
|
||||
else {
|
||||
dirsList = [outDirArg];
|
||||
}
|
||||
|
||||
return dirsList;
|
||||
}
|
||||
|
||||
export function makeOutputDirectories(dirsList: string[]) {
|
||||
dirsList.forEach(dir => {
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.info(colors.yellow('Creating output directory:'));
|
||||
console.info(colors.green(dir) + '\n');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
foundUrl = false;
|
||||
continue;
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.INVALID_OUTPUT_DIR);
|
||||
else {
|
||||
logger.warn(`Found options without preceding url at line ${i + 1}, skipping..`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function checkOutDirsUrlsMismatch(dirsList: string[], urlsList: string[]) {
|
||||
const dirsListL = dirsList.length;
|
||||
const urlsListL = urlsList.length;
|
||||
/* now line is not empty nor an option line.
|
||||
If foundUrl is still true last line didn't have a directory option
|
||||
so we stil need to add the default outDir to outDirList to */
|
||||
if (foundUrl) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
// single out dir, treat this as the chosen one for all
|
||||
if (dirsListL == 1) {
|
||||
return;
|
||||
const guids: Array<string> | null = await extractGuids(line, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
foundUrl = true;
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url at line ${i + 1}, skipping..`);
|
||||
}
|
||||
}
|
||||
else if (dirsListL != urlsListL) {
|
||||
process.exit(ERROR_CODE.OUTDIRS_URLS_MISMATCH);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
// This leaves us the option to add more options (badum tss) _Luca
|
||||
function parseOption(optionSyntax: string, item: string): string | null {
|
||||
const match: RegExpMatchArray | null = item.match(
|
||||
RegExp(`^\\s*${optionSyntax}\\s?=\\s?['"](.*)['"]`)
|
||||
);
|
||||
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
|
||||
export function checkOutDir(directory: string): boolean {
|
||||
if (!fs.existsSync(directory)) {
|
||||
try {
|
||||
fs.mkdirSync(directory);
|
||||
logger.info('\nCreated directory: '.yellow + directory);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warn('Cannot create directory: '+ directory +
|
||||
'\nFalling back to default directory..');
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function checkRequirements() {
|
||||
export function checkRequirements(): void {
|
||||
try {
|
||||
const ffmpegVer = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
console.info(colors.green(`Using ${ffmpegVer}\n`));
|
||||
|
||||
const ffmpegVer: string = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
logger.info(`Using ${ffmpegVer}\n`);
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.MISSING_FFMPEG);
|
||||
}
|
||||
}
|
||||
|
||||
export function makeUniqueTitle(title: string, outDir: string, skip?: boolean, format?: string) {
|
||||
let ntitle = title;
|
||||
let k = 0;
|
||||
|
||||
while (!skip && fs.existsSync(outDir + path.sep + ntitle + '.' + format)) {
|
||||
ntitle = title + ' - ' + (++k).toString();
|
||||
}
|
||||
export function ffmpegTimemarkToChunk(timemark: string): number {
|
||||
const timeVals: Array<string> = timemark.split(':');
|
||||
const hrs: number = parseInt(timeVals[0]);
|
||||
const mins: number = parseInt(timeVals[1]);
|
||||
const secs: number = parseInt(timeVals[2]);
|
||||
|
||||
return ntitle;
|
||||
}
|
||||
|
||||
export function ffmpegTimemarkToChunk(timemark: string) {
|
||||
const timeVals: string[] = timemark.split(':');
|
||||
const hrs = parseInt(timeVals[0]);
|
||||
const mins = parseInt(timeVals[1]);
|
||||
const secs = parseInt(timeVals[2]);
|
||||
const chunk = (hrs * 60) + mins + (secs / 60);
|
||||
|
||||
return chunk;
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user