mirror of
https://github.com/snobu/destreamer.git
synced 2026-01-21 07:22:15 +00:00
Major code refactoring (#164)
* Added Chromium caching of identity provider cookies * Moved token expiry check in standalone method * Created refreshSession function * Session is now refreshed if the token expires * Linting fixes * Removed debug console.log() * Added CC support * Created function to prompt user for download parameters (interactive mode) * Fix data folder for puppeteer * Fixed multiple session error * Fix token expire time * Moved session refreshing to a more sensible place * Changed Metadata name to Video (to better reflect the data structure) * Complete CLI refactoring * Removed useless sleep function * Added outDir check from CLI * Complete input parsing refactoring (both inline and file) * Fixed and improved tests to work with the new input parsing * Moved and improved output path generation to videoUtils * Main code refactoring, added outpath to video type * Minor changes in spacing and type definition style * Updated readme after code refactoring * Fix if inputFile doesn't start with url on line 1 * Minor naming change * Use module 'winston' for logging * Created logge, changed all console.log and similar to use the logger * Added verbose logging, changed posterUrl property name on Video type * Moved GUID extraction to input parsing * Added support for group links * Fixed test after last input parsing update * Removed debug proces.exit() * Changed from desc to asc order for group videos * Updated test to reflect GUIDs output after parsing * Added couple of comments and restyled some imports * More readable verbose GUIDs logging * Removed unused errors * Temporary fix for timeout not working in ApiClient * Explicit class member accessibility * Defined array naming schema to be Array<T> * Defined type/interface schema to be type only * A LOT of type definitions
This commit is contained in:
@@ -1,6 +1,9 @@
|
||||
import { logger } from './Logger';
|
||||
import { Session } from './Types';
|
||||
|
||||
import axios, { AxiosRequestConfig, AxiosResponse, AxiosInstance, AxiosError } from 'axios';
|
||||
import axiosRetry, { isNetworkOrIdempotentRequestError } from 'axios-retry';
|
||||
import { Session } from './Types';
|
||||
|
||||
|
||||
export class ApiClient {
|
||||
private static instance: ApiClient;
|
||||
@@ -11,26 +14,30 @@ export class ApiClient {
|
||||
this.session = session;
|
||||
this.axiosInstance = axios.create({
|
||||
baseURL: session?.ApiGatewayUri,
|
||||
timeout: 7000,
|
||||
// timeout: 7000,
|
||||
headers: { 'User-Agent': 'destreamer/2.0 (Hammer of Dawn)' }
|
||||
});
|
||||
|
||||
axiosRetry(this.axiosInstance, {
|
||||
// The following option is not working.
|
||||
// We should open an issue on the relative GitHub
|
||||
shouldResetTimeout: true,
|
||||
retries: 6,
|
||||
retryDelay: (retryCount) => {
|
||||
retryDelay: (retryCount: number) => {
|
||||
return retryCount * 2000;
|
||||
},
|
||||
retryCondition: (err: AxiosError) => {
|
||||
const retryCodes = [429, 500, 502, 503];
|
||||
const retryCodes: Array<number> = [429, 500, 502, 503];
|
||||
if (isNetworkOrIdempotentRequestError(err)) {
|
||||
console.warn(`${err}. Retrying request...`);
|
||||
logger.warn(`${err}. Retrying request...`);
|
||||
|
||||
return true;
|
||||
}
|
||||
console.warn(`Got HTTP ${err?.response?.status}. Retrying request...`);
|
||||
const condition = retryCodes.includes(err?.response?.status ?? 0);
|
||||
}
|
||||
logger.warn(`Got HTTP code ${err?.response?.status ?? undefined}. Retrying request...`);
|
||||
|
||||
return condition;
|
||||
const shouldRetry: boolean = retryCodes.includes(err?.response?.status ?? 0);
|
||||
|
||||
return shouldRetry;
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -52,7 +59,7 @@ export class ApiClient {
|
||||
method: AxiosRequestConfig['method'] = 'get',
|
||||
payload?: any): Promise<AxiosResponse | undefined> {
|
||||
|
||||
const delimiter = path.split('?').length === 1 ? '?' : '&';
|
||||
const delimiter: '?' | '&' = path.split('?').length === 1 ? '?' : '&';
|
||||
|
||||
const headers: object = {
|
||||
'Authorization': 'Bearer ' + this.session?.AccessToken
|
||||
@@ -74,7 +81,7 @@ export class ApiClient {
|
||||
method: AxiosRequestConfig['method'] = 'get',
|
||||
payload?: any,
|
||||
responseType: AxiosRequestConfig['responseType'] = 'json'): Promise<AxiosResponse | undefined> {
|
||||
|
||||
|
||||
const headers: object = {
|
||||
'Authorization': 'Bearer ' + this.session?.AccessToken
|
||||
};
|
||||
@@ -87,5 +94,4 @@ export class ApiClient {
|
||||
responseType: responseType
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,37 +1,43 @@
|
||||
import { CLI_ERROR } from './Errors';
|
||||
import { CLI_ERROR, ERROR_CODE } from './Errors';
|
||||
import { checkOutDir } from './Utils';
|
||||
import { logger } from './Logger';
|
||||
|
||||
import yargs from 'yargs';
|
||||
import colors from 'colors';
|
||||
import fs from 'fs';
|
||||
import readlineSync from 'readline-sync';
|
||||
import yargs from 'yargs';
|
||||
|
||||
export const argv = yargs.options({
|
||||
|
||||
export const argv: any = yargs.options({
|
||||
username: {
|
||||
alias: 'u',
|
||||
type: 'string',
|
||||
describe: 'The username used to log into Microsoft Stream (enabling this will fill in the email field for you)',
|
||||
demandOption: false
|
||||
},
|
||||
videoUrls: {
|
||||
alias: 'i',
|
||||
describe: 'List of video urls',
|
||||
type: 'array',
|
||||
demandOption: false
|
||||
},
|
||||
videoUrlsFile: {
|
||||
inputFile: {
|
||||
alias: 'f',
|
||||
describe: 'Path to txt file containing the urls',
|
||||
type: 'string',
|
||||
demandOption: false
|
||||
},
|
||||
username: {
|
||||
alias: 'u',
|
||||
describe: 'Path to text file containing URLs and optionally outDirs. See the README for more on outDirs.',
|
||||
type: 'string',
|
||||
demandOption: false
|
||||
},
|
||||
outputDirectory: {
|
||||
alias: 'o',
|
||||
describe: 'The directory where destreamer will save your downloads [default: videos]',
|
||||
describe: 'The directory where destreamer will save your downloads',
|
||||
type: 'string',
|
||||
default: 'videos',
|
||||
demandOption: false
|
||||
},
|
||||
outputDirectories: {
|
||||
alias: 'O',
|
||||
describe: 'Path to a txt file containing one output directory per video',
|
||||
type: 'string',
|
||||
keepLoginCookies: {
|
||||
alias: 'k',
|
||||
describe: 'Let Chromium cache identity provider cookies so you can use "Remember me" during login',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
noExperiments: {
|
||||
@@ -55,6 +61,13 @@ export const argv = yargs.options({
|
||||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
closedCaptions: {
|
||||
alias: 'cc',
|
||||
describe: 'Check if closed captions are aviable and let the user choose which one to download (will not ask if only one aviable)',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
noCleanup: {
|
||||
alias: 'nc',
|
||||
describe: 'Do not delete the downloaded video file when an FFmpeg error occurs',
|
||||
@@ -87,147 +100,74 @@ export const argv = yargs.options({
|
||||
demandOption: false
|
||||
}
|
||||
})
|
||||
/**
|
||||
* Do our own argv magic before destreamer starts.
|
||||
* ORDER IS IMPORTANT!
|
||||
* Do not mess with this.
|
||||
*/
|
||||
.check(() => isShowHelpRequest())
|
||||
.check(argv => checkRequiredArgument(argv))
|
||||
.check(argv => checkVideoUrlsArgConflict(argv))
|
||||
.check(argv => checkOutputDirArgConflict(argv))
|
||||
.check(argv => checkVideoUrlsInput(argv))
|
||||
.check(argv => windowsFileExtensionBadBehaviorFix(argv))
|
||||
.check(argv => mergeVideoUrlsArguments(argv))
|
||||
.check(argv => mergeOutputDirArguments(argv))
|
||||
.wrap(120)
|
||||
.check(() => noArguments())
|
||||
.check((argv: any) => inputConflicts(argv.videoUrls, argv.inputFile))
|
||||
.check((argv: any) => {
|
||||
if (checkOutDir(argv.outputDirectory)) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
logger.error(CLI_ERROR.INVALID_OUTDIR);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
})
|
||||
.argv;
|
||||
|
||||
function hasNoArgs() {
|
||||
return process.argv.length === 2;
|
||||
}
|
||||
|
||||
function isShowHelpRequest() {
|
||||
if (hasNoArgs()) {
|
||||
throw new Error(CLI_ERROR.GRACEFULLY_STOP);
|
||||
function noArguments(): boolean {
|
||||
// if only 2 args no other args (0: node path, 1: js script path)
|
||||
if (process.argv.length === 2) {
|
||||
logger.error(CLI_ERROR.MISSING_INPUT_ARG, {fatal: true});
|
||||
|
||||
// so that the output stays clear
|
||||
throw new Error(' ');
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkRequiredArgument(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
|
||||
function inputConflicts(videoUrls: Array<string | number> | undefined,
|
||||
inputFile: string | undefined): boolean {
|
||||
// check if both inputs are declared
|
||||
if ((videoUrls !== undefined) && (inputFile !== undefined)) {
|
||||
logger.error(CLI_ERROR.INPUT_ARG_CONFLICT);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
// check if no input is declared or if they are declared but empty
|
||||
else if (!(videoUrls || inputFile) || (videoUrls?.length === 0) || (inputFile?.length === 0)) {
|
||||
logger.error(CLI_ERROR.MISSING_INPUT_ARG);
|
||||
|
||||
if (!argv.videoUrls && !argv.videoUrlsFile) {
|
||||
throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG));
|
||||
throw new Error(' ');
|
||||
}
|
||||
else if (inputFile) {
|
||||
// check if inputFile doesn't end in '.txt'
|
||||
if (inputFile.substring(inputFile.length - 4) !== '.txt') {
|
||||
logger.error(CLI_ERROR.INPUTFILE_WRONG_EXTENSION);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkVideoUrlsArgConflict(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (argv.videoUrls && argv.videoUrlsFile) {
|
||||
throw new Error(colors.red(CLI_ERROR.VIDEOURLS_ARG_CONFLICT));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkOutputDirArgConflict(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (argv.outputDirectory && argv.outputDirectories) {
|
||||
throw new Error(colors.red(CLI_ERROR.OUTPUTDIR_ARG_CONFLICT));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkVideoUrlsInput(argv: any) {
|
||||
if (hasNoArgs() || !argv.videoUrls) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!argv.videoUrls.length) {
|
||||
throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG));
|
||||
}
|
||||
|
||||
const t = argv.videoUrls[0] as string;
|
||||
if (t.substring(t.length-4) === '.txt') {
|
||||
throw new Error(colors.red(CLI_ERROR.FILE_INPUT_VIDEOURLS_ARG));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Users see 2 separate options, but we don't really care
|
||||
* cause both options have no difference in code.
|
||||
*
|
||||
* Optimize and make this transparent to destreamer
|
||||
*/
|
||||
function mergeVideoUrlsArguments(argv: any) {
|
||||
if (!argv.videoUrlsFile) {
|
||||
return true;
|
||||
}
|
||||
|
||||
argv.videoUrls = [argv.videoUrlsFile]; // noone will notice ;)
|
||||
|
||||
// these are not valid anymore
|
||||
delete argv.videoUrlsFile;
|
||||
delete argv.F;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Users see 2 separate options, but we don't really care
|
||||
* cause both options have no difference in code.
|
||||
*
|
||||
* Optimize and make this transparent to destreamer
|
||||
*/
|
||||
function mergeOutputDirArguments(argv: any) {
|
||||
if (!argv.outputDirectories && argv.outputDirectory) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!argv.outputDirectory && !argv.outputDirectories) {
|
||||
argv.outputDirectory = 'videos'; // default out dir
|
||||
}
|
||||
else if (argv.outputDirectories) {
|
||||
argv.outputDirectory = argv.outputDirectories;
|
||||
}
|
||||
|
||||
if (argv.outputDirectories) {
|
||||
// these are not valid anymore
|
||||
delete argv.outputDirectories;
|
||||
delete argv.O;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// yeah this is for windows, but lets check everyone, who knows...
|
||||
function windowsFileExtensionBadBehaviorFix(argv: any) {
|
||||
if (hasNoArgs() || !argv.videoUrlsFile || !argv.outputDirectories) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(argv.videoUrlsFile)) {
|
||||
if (fs.existsSync(argv.videoUrlsFile + '.txt')) {
|
||||
argv.videoUrlsFile += '.txt';
|
||||
throw new Error(' ');
|
||||
}
|
||||
else {
|
||||
throw new Error(colors.red(CLI_ERROR.INPUT_URLS_FILE_NOT_FOUND));
|
||||
// check if the inputFile exists
|
||||
else if (!fs.existsSync(inputFile)) {
|
||||
logger.error(CLI_ERROR.INPUTFILE_NOT_FOUND);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
export function promptUser(choices: Array<string>): number {
|
||||
let index: number = readlineSync.keyInSelect(choices, 'Which resolution/format do you prefer?');
|
||||
|
||||
if (index === -1) {
|
||||
process.exit(ERROR_CODE.CANCELLED_USER_INPUT);
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
@@ -1,64 +1,47 @@
|
||||
interface IError {
|
||||
[key: number]: string
|
||||
}
|
||||
|
||||
export const enum ERROR_CODE {
|
||||
NO_ERROR,
|
||||
UNHANDLED_ERROR,
|
||||
MISSING_FFMPEG,
|
||||
ELEVATED_SHELL,
|
||||
INVALID_OUTPUT_DIR,
|
||||
INVALID_INPUT_URLS,
|
||||
OUTDIRS_URLS_MISMATCH,
|
||||
INVALID_VIDEO_ID,
|
||||
INVALID_VIDEO_GUID,
|
||||
CANCELLED_USER_INPUT,
|
||||
MISSING_FFMPEG,
|
||||
UNK_FFMPEG_ERROR,
|
||||
INVALID_VIDEO_GUID,
|
||||
NO_SESSION_INFO
|
||||
}
|
||||
|
||||
// TODO: create better errors descriptions
|
||||
export const Error: IError = {
|
||||
[ERROR_CODE.NO_ERROR]: 'Clean exit with code 0',
|
||||
|
||||
export const errors: {[key: number]: string} = {
|
||||
[ERROR_CODE.UNHANDLED_ERROR]: 'Unhandled error!\n' +
|
||||
'Timeout or fatal error, please check your downloads directory and try again',
|
||||
|
||||
[ERROR_CODE.ELEVATED_SHELL]: 'Destreamer cannot run in an elevated (Administrator/root) shell.\n' +
|
||||
'Please run in a regular, non-elevated window.',
|
||||
|
||||
[ERROR_CODE.INVALID_OUTPUT_DIR]: 'Unable to create output directory',
|
||||
[ERROR_CODE.CANCELLED_USER_INPUT]: 'Input was cancelled by user',
|
||||
|
||||
[ERROR_CODE.MISSING_FFMPEG]: 'FFmpeg is missing!\n' +
|
||||
'Destreamer requires a fairly recent release of FFmpeg to download videos',
|
||||
|
||||
[ERROR_CODE.UNK_FFMPEG_ERROR]: 'Unknown FFmpeg error',
|
||||
|
||||
[ERROR_CODE.INVALID_INPUT_URLS]: 'No valid URL from input',
|
||||
|
||||
[ERROR_CODE.OUTDIRS_URLS_MISMATCH]: 'Output directories and URLs mismatch!\n' +
|
||||
'You must input the same number of URLs and output directories',
|
||||
|
||||
[ERROR_CODE.INVALID_VIDEO_ID]: 'Unable to get video ID from URL',
|
||||
|
||||
[ERROR_CODE.INVALID_VIDEO_GUID]: 'Unable to get video GUID from URL',
|
||||
|
||||
[ERROR_CODE.NO_SESSION_INFO]: 'Could not evaluate sessionInfo on the page'
|
||||
};
|
||||
|
||||
|
||||
export const enum CLI_ERROR {
|
||||
GRACEFULLY_STOP = ' ', // gracefully stop execution, yargs way
|
||||
MISSING_INPUT_ARG = 'You must specify a URLs source. \n' +
|
||||
'Valid options are -i for one or more URLs separated by space or -f for input file. \n',
|
||||
|
||||
MISSING_REQUIRED_ARG = 'You must specify a URLs source.\n' +
|
||||
'Valid options are -i for one or more URLs separated by space or -f for URLs from file.',
|
||||
INPUT_ARG_CONFLICT = 'Too many URLs sources specified! \n' +
|
||||
'Please specify a single source, either -i or -f \n',
|
||||
|
||||
VIDEOURLS_ARG_CONFLICT = 'Too many URLs sources specified!\n' +
|
||||
'Please specify a single source, either -i or -f (URLs from file)',
|
||||
INPUTFILE_WRONG_EXTENSION = 'The specified inputFile has the wrong extension \n' +
|
||||
'Please make sure to use path/to/filename.txt when useing the -f option \n',
|
||||
|
||||
OUTPUTDIR_ARG_CONFLICT = 'Too many output arguments specified!\n' +
|
||||
'Please specify a single output argument, either -o or --outputDirectories.',
|
||||
INPUTFILE_NOT_FOUND = 'The specified inputFile does not exists \n'+
|
||||
'Please check the filename and the path you provided \n',
|
||||
|
||||
FILE_INPUT_VIDEOURLS_ARG = 'Wrong input for option --videoUrls.\n' +
|
||||
'To read URLs from file, use --videoUrlsFile option.',
|
||||
|
||||
INPUT_URLS_FILE_NOT_FOUND = 'Input URL list file not found.'
|
||||
}
|
||||
INVALID_OUTDIR = 'Could not create the default/specified output directory \n' +
|
||||
'Please check directory and permissions and try again. \n'
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Error, ERROR_CODE } from './Errors';
|
||||
import { errors, ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
|
||||
import colors from 'colors';
|
||||
|
||||
/**
|
||||
* This file contains global destreamer process events
|
||||
@@ -9,20 +9,25 @@ import colors from 'colors';
|
||||
*
|
||||
* @note function is required for non-packaged destreamer, so we can't do better
|
||||
*/
|
||||
export function setProcessEvents() {
|
||||
export function setProcessEvents(): void {
|
||||
// set exit event first so that we can always print cute errors
|
||||
process.on('exit', (code) => {
|
||||
if (code == 0) {
|
||||
process.on('exit', (code: number) => {
|
||||
if (code === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const msg = code in Error ? `\n\n${Error[code]} \n` : `\n\nUnknown error: exit code ${code} \n`;
|
||||
const msg: string = (code in errors) ? `${errors[code]} \n` : `Unknown error: exit code ${code} \n`;
|
||||
|
||||
console.error(colors.bgRed(msg));
|
||||
logger.error({ message: msg, fatal: true });
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
console.error(colors.red(reason as string));
|
||||
process.on('unhandledRejection', (reason: {} | null | undefined) => {
|
||||
if (reason instanceof Error) {
|
||||
logger.error({ message: (reason as Error) });
|
||||
process.exit(ERROR_CODE.UNHANDLED_ERROR);
|
||||
}
|
||||
|
||||
logger.error({ message: (reason as string) });
|
||||
process.exit(ERROR_CODE.UNHANDLED_ERROR);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
40
src/Logger.ts
Normal file
40
src/Logger.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import colors from 'colors';
|
||||
import winston from 'winston';
|
||||
|
||||
|
||||
export const logger: winston.Logger = winston.createLogger({
|
||||
level: 'info',
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
winston.format.printf(
|
||||
(item: winston.Logform.TransformableInfo) => customPrint(item)
|
||||
)
|
||||
)
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
|
||||
function customPrint (info: winston.Logform.TransformableInfo): string {
|
||||
if (info.level === 'error') {
|
||||
if (info.fatal) {
|
||||
return colors.red('\n\n[FATAL ERROR] ') + (info.stack ?? info.message);
|
||||
}
|
||||
|
||||
return colors.red('\n[ERROR] ') + (info.stack ?? info.message) + '\n';
|
||||
}
|
||||
else if (info.level === 'warn') {
|
||||
return colors.yellow('\n[WARNING] ') + info.message;
|
||||
}
|
||||
else if (info.level === 'info') {
|
||||
return info.message;
|
||||
}
|
||||
else if (info.level === 'verbose') {
|
||||
return colors.cyan('\n[VERBOSE] ') + info.message;
|
||||
}
|
||||
|
||||
return `${info.level}: ${info.message} - ${info.timestamp}`;
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
import { Metadata, Session } from './Types';
|
||||
import { forEachAsync } from './Utils';
|
||||
import { ApiClient } from './ApiClient';
|
||||
|
||||
import { parse } from 'iso8601-duration';
|
||||
|
||||
|
||||
function publishedDateToString(date: string) {
|
||||
const dateJs = new Date(date);
|
||||
const day = dateJs.getDate().toString().padStart(2, '0');
|
||||
const month = (dateJs.getMonth() + 1).toString(10).padStart(2, '0');
|
||||
const publishedDate = day + '-' + month + '-' + dateJs.getFullYear();
|
||||
|
||||
return publishedDate;
|
||||
}
|
||||
|
||||
function durationToTotalChunks(duration: string) {
|
||||
const durationObj = parse(duration);
|
||||
const hrs = durationObj['hours'] ?? 0;
|
||||
const mins = durationObj['minutes'] ?? 0;
|
||||
const secs = Math.ceil(durationObj['seconds'] ?? 0);
|
||||
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
export async function getVideoMetadata(videoGuids: string[], session: Session): Promise<Metadata[]> {
|
||||
let metadata: Metadata[] = [];
|
||||
let title: string;
|
||||
let date: string;
|
||||
let totalChunks: number;
|
||||
let playbackUrl: string;
|
||||
let posterImage: string;
|
||||
|
||||
const apiClient = ApiClient.getInstance(session);
|
||||
|
||||
await forEachAsync(videoGuids, async (guid: string) => {
|
||||
let response = await apiClient.callApi('videos/' + guid, 'get');
|
||||
|
||||
title = response?.data['name'];
|
||||
playbackUrl = response?.data['playbackUrls']
|
||||
.filter((item: { [x: string]: string; }) =>
|
||||
item['mimeType'] == 'application/vnd.apple.mpegurl')
|
||||
.map((item: { [x: string]: string }) => {
|
||||
return item['playbackUrl'];
|
||||
})[0];
|
||||
|
||||
posterImage = response?.data['posterImage']['medium']['url'];
|
||||
date = publishedDateToString(response?.data['publishedDate']);
|
||||
totalChunks = durationToTotalChunks(response?.data.media['duration']);
|
||||
|
||||
metadata.push({
|
||||
date: date,
|
||||
totalChunks: totalChunks,
|
||||
title: title,
|
||||
playbackUrl: playbackUrl,
|
||||
posterImage: posterImage
|
||||
});
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
||||
@@ -1,19 +1,21 @@
|
||||
import path from 'path';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
// Thanks pkg-puppeteer [ cleaned up version :) ]
|
||||
export function getPuppeteerChromiumPath() {
|
||||
const isPkg = __filename.includes('snapshot');
|
||||
const macOS_Linux_rex = /^.*?\/node_modules\/puppeteer\/\.local-chromium/;
|
||||
const win32_rex = /^.*?\\node_modules\\puppeteer\\\.local-chromium/;
|
||||
const replaceRegex = process.platform === 'win32' ? win32_rex : macOS_Linux_rex;
|
||||
export function getPuppeteerChromiumPath(): string {
|
||||
const isPkg: boolean = __filename.includes('snapshot');
|
||||
|
||||
const replaceRegex: RegExp = (process.platform === 'win32') ?
|
||||
new RegExp(/^.*?\\node_modules\\puppeteer\\\.local-chromium/) :
|
||||
new RegExp(/^.*?\/node_modules\/puppeteer\/\.local-chromium/);
|
||||
|
||||
if (!isPkg) {
|
||||
return puppeteer.executablePath();
|
||||
}
|
||||
|
||||
const browserPath = puppeteer.executablePath()
|
||||
const browserPath: string = puppeteer.executablePath()
|
||||
.replace(replaceRegex, path.join(path.dirname(process.execPath), 'chromium'));
|
||||
|
||||
return browserPath;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
import { ApiClient } from './ApiClient';
|
||||
import { Session } from './Types';
|
||||
|
||||
import terminalImage from 'terminal-image';
|
||||
import { AxiosResponse } from 'axios';
|
||||
|
||||
|
||||
export async function drawThumbnail(posterImage: string, session: Session): Promise<void> {
|
||||
const apiClient = ApiClient.getInstance(session);
|
||||
let thumbnail = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer');
|
||||
console.log(await terminalImage.buffer(thumbnail?.data, { width: 70 } ));
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
let thumbnail: Buffer = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data);
|
||||
|
||||
console.log(await terminalImage.buffer(thumbnail, { width: 70 } ));
|
||||
}
|
||||
|
||||
@@ -1,56 +1,104 @@
|
||||
import * as fs from 'fs';
|
||||
import { chromeCacheFolder } from './destreamer';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { Session } from './Types';
|
||||
import { bgGreen, bgYellow, green } from 'colors';
|
||||
|
||||
import fs from 'fs';
|
||||
import jwtDecode from 'jwt-decode';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
export class TokenCache {
|
||||
private tokenCacheFile: string = '.token_cache';
|
||||
private tokenCacheFile = '.token_cache';
|
||||
|
||||
public Read(): Session | null {
|
||||
let j = null;
|
||||
if (!fs.existsSync(this.tokenCacheFile)) {
|
||||
console.warn(bgYellow.black(`${this.tokenCacheFile} not found.\n`));
|
||||
logger.warn(`${this.tokenCacheFile} not found. \n`);
|
||||
|
||||
return null;
|
||||
}
|
||||
let f = fs.readFileSync(this.tokenCacheFile, 'utf8');
|
||||
j = JSON.parse(f);
|
||||
|
||||
interface Jwt {
|
||||
let session: Session = JSON.parse(fs.readFileSync(this.tokenCacheFile, 'utf8'));
|
||||
|
||||
type Jwt = {
|
||||
[key: string]: any
|
||||
}
|
||||
const decodedJwt: Jwt = jwtDecode(session.AccessToken);
|
||||
|
||||
const decodedJwt: Jwt = jwtDecode(j.AccessToken);
|
||||
let now: number = Math.floor(Date.now() / 1000);
|
||||
let exp: number = decodedJwt['exp'];
|
||||
let timeLeft: number = exp - now;
|
||||
|
||||
let now = Math.floor(Date.now() / 1000);
|
||||
let exp = decodedJwt['exp'];
|
||||
let timeLeft = exp - now;
|
||||
|
||||
let timeLeftInMinutes = Math.floor(timeLeft / 60);
|
||||
if (timeLeft < 120) {
|
||||
console.warn(bgYellow.black('\nAccess token has expired.'));
|
||||
logger.warn('Access token has expired! \n');
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
console.info(bgGreen.black(`\nAccess token still good for ${timeLeftInMinutes} minutes.\n`));
|
||||
|
||||
let session: Session = {
|
||||
AccessToken: j.AccessToken,
|
||||
ApiGatewayUri: j.ApiGatewayUri,
|
||||
ApiGatewayVersion: j.ApiGatewayVersion
|
||||
};
|
||||
logger.info(`Access token still good for ${Math.floor(timeLeft / 60)} minutes.\n`.green);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
public Write(session: Session): void {
|
||||
let s = JSON.stringify(session, null, 4);
|
||||
let s: string = JSON.stringify(session, null, 4);
|
||||
fs.writeFile('.token_cache', s, (err: any) => {
|
||||
if (err) {
|
||||
return console.error(err);
|
||||
return logger.error(err);
|
||||
}
|
||||
console.info(green('Fresh access token dropped into .token_cache'));
|
||||
logger.info('Fresh access token dropped into .token_cachen \n'.green);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function refreshSession(): Promise<Session> {
|
||||
const url = 'https://web.microsoftstream.com';
|
||||
|
||||
const browser: puppeteer.Browser = await puppeteer.launch({
|
||||
executablePath: getPuppeteerChromiumPath(),
|
||||
headless: false, // NEVER TRUE OR IT DOES NOT WORK
|
||||
userDataDir: chromeCacheFolder,
|
||||
args: [
|
||||
'--disable-dev-shm-usage',
|
||||
'--fast-start',
|
||||
'--no-sandbox'
|
||||
]
|
||||
});
|
||||
|
||||
const page: puppeteer.Page = (await browser.pages())[0];
|
||||
await page.goto(url, { waitUntil: 'load' });
|
||||
|
||||
await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(url), { timeout: 30000 });
|
||||
|
||||
let session: Session | null = null;
|
||||
let tries = 1;
|
||||
|
||||
while (!session) {
|
||||
try {
|
||||
let sessionInfo: any;
|
||||
session = await page.evaluate(
|
||||
() => {
|
||||
return {
|
||||
AccessToken: sessionInfo.AccessToken,
|
||||
ApiGatewayUri: sessionInfo.ApiGatewayUri,
|
||||
ApiGatewayVersion: sessionInfo.ApiGatewayVersion
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
catch (error) {
|
||||
if (tries > 5) {
|
||||
process.exit(ERROR_CODE.NO_SESSION_INFO);
|
||||
}
|
||||
|
||||
session = null;
|
||||
tries++;
|
||||
await page.waitFor(3000);
|
||||
}
|
||||
}
|
||||
browser.close();
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
11
src/Types.ts
11
src/Types.ts
@@ -4,10 +4,13 @@ export type Session = {
|
||||
ApiGatewayVersion: string;
|
||||
}
|
||||
|
||||
export type Metadata = {
|
||||
|
||||
export type Video = {
|
||||
date: string;
|
||||
totalChunks: number; // Abstraction of FFmpeg timemark
|
||||
title: string;
|
||||
outPath: string;
|
||||
totalChunks: number; // Abstraction of FFmpeg timemark
|
||||
playbackUrl: string;
|
||||
posterImage: string;
|
||||
}
|
||||
posterImageUrl: string;
|
||||
captionsUrl?: string
|
||||
}
|
||||
|
||||
281
src/Utils.ts
281
src/Utils.ts
@@ -1,152 +1,193 @@
|
||||
import { ApiClient } from './ApiClient';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
import { Session } from './Types';
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { execSync } from 'child_process';
|
||||
import colors from 'colors';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
function sanitizeUrls(urls: string[]) {
|
||||
// eslint-disable-next-line
|
||||
const rex = new RegExp(/(?:https:\/\/)?.*\/video\/[a-z0-9]{8}-(?:[a-z0-9]{4}\-){3}[a-z0-9]{12}$/, 'i');
|
||||
const sanitized: string[] = [];
|
||||
|
||||
for (let i = 0, l = urls.length; i < l; ++i) {
|
||||
let url = urls[i].split('?')[0];
|
||||
async function extractGuids(url: string, client: ApiClient): Promise<Array<string> | null> {
|
||||
|
||||
if (!rex.test(url)) {
|
||||
if (url !== '') {
|
||||
console.warn(colors.yellow('Invalid URL at line ' + (i + 1) + ', skip..'));
|
||||
}
|
||||
const videoRegex = new RegExp(/https:\/\/.*\/video\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
const groupRegex = new RegExp(/https:\/\/.*\/group\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
|
||||
const videoMatch: RegExpExecArray | null = videoRegex.exec(url);
|
||||
const groupMatch: RegExpExecArray | null = groupRegex.exec(url);
|
||||
|
||||
if (videoMatch) {
|
||||
return [videoMatch[1]];
|
||||
}
|
||||
else if (groupMatch) {
|
||||
const videoNumber: number = await client.callApi(`groups/${groupMatch[1]}`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.metrics.videos);
|
||||
|
||||
let result: Array<string> = await client.callApi(`groups/${groupMatch[1]}/videos?$top=${videoNumber}&$orderby=publishedDate asc`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.value.map((item: any) => item.id));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the list of url given by the user via console input.
|
||||
* They can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {Array<string>} urlList list of link to parse
|
||||
* @param {string} defaultOutDir the directry used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseCLIinput(urlList: Array<string>, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
let guidList: Array<string> = [];
|
||||
|
||||
for (const url of urlList) {
|
||||
const guids: Array<string> | null = await extractGuids(url, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url '${url}', skipping..`);
|
||||
}
|
||||
}
|
||||
|
||||
const outDirList: Array<string> = Array(guidList.length).fill(defaultOutDir);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the input text file.
|
||||
* The urls in the file can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {string} inputFile path to the text file
|
||||
* @param {string} defaultOutDir the default/fallback directory used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseInputFile(inputFile: string, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
// rawContent is a list of each line of the file
|
||||
const rawContent: Array<string> = fs.readFileSync(inputFile).toString()
|
||||
.split(/\r?\n/);
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
let guidList: Array<string> = [];
|
||||
let outDirList: Array<string> = [];
|
||||
// if the last line was an url set this
|
||||
let foundUrl = false;
|
||||
|
||||
for (let i = 0; i < rawContent.length; i++) {
|
||||
const line: string = rawContent[i];
|
||||
|
||||
// filter out lines with no content
|
||||
if (!line.match(/\S/)) {
|
||||
logger.warn(`Line ${i + 1} is empty, skipping..`);
|
||||
continue;
|
||||
}
|
||||
// parse if line is option
|
||||
else if (line.includes('-dir')) {
|
||||
if (foundUrl) {
|
||||
let outDir: string | null = parseOption('-dir', line);
|
||||
|
||||
if (url.substring(0, 8) !== 'https://') {
|
||||
url = 'https://' + url;
|
||||
}
|
||||
if (outDir && checkOutDir(outDir)) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(outDir));
|
||||
}
|
||||
else {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
sanitized.push(url);
|
||||
}
|
||||
|
||||
if (!sanitized.length) {
|
||||
process.exit(ERROR_CODE.INVALID_INPUT_URLS);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function sanitizeOutDirsList(dirsList: string[]) {
|
||||
const sanitized: string[] = [];
|
||||
|
||||
dirsList.forEach(dir => {
|
||||
if (dir !== '') {
|
||||
sanitized.push(dir);
|
||||
}
|
||||
});
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function readFileToArray(path: string) {
|
||||
return fs.readFileSync(path).toString('utf-8').split(/[\r\n]/);
|
||||
}
|
||||
|
||||
export async function forEachAsync(array: any, callback: any) {
|
||||
for (let i = 0, l = array.length; i < l; ++i) {
|
||||
await callback(array[i], i, array);
|
||||
}
|
||||
}
|
||||
|
||||
export function parseVideoUrls(videoUrls: any) {
|
||||
let input = videoUrls[0] as string;
|
||||
const isPath = input.substring(input.length - 4) === '.txt';
|
||||
let urls: string[];
|
||||
|
||||
if (isPath) {
|
||||
urls = readFileToArray(input);
|
||||
}
|
||||
else {
|
||||
urls = videoUrls as string[];
|
||||
}
|
||||
|
||||
return sanitizeUrls(urls);
|
||||
}
|
||||
|
||||
export function getOutputDirectoriesList(outDirArg: string) {
|
||||
const isList = outDirArg.substring(outDirArg.length - 4) === '.txt';
|
||||
let dirsList: string[];
|
||||
|
||||
if (isList) {
|
||||
dirsList = sanitizeOutDirsList(readFileToArray(outDirArg));
|
||||
}
|
||||
else {
|
||||
dirsList = [outDirArg];
|
||||
}
|
||||
|
||||
return dirsList;
|
||||
}
|
||||
|
||||
export function makeOutputDirectories(dirsList: string[]) {
|
||||
dirsList.forEach(dir => {
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.info(colors.yellow('Creating output directory:'));
|
||||
console.info(colors.green(dir) + '\n');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
foundUrl = false;
|
||||
continue;
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.INVALID_OUTPUT_DIR);
|
||||
else {
|
||||
logger.warn(`Found options without preceding url at line ${i + 1}, skipping..`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function checkOutDirsUrlsMismatch(dirsList: string[], urlsList: string[]) {
|
||||
const dirsListL = dirsList.length;
|
||||
const urlsListL = urlsList.length;
|
||||
/* now line is not empty nor an option line.
|
||||
If foundUrl is still true last line didn't have a directory option
|
||||
so we stil need to add the default outDir to outDirList to */
|
||||
if (foundUrl) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
// single out dir, treat this as the chosen one for all
|
||||
if (dirsListL == 1) {
|
||||
return;
|
||||
const guids: Array<string> | null = await extractGuids(line, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
foundUrl = true;
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url at line ${i + 1}, skipping..`);
|
||||
}
|
||||
}
|
||||
else if (dirsListL != urlsListL) {
|
||||
process.exit(ERROR_CODE.OUTDIRS_URLS_MISMATCH);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
// This leaves us the option to add more options (badum tss) _Luca
|
||||
function parseOption(optionSyntax: string, item: string): string | null {
|
||||
const match: RegExpMatchArray | null = item.match(
|
||||
RegExp(`^\\s*${optionSyntax}\\s?=\\s?['"](.*)['"]`)
|
||||
);
|
||||
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
|
||||
export function checkOutDir(directory: string): boolean {
|
||||
if (!fs.existsSync(directory)) {
|
||||
try {
|
||||
fs.mkdirSync(directory);
|
||||
logger.info('\nCreated directory: '.yellow + directory);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warn('Cannot create directory: '+ directory +
|
||||
'\nFalling back to default directory..');
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function checkRequirements() {
|
||||
export function checkRequirements(): void {
|
||||
try {
|
||||
const ffmpegVer = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
console.info(colors.green(`Using ${ffmpegVer}\n`));
|
||||
|
||||
const ffmpegVer: string = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
logger.info(`Using ${ffmpegVer}\n`);
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.MISSING_FFMPEG);
|
||||
}
|
||||
}
|
||||
|
||||
export function makeUniqueTitle(title: string, outDir: string, skip?: boolean, format?: string) {
|
||||
let ntitle = title;
|
||||
let k = 0;
|
||||
|
||||
while (!skip && fs.existsSync(outDir + path.sep + ntitle + '.' + format)) {
|
||||
ntitle = title + ' - ' + (++k).toString();
|
||||
}
|
||||
export function ffmpegTimemarkToChunk(timemark: string): number {
|
||||
const timeVals: Array<string> = timemark.split(':');
|
||||
const hrs: number = parseInt(timeVals[0]);
|
||||
const mins: number = parseInt(timeVals[1]);
|
||||
const secs: number = parseInt(timeVals[2]);
|
||||
|
||||
return ntitle;
|
||||
}
|
||||
|
||||
export function ffmpegTimemarkToChunk(timemark: string) {
|
||||
const timeVals: string[] = timemark.split(':');
|
||||
const hrs = parseInt(timeVals[0]);
|
||||
const mins = parseInt(timeVals[1]);
|
||||
const secs = parseInt(timeVals[2]);
|
||||
const chunk = (hrs * 60) + mins + (secs / 60);
|
||||
|
||||
return chunk;
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
106
src/VideoUtils.ts
Normal file
106
src/VideoUtils.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { ApiClient } from './ApiClient';
|
||||
import { promptUser } from './CommandLineParser';
|
||||
import { logger } from './Logger';
|
||||
import { Video, Session } from './Types';
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import fs from 'fs';
|
||||
import { parse } from 'iso8601-duration';
|
||||
import path from 'path';
|
||||
import sanitize from 'sanitize-filename';
|
||||
|
||||
|
||||
function publishedDateToString(date: string): string {
|
||||
const dateJs: Date = new Date(date);
|
||||
const day: string = dateJs.getDate().toString().padStart(2, '0');
|
||||
const month: string = (dateJs.getMonth() + 1).toString(10).padStart(2, '0');
|
||||
|
||||
return `${dateJs.getFullYear()}-${month}-${day}`;
|
||||
}
|
||||
|
||||
|
||||
function durationToTotalChunks(duration: string): number {
|
||||
const durationObj: any = parse(duration);
|
||||
const hrs: number = durationObj.hours ?? 0;
|
||||
const mins: number = durationObj.minutes ?? 0;
|
||||
const secs: number = Math.ceil(durationObj.seconds ?? 0);
|
||||
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
|
||||
export async function getVideoInfo(videoGuids: Array<string>, session: Session, subtitles?: boolean): Promise<Array<Video>> {
|
||||
let metadata: Array<Video> = [];
|
||||
let title: string;
|
||||
let date: string;
|
||||
let totalChunks: number;
|
||||
let playbackUrl: string;
|
||||
let posterImageUrl: string;
|
||||
let captionsUrl: string | undefined;
|
||||
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
for (const GUID of videoGuids) {
|
||||
let response: AxiosResponse<any> | undefined= await apiClient.callApi('videos/' + GUID, 'get');
|
||||
|
||||
title = sanitize(response?.data['name']);
|
||||
playbackUrl = response?.data['playbackUrls']
|
||||
.filter((item: { [x: string]: string; }) =>
|
||||
item['mimeType'] == 'application/vnd.apple.mpegurl')
|
||||
.map((item: { [x: string]: string }) => {
|
||||
return item['playbackUrl'];
|
||||
})[0];
|
||||
|
||||
posterImageUrl = response?.data['posterImage']['medium']['url'];
|
||||
date = publishedDateToString(response?.data['publishedDate']);
|
||||
totalChunks = durationToTotalChunks(response?.data.media['duration']);
|
||||
|
||||
if (subtitles) {
|
||||
let captions: AxiosResponse<any> | undefined = await apiClient.callApi(`videos/${GUID}/texttracks`, 'get');
|
||||
|
||||
if (!captions?.data.value.length) {
|
||||
captionsUrl = undefined;
|
||||
}
|
||||
else if (captions?.data.value.length === 1) {
|
||||
logger.info(`Found subtitles for ${title}. \n`);
|
||||
captionsUrl = captions?.data.value.pop().url;
|
||||
}
|
||||
else {
|
||||
const index: number = promptUser(captions.data.value.map((item: { language: string; autoGenerated: string; }) => {
|
||||
return `[${item.language}] autogenerated: ${item.autoGenerated}`;
|
||||
}));
|
||||
captionsUrl = captions.data.value[index].url;
|
||||
}
|
||||
}
|
||||
|
||||
metadata.push({
|
||||
date: date,
|
||||
totalChunks: totalChunks,
|
||||
title: title,
|
||||
outPath: '',
|
||||
playbackUrl: playbackUrl,
|
||||
posterImageUrl: posterImageUrl,
|
||||
captionsUrl: captionsUrl
|
||||
});
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
export function createUniquePath(videos: Array<Video>, outDirs: Array<string>, format: string, skip?: boolean): Array<Video> {
|
||||
|
||||
videos.forEach((video: Video, index: number) => {
|
||||
let title = `${video.title} - ${video.date}`;
|
||||
let i = 0;
|
||||
|
||||
while (!skip && fs.existsSync(path.join(outDirs[index], title + '.' + format))) {
|
||||
title = `${video.title} - ${video.date}_${++i}`;
|
||||
}
|
||||
|
||||
|
||||
video.outPath = path.join(outDirs[index], title + '.' + format);
|
||||
});
|
||||
|
||||
return videos;
|
||||
}
|
||||
@@ -1,31 +1,32 @@
|
||||
import {
|
||||
sleep, parseVideoUrls, checkRequirements, makeUniqueTitle, ffmpegTimemarkToChunk,
|
||||
makeOutputDirectories, getOutputDirectoriesList, checkOutDirsUrlsMismatch
|
||||
} from './Utils';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { setProcessEvents } from './Events';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { TokenCache } from './TokenCache';
|
||||
import { getVideoMetadata } from './Metadata';
|
||||
import { Metadata, Session } from './Types';
|
||||
import { drawThumbnail } from './Thumbnail';
|
||||
import { argv } from './CommandLineParser';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { setProcessEvents } from './Events';
|
||||
import { logger } from './Logger';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { drawThumbnail } from './Thumbnail';
|
||||
import { TokenCache, refreshSession } from './TokenCache';
|
||||
import { Video, Session } from './Types';
|
||||
import { checkRequirements, ffmpegTimemarkToChunk, parseInputFile, parseCLIinput} from './Utils';
|
||||
import { getVideoInfo, createUniquePath } from './VideoUtils';
|
||||
|
||||
import puppeteer from 'puppeteer';
|
||||
import isElevated from 'is-elevated';
|
||||
import colors from 'colors';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { URL } from 'url';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import cliProgress from 'cli-progress';
|
||||
import fs from 'fs';
|
||||
import isElevated from 'is-elevated';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
const { FFmpegCommand, FFmpegInput, FFmpegOutput } = require('@tedconf/fessonia')();
|
||||
const tokenCache = new TokenCache();
|
||||
const tokenCache: TokenCache = new TokenCache();
|
||||
export const chromeCacheFolder = '.chrome_data';
|
||||
|
||||
async function init() {
|
||||
|
||||
async function init(): Promise<void> {
|
||||
setProcessEvents(); // must be first!
|
||||
|
||||
if (argv.verbose) {
|
||||
logger.level = 'verbose';
|
||||
}
|
||||
|
||||
if (await isElevated()) {
|
||||
process.exit(ERROR_CODE.ELEVATED_SHELL);
|
||||
}
|
||||
@@ -33,53 +34,58 @@ async function init() {
|
||||
checkRequirements();
|
||||
|
||||
if (argv.username) {
|
||||
console.info('Username: %s', argv.username);
|
||||
logger.info(`Username: ${argv.username}`);
|
||||
}
|
||||
|
||||
if (argv.simulate) {
|
||||
console.info(colors.yellow('Simulate mode, there will be no video download.\n'));
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.info('Video URLs:');
|
||||
console.info(argv.videoUrls);
|
||||
logger.warn('Simulate mode, there will be no video downloaded. \n');
|
||||
}
|
||||
}
|
||||
|
||||
async function DoInteractiveLogin(url: string, username?: string): Promise<Session> {
|
||||
const videoId = url.split('/').pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_ID);
|
||||
|
||||
console.log('Launching headless Chrome to perform the OpenID Connect dance...');
|
||||
const browser = await puppeteer.launch({
|
||||
async function DoInteractiveLogin(url: string, username?: string): Promise<Session> {
|
||||
const videoId: string = url.split('/').pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_GUID);
|
||||
|
||||
logger.info('Launching headless Chrome to perform the OpenID Connect dance...');
|
||||
|
||||
const browser: puppeteer.Browser = await puppeteer.launch({
|
||||
executablePath: getPuppeteerChromiumPath(),
|
||||
headless: false,
|
||||
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
|
||||
args: [
|
||||
'--disable-dev-shm-usage',
|
||||
'--fast-start',
|
||||
'--no-sandbox'
|
||||
]
|
||||
});
|
||||
const page = (await browser.pages())[0];
|
||||
console.log('Navigating to login page...');
|
||||
const page: puppeteer.Page = (await browser.pages())[0];
|
||||
|
||||
logger.info('Navigating to login page...');
|
||||
await page.goto(url, { waitUntil: 'load' });
|
||||
|
||||
if (username) {
|
||||
await page.waitForSelector('input[type="email"]');
|
||||
await page.keyboard.type(username);
|
||||
await page.click('input[type="submit"]');
|
||||
try {
|
||||
if (username) {
|
||||
await page.waitForSelector('input[type="email"]', {timeout: 3000});
|
||||
await page.keyboard.type(username);
|
||||
await page.click('input[type="submit"]');
|
||||
}
|
||||
else {
|
||||
/* If a username was not provided we let the user take actions that
|
||||
lead up to the video page. */
|
||||
}
|
||||
}
|
||||
else {
|
||||
// If a username was not provided we let the user take actions that
|
||||
// lead up to the video page.
|
||||
catch (e) {
|
||||
/* If there is no email input selector we aren't in the login module,
|
||||
we are probably using the cache to aid the login.
|
||||
It could finish the login on its own if the user said 'yes' when asked to
|
||||
remember the credentials or it could still prompt the user for a password */
|
||||
}
|
||||
|
||||
await browser.waitForTarget(target => target.url().includes(videoId), { timeout: 150000 });
|
||||
console.info('We are logged in.');
|
||||
|
||||
let session = null;
|
||||
let tries: number = 1;
|
||||
await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(videoId), { timeout: 150000 });
|
||||
logger.info('We are logged in.');
|
||||
|
||||
let session: Session | null = null;
|
||||
let tries = 1;
|
||||
while (!session) {
|
||||
try {
|
||||
let sessionInfo: any;
|
||||
@@ -100,85 +106,55 @@ async function DoInteractiveLogin(url: string, username?: string): Promise<Sessi
|
||||
|
||||
session = null;
|
||||
tries++;
|
||||
await sleep(3000);
|
||||
await page.waitFor(3000);
|
||||
}
|
||||
}
|
||||
|
||||
tokenCache.Write(session);
|
||||
console.log('Wrote access token to token cache.');
|
||||
console.log("At this point Chromium's job is done, shutting it down...\n");
|
||||
logger.info('Wrote access token to token cache.');
|
||||
logger.info("At this point Chromium's job is done, shutting it down...\n");
|
||||
|
||||
await browser.close();
|
||||
// --- Ignore all this for now ---
|
||||
// --- hopefully we won't need it ----
|
||||
// await sleep(1000);
|
||||
// let banner = await page.evaluate(
|
||||
// () => {
|
||||
// let topbar = document.getElementsByTagName('body')[0];
|
||||
// topbar.innerHTML =
|
||||
// '<h1 style="color: red">DESTREAMER NEEDS THIS WINDOW ' +
|
||||
// 'TO DO SOME ACCESS TOKEN MAGIC. DO NOT CLOSE IT.</h1>';
|
||||
// });
|
||||
// --------------------------------
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
function extractVideoGuid(videoUrls: string[]): string[] {
|
||||
const videoGuids: string[] = [];
|
||||
let guid: string | undefined = '';
|
||||
|
||||
for (const url of videoUrls) {
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
guid = urlObj.pathname.split('/').pop();
|
||||
}
|
||||
catch (e) {
|
||||
console.error(`Unrecognized URL format in ${url}: ${e.message}`);
|
||||
process.exit(ERROR_CODE.INVALID_VIDEO_GUID);
|
||||
}
|
||||
async function downloadVideo(videoGUIDs: Array<string>, outputDirectories: Array<string>, session: Session): Promise<void> {
|
||||
|
||||
if (guid) {
|
||||
videoGuids.push(guid);
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.info('Video GUIDs:');
|
||||
console.info(videoGuids);
|
||||
}
|
||||
|
||||
return videoGuids;
|
||||
}
|
||||
|
||||
async function downloadVideo(videoUrls: string[], outputDirectories: string[], session: Session) {
|
||||
const videoGuids = extractVideoGuid(videoUrls);
|
||||
|
||||
console.log('Fetching metadata...');
|
||||
|
||||
const metadata: Metadata[] = await getVideoMetadata(videoGuids, session);
|
||||
logger.info('Fetching videos info... \n');
|
||||
const videos: Array<Video> = createUniquePath (
|
||||
await getVideoInfo(videoGUIDs, session, argv.closedCaptions),
|
||||
outputDirectories, argv.format, argv.skip
|
||||
);
|
||||
|
||||
if (argv.simulate) {
|
||||
metadata.forEach(video => {
|
||||
console.log(
|
||||
colors.yellow('\n\nTitle: ') + colors.green(video.title) +
|
||||
colors.yellow('\nPublished Date: ') + colors.green(video.date) +
|
||||
colors.yellow('\nPlayback URL: ') + colors.green(video.playbackUrl)
|
||||
videos.forEach((video: Video) => {
|
||||
logger.info(
|
||||
'\nTitle: '.green + video.title +
|
||||
'\nOutPath: '.green + video.outPath +
|
||||
'\nPublished Date: '.green + video.date +
|
||||
'\nPlayback URL: '.green + video.playbackUrl +
|
||||
((video.captionsUrl) ? ('\nCC URL: '.green + video.captionsUrl) : '')
|
||||
);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.log(outputDirectories);
|
||||
}
|
||||
for (const video of videos) {
|
||||
|
||||
const outDirsIdxInc = outputDirectories.length > 1 ? 1:0;
|
||||
if (argv.skip && fs.existsSync(video.outPath)) {
|
||||
logger.info(`File already exists, skipping: ${video.outPath} \n`);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (let i=0, j=0, l=metadata.length; i<l; ++i, j+=outDirsIdxInc) {
|
||||
const video = metadata[i];
|
||||
const pbar = new cliProgress.SingleBar({
|
||||
if (argv.keepLoginCookies) {
|
||||
logger.info('Trying to refresh token...');
|
||||
session = await refreshSession();
|
||||
}
|
||||
|
||||
const pbar: cliProgress.SingleBar = new cliProgress.SingleBar({
|
||||
barCompleteChar: '\u2588',
|
||||
barIncompleteChar: '\u2591',
|
||||
format: 'progress [{bar}] {percentage}% {speed} {eta_formatted}',
|
||||
@@ -188,37 +164,40 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
||||
hideCursor: true,
|
||||
});
|
||||
|
||||
console.log(colors.yellow(`\nDownloading Video: ${video.title}\n`));
|
||||
logger.info(`\nDownloading Video: ${video.title} \n`);
|
||||
logger.verbose('Extra video info \n' +
|
||||
'\t Video m3u8 playlist URL: '.cyan + video.playbackUrl + '\n' +
|
||||
'\t Video tumbnail URL: '.cyan + video.posterImageUrl + '\n' +
|
||||
'\t Video subtitle URL (may not exist): '.cyan + video.captionsUrl + '\n' +
|
||||
'\t Video total chunks: '.cyan + video.totalChunks + '\n');
|
||||
|
||||
video.title = makeUniqueTitle(sanitize(video.title) + ' - ' + video.date, outputDirectories[j], argv.skip, argv.format);
|
||||
|
||||
console.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...');
|
||||
logger.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...\n\n');
|
||||
if (!process.stdout.columns) {
|
||||
console.info(colors.red('Unable to get number of columns from terminal.\n' +
|
||||
'This happens sometimes in Cygwin/MSYS.\n' +
|
||||
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
|
||||
'Please use PowerShell or cmd.exe to run destreamer on Windows.'));
|
||||
logger.warn(
|
||||
'Unable to get number of columns from terminal.\n' +
|
||||
'This happens sometimes in Cygwin/MSYS.\n' +
|
||||
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
|
||||
'Please use PowerShell or cmd.exe to run destreamer on Windows.'
|
||||
);
|
||||
}
|
||||
|
||||
const headers = 'Authorization: Bearer ' + session.AccessToken;
|
||||
const headers: string = 'Authorization: Bearer ' + session.AccessToken;
|
||||
|
||||
// Very experimental inline thumbnail rendering
|
||||
if (!argv.noExperiments) {
|
||||
await drawThumbnail(video.posterImage, session);
|
||||
await drawThumbnail(video.posterImageUrl, session);
|
||||
}
|
||||
|
||||
const outputPath = outputDirectories[j] + path.sep + video.title + '.' + argv.format;
|
||||
const ffmpegInpt = new FFmpegInput(video.playbackUrl, new Map([
|
||||
const ffmpegInpt: any = new FFmpegInput(video.playbackUrl, new Map([
|
||||
['headers', headers]
|
||||
]));
|
||||
const ffmpegOutput = new FFmpegOutput(outputPath, new Map([
|
||||
const ffmpegOutput: any = new FFmpegOutput(video.outPath, new Map([
|
||||
argv.acodec === 'none' ? ['an', null] : ['c:a', argv.acodec],
|
||||
argv.vcodec === 'none' ? ['vn', null] : ['c:v', argv.vcodec],
|
||||
['n', null]
|
||||
]));
|
||||
const ffmpegCmd = new FFmpegCommand();
|
||||
const ffmpegCmd: any = new FFmpegCommand();
|
||||
|
||||
const cleanupFn = (): void => {
|
||||
const cleanupFn: () => void = () => {
|
||||
pbar.stop();
|
||||
|
||||
if (argv.noCleanup) {
|
||||
@@ -226,10 +205,10 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(outputPath);
|
||||
fs.unlinkSync(video.outPath);
|
||||
}
|
||||
catch (e) {
|
||||
// Future handling of an error maybe
|
||||
// Future handling of an error (maybe)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -240,9 +219,16 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
||||
// prepare ffmpeg command line
|
||||
ffmpegCmd.addInput(ffmpegInpt);
|
||||
ffmpegCmd.addOutput(ffmpegOutput);
|
||||
if (argv.closedCaptions && video.captionsUrl) {
|
||||
const captionsInpt: any = new FFmpegInput(video.captionsUrl, new Map([
|
||||
['headers', headers]
|
||||
]));
|
||||
|
||||
ffmpegCmd.on('update', (data: any) => {
|
||||
const currentChunks = ffmpegTimemarkToChunk(data.out_time);
|
||||
ffmpegCmd.addInput(captionsInpt);
|
||||
}
|
||||
|
||||
ffmpegCmd.on('update', async (data: any) => {
|
||||
const currentChunks: number = ffmpegTimemarkToChunk(data.out_time);
|
||||
|
||||
pbar.update(currentChunks, {
|
||||
speed: data.bitrate
|
||||
@@ -259,22 +245,15 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
||||
// let the magic begin...
|
||||
await new Promise((resolve: any) => {
|
||||
ffmpegCmd.on('error', (error: any) => {
|
||||
if (argv.skip && error.message.includes('exists') && error.message.includes(outputPath)) {
|
||||
pbar.update(video.totalChunks); // set progress bar to 100%
|
||||
console.log(colors.yellow(`\nFile already exists, skipping: ${outputPath}`));
|
||||
resolve();
|
||||
}
|
||||
else {
|
||||
cleanupFn();
|
||||
cleanupFn();
|
||||
|
||||
console.log(`\nffmpeg returned an error: ${error.message}`);
|
||||
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
|
||||
}
|
||||
logger.error(`FFmpeg returned an error: ${error.message}`);
|
||||
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
|
||||
});
|
||||
|
||||
ffmpegCmd.on('success', () => {
|
||||
pbar.update(video.totalChunks); // set progress bar to 100%
|
||||
console.log(colors.green(`\nDownload finished: ${outputPath}`));
|
||||
logger.info(`\nDownload finished: ${video.outPath} \n`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
@@ -285,19 +264,36 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
async function main(): Promise<void> {
|
||||
await init(); // must be first
|
||||
|
||||
const outDirs: string[] = getOutputDirectoriesList(argv.outputDirectory as string);
|
||||
const videoUrls: string[] = parseVideoUrls(argv.videoUrls);
|
||||
let session: Session;
|
||||
session = tokenCache.Read() ?? await DoInteractiveLogin('https://web.microsoftstream.com/', argv.username);
|
||||
|
||||
checkOutDirsUrlsMismatch(outDirs, videoUrls);
|
||||
makeOutputDirectories(outDirs); // create all dirs now to prevent ffmpeg panic
|
||||
logger.verbose('Session and API info \n' +
|
||||
'\t API Gateway URL: '.cyan + session.ApiGatewayUri + '\n' +
|
||||
'\t API Gateway version: '.cyan + session.ApiGatewayVersion + '\n');
|
||||
|
||||
session = tokenCache.Read() ?? await DoInteractiveLogin(videoUrls[0], argv.username);
|
||||
let videoGUIDs: Array<string>;
|
||||
let outDirs: Array<string>;
|
||||
|
||||
downloadVideo(videoUrls, outDirs, session);
|
||||
if (argv.videoUrls) {
|
||||
logger.info('Parsing video/group urls');
|
||||
[videoGUIDs, outDirs] = await parseCLIinput(argv.videoUrls as Array<string>, argv.outputDirectory, session);
|
||||
}
|
||||
else {
|
||||
logger.info('Parsing input file');
|
||||
[videoGUIDs, outDirs] = await parseInputFile(argv.inputFile!, argv.outputDirectory, session);
|
||||
}
|
||||
|
||||
logger.verbose('List of GUIDs and corresponding output directory \n' +
|
||||
videoGUIDs.map((guid: string, i: number) =>
|
||||
`\thttps://web.microsoftstream.com/video/${guid} => ${outDirs[i]} \n`).join(''));
|
||||
|
||||
|
||||
downloadVideo(videoGUIDs, outDirs, session);
|
||||
}
|
||||
|
||||
|
||||
main();
|
||||
|
||||
Reference in New Issue
Block a user