diff --git a/.gitignore b/.gitignore index 29ecadd..d020c14 100644 --- a/.gitignore +++ b/.gitignore @@ -6,5 +6,4 @@ node_modules videos release -swagger.json -cc.json \ No newline at end of file +build \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index d1d9d8f..f44acb8 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -7,12 +7,13 @@ { "type": "node", + "runtimeArgs": ["--max-http-header-size", "32768"], "request": "launch", "name": "Launch Program", - "program": "${workspaceFolder}/destreamer.js", + "program": "${workspaceFolder}/build/src/destreamer.js", "args": [ - "--videoUrls", - "https://web.microsoftstream.com/video/6f1a382b-e20c-44c0-98fc-5608286e48bc" + "-i", + "https://web.microsoftstream.com/video/ce4da1ff-0400-86ec-2ad6-f1ea83412074" // Bacon and Eggs ] } ] diff --git a/README.md b/README.md index 27b91a2..0740153 100644 --- a/README.md +++ b/README.md @@ -17,9 +17,9 @@ This release would not have been possible without the code and time contributed - Major code refactoring - Dramatically improved error handling - We now have a token cache so we can reuse access tokens. This really means that within one hour you need to perform the interactive browser login only once. -- We removed the dependency on `youtube-dl`. +- We removed the dependency on `youtube-dl` - Getting to the HLS URL is dramatically more reliable as we dropped parsing the DOM for the video element in favor of calling the Microsoft Stream API -- Fixed access token lifetime bugs (you no longer get a 403 Forbidden midway though your download list). Still one outstanding edge case here, see _Found a bug_ at the bottom for more. +- Fixed access token lifetime bugs (you no longer get a 403 Forbidden midway though your download list) - Fixed a major 2FA bug that would sometimes cause a timeout in our code - Fixed a wide variety of other bugs, maybe introduced a few new ones :) @@ -132,9 +132,7 @@ Contributions are welcome. Open an issue first before sending in a pull request. ## Found a bug? -There is one outstanding bug that you may hit: if you download two or more videos in one go, if one of the videos take more than one hour to complete, the next download will fail as the cookie is now expired. We'll patch this soon. - -For other bugs, please open an [issue](https://github.com/snobu/destreamer/issues) and we'll look into it. +Please open an [issue](https://github.com/snobu/destreamer/issues) and we'll look into it. [ffmpeg]: https://www.ffmpeg.org/download.html diff --git a/destreamer.cmd b/destreamer.cmd index 2c922e2..9ba15a6 100644 --- a/destreamer.cmd +++ b/destreamer.cmd @@ -1 +1,10 @@ -node.exe build\src\destreamer.js %* \ No newline at end of file +@ECHO OFF + +node.exe --version | findstr "v8." +IF %ERRORLEVEL% EQU 0 GOTO Node8 + +node.exe --max-http-header-size 32768 build\src\destreamer.js %* + +:Node8 +node.exe build\src\destreamer.js %* + diff --git a/destreamer.ps1 b/destreamer.ps1 index dba5b34..5df7c8d 100644 --- a/destreamer.ps1 +++ b/destreamer.ps1 @@ -1 +1,8 @@ -node.exe build\src\destreamer.js $args \ No newline at end of file +$NodeVersion = Invoke-Expression "node.exe --version" +if ($NodeVersion.StartsWith("v8.")) { + node.exe build\src\destreamer.js $args +} +else { + node.exe --max-http-header-size 32768 build\src\destreamer.js $args +} + diff --git a/destreamer.sh b/destreamer.sh index 3d65b8c..6c1bf8c 100755 --- a/destreamer.sh +++ b/destreamer.sh @@ -1 +1,8 @@ -node build/src/destreamer.js "$@" \ No newline at end of file +#!/usr/bin/env bash +NODE_VERSION=$(node --version) + +if [[ $NODE_VERSION == "v8."* ]]; then + node build/src/destreamer.js "$@" +else + node --max-http-header-size 32768 build/src/destreamer.js "$@" +fi \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 6abe657..de75fd1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -481,9 +481,9 @@ } }, "cli-progress": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/cli-progress/-/cli-progress-3.7.0.tgz", - "integrity": "sha512-xo2HeQ3vNyAO2oYF5xfrk5YM6jzaDNEbeJRLAQir6QlH54g4f6AXW+fLyJ/f12gcTaCbJznsOdQcr/yusp/Kjg==", + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/cli-progress/-/cli-progress-3.8.0.tgz", + "integrity": "sha512-3e+m7ecKbVTF2yo186vrrt/5217ZwE64z61kMwhSFmgrF3qZiTUuV9Fdh2RyzSkhLRfsqFf721KiUDEAJlP5pA==", "requires": { "colors": "^1.1.2", "string-width": "^4.2.0" diff --git a/package.json b/package.json index b6e933d..325503d 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "main": "build/src/destreamer.js", "bin": "build/src/destreamer.js", "scripts": { - "build": "echo Transpiling TypeScript to JavaScript... & node node_modules/typescript/bin/tsc --listEmittedFiles", + "build": "echo Transpiling TypeScript to JavaScript... && node node_modules/typescript/bin/tsc && echo Destreamer was built successfully.", "test": "mocha build/test" }, "keywords": [], diff --git a/src/TokenCache.ts b/src/TokenCache.ts index c5d47fb..ce3aff7 100644 --- a/src/TokenCache.ts +++ b/src/TokenCache.ts @@ -56,16 +56,23 @@ export class TokenCache { }); } - public async RefreshToken(session: Session): Promise { + public async RefreshToken(session: Session, cookie?: string | null): Promise { let endpoint = `${session.ApiGatewayUri}refreshtoken?api-version=${session.ApiGatewayVersion}`; - let response = await axios.get(endpoint, - { - headers: { - Authorization: `Bearer ${session.AccessToken}` - } - }); + let headers: Function = (): object => { + if (cookie) { + return { + Cookie: cookie + }; + } + else { + return { + Authorization: 'Bearer ' + session.AccessToken + }; + } + } + let response = await axios.get(endpoint, { headers: headers() }); let freshCookie: string | null = null; try { diff --git a/src/destreamer.ts b/src/destreamer.ts index 2dd49b3..66b9645 100644 --- a/src/destreamer.ts +++ b/src/destreamer.ts @@ -23,6 +23,10 @@ import cliProgress from 'cli-progress'; const { FFmpegCommand, FFmpegInput, FFmpegOutput } = require('@tedconf/fessonia')(); const tokenCache = new TokenCache(); +// The cookie lifetime is one hour, +// let's refresh every 3000 seconds. +const REFRESH_TOKEN_INTERVAL = 3000; + async function init() { setProcessEvents(); // must be first! @@ -127,6 +131,7 @@ function extractVideoGuid(videoUrls: string[]): string[] { async function downloadVideo(videoUrls: string[], outputDirectories: string[], session: Session) { const videoGuids = extractVideoGuid(videoUrls); + let lastTokenRefresh: number; console.log('Fetching metadata...'); @@ -147,7 +152,10 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s if (argv.verbose) console.log(outputDirectories); + + let freshCookie: string | null = null; const outDirsIdxInc = outputDirectories.length > 1 ? 1:0; + for (let i=0, j=0, l=metadata.length; i => { + let elapsed = Date.now() - lastTokenRefresh; + if (elapsed > REFRESH_TOKEN_INTERVAL * 1000) { + if (argv.verbose) { + console.info(colors.green('\nRefreshing access token...')); + } + lastTokenRefresh = Date.now(); + freshCookie = await tokenCache.RefreshToken(session, freshCookie); + } + }; + const outputPath = outputDirectories[j] + path.sep + video.title + '.mp4'; const ffmpegInpt = new FFmpegInput(video.playbackUrl, new Map([ ['headers', headers] @@ -192,7 +219,7 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s const ffmpegOutput = new FFmpegOutput(outputPath); const ffmpegCmd = new FFmpegCommand(); - const cleanupFn = function () { + const cleanupFn = (): void => { pbar.stop(); if (argv.noCleanup) @@ -211,9 +238,9 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s ffmpegCmd.addInput(ffmpegInpt); ffmpegCmd.addOutput(ffmpegOutput); - // set events ffmpegCmd.on('update', (data: any) => { const currentChunks = ffmpegTimemarkToChunk(data.out_time); + RefreshTokenMaybe(); pbar.update(currentChunks, { speed: data.bitrate diff --git a/test/test.ts b/test/test.ts index dab4395..b023464 100644 --- a/test/test.ts +++ b/test/test.ts @@ -7,28 +7,20 @@ import fs from 'fs'; let browser: any; let page: any; -before(async () => { - browser = await puppeteer.launch({ - headless: true, - args: ['--disable-dev-shm-usage'] - }); - page = await browser.newPage(); -}); - describe('Puppeteer', () => { it('should grab GitHub page title', async () => { - await page.goto("https://github.com/", { waitUntil: 'networkidle2' }); + browser = await puppeteer.launch({ + headless: true, + args: ['--disable-dev-shm-usage'] + }); + page = await browser.newPage(); + await page.goto("https://github.com/", { waitUntil: 'load' }); let pageTitle = await page.title(); assert.equal(true, pageTitle.includes('GitHub')); - + await browser.close(); }).timeout(15000); // yeah, this may take a while... }); -after(async () => { - await browser.close(); -}); - - describe('Destreamer', () => { it('should parse and sanitize URL list from file', () => { const testIn: string[] = [ diff --git a/tsconfig.json b/tsconfig.json index d971e41..47e159d 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -6,6 +6,7 @@ "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ "strict": true, /* Enable all strict type-checking options. */ "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ - "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + "sourceMap": true, } } \ No newline at end of file