1
0
mirror of https://github.com/snobu/destreamer.git synced 2026-02-09 08:19:41 +00:00

64 Commits

Author SHA1 Message Date
Mahmoud Ashraf
ffd76ba226 Fix a small typo in terminal output (#363) 2021-04-07 21:56:00 +03:00
lukaarma
cea18bbf5e Merge pull request #289 from fulminemizzega/patch-1
Update prereqs and fix quotes in readme.md
2021-02-09 21:31:42 +01:00
lukaarma
3fe64d13a3 Merge branch 'aria2c_forRealNow' into patch-1 2021-02-09 21:31:13 +01:00
Adrian Calinescu
6d8f3c6ee0 Fix return HTTP 403 reason with or without verbose (#316) 2021-01-26 14:52:23 +02:00
lukaarma
1e8db6f9b9 Merge pull request #303 from pastacolsugo/patch-1
Added `aria2` prerequisite
2021-01-08 16:17:14 +01:00
Ugo Baroncini
195d0dd1f8 Added aria2 prerequisite
Added a prerequisite for the `aria2` package to be installed when using this version.
2020-12-27 00:10:55 +01:00
fulminemizzega
66a7f609b3 fix quotes in input file example in README.md
Same as PR snobu#283
2020-12-07 13:08:05 +01:00
fulminemizzega
c1a0994e2a add aria2 to prereqs 2020-12-07 12:47:03 +01:00
Luca Armaroli
6d28b3c397 updated all packages 2020-12-03 18:37:58 +01:00
Luca Armaroli
08364ed635 new version of error retry, couldn't test it 2020-12-03 18:34:11 +01:00
Luca Armaroli
ddafc5091d fixed parsing for group with more than 100 videos
fixed error logging
2020-12-03 17:16:49 +01:00
Adrian Calinescu
0b4c900e3f Update README.md 2020-12-03 00:08:59 +02:00
Adrian Calinescu
461df7b726 Fix for group download IRL this time 2020-11-14 20:09:45 +02:00
Adrian Calinescu
0f2d902d2e Fix verbose logging when we get JSON back
Which i guess was like 99% of the time? :)
2020-11-14 19:59:11 +02:00
Adrian Calinescu
4a45c68943 Fix group download, added 100 video limit 2020-11-14 19:56:59 +02:00
lukaarma
eacb2b63c1 Merge pull request #265 from rpaulucci3/patch-1
Fix typo and capitalization in Errors.ts
2020-10-17 18:30:43 +02:00
Rafael A O Paulucci
a179bdcadc Update Errors.ts
Fix typo in `MISSING_ARIA2` and some capitalization
2020-10-17 11:30:00 -03:00
Luca Armaroli
1763dc8cbd changed exec in favour of spawn for aria2c
(should solve and close #254)
2020-10-11 22:20:45 +02:00
Luca Armaroli
41206a97f0 fixed stupid session refreshing typo 2020-10-11 22:19:55 +02:00
Luca Armaroli
85f3beae71 added access token refresh logic (closes #255) 2020-10-11 21:56:08 +02:00
Luca Armaroli
3249759c29 Merge branch 'master' of https://github.com/snobu/destreamer into aria2c_forRealNow 2020-10-11 21:52:20 +02:00
Luca Armaroli
2a3c3eb225 fixed buffer maxLenght exceded on aria2c daemon 2020-10-09 13:33:11 +02:00
Luca Armaroli
b89e04156f workaround for listeners not removed
fixed typo
2020-09-27 22:48:05 +02:00
Luca Armaroli
331efd9773 fix for *nix platforms 2020-09-25 10:32:46 +02:00
Luca Armaroli
020518e542 minor comments and variables name changes 2020-09-25 10:32:29 +02:00
Luca Armaroli
502565dcea better webSocket initialization
It should solve all timing issues
2020-09-24 02:36:00 +02:00
Luca Armaroli
c7e0415786 process.exit on uncaught exceptions 2020-09-24 02:30:49 +02:00
Luca Armaroli
14cfe7c18e typo fix 2020-09-24 02:30:16 +02:00
Luca Armaroli
95c7150449 improved shutdown sequence
done a couple of TODOs
2020-09-20 23:15:56 +02:00
Luca Armaroli
482a506145 fixed bug on hanging on shutdown
improved shutdown sequence
2020-09-20 23:15:15 +02:00
Luca Armaroli
38edbadf4a check for aria existance
changed ffmpeg error message
2020-09-20 02:01:28 +02:00
Luca Armaroli
af4725c371 fixed linting errors 2020-09-20 01:57:04 +02:00
Luca Armaroli
c9c9fefd2d removed random useless linting test 2020-09-20 01:28:19 +02:00
Luca Armaroli
8df51555f7 implemented port finding
upgraded logging during execution
2020-09-20 01:24:01 +02:00
Luca Armaroli
3e472f9ae0 created error for no port aviable 2020-09-20 01:23:34 +02:00
Luca Armaroli
9453458664 installed portfinder
removed useless packages
moved types to devDependencies

upgraded packages to more recent versions
- axios                             ^0.19.2  →  ^0.20.0
- axios-retry                        ^3.1.8  →   ^3.1.9
- puppeteer                           2.1.1  →    5.3.0
- yargs                             ^15.4.1  →  ^16.0.3
- @types/mocha                       ^7.0.2  →   ^8.0.3
- @types/puppeteer                  ^1.20.6  →   ^3.0.2
- @typescript-eslint/eslint-plugin  ^2.34.0  →   ^4.1.1
- @typescript-eslint/parser         ^2.34.0  →   ^4.1.1
- eslint                             ^6.8.0  →   ^7.9.0
- mocha                              ^7.2.0  →   ^8.1.3
- typescript                         ^3.9.7  →   ^4.0.3
2020-09-20 01:23:08 +02:00
Luca Armaroli
7cab44a2e4 added debug statement 2020-09-19 23:29:44 +02:00
Luca Armaroli
6c8628e5e1 code cleanup 2020-09-19 23:16:55 +02:00
Luca Armaroli
796753f170 change user agent 2020-09-12 13:51:52 +02:00
Luca Armaroli
6f082e163b marked unused code 2020-09-09 21:18:40 +02:00
Luca Armaroli
16a85325d9 removed useless old code 2020-09-09 21:18:17 +02:00
Luca Armaroli
e9dea1484e removed bug in quality selection 2020-09-09 05:40:25 +02:00
Luca Armaroli
a93b32879c updating comments 2020-09-09 05:00:28 +02:00
Luca Armaroli
f1476ffe39 done decryption/merging of video/audio/sub traks 2020-09-09 04:56:12 +02:00
Luca Armaroli
ec099e9124 - fixed progress bar not updating
- fixed comments
2020-09-09 04:55:14 +02:00
Luca Armaroli
96f4c90277 - removed useless function/properties
- added filename property to Video type
2020-09-09 04:54:17 +02:00
Luca Armaroli
a185f51eb5 fixed typo and added comment 2020-09-09 04:52:21 +02:00
Luca Armaroli
aa21e54a3d added quality option 2020-09-09 04:51:35 +02:00
Luca Armaroli
8b61f86639 added debug logging 2020-09-09 04:51:03 +02:00
Luca Armaroli
d037b7cfb2 renamed and finished decrypter 2020-09-09 04:50:24 +02:00
Luca Armaroli
9e25870191 updated packages 2020-09-07 22:17:11 +02:00
Luca Armaroli
6e874f5138 exclude debug launch config 2020-09-07 17:49:44 +02:00
Luca Armaroli
1dff41c1bf Merge branch 'master' of https://github.com/snobu/destreamer into aria2c_forRealNow 2020-09-07 14:34:26 +02:00
Luca Armaroli
903f2bfafc updated destreamer to use the new download method 2020-09-05 12:51:01 +02:00
Luca Armaroli
0c65ff7dfe added parsing of m3u8 file down to a list of links 2020-09-05 12:42:47 +02:00
Luca Armaroli
5350bc324b added debug logging 2020-09-05 12:42:00 +02:00
Luca Armaroli
d29bd54d5b very simple test for SIGINT 2020-09-05 12:41:39 +02:00
Luca Armaroli
29a6fab20b minor formatting changes 2020-09-05 12:41:05 +02:00
Luca Armaroli
6c0e37ad98 created WebSocket/Aria2c errors 2020-09-05 12:40:41 +02:00
Luca Armaroli
0d8b4204fa changed UserAgent version
added debug logging
2020-09-05 12:40:16 +02:00
Luca Armaroli
685fa27cc7 added debug and best quality flags 2020-09-05 12:39:21 +02:00
Luca Armaroli
67573fcf86 installed WebSocket and updated Tmp 2020-09-05 12:37:37 +02:00
Luca Armaroli
53342932d9 first draft of Decrypter 2020-09-05 12:37:07 +02:00
Luca Armaroli
0cbc962bf3 first draft of DownloadManager 2020-09-05 12:36:39 +02:00
23 changed files with 6245 additions and 1685 deletions

View File

@@ -6,9 +6,6 @@ on:
- 'README.md'
branches:
- master
pull_request:
branches:
- master
jobs:
build:
@@ -17,7 +14,7 @@ jobs:
strategy:
matrix:
node-version: [10.x, 12.x, 13.x]
node-version: [8.x, 10.x, 12.x, 13.x]
steps:
- uses: actions/checkout@v1

3
.gitignore vendored
View File

@@ -3,9 +3,8 @@
*.log
*.js
*.zip
*.xml
yarn.lock
.vscode\launch.json
.chrome_data
node_modules

20
.vscode/launch.json vendored
View File

@@ -1,20 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"runtimeArgs": ["--max-http-header-size", "32768"],
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/build/src/destreamer.js",
"args": [
"-i",
"https://web.microsoftstream.com/video/ce4da1ff-0400-86ec-2ad6-f1ea83412074" // Bacon and Eggs
]
}
]
}

View File

@@ -1,4 +1,3 @@
{
"eslint.enable": true,
"typescript.tsdk": "node_modules\\typescript\\lib"
}
"eslint.enable": true
}

View File

@@ -2,7 +2,9 @@
<img src="https://github.com/snobu/destreamer/workflows/Node%20CI/badge.svg" alt="CI build status" />
</a>
**destreamer v3.0** is just around the corner. You can try out a pre-release today by cloning [this branch](https://github.com/snobu/destreamer/tree/aria2c_forRealNow).
# destreamer v3.0 (aria2c as download manager)
## This is a pre-release branch so don't expect stability, do expect speed improvements. Tons of it.
![destreamer](assets/logo.png)
@@ -14,7 +16,7 @@ _(Alternative artwork proposals are welcome! Submit one through an Issue.)_
This release would not have been possible without the code and time contributed by two distinguished developers: [@lukaarma](https://github.com/lukaarma) and [@kylon](https://github.com/kylon). Thank you!
### Specialized versions
### Specialized vesions
- [Politecnico di Milano][polimi]: fork over at https://github.com/SamanFekri/destreamer
- [Università di Pisa][unipi]: fork over at https://github.com/Guray00/destreamer-unipi
@@ -38,11 +40,12 @@ Hopefully this doesn't break the end user agreement for Microsoft Stream. Since
## Prereqs
- [**Node.js**][node]: You'll need Node.js version 8.0 or higher. A GitHub Action runs tests on all major Node versions on every commit. One caveat for Node 8, if you get a `Parse Error` with `code: HPE_HEADER_OVERFLOW` you're out of luck and you'll need to upgrade to Node 10+. PLEASE NOTE WE NO LONGER TEST BUILDS AGAINST NODE 8.x. YOU ARE ON YOUR OWN.
- [**Node.js**][node]: You'll need Node.js version 8.0 or higher. A GitHub Action runs tests on all major Node versions on every commit. One caveat for Node 8, if you get a `Parse Error` with `code: HPE_HEADER_OVERFLOW` you're out of luck and you'll need to upgrade to Node 10+.
- **npm**: usually comes with Node.js, type `npm` in your terminal to check for its presence
- [**ffmpeg**][ffmpeg]: a recent version (year 2019 or above), in `$PATH` or in the same directory as this README file (project root).
- [**aria2**][aria2]: aria2 is a utility for downloading files with multiple threads, fast.
- [**git**][git]: one or more npm dependencies require git.
- [**aria2**][aria2]: present in your `$PATH`, on Linux you can install via `sudo apt install aria2`.
Destreamer takes a [honeybadger](https://www.youtube.com/watch?v=4r7wHMg5Yjg) approach towards the OS it's running on. We've successfully tested it on Windows, macOS and Linux.
@@ -58,25 +61,29 @@ Note that destreamer won't run in an elevated (Administrator/root) shell. Runnin
## Can i plug in my own browser?
Yes, yes you can. This may be useful if your main browser has some authentication plugins that are required for you to logon to your Microsoft Stream tenant.
To use your own browser for the authentication part, locate the following snippet in `src/destreamer.ts` and `src/TokenCache.ts`:
To use your own browser for the authentication part, locate the following snippet in `src/destreamer.ts`:
```typescript
const browser: puppeteer.Browser = await puppeteer.launch({
executablePath: getPuppeteerChromiumPath(),
// …
});
executablePath: getPuppeteerChromiumPath(),
headless: false,
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
args: [
'--disable-dev-shm-usage',
'--fast-start',
'--no-sandbox'
]
});
```
Navigate to `chrome://version` in the browser you want to plug in and copy executable path from there. Use double backslash for Windows.
Now, change `executablePath` to reflect the path to your browser and profile (i.e. to use Microsoft Edge on Windows):
```typescript
executablePath: 'C:\\Program Files (x86)\\Microsoft\\Edge\\Application\\msedge.exe',
executablePath: "'C:\Program Files (x86)\Microsoft\Edge\Application\msedge.exe' --profile-directory=Default",
```
You can add `userDataDir` right after `executablePath` with the path to your browser profile (also shown in `chrome://version`) if you want that loaded as well.
Note that for Mac/Linux the path will look a little different but no other changes are necessary.
Remember to rebuild (`npm run build`) every time you change this configuration.
You need to rebuild (`npm run build`) every time you change this configuration.
## How to build
@@ -177,7 +184,7 @@ https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx
```
### Title template
The `-t` option allows user to specify a custom filename for the videos.
The `-t` option allows users to input a template string for the output file names.
You can use one or more of the following magic sequence which will get substituted at runtime. The magic sequence must be surrounded by curly brackets like this: `{title} {publishDate}`
@@ -189,20 +196,8 @@ You can use one or more of the following magic sequence which will get substitut
- `authorEmail`: E-mail of video publisher
- `uniqueId`: An _unique-enough_ ID generated from the video metadata
Examples -
Example -
```
Input:
-t 'This is an example'
Expected filename:
This is an example.mkv
Input:
-t 'This is an example by {author}'
Expected filename:
This is an example by lukaarma.mkv
Input:
-t '{title} - {duration} - {publishDate} - {publishTime} - {author} - {authorEmail} - {uniqueId}'
@@ -222,14 +217,6 @@ iTerm2 on a Mac -
By default, downloads are saved under project root `Destreamer/videos/` ( Not the system media Videos folder ), unless specified by `-o` (output directory).
## KNOWN BUGS
If you get a
```
[FATAL ERROR] Unknown error: exit code 4
````
when running destreamer, then make sure you're running a recent (post year 2019), stable version of **ffmpeg**.
## Contributing
Contributions are welcome. Open an issue first before sending in a pull request. All pull requests require at least one code review before they are merged to master.
@@ -240,10 +227,10 @@ Please open an [issue](https://github.com/snobu/destreamer/issues) and we'll loo
[ffmpeg]: https://www.ffmpeg.org/download.html
[aria2]: https://github.com/aria2/aria2/releases
[xming]: https://sourceforge.net/projects/xming/
[node]: https://nodejs.org/en/download/
[git]: https://git-scm.com/downloads
[aria2]: https://aria2.github.io
[wsl]: https://github.com/snobu/destreamer/issues/90#issuecomment-619377950
[polimi]: https://www.polimi.it
[unipi]: https://www.unipi.it/

5589
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,51 +1,55 @@
{
"name": "destreamer",
"repository": {
"type": "git",
"url": "git://github.com/snobu/destreamer.git"
},
"version": "2.1.0",
"description": "Save Microsoft Stream videos for offline enjoyment.",
"main": "build/src/destreamer.js",
"bin": "build/src/destreamer.js",
"scripts": {
"build": "echo Transpiling TypeScript to JavaScript... && tsc && echo Destreamer was built successfully.",
"watch": "tsc --watch",
"test": "mocha build/test",
"lint": "eslint src/*.ts"
},
"keywords": [],
"author": "snobu",
"license": "MIT",
"devDependencies": {
"@types/mocha": "^8.0.4",
"@types/puppeteer": "^5.4.0",
"@types/readline-sync": "^1.4.3",
"@types/tmp": "^0.2.0",
"@types/yargs": "^15.0.11",
"@typescript-eslint/eslint-plugin": "^4.9.0",
"@typescript-eslint/parser": "^4.9.0",
"eslint": "^7.14.0",
"mocha": "^8.2.1",
"tmp": "^0.2.1"
},
"dependencies": {
"@tedconf/fessonia": "^2.1.2",
"@types/cli-progress": "^3.8.0",
"@types/jwt-decode": "^2.2.1",
"axios": "^0.21.2",
"axios-retry": "^3.1.9",
"cli-progress": "^3.8.2",
"colors": "^1.4.0",
"is-elevated": "^3.0.0",
"iso8601-duration": "^1.3.0",
"jwt-decode": "^3.1.2",
"puppeteer": "5.5.0",
"readline-sync": "^1.4.10",
"sanitize-filename": "^1.6.3",
"terminal-image": "^1.2.1",
"typescript": "^4.1.2",
"winston": "^3.3.3",
"yargs": "^16.1.1"
}
"name": "destreamer",
"repository": {
"type": "git",
"url": "git://github.com/snobu/destreamer.git"
},
"version": "2.1.0",
"description": "Save Microsoft Stream videos for offline enjoyment.",
"main": "build/src/destreamer.js",
"bin": {
"destreamer": "build/src/destreamer.js"
},
"scripts": {
"build": "echo Transpiling TypeScript to JavaScript... && node node_modules/typescript/bin/tsc && echo Destreamer was built successfully.",
"test": "mocha build/test",
"lint": "eslint src/*.ts"
},
"keywords": [],
"author": "snobu",
"license": "MIT",
"devDependencies": {
"@types/cli-progress": "^3.8.0",
"@types/jwt-decode": "^2.2.1",
"@types/mocha": "^8.0.4",
"@types/puppeteer": "^5.4.0",
"@types/readline-sync": "^1.4.3",
"@types/tmp": "^0.2.0",
"@types/ws": "^7.4.0",
"@types/yargs": "^15.0.11",
"@typescript-eslint/eslint-plugin": "^4.9.0",
"@typescript-eslint/parser": "^4.9.0",
"eslint": "^7.14.0",
"mocha": "^8.2.1",
"typescript": "^4.1.2"
},
"dependencies": {
"axios": "^0.21.0",
"axios-retry": "^3.1.9",
"cli-progress": "^3.8.2",
"colors": "^1.4.0",
"is-elevated": "^3.0.0",
"iso8601-duration": "^1.3.0",
"jwt-decode": "^3.1.2",
"m3u8-parser": "^4.5.0",
"portfinder": "^1.0.28",
"puppeteer": "5.5.0",
"readline-sync": "^1.4.10",
"sanitize-filename": "^1.6.3",
"terminal-image": "^1.2.1",
"tmp": "^0.2.1",
"winston": "^3.3.3",
"ws": "^7.4.0",
"yargs": "^16.1.1"
}
}

View File

@@ -1,23 +1,21 @@
import { logger } from './Logger';
import { ShareSession, StreamSession, Video } from './Types';
import { publishedDateToString, publishedTimeToString } from './VideoUtils';
import { Session } from './Types';
import axios, { AxiosRequestConfig, AxiosResponse, AxiosInstance, AxiosError } from 'axios';
import axiosRetry, { isNetworkOrIdempotentRequestError } from 'axios-retry';
// import fs from 'fs';
export class StreamApiClient {
private static instance: StreamApiClient;
export class ApiClient {
private static instance: ApiClient;
private axiosInstance?: AxiosInstance;
private session?: StreamSession;
private session?: Session;
private constructor(session?: StreamSession) {
private constructor(session?: Session) {
this.session = session;
this.axiosInstance = axios.create({
baseURL: session?.ApiGatewayUri,
// timeout: 7000,
headers: { 'User-Agent': 'destreamer/2.0 (Hammer of Dawn)' }
headers: { 'User-Agent': 'destreamer/3.0 (Preview)' }
});
axiosRetry(this.axiosInstance, {
@@ -36,9 +34,10 @@ export class StreamApiClient {
return true;
}
logger.warn(`Got HTTP code ${err?.response?.status ?? undefined}. Retrying request...`);
logger.warn('Here is the error message: ');
console.dir(err.response?.data);
logger.warn('We called this URL: ' + err.response?.config.baseURL + err.response?.config.url);
logger.warn('Here is the error message: \n' +
JSON.stringify(err.response?.data ?? undefined) +
'\nRetrying request...');
logger.warn(`We called this URL: ${err.response?.config.baseURL}${err.response?.config.url}`);
const shouldRetry: boolean = retryCodes.includes(err?.response?.status ?? 0);
@@ -52,16 +51,16 @@ export class StreamApiClient {
*
* @param session used if initializing
*/
public static getInstance(session?: StreamSession): StreamApiClient {
if (!StreamApiClient.instance) {
StreamApiClient.instance = new StreamApiClient(session);
public static getInstance(session?: Session): ApiClient {
if (!ApiClient.instance) {
ApiClient.instance = new ApiClient(session);
}
return StreamApiClient.instance;
return ApiClient.instance;
}
public setSession(session: StreamSession): void {
if (!StreamApiClient.instance) {
public setSession(session: Session): void {
if (!ApiClient.instance) {
logger.warn("Trying to update ApiCient session when it's not initialized!");
}
@@ -85,6 +84,13 @@ export class StreamApiClient {
'Authorization': 'Bearer ' + this.session?.AccessToken
};
logger.debug(
'[ApiClient.callApi]\n' +
'path: ' + path + '\n' +
'method: ' + method + '\n' +
'payload: ' + payload + '\n'
);
return this.axiosInstance?.request({
method: method,
headers: headers,
@@ -106,6 +112,14 @@ export class StreamApiClient {
'Authorization': 'Bearer ' + this.session?.AccessToken
};
logger.debug(
'[ApiClient.callUrl]\n' +
'url: ' + url + '\n' +
'method: ' + method + '\n' +
'payload: ' + payload + '\n' +
'responseType: ' + responseType + '\n'
);
return this.axiosInstance?.request({
method: method,
headers: headers,
@@ -115,134 +129,3 @@ export class StreamApiClient {
});
}
}
export class ShareApiClient {
private axiosInstance: AxiosInstance;
private site: string;
public constructor(domain: string, site: string, session: ShareSession) {
this.axiosInstance = axios.create({
baseURL: domain,
// timeout: 7000,
headers: {
'User-Agent': 'destreamer/3.0 ALPHA',
'Cookie': `rtFa=${session.rtFa}; FedAuth=${session.FedAuth}`
}
});
this.site = site;
// FIXME: disabled because it was messing with the direct download check
// axiosRetry(this.axiosInstance, {
// // The following option is not working.
// // We should open an issue on the relative GitHub
// shouldResetTimeout: true,
// retries: 6,
// retryDelay: (retryCount: number) => {
// return retryCount * 2000;
// },
// retryCondition: (err: AxiosError) => {
// const retryCodes: Array<number> = [429, 500, 502, 503];
// if (isNetworkOrIdempotentRequestError(err)) {
// logger.warn(`${err}. Retrying request...`);
// return true;
// }
// logger.warn(`Got HTTP code ${err?.response?.status ?? undefined}.`);
// logger.warn('Here is the error message: ');
// console.dir(err.response?.data);
// logger.warn('We called this URL: ' + err.response?.config.baseURL + err.response?.config.url);
// const shouldRetry: boolean = retryCodes.includes(err?.response?.status ?? 0);
// return shouldRetry;
// }
// });
}
public async getVideoInfo(filePath: string, outPath: string): Promise<Video> {
let playbackUrl: string;
// TODO: Ripped this straigth from chromium inspector. Don't know don't care what it is right now. Check later
const payload = {
parameters: {
__metadata: {
type: 'SP.RenderListDataParameters'
},
ViewXml: `<View Scope="RecursiveAll"><Query><Where><Eq><FieldRef Name="FileRef" /><Value Type="Text"><![CDATA[${filePath}]]></Value></Eq></Where></Query><RowLimit Paged="TRUE">1</RowLimit></View>`,
RenderOptions: 12295,
AddRequiredFields: true
}
};
const url = `${this.site}/_api/web/GetListUsingPath(DecodedUrl=@a1)/RenderListDataAsStream?@a1='${encodeURIComponent(filePath)}'`;
logger.verbose(`Requesting video info for '${url}'`);
const info = await this.axiosInstance.post(url, payload, {
headers: {
'Content-Type': 'application/json;odata=verbose'
}
}).then(res => res.data);
// fs.writeFileSync('info.json', JSON.stringify(info, null, 4));
// FIXME: very bad but usefull in alpha stage to check for edge cases
if (info.ListData.Row.length !== 1) {
logger.error('More than 1 row in SharePoint video info', { fatal: true });
process.exit(1000);
}
const direct = await this.canDirectDownload(filePath);
const b64VideoMetadata = JSON.parse(
info.ListData.Row[0].MediaServiceFastMetadata
).video.altManifestMetadata;
const durationSeconds = Math.ceil(
(JSON.parse(
Buffer.from(b64VideoMetadata, 'base64').toString()
).Duration100Nano) / 10 / 1000 / 1000
);
if (direct) {
playbackUrl = this.axiosInstance.defaults.baseURL + filePath;
// logger.verbose(playbackUrl);
}
else {
playbackUrl = info.ListSchema['.videoManifestUrl'];
playbackUrl = playbackUrl.replace('{.mediaBaseUrl}', info.ListSchema['.mediaBaseUrl']);
// the only filetype works I found
playbackUrl = playbackUrl.replace('{.fileType}', 'mp4');
playbackUrl = playbackUrl.replace('{.callerStack}', info.ListSchema['.callerStack']);
playbackUrl = playbackUrl.replace('{.spItemUrl}', info.ListData.Row[0]['.spItemUrl']);
playbackUrl = playbackUrl.replace('{.driveAccessToken}', info.ListSchema['.driveAccessToken']);
playbackUrl += '&part=index&format=dash';
}
return {
direct,
title: filePath.split('/').pop() ?? 'video.mp4',
duration: publishedTimeToString(durationSeconds),
publishDate: publishedDateToString(info.ListData.Row[0]['Modified.']),
publishTime: publishedTimeToString(info.ListData.Row[0]['Modified.']),
author: info.ListData.Row[0]['Author.title'],
authorEmail: info.ListData.Row[0]['Author.email'],
uniqueId: info.ListData.Row[0].GUID.substring(1, 9),
outPath,
playbackUrl,
totalChunks: durationSeconds
};
}
private async canDirectDownload(filePath: string): Promise<boolean> {
logger.verbose(`Checking direct download for '${filePath}'`);
return this.axiosInstance.head(
filePath, { maxRedirects: 0 }
).then(
res => (res.status === 200)
).catch(
() => false
);
}
}

View File

@@ -1,14 +1,15 @@
import { CLI_ERROR } from './Errors';
import { makeOutDir } from './Utils';
import { CLI_ERROR, ERROR_CODE } from './Errors';
import { checkOutDir } from './Utils';
import { logger } from './Logger';
import { templateElements } from './Types';
import fs from 'fs';
import readlineSync from 'readline-sync';
import sanitize from 'sanitize-filename';
import yargs from 'yargs';
export const argv = yargs.options({
export const argv: any = yargs.options({
username: {
alias: 'u',
type: 'string',
@@ -70,32 +71,27 @@ export const argv = yargs.options({
default: false,
demandOption: false
},
debug: {
alias: 'd',
describe: 'Set logging level to debug (only use this if you know what are doing)',
type: 'boolean',
default: false,
demandOption: false
},
selectQuality: {
alias: 'q',
describe: 'Select the quality with a number 1 (worst) trough 10 (best), 0 prompt the user for each video',
default: 10,
type: 'number',
demandOption: false
},
closedCaptions: {
alias: 'cc',
describe: 'Check if closed captions are available and let the user choose which one to download (will not ask if only one available).',
describe: 'Check if closed captions are available and let the user choose which one to download (will not ask if only one available)',
type: 'boolean',
default: false,
demandOption: false
},
noCleanup: {
alias: 'nc',
describe: 'Do not delete the downloaded video file when an FFmpeg error occurs.',
type: 'boolean',
default: false,
demandOption: false
},
vcodec: {
describe: 'Re-encode video track. Specify FFmpeg codec (e.g. libx265) or set to "none" to disable video.',
type: 'string',
default: 'copy',
demandOption: false
},
acodec: {
describe: 'Re-encode audio track. Specify FFmpeg codec (e.g. libopus) or set to "none" to disable audio.',
type: 'string',
default: 'copy',
demandOption: false
},
format: {
describe: 'Output container format (mkv, mp4, mov, anything that FFmpeg supports).',
type: 'string',
@@ -112,17 +108,9 @@ export const argv = yargs.options({
.wrap(120)
.check(() => noArguments())
.check((argv: any) => checkInputConflicts(argv.videoUrls, argv.inputFile))
.check((argv: any) => {
if (makeOutDir(argv.outputDirectory)) {
return true;
}
else {
logger.error(CLI_ERROR.INVALID_OUTDIR);
throw new Error(' ');
}
})
.check((argv: any) => checkOutputDirectoryExistance(argv.outputDirectory))
.check((argv: any) => isOutputTemplateValid(argv))
.check((argv: any) => checkQualityValue(argv))
.argv;
@@ -172,9 +160,22 @@ function checkInputConflicts(videoUrls: Array<string | number> | undefined,
}
function checkOutputDirectoryExistance(dir: string): boolean {
if (checkOutDir(dir)) {
return true;
}
else {
logger.error(CLI_ERROR.INVALID_OUTDIR, { fatal: true });
throw new Error(' ');
}
}
function isOutputTemplateValid(argv: any): boolean {
let finalTemplate: string = argv.outputTemplate;
const elementRegEx = RegExp(/{(.*?)}/g);
let match = elementRegEx.exec(argv.outputTemplate);
let match = elementRegEx.exec(finalTemplate);
// if no template elements this fails
if (match) {
@@ -189,11 +190,46 @@ function isOutputTemplateValid(argv: any): boolean {
process.exit(1);
}
match = elementRegEx.exec(argv.outputTemplate);
match = elementRegEx.exec(finalTemplate);
}
}
// bad template from user, switching to default
else {
logger.warn('Empty output template provided, using default one \n');
finalTemplate = '{title} - {publishDate} {uniqueId}';
}
argv.outputTemplate = sanitize(argv.outputTemplate.trim());
argv.outputTemplate = sanitize(finalTemplate.trim());
return true;
}
function checkQualityValue(argv: any): boolean {
if (isNaN(argv.selectQuality)) {
logger.error('The quality value provided was not a number, switching to default');
argv.selectQuality = 10;
return true;
}
else if (argv.selectQuality < 0 || argv.selectQuality > 10) {
logger.error('The quality value provided was outside the valid range, switching to default');
argv.selectQuality = 10;
return true;
}
else {
return true;
}
}
export function promptUser(choices: Array<string>): number {
const index: number = readlineSync.keyInSelect(choices, 'Which resolution/format do you prefer?');
if (index === -1) {
process.exit(ERROR_CODE.CANCELLED_USER_INPUT);
}
return index;
}

36
src/Decrypter.ts Normal file
View File

@@ -0,0 +1,36 @@
import { ApiClient } from './ApiClient';
import { logger } from './Logger';
import { Session } from './Types';
import crypto from 'crypto';
export async function getDecrypter(playlistUrl: string, session: Session): Promise<crypto.Decipher> {
const apiClient = ApiClient.getInstance(session);
const keyOption = await apiClient.callUrl(playlistUrl, 'get', null, 'text')
.then(res => (res?.data as string).split(/\r?\n/)
.find(line => line.startsWith('#EXT-X-KEY')));
if (keyOption) {
logger.debug('[Decrypter] CRIPTO LINE IN M3U8: ' + keyOption);
const match = RegExp(/#EXT-X-KEY:METHOD=(.*?),URI="(.*?)",IV=0X(.*)/).exec(keyOption);
if (!match) {
throw new Error('No match for regex');
}
const algorithm = match[1].toLowerCase().replace('-', '');
const key: Buffer = await apiClient.callUrl(match[2], 'post', null, 'arraybuffer')
.then(res => res?.data);
const iv = Buffer.from(match[3], 'hex');
return crypto.createDecipheriv(algorithm, key, iv);
}
else {
process.exit(555);
}
}

310
src/DownloadManager.ts Normal file
View File

@@ -0,0 +1,310 @@
import { ERROR_CODE } from './Errors';
import { logger } from './Logger';
import cliProgress from 'cli-progress';
import WebSocket from 'ws';
export class DownloadManager {
// it's initalized in this.init()
private webSocket!: WebSocket;
private connected: boolean;
// NOTE: is there a way to fix the ETA? Can't get size nor ETA from aria that I can see
// we initialize this for each download
private progresBar!: cliProgress.Bar;
private completed: number;
private queue: Set<string>;
private index: number;
public constructor() {
this.connected = false;
this.completed = 0;
this.queue = new Set<string>();
this.index = 1;
if (!process.stdout.columns) {
logger.warn(
'Unable to get number of columns from terminal.\n' +
'This happens sometimes in Cygwin/MSYS.\n' +
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
'Please use PowerShell or cmd.exe to run destreamer on Windows.'
);
}
}
/**
* MUST BE CALLED BEFORE ANY OTHER OPERATION
*
* Wait for an established connection between the webSocket
* and Aria2c with a 10s timeout.
* Then send aria2c the global config option if specified.
*/
public async init(port: number, options?: { [option: string]: string }): Promise<void> {
let socTries = 0;
const maxTries = 10;
let timer = 0;
const waitTime = 20;
const errorHanlder = async (err: WebSocket.ErrorEvent): Promise<void> => {
// we try for 10 sec to initialize a socket on the specified port
if (err.error.code === 'ECONNREFUSED' && socTries < maxTries) {
logger.debug(`[DownloadMangaer] trying webSocket init ${socTries}/${maxTries}`);
await new Promise(r => setTimeout(r, 1000));
this.webSocket = new WebSocket(`http://localhost:${port}/jsonrpc`);
this.webSocket.onerror = errorHanlder;
this.webSocket.onopen = openHandler;
socTries++;
}
else {
logger.error(err);
process.exit(ERROR_CODE.NO_CONNECT_ARIA2C);
}
};
const openHandler = (event: WebSocket.OpenEvent): void => {
this.connected = true;
logger.debug(`[DownloadMangaer] open event recived ${event}`);
logger.info('Connected to aria2 daemon!');
};
// create webSocket
this.webSocket = new WebSocket(`http://localhost:${port}/jsonrpc`);
this.webSocket.onerror = errorHanlder;
this.webSocket.onopen = openHandler;
// wait for socket connection
while (!this.connected) {
if (timer < waitTime) {
timer++;
await new Promise(r => setTimeout(r, 1000));
}
else {
process.exit(ERROR_CODE.NO_CONNECT_ARIA2C);
}
}
// setup messages handling
this.webSocket.on('message', (data: WebSocket.Data) => {
const parsed = JSON.parse(data.toString());
// print only messaged not handled during download
// NOTE: maybe we could remove this and re-add when the downloads are done
if (parsed.method !== 'aria2.onDownloadComplete' &&
parsed.method !== 'aria2.onDownloadStart' &&
parsed.method !== 'aria2.onDownloadError' &&
parsed.id !== 'getSpeed' &&
parsed.id !== 'addUrl' &&
parsed.id !== 'shutdown' &&
parsed.id !== 'getUrlForRetry') {
logger.info('[INCOMING] \n' + JSON.stringify(parsed, null, 4) + '\n\n');
}
});
if (options) {
logger.info('Now trying to send configs...');
this.setOptions(options);
}
this.webSocket.send(JSON.stringify({
jsonrpc: '2.0',
id: 'Destreamer',
method: 'aria2.getGlobalOption'
}));
logger.debug('[DownloadMangaer] Setup listener count on "message": ' + this.webSocket.listenerCount('message'));
}
public async close(): Promise<void> {
let exited = false;
let timer = 0;
const waitTime = 10;
this.webSocket.on('message', (data: WebSocket.Data) => {
const parsed = JSON.parse(data.toString());
if (parsed.result === 'OK') {
exited = true;
logger.verbose('Aria2c shutdown complete');
}
});
this.webSocket.send(this.createMessage('aria2.shutdown', null, 'shutdown'));
this.webSocket.close();
while ((this.webSocket.readyState !== this.webSocket.CLOSED) || !exited) {
if (timer < waitTime) {
timer++;
await new Promise(r => setTimeout(r, 1000));
}
else {
throw new Error();
}
}
}
private initProgresBar(): void {
this.progresBar = new cliProgress.SingleBar({
barCompleteChar: '\u2588',
barIncompleteChar: '\u2591',
format: 'progress [{bar}] {percentage}% {speed} MB/s {eta_formatted}',
noTTYOutput: true,
notTTYSchedule: 3000,
// process.stdout.columns may return undefined in some terminals (Cygwin/MSYS)
barsize: Math.floor((process.stdout.columns || 30) / 3),
stopOnComplete: true,
hideCursor: true,
});
}
private createMessage(method: 'aria2.addUri', params: [[string]] | [[string], object], id?: string): string;
private createMessage(method: 'aria2.tellStatus', params: [[string]] | [string, object], id?: string): string;
private createMessage(method: 'aria2.changeOption', params: [string, object], id?: string): string;
private createMessage(method: 'aria2.changeGlobalOption', params: [{ [option: string]: string }], id?: string): string;
private createMessage(method: 'system.multicall', params: [Array<object>], id?: string): string;
// FIXME: I don't know how to properly implement this one that doesn't require params..
private createMessage(method: 'aria2.getGlobalStat', params?: null, id?: string): string;
private createMessage(method: 'aria2.shutdown', params?: null, id?: string): string;
private createMessage(method: string, params?: any, id?: string): string {
return JSON.stringify({
jsonrpc: '2.0',
id: id ?? 'Destreamer',
method: method,
// This took 40 mins just because I didn't want to use an if...so smart -_-
...(!!params && { params: params })
});
}
private createMulticallElement(method: string, params?: any): any {
return {
methodName: method,
// This took 40 mins just because I didn't want to use an if...so smart -_-
...(!!params && { params: params })
};
}
/**
* For general options see
* {@link https://aria2.github.io/manual/en/html/aria2c.html#aria2.changeOption here}.
* For single download options see
* {@link https://aria2.github.io/manual/en/html/aria2c.html#aria2.changeGlobalOption here}
*
* @param options object with key: value pairs
*/
private setOptions(options: { [option: string]: string }, guid?: string): void {
const message: string = guid ?
this.createMessage('aria2.changeOption', [guid, options]) :
this.createMessage('aria2.changeGlobalOption', [options]);
this.webSocket.send(message);
}
public downloadUrls(urls: Array<string>, directory: string): Promise<void> {
return new Promise(resolve => {
this.index = 1;
this.completed = 0;
// initialize the bar as a new one
this.initProgresBar();
let barStarted = false;
const handleResponse = (data: WebSocket.Data): void => {
const parsed = JSON.parse(data.toString());
/* I ordered them in order of (probable) times called so
that we don't check useless ifs (even if we aren't caring about efficency) */
// handle download completions
if (parsed.method === 'aria2.onDownloadComplete') {
this.queue.delete(parsed.params.pop().gid.toString());
this.progresBar.update(++this.completed);
/* NOTE: probably we could use setIntervall because reling on
a completed download is good in most cases (since the segments
are small and a lot, somany and frequent updates) BUT if the user
internet speed is really low the completed downalods come in
less frequently and we have less updates */
this.webSocket.send(this.createMessage('aria2.getGlobalStat', null, 'getSpeed'));
if (this.queue.size === 0) {
this.webSocket.off('message', handleResponse);
logger.debug('[DownloadMangaer] End download listener count on "message": ' + this.webSocket.listenerCount('message'));
resolve();
}
}
// handle speed update packages
else if (parsed.id === 'getSpeed') {
this.progresBar.update(this.completed,
{ speed: ((parsed.result.downloadSpeed as number) / 1000000).toFixed(2) });
}
// handle download errors
else if (parsed.method === 'aria2.onDownloadError') {
logger.error('Error while downloading, retrying...');
const errorGid: string = parsed.params.pop().gid.toString();
this.queue.delete(errorGid);
// FIXME: I don't know if it's fixed, I was not able to reproduce a fail reliably
this.webSocket.send(this.createMessage('aria2.tellStatus', [errorGid, ['files']], 'getUrlForRetry'));
}
else if (parsed.id === 'getUrlForRetry') {
const retryUrl = parsed.result.files[0].uris[0].uri;
const retryTitle = parsed.result.files[0].path;
this.webSocket.send(this.createMessage('aria2.addUri', [[retryUrl], { out: retryTitle }], 'addUrl'));
}
// handle url added to download list in aria
else if (parsed.id === 'addUrl') {
// if we recive array it's the starting list of downloads
// if it's a single string it's an error download being re-added
if (typeof parsed.result === 'string') {
this.queue.add(parsed.result.gid.toString());
}
else if (Array.isArray(parsed.result)) {
parsed.result.forEach((gid: string) =>
this.queue.add(gid.toString())
);
if (!barStarted) {
barStarted = true;
logger.debug(`[DownloadMangaer] Starting download queue size: ${this.queue.size}`);
this.progresBar.start(this.queue.size, 0, { speed: 0 });
}
}
}
};
// FIXME: terrible workaround for 'https://github.com/snobu/destreamer/issues/232#issuecomment-699642770' :/
this.webSocket.removeAllListeners('message');
this.webSocket.on('message', (data: WebSocket.Data) => {
const parsed = JSON.parse(data.toString());
if (parsed.method !== 'aria2.onDownloadComplete' &&
parsed.method !== 'aria2.onDownloadStart' &&
parsed.method !== 'aria2.onDownloadError' &&
parsed.id !== 'getSpeed' &&
parsed.id !== 'addUrl' &&
parsed.id !== 'shutdown' &&
parsed.id !== 'getUrlForRetry') {
logger.info('[INCOMING] \n' + JSON.stringify(parsed, null, 4) + '\n\n');
}
});
logger.debug('[DownloadMangaer] Start download listener count on "message": ' + this.webSocket.listenerCount('message'));
this.webSocket.on('message', data => handleResponse(data));
const paramsForDownload: Array<any> = urls.map(url => {
const title: string = (this.index++).toString().padStart(16, '0') + '.encr';
return this.createMulticallElement(
'aria2.addUri', [[url], { out: title, dir: directory }]);
});
this.webSocket.send(
this.createMessage('system.multicall', [paramsForDownload], 'addUrl')
);
});
}
}

View File

@@ -1,335 +0,0 @@
import { ShareApiClient, StreamApiClient } from './ApiClient';
import { argv } from './CommandLineParser';
import { ERROR_CODE } from './Errors';
import { logger } from './Logger';
import { doShareLogin, doStreamLogin } from './LoginModules';
import { drawThumbnail } from './Thumbnail';
import { refreshSession, TokenCache } from './TokenCache';
import { Video, VideoUrl } from './Types';
import { ffmpegTimemarkToChunk } from './Utils';
import { createUniquePath, getStreamInfo } from './VideoUtils';
import cliProgress from 'cli-progress';
import fs from 'fs';
import { execSync } from 'child_process';
import path from 'path';
const { FFmpegCommand, FFmpegInput, FFmpegOutput } = require('@tedconf/fessonia')();
const tokenCache: TokenCache = new TokenCache();
export async function downloadStreamVideo(videoUrls: Array<VideoUrl>): Promise<void> {
logger.info('Downloading Microsoft Stream videos...');
let session = tokenCache.Read() ?? await doStreamLogin('https://web.microsoftstream.com/', tokenCache, argv.username);
logger.verbose(
'Session and API info \n' +
'\t API Gateway URL: '.cyan + session.ApiGatewayUri + '\n' +
'\t API Gateway version: '.cyan + session.ApiGatewayVersion + '\n'
);
logger.info('Fetching videos info... \n');
const videos: Array<Video> = createUniquePath(
await getStreamInfo(videoUrls, session, argv.closedCaptions),
argv.outputTemplate, argv.format, argv.skip
);
if (argv.simulate) {
videos.forEach((video: Video) => {
logger.info(
'\nTitle: '.green + video.title +
'\nOutPath: '.green + video.outPath +
'\nPublished Date: '.green + video.publishDate +
'\nPlayback URL: '.green + video.playbackUrl +
((video.captionsUrl) ? ('\nCC URL: '.green + video.captionsUrl) : '')
);
});
return;
}
for (const [index, video] of videos.entries()) {
if (argv.skip && fs.existsSync(video.outPath)) {
logger.info(`File already exists, skipping: ${video.outPath} \n`);
continue;
}
if (argv.keepLoginCookies && index !== 0) {
logger.info('Trying to refresh token...');
session = await refreshSession('https://web.microsoftstream.com/video/' + video.guid);
StreamApiClient.getInstance().setSession(session);
}
const pbar: cliProgress.SingleBar = new cliProgress.SingleBar({
barCompleteChar: '\u2588',
barIncompleteChar: '\u2591',
format: 'progress [{bar}] {percentage}% {speed} {eta_formatted}',
// process.stdout.columns may return undefined in some terminals (Cygwin/MSYS)
barsize: Math.floor((process.stdout.columns || 30) / 3),
stopOnComplete: true,
hideCursor: true,
});
logger.info(`\nDownloading Video: ${video.title} \n`);
logger.verbose('Extra video info \n' +
'\t Video m3u8 playlist URL: '.cyan + video.playbackUrl + '\n' +
'\t Video tumbnail URL: '.cyan + video.posterImageUrl + '\n' +
'\t Video subtitle URL (may not exist): '.cyan + video.captionsUrl + '\n' +
'\t Video total chunks: '.cyan + video.totalChunks + '\n');
logger.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...\n\n');
if (!process.stdout.columns) {
logger.warn(
'Unable to get number of columns from terminal.\n' +
'This happens sometimes in Cygwin/MSYS.\n' +
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
'Please use PowerShell or cmd.exe to run destreamer on Windows.'
);
}
const headers: string = 'Authorization: Bearer ' + session.AccessToken;
if (!argv.noExperiments) {
if (video.posterImageUrl) {
await drawThumbnail(video.posterImageUrl, session);
}
}
const ffmpegInpt: any = new FFmpegInput(video.playbackUrl, new Map([
['headers', headers]
]));
const ffmpegOutput: any = new FFmpegOutput(video.outPath, new Map([
argv.acodec === 'none' ? ['an', null] : ['c:a', argv.acodec],
argv.vcodec === 'none' ? ['vn', null] : ['c:v', argv.vcodec],
['n', null]
]));
const ffmpegCmd: any = new FFmpegCommand();
const cleanupFn: () => void = () => {
pbar.stop();
if (argv.noCleanup) {
return;
}
try {
fs.unlinkSync(video.outPath);
}
catch (e) {
// Future handling of an error (maybe)
}
};
pbar.start(video.totalChunks, 0, {
speed: '0'
});
// prepare ffmpeg command line
ffmpegCmd.addInput(ffmpegInpt);
ffmpegCmd.addOutput(ffmpegOutput);
if (argv.closedCaptions && video.captionsUrl) {
const captionsInpt: any = new FFmpegInput(video.captionsUrl, new Map([
['headers', headers]
]));
ffmpegCmd.addInput(captionsInpt);
}
ffmpegCmd.on('update', async (data: any) => {
const currentChunks: number = ffmpegTimemarkToChunk(data.out_time);
pbar.update(currentChunks, {
speed: data.bitrate
});
// Graceful fallback in case we can't get columns (Cygwin/MSYS)
if (!process.stdout.columns) {
process.stdout.write(`--- Speed: ${data.bitrate}, Cursor: ${data.out_time}\r`);
}
});
process.on('SIGINT', cleanupFn);
// let the magic begin...
await new Promise((resolve: any) => {
ffmpegCmd.on('error', (error: any) => {
cleanupFn();
logger.error(`FFmpeg returned an error: ${error.message}`);
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
});
ffmpegCmd.on('success', () => {
pbar.update(video.totalChunks); // set progress bar to 100%
logger.info(`\nDownload finished: ${video.outPath} \n`);
resolve();
});
ffmpegCmd.spawn();
});
process.removeListener('SIGINT', cleanupFn);
}
}
// TODO: complete overhaul of this function
export async function downloadShareVideo(videoUrls: Array<VideoUrl>): Promise<void> {
const shareUrlRegex = new RegExp(/(?<domain>https:\/\/.+\.sharepoint\.com).*?(?<baseSite>\/(?:teams|sites|personal)\/.*?)(?:(?<filename>\/.*\.mp4)|\/.*id=(?<paramFilename>.*mp4))/);
logger.info('Downloading SharePoint videos...\n\n');
// FIXME: this may change we need a smart login system if a request fails
const session = await doShareLogin(videoUrls[0].url, argv.username);
for (const videoUrl of videoUrls) {
const match = shareUrlRegex.exec(videoUrl.url);
if (!match) {
logger.error(`Invalid url '${videoUrl.url}', skipping...`);
continue;
}
const shareDomain = match.groups!.domain;
const shareSite = match.groups!.baseSite;
const shareFilepath = decodeURIComponent(match.groups?.filename ? (shareSite + match.groups.filename) : match.groups!.paramFilename);
// FIXME: hardcoded video.mp4
const title = shareFilepath.split('/').pop()?.split('.')[0] ?? 'video';
const apiClient = new ShareApiClient(shareDomain, shareSite, session);
const video = await apiClient.getVideoInfo(shareFilepath, videoUrl.outDir);
createUniquePath(video, title, argv.format, argv.skip);
if (argv.simulate) {
if (argv.verbose) {
console.dir(video);
}
else {
logger.info(
'\nTitle: '.green + video.title +
'\nOutPath: '.green + video.outPath +
'\nPublished Date: '.green + video.publishDate +
'\nPlayback URL: '.green + video.playbackUrl
);
}
continue;
}
if (video.direct) {
const headers = `Cookie: rtFa=${session.rtFa}; FedAuth=${session.FedAuth}`;
// FIXME: unstable and bad all-around
try {
execSync(
'aria2c --max-connection-per-server 8 --console-log-level warn ' +
`--header "${headers}" --dir "${path.dirname(video.outPath)}" --out "${path.basename(video.outPath)}" "${shareDomain + shareFilepath}"`,
{ stdio: 'inherit' }
);
}
catch (error: any) {
logger.error(`${error.status} \n\n${error.message} \n\n${error.stdout.toString()} \n\n${error.stderr.toString()}`);
}
}
else {
// FIXME: just a copy-paste, should move to separate function
const pbar: cliProgress.SingleBar = new cliProgress.SingleBar({
barCompleteChar: '\u2588',
barIncompleteChar: '\u2591',
format: 'progress [{bar}] {percentage}% {speed} {eta_formatted}',
// process.stdout.columns may return undefined in some terminals (Cygwin/MSYS)
barsize: Math.floor((process.stdout.columns || 30) / 3),
stopOnComplete: true,
hideCursor: true,
});
logger.info(`\nDownloading Video: ${video.title} \n`);
logger.verbose('Extra video info \n' +
'\t Video m3u8 playlist URL: '.cyan + video.playbackUrl + '\n' +
'\t Video tumbnail URL: '.cyan + video.posterImageUrl + '\n' +
'\t Video subtitle URL (may not exist): '.cyan + video.captionsUrl + '\n' +
'\t Video total chunks: '.cyan + video.totalChunks + '\n');
logger.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...\n\n');
if (!process.stdout.columns) {
logger.warn(
'Unable to get number of columns from terminal.\n' +
'This happens sometimes in Cygwin/MSYS.\n' +
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
'Please use PowerShell or cmd.exe to run destreamer on Windows.'
);
}
const ffmpegInpt: any = new FFmpegInput(video.playbackUrl);
const ffmpegOutput: any = new FFmpegOutput(video.outPath, new Map([
argv.acodec === 'none' ? ['an', null] : ['c:a', argv.acodec],
argv.vcodec === 'none' ? ['vn', null] : ['c:v', argv.vcodec],
['n', null]
]));
const ffmpegCmd: any = new FFmpegCommand();
const cleanupFn: () => void = () => {
pbar.stop();
if (argv.noCleanup) {
return;
}
try {
fs.unlinkSync(video.outPath);
}
catch (e) {
// Future handling of an error (maybe)
}
};
pbar.start(video.totalChunks, 0, {
speed: '0'
});
// prepare ffmpeg command line
ffmpegCmd.addInput(ffmpegInpt);
ffmpegCmd.addOutput(ffmpegOutput);
ffmpegCmd.on('update', async (data: any) => {
const currentChunks: number = ffmpegTimemarkToChunk(data.out_time);
pbar.update(currentChunks, {
speed: data.bitrate
});
// Graceful fallback in case we can't get columns (Cygwin/MSYS)
if (!process.stdout.columns) {
process.stdout.write(`--- Speed: ${data.bitrate}, Cursor: ${data.out_time}\r`);
}
});
process.on('SIGINT', cleanupFn);
// let the magic begin...
await new Promise((resolve: any) => {
ffmpegCmd.on('error', (error: any) => {
cleanupFn();
logger.error(`FFmpeg returned an error: ${error.message}`);
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
});
ffmpegCmd.on('success', () => {
pbar.update(video.totalChunks); // set progress bar to 100%
logger.info(`\nDownload finished: ${video.outPath} \n`);
resolve();
});
ffmpegCmd.spawn();
});
process.removeListener('SIGINT', cleanupFn);
// logger.error('TODO: manifest download');
// continue;
}
}
}

View File

@@ -1,55 +1,64 @@
/* let's start our error codes up high so we
don't exit with the wrong message if other modules exit with some code */
export const enum ERROR_CODE {
UNHANDLED_ERROR = 200,
UNHANDLED_ERROR = 1000,
ELEVATED_SHELL,
CANCELLED_USER_INPUT,
MISSING_FFMPEG,
MISSING_ARIA2,
OUTDATED_FFMPEG,
UNK_FFMPEG_ERROR,
INVALID_VIDEO_GUID,
NO_SESSION_INFO
NO_SESSION_INFO,
NO_ENCRYPTION,
ARIA2C_CRASH,
NO_CONNECT_ARIA2C,
NO_DAEMON_PORT,
MISSING_ARIA2
}
export const errors: { [key: number]: string } = {
[ERROR_CODE.UNHANDLED_ERROR]: 'Unhandled error!\n' +
'Timeout or fatal error, please check your downloads directory and try again',
export const errors: {[key: number]: string} = {
[ERROR_CODE.UNHANDLED_ERROR]: 'Unhandled error or uncaught exception! \n' +
'Please check your download directory/directories and try again. \n' +
'If this keep happening please report it on github "https://github.com/snobu/destreamer/issues"',
[ERROR_CODE.ELEVATED_SHELL]: 'Destreamer cannot run in an elevated (Administrator/root) shell.\n' +
'Please run in a regular, non-elevated window.',
[ERROR_CODE.ELEVATED_SHELL]: 'Destreamer cannot run in an elevated (Administrator/root) shell. \n' +
'Please run in a regular, non-elevated window.',
[ERROR_CODE.CANCELLED_USER_INPUT]: 'Input was cancelled by user',
[ERROR_CODE.CANCELLED_USER_INPUT]: 'Input was cancelled by user',
[ERROR_CODE.MISSING_FFMPEG]: 'FFmpeg is missing!\n' +
'Destreamer requires a fairly recent release of FFmpeg to download videos',
[ERROR_CODE.MISSING_FFMPEG]: 'FFmpeg is missing! Destreamer requires FFmpeg to merge videos',
[ERROR_CODE.MISSING_ARIA2]: 'Aria2 is missing!\n' +
'Destreamer requires a fairly recent release of Aria2 to download videos',
[ERROR_CODE.MISSING_ARIA2]: 'Aria2c is missing! Destreamer requires Aria2c to download videos',
[ERROR_CODE.OUTDATED_FFMPEG]: 'The FFmpeg version currently installed is too old!\n' +
'Destreamer requires a fairly recent release of FFmpeg to download videos',
[ERROR_CODE.UNK_FFMPEG_ERROR]: 'Unknown FFmpeg error',
[ERROR_CODE.UNK_FFMPEG_ERROR]: 'Unknown FFmpeg error',
[ERROR_CODE.INVALID_VIDEO_GUID]: 'Unable to get video GUID from URL',
[ERROR_CODE.INVALID_VIDEO_GUID]: 'Unable to get video GUID from URL',
[ERROR_CODE.NO_SESSION_INFO]: 'Could not evaluate sessionInfo on the page',
[ERROR_CODE.NO_SESSION_INFO]: 'Could not evaluate sessionInfo on the page'
[ERROR_CODE.NO_ENCRYPTION]: 'Could not extract the encryption info from the playlist',
[ERROR_CODE.ARIA2C_CRASH]: 'The Aria2c RPC server crashed with the previous message',
[ERROR_CODE.NO_CONNECT_ARIA2C]: 'Could not connect to Aria2c JSON-RPC WebSocket before timeout!',
[ERROR_CODE.NO_DAEMON_PORT]: 'Could not get a free port to use'
};
export const enum CLI_ERROR {
MISSING_INPUT_ARG = 'You must specify a URLs source. \n' +
'Valid options are -i for one or more URLs separated by space or -f for input file. \n',
MISSING_INPUT_ARG = 'You must specify a URLs source. \n' +
'Valid options are -i for one or more URLs separated by space or -f for input file. \n',
INPUT_ARG_CONFLICT = 'Too many URLs sources specified! \n' +
'Please specify a single source, either -i or -f \n',
INPUT_ARG_CONFLICT = 'Too many URLs sources specified! \n' +
'Please specify a single source, either -i or -f \n',
INPUTFILE_WRONG_EXTENSION = 'The specified inputFile has the wrong extension \n' +
'Please make sure to use path/to/filename.txt when useing the -f option \n',
INPUTFILE_WRONG_EXTENSION = 'The specified inputFile has the wrong extension \n' +
'Please make sure to use path/to/filename.txt when useing the -f option \n',
INPUTFILE_NOT_FOUND = 'The specified inputFile does not exists \n' +
'Please check the filename and the path you provided \n',
INPUTFILE_NOT_FOUND = 'The specified inputFile does not exists \n'+
'Please check the filename and the path you provided \n',
INVALID_OUTDIR = 'Could not create the default/specified output directory \n' +
'Please check directory and permissions and try again. \n'
INVALID_OUTDIR = 'Could not create the default/specified output directory \n' +
'Please check directory and permissions and try again. \n'
}

View File

@@ -5,8 +5,6 @@ import { logger } from './Logger';
/**
* This file contains global destreamer process events
*
* @note SIGINT event is overridden in downloadVideo function
*
* @note function is required for non-packaged destreamer, so we can't do better
*/
export function setProcessEvents(): void {
@@ -21,6 +19,16 @@ export function setProcessEvents(): void {
logger.error({ message: msg, fatal: true });
});
process.on('SIGINT', signal => {
logger.error(signal);
process.exit(777);
});
process.on('uncaughtException', (err: Error) => {
logger.error(err);
process.exit(ERROR_CODE.UNHANDLED_ERROR);
});
process.on('unhandledRejection', (reason: {} | null | undefined) => {
if (reason instanceof Error) {
logger.error({ message: (reason as Error) });

View File

@@ -35,6 +35,9 @@ function customPrint (info: winston.Logform.TransformableInfo): string {
else if (info.level === 'verbose') {
return colors.cyan('\n[VERBOSE] ') + info.message;
}
else if (info.level === 'debug') {
return colors.magenta('\n[debug] ') + info.message;
}
return `${info.level}: ${info.message} - ${info.timestamp}`;
}

View File

@@ -1,176 +0,0 @@
import { logger } from './Logger';
import puppeteer from 'puppeteer';
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
import { chromeCacheFolder } from './destreamer';
import { argv } from './CommandLineParser';
import { ShareSession, StreamSession } from './Types';
import { ERROR_CODE } from './Errors';
import { TokenCache } from './TokenCache';
export async function doStreamLogin(url: string, tokenCache: TokenCache, username?: string): Promise<StreamSession> {
logger.info('Launching headless Chrome to perform the OpenID Connect dance...');
const browser: puppeteer.Browser = await puppeteer.launch({
executablePath: getPuppeteerChromiumPath(),
headless: false,
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
defaultViewport: null,
args: [
'--disable-dev-shm-usage',
'--fast-start',
'--no-sandbox'
]
});
// try-finally because we were leaving zombie processes if there was an error
try {
const page: puppeteer.Page = (await browser.pages())[0];
logger.info('Navigating to login page...');
await page.goto(url, { waitUntil: 'load' });
try {
if (username) {
await page.waitForSelector('input[type="email"]', { timeout: 3000 });
await page.keyboard.type(username);
await page.click('input[type="submit"]');
}
else {
/* If a username was not provided we let the user take actions that
lead up to the video page. */
}
}
catch (e) {
/* If there is no email input selector we aren't in the login module,
we are probably using the cache to aid the login.
It could finish the login on its own if the user said 'yes' when asked to
remember the credentials or it could still prompt the user for a password */
}
await browser.waitForTarget((target: puppeteer.Target) => target.url().endsWith('microsoftstream.com/'), { timeout: 150000 });
logger.info('We are logged in.');
let session: StreamSession | null = null;
let tries = 1;
while (!session) {
try {
let sessionInfo: any;
session = await page.evaluate(
() => {
return {
AccessToken: sessionInfo.AccessToken,
ApiGatewayUri: sessionInfo.ApiGatewayUri,
ApiGatewayVersion: sessionInfo.ApiGatewayVersion
};
}
);
}
catch (error) {
if (tries > 5) {
process.exit(ERROR_CODE.NO_SESSION_INFO);
}
session = null;
tries++;
await page.waitForTimeout(3000);
}
}
tokenCache.Write(session);
logger.info('Wrote access token to token cache.');
logger.info("At this point Chromium's job is done, shutting it down...\n");
return session;
}
finally {
await browser.close();
}
}
export async function doShareLogin(url: string, username?: string): Promise<ShareSession> {
logger.info('Launching headless Chrome to perform the OpenID Connect dance...');
let session: ShareSession | null = null;
const hostname = new URL(url).host;
const browser: puppeteer.Browser = await puppeteer.launch({
executablePath: getPuppeteerChromiumPath(),
headless: false,
devtools: argv.verbose,
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
defaultViewport: null,
args: [
'--disable-dev-shm-usage',
'--fast-start',
'--no-sandbox'
]
});
// try-finally because we were leaving zombie processes if there was an error
try {
const page: puppeteer.Page = (await browser.pages())[0];
logger.info('Navigating to login page...');
await page.goto(url, { waitUntil: 'load' });
try {
if (username) {
await page.waitForSelector('input[type="email"]', { timeout: 3000 });
await page.keyboard.type(username);
await page.click('input[type="submit"]');
}
else {
/* If a username was not provided we let the user take actions that
lead up to the video page. */
}
}
catch (e) {
/* If there is no email input selector we aren't in the login module,
we are probably using the cache to aid the login.
It could finish the login on its own if the user said 'yes' when asked to
remember the credentials or it could still prompt the user for a password */
}
logger.info('Waiting for target!');
await browser.waitForTarget((target: puppeteer.Target) => target.url().startsWith(`https://${hostname}`), { timeout: 150000 });
logger.info('We are logged in.');
let tries = 1;
while (!session) {
const cookieJar = (await page.cookies()).filter(
biscuit => biscuit.name == 'rtFa' || biscuit.name == 'FedAuth'
);
if (cookieJar.length != 2) {
if (tries > 5) {
process.exit(ERROR_CODE.NO_SESSION_INFO);
}
await page.waitForTimeout(1000 * tries++);
continue;
}
session = {
rtFa: cookieJar.find(biscuit => biscuit.name == 'rtFa')!.value,
FedAuth: cookieJar.find(biscuit => biscuit.name == 'FedAuth')!.value
};
}
logger.info("At this point Chromium's job is done, shutting it down...\n");
// await page.waitForTimeout(1000 * 60 * 60 * 60);
}
finally {
logger.verbose('Stream login browser closing...');
await browser.close();
logger.verbose('Stream login browser closed');
}
return session;
}

View File

@@ -1,12 +1,12 @@
import { StreamApiClient } from './ApiClient';
import { StreamSession } from './Types';
import { ApiClient } from './ApiClient';
import { Session } from './Types';
import terminalImage from 'terminal-image';
import { AxiosResponse } from 'axios';
export async function drawThumbnail(posterImage: string, session: StreamSession): Promise<void> {
const apiClient: StreamApiClient = StreamApiClient.getInstance(session);
export async function drawThumbnail(posterImage: string, session: Session): Promise<void> {
const apiClient: ApiClient = ApiClient.getInstance(session);
const thumbnail: Buffer = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer')
.then((response: AxiosResponse<any> | undefined) => response?.data);

View File

@@ -2,60 +2,69 @@ import { chromeCacheFolder } from './destreamer';
import { ERROR_CODE } from './Errors';
import { logger } from './Logger';
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
import { StreamSession } from './Types';
import { Session } from './Types';
import fs from 'fs';
import jwtDecode from 'jwt-decode';
import puppeteer from 'puppeteer';
type Jwt = {
[key: string]: any
}
export class TokenCache {
private tokenCacheFile = '.token_cache';
public Read(): StreamSession | null {
public Read(): Session | null {
if (!fs.existsSync(this.tokenCacheFile)) {
logger.warn(`${this.tokenCacheFile} not found. \n`);
return null;
}
const session: StreamSession = JSON.parse(fs.readFileSync(this.tokenCacheFile, 'utf8'));
const session: Session = JSON.parse(fs.readFileSync(this.tokenCacheFile, 'utf8'));
type Jwt = {
[key: string]: any
}
const decodedJwt: Jwt = jwtDecode(session.AccessToken);
const [isExpiring, timeLeft] = this.isExpiring(session);
const now: number = Math.floor(Date.now() / 1000);
const exp: number = decodedJwt['exp'];
const timeLeft: number = exp - now;
if (timeLeft < 120) {
if (isExpiring) {
logger.warn('Access token has expired! \n');
return null;
}
else {
logger.info(`Access token still good for ${Math.floor(timeLeft / 60)} minutes.\n`.green);
logger.info(`Access token still good for ${Math.floor(timeLeft / 60)} minutes.\n`.green);
return session;
return session;
}
}
public Write(session: StreamSession): void {
public Write(session: Session): void {
const s: string = JSON.stringify(session, null, 4);
fs.writeFile(this.tokenCacheFile, s, (err: any) => {
fs.writeFile('.token_cache', s, (err: any) => {
if (err) {
return logger.error(err);
}
logger.info(`Fresh access token dropped into ${this.tokenCacheFile} \n`.green);
logger.info('Fresh access token dropped into .token_cachen \n'.green);
});
}
public isExpiring(session: Session): [boolean, number] {
const decodedJwt: Jwt = jwtDecode(session.AccessToken);
const timeLeft: number = decodedJwt['exp'] - Math.floor(Date.now() / 1000);
if (timeLeft < (5 * 60)) {
return [true, 0];
}
else {
return [false, timeLeft];
}
}
}
export async function refreshSession(url: string): Promise<StreamSession> {
const videoId: string = url.split('/').pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_GUID);
export async function refreshSession(url: string): Promise<Session> {
const browser: puppeteer.Browser = await puppeteer.launch({
executablePath: getPuppeteerChromiumPath(),
@@ -71,9 +80,9 @@ export async function refreshSession(url: string): Promise<StreamSession> {
const page: puppeteer.Page = (await browser.pages())[0];
await page.goto(url, { waitUntil: 'load' });
await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(videoId), { timeout: 30000 });
await browser.waitForTarget((target: puppeteer.Target) => target.url().endsWith('microsoftstream.com/'), { timeout: 150000 });
let session: StreamSession | null = null;
let session: Session | null = null;
let tries = 1;
while (!session) {

View File

@@ -1,34 +1,12 @@
export type StreamSession = {
export type Session = {
AccessToken: string;
ApiGatewayUri: string;
ApiGatewayVersion: string;
}
export type ShareSession = {
FedAuth: string;
rtFa: string;
}
export type VideoUrl = {
url: string,
outDir: string
}
export type SharepointVideo = {
// if we can download the MP4 or we need to use DASH
direct: boolean;
playbackUrl: string;
title: string;
outPath: string
}
export type Video = {
guid?: string;
direct?: boolean;
// the following properties are all for the title template
title: string;
duration: string;
publishDate: string;
@@ -36,15 +14,20 @@ export type Video = {
author: string;
authorEmail: string;
uniqueId: string;
outPath: string;
totalChunks: number; // Abstraction of FFmpeg timemark
// the following properties are all the urls neede for the download
playbackUrl: string;
posterImageUrl?: string;
posterImageUrl: string;
captionsUrl?: string
// final filename, already sanitized and unique
filename: string;
// complete path to save the video
outPath: string;
}
/* TODO: expand this template once we are all on board with a list
/* NOTE: expand this template once we are all on board with a list
see https://github.com/snobu/destreamer/issues/190#issuecomment-663718010 for list*/
export const templateElements: Array<string> = [
'title',

View File

@@ -1,62 +1,49 @@
import { StreamApiClient } from './ApiClient';
import { ApiClient } from './ApiClient';
import { ERROR_CODE } from './Errors';
import { logger } from './Logger';
import { StreamSession, VideoUrl } from './Types';
import { Session } from './Types';
import { AxiosResponse } from 'axios';
import { execSync } from 'child_process';
import fs from 'fs';
import readlineSync from 'readline-sync';
const streamUrlRegex = new RegExp(/https?:\/\/web\.microsoftstream\.com.*/);
const shareUrlRegex = new RegExp(/https?:\/\/.+\.sharepoint\.com.*/);
async function extractGuids(url: string, client: ApiClient): Promise<Array<string> | null> {
/** we place the guid in the url fild in the return */
export async function extractStreamGuids(urlList: Array<VideoUrl>, session: StreamSession): Promise<Array<VideoUrl>> {
const videoRegex = new RegExp(/https:\/\/.*\/video\/(\w{8}-(?:\w{4}-){3}\w{12})/);
const groupRegex = new RegExp(/https:\/\/.*\/group\/(\w{8}-(?:\w{4}-){3}\w{12})/);
const apiClient: StreamApiClient = StreamApiClient.getInstance(session);
const guidList: Array<VideoUrl> = [];
const videoMatch: RegExpExecArray | null = videoRegex.exec(url);
const groupMatch: RegExpExecArray | null = groupRegex.exec(url);
for (const url of urlList) {
const videoMatch: RegExpExecArray | null = videoRegex.exec(url.url);
const groupMatch: RegExpExecArray | null = groupRegex.exec(url.url);
if (videoMatch) {
return [videoMatch[1]];
}
else if (groupMatch) {
const videoNumber: number = await client.callApi(`groups/${groupMatch[1]}`, 'get')
.then((response: AxiosResponse<any> | undefined) => response?.data.metrics.videos);
const result: Array<string> = [];
if (videoMatch) {
guidList.push({
url: videoMatch[1],
outDir: url.outDir
});
}
else if (groupMatch) {
const videoNumber: number = await apiClient.callApi(`groups/${groupMatch[1]}`, 'get')
.then((response: AxiosResponse<any> | undefined) => response?.data.metrics.videos);
logger.error(videoNumber);
// Anything above $top=100 results in 400 Bad Request
// Use $skip to skip the first 100 and get another 100 and so on
for (let index = 0; index <= Math.floor(videoNumber / 100); index++) {
await apiClient.callApi(
`groups/${groupMatch[1]}/videos?$skip=${100 * index}&` +
'$top=100&$orderby=publishedDate asc', 'get'
).then((response: AxiosResponse<any> | undefined) => {
response?.data.value.forEach((video: { id: string }) =>
guidList.push({
url: video.id,
outDir: url.outDir
})
);
});
}
}
else {
logger.warn(`Invalid url '${url.url}', skipping...`);
// Anything above $top=100 results in 400 Bad Request
// Use $skip to skip the first 100 and get another 100 and so on
for (let index = 0; index <= Math.floor(videoNumber / 100); index++) {
const partial: Array<string> = await client.callApi(
`groups/${groupMatch[1]}/videos?$skip=${100 * index}&` +
'$top=100&$orderby=publishedDate asc', 'get')
.then(
(response: AxiosResponse<any> | undefined) =>
response?.data.value.map((item: any) => item.id)
);
result.push(...partial);
}
return result;
}
return guidList;
return null;
}
@@ -67,32 +54,30 @@ export async function extractStreamGuids(urlList: Array<VideoUrl>, session: Stre
*
* @param {Array<string>} urlList list of link to parse
* @param {string} defaultOutDir the directry used to save the videos
* @param {Session} session used to call the API to get the GUIDs from group links
*
* @returns Array of 2 elements: 1st an array of Microsoft Stream urls, 2nd an array of SharePoint urls
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
*/
export function parseCLIinput(urlList: Array<string>, defaultOutDir: string): Array<Array<VideoUrl>> {
const stream: Array<VideoUrl> = [];
const share: Array<VideoUrl> = [];
export async function parseCLIinput(urlList: Array<string>, defaultOutDir: string,
session: Session): Promise<Array<Array<string>>> {
const apiClient: ApiClient = ApiClient.getInstance(session);
const guidList: Array<string> = [];
for (const url of urlList) {
if (streamUrlRegex.test(url)) {
stream.push({
url: url,
outDir: defaultOutDir
});
}
else if (shareUrlRegex.test(url)) {
share.push({
url: url,
outDir: defaultOutDir
});
const guids: Array<string> | null = await extractGuids(url, apiClient);
if (guids) {
guidList.push(...guids);
}
else {
logger.warn(`Invalid url '${url}', skipping..`);
}
}
return [stream, share];
const outDirList: Array<string> = Array(guidList.length).fill(defaultOutDir);
return [guidList, outDirList];
}
@@ -103,84 +88,94 @@ export function parseCLIinput(urlList: Array<string>, defaultOutDir: string): Ar
*
* @param {string} inputFile path to the text file
* @param {string} defaultOutDir the default/fallback directory used to save the videos
* @param {Session} session used to call the API to get the GUIDs from group links
*
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
*/
export function parseInputFile(inputFile: string, defaultOutDir: string): Array<Array<VideoUrl>> {
export async function parseInputFile(inputFile: string, defaultOutDir: string,
session: Session): Promise<Array<Array<string>>> {
// rawContent is a list of each line of the file
const rawContent: Array<string> = fs.readFileSync(inputFile).toString().split(/\r?\n/);
const stream: Array<VideoUrl> = [];
const share: Array<VideoUrl> = [];
let streamUrl = false;
const rawContent: Array<string> = fs.readFileSync(inputFile).toString()
.split(/\r?\n/);
const apiClient: ApiClient = ApiClient.getInstance(session);
const guidList: Array<string> = [];
const outDirList: Array<string> = [];
// if the last line was an url set this
let foundUrl = false;
for (let i = 0; i < rawContent.length; i++) {
const line: string = rawContent[i];
const nextLine: string | null = i < rawContent.length ? rawContent[i + 1] : null;
let outDir = defaultOutDir;
// filter out lines with no content
if (!line.match(/\S/)) {
logger.warn(`Line ${i + 1} is empty, skipping..`);
continue;
}
// check for urls
else if (streamUrlRegex.test(line)) {
streamUrl = true;
}
else if (shareUrlRegex.test(line)) {
streamUrl = false;
}
// now invalid line since we skip ahead one line if we find dir option
else {
logger.warn(`Line ${i + 1}: '${line}' is invalid, skipping..`);
// parse if line is option
else if (line.includes('-dir')) {
if (foundUrl) {
const outDir: string | null = parseOption('-dir', line);
continue;
}
if (outDir && checkOutDir(outDir)) {
outDirList.push(...Array(guidList.length - outDirList.length)
.fill(outDir));
}
else {
outDirList.push(...Array(guidList.length - outDirList.length)
.fill(defaultOutDir));
}
// we now have a valid url, check next line for option
if (nextLine) {
const optionDir = parseOption('-dir', nextLine);
if (optionDir && makeOutDir(optionDir)) {
outDir = optionDir;
// if there was an option we skip a line
i++;
foundUrl = false;
continue;
}
else {
logger.warn(`Found options without preceding url at line ${i + 1}, skipping..`);
continue;
}
}
if (streamUrl) {
stream.push({
url: line,
outDir
});
/* now line is not empty nor an option line.
If foundUrl is still true last line didn't have a directory option
so we stil need to add the default outDir to outDirList to */
if (foundUrl) {
outDirList.push(...Array(guidList.length - outDirList.length)
.fill(defaultOutDir));
foundUrl = false;
}
const guids: Array<string> | null = await extractGuids(line, apiClient);
if (guids) {
guidList.push(...guids);
foundUrl = true;
}
else {
share.push({
url: line,
outDir
});
logger.warn(`Invalid url at line ${i + 1}, skipping..`);
}
}
// if foundUrl is still true after the loop we have some url without an outDir
if (foundUrl) {
outDirList.push(...Array(guidList.length - outDirList.length)
.fill(defaultOutDir));
}
return [stream, share];
return [guidList, outDirList];
}
// This leaves us the option to add more options (badum tss) _Luca
function parseOption(optionSyntax: string, item: string): string | null {
const match: RegExpMatchArray | null = item.match(
RegExp(`^\\s+${optionSyntax}\\s*=\\s*['"](.*)['"]`)
RegExp(`^\\s*${optionSyntax}\\s?=\\s?['"](.*)['"]`)
);
return match ? match[1] : null;
}
/**
* @param directory path to create
* @returns true on success, false otherwise
*/
export function makeOutDir(directory: string): boolean {
export function checkOutDir(directory: string): boolean {
if (!fs.existsSync(directory)) {
try {
fs.mkdirSync(directory);
@@ -198,15 +193,16 @@ export function makeOutDir(directory: string): boolean {
}
export async function getUrlsFromPlaylist(playlistUrl: string, session: Session): Promise<Array<string>> {
return await ApiClient.getInstance(session).callUrl(playlistUrl, 'get', null, 'text')
.then(res => (res?.data as string).split(/\r?\n/)
.filter(line => !(line.startsWith('#') || line === '')));
}
export function checkRequirements(): void {
try {
const copyrightYearRe = new RegExp(/\d{4}-(\d{4})/);
const ffmpegVer: string = execSync('ffmpeg -version').toString().split('\n')[0];
if (parseInt(copyrightYearRe.exec(ffmpegVer)?.[1] ?? '0') <= 2019) {
process.exit(ERROR_CODE.OUTDATED_FFMPEG);
}
logger.verbose(`Using ${ffmpegVer}\n`);
}
catch (e) {
@@ -214,38 +210,20 @@ export function checkRequirements(): void {
}
try {
const versionRegex = new RegExp(/aria2 version (.*)/);
const aira2Ver: string = execSync('aria2c --version').toString().split('\n')[0];
if (versionRegex.test(aira2Ver)) {
logger.verbose(`Using ${aira2Ver}\n`);
}
else {
throw new Error();
}
const aria2Ver: string = execSync('aria2c --version').toString().split('\n')[0];
logger.verbose(`Using ${aria2Ver}\n`);
}
catch (e) {
process.exit(ERROR_CODE.MISSING_ARIA2);
}
}
// number of seconds
export function ffmpegTimemarkToChunk(timemark: string): number {
const timeVals: Array<string> = timemark.split(':');
const hrs: number = parseInt(timeVals[0]);
const mins: number = parseInt(timeVals[1]);
const secs: number = parseInt(timeVals[2]);
return (hrs * 60 * 60) + (mins * 60) + secs;
}
export function promptUser(choices: Array<string>): number {
const index: number = readlineSync.keyInSelect(choices, 'Which resolution/format do you prefer?');
if (index === -1) {
process.exit(ERROR_CODE.CANCELLED_USER_INPUT);
}
return index;
return (hrs * 60) + mins + (secs / 60);
}

View File

@@ -1,16 +1,16 @@
import { StreamApiClient } from './ApiClient';
import { promptUser } from './Utils';
import { ApiClient } from './ApiClient';
import { promptUser } from './CommandLineParser';
import { logger } from './Logger';
import { Video, StreamSession, VideoUrl } from './Types';
import { Video, Session } from './Types';
import { AxiosResponse } from 'axios';
import fs from 'fs';
import { parse as parseDuration, Duration } from 'iso8601-duration';
import path from 'path';
import sanitizeWindowsName from 'sanitize-filename';
import { extractStreamGuids } from './Utils';
export function publishedDateToString(date: string): string {
function publishedDateToString(date: string): string {
const dateJs: Date = new Date(date);
const day: string = dateJs.getDate().toString().padStart(2, '0');
const month: string = (dateJs.getMonth() + 1).toString(10).padStart(2, '0');
@@ -18,45 +18,29 @@ export function publishedDateToString(date: string): string {
return `${dateJs.getFullYear()}-${month}-${day}`;
}
export function publishedTimeToString(seconds: number): string
export function publishedTimeToString(date: string): string
export function publishedTimeToString(date: string | number): string {
let dateJs: Date;
if (typeof (date) === 'number') {
dateJs = new Date(0, 0, 0, 0, 0, date);
}
else {
dateJs = new Date(date);
}
function publishedTimeToString(date: string): string {
const dateJs: Date = new Date(date);
const hours: string = dateJs.getHours().toString();
const minutes: string = dateJs.getMinutes().toString();
const seconds: string = dateJs.getSeconds().toString();
return `${hours}h ${minutes}m ${seconds}s`;
return `${hours}.${minutes}.${seconds}`;
}
export function isoDurationToString(time: string): string {
function isoDurationToString(time: string): string {
const duration: Duration = parseDuration(time);
return `${duration.hours ?? '00'}.${duration.minutes ?? '00'}.${duration.seconds?.toFixed(0) ?? '00'}`;
}
// it's the number of seconds in the video
export function durationToTotalChunks(duration: string,): number {
const durationObj: any = parseDuration(duration);
const hrs: number = durationObj.hours ?? 0;
const mins: number = durationObj.minutes ?? 0;
const secs: number = Math.ceil(durationObj.seconds ?? 0);
return (hrs * 60 * 60) + (mins * 60) + secs;
}
export async function getVideosInfo(videoGuids: Array<string>,
session: Session, subtitles?: boolean): Promise<Array<Video>> {
export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamSession, subtitles?: boolean): Promise<Array<Video>> {
const metadata: Array<Video> = [];
let title: string;
let duration: string;
let publishDate: string;
@@ -64,23 +48,19 @@ export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamS
let author: string;
let authorEmail: string;
let uniqueId: string;
let totalChunks: number;
let playbackUrl: string;
let posterImageUrl: string;
let captionsUrl: string | undefined;
const apiClient: StreamApiClient = StreamApiClient.getInstance(session);
const apiClient: ApiClient = ApiClient.getInstance(session);
// we place the guid in the url field
const videoGUIDs = await extractStreamGuids(videoUrls, session);
/* See 'https://github.com/snobu/destreamer/pull/203' for API throttling mitigation */
for (const guid of videoGuids) {
/* TODO: change this to a single guid at a time to ease our footprint on the
MSS servers or we get throttled after 10 sequential reqs */
for (const guid of videoGUIDs) {
const response: AxiosResponse<any> | undefined =
await apiClient.callApi('videos/' + guid.url + '?$expand=creator', 'get');
await apiClient.callApi('videos/' + guid + '?$expand=creator', 'get');
title = sanitizeWindowsName(response?.data['name']);
@@ -94,9 +74,7 @@ export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamS
authorEmail = response?.data['creator'].mail;
uniqueId = '#' + guid.url.split('-')[0];
totalChunks = durationToTotalChunks(response?.data.media['duration']);
uniqueId = '#' + guid.split('-')[0];
playbackUrl = response?.data['playbackUrls']
.filter((item: { [x: string]: string; }) =>
@@ -108,7 +86,7 @@ export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamS
posterImageUrl = response?.data['posterImage']['medium']['url'];
if (subtitles) {
const captions: AxiosResponse<any> | undefined = await apiClient.callApi(`videos/${guid.url}/texttracks`, 'get');
const captions: AxiosResponse<any> | undefined = await apiClient.callApi(`videos/${guid}/texttracks`, 'get');
if (!captions?.data.value.length) {
captionsUrl = undefined;
@@ -126,19 +104,21 @@ export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamS
}
metadata.push({
guid: guid.url,
title,
duration,
publishDate,
publishTime,
author,
authorEmail,
uniqueId,
outPath: guid.outDir,
totalChunks, // Abstraction of FFmpeg timemark
playbackUrl,
posterImageUrl,
captionsUrl
title: title,
duration: duration,
publishDate: publishDate,
publishTime: publishTime,
author: author,
authorEmail: authorEmail,
uniqueId: uniqueId,
// totalChunks: totalChunks, // Abstraction of FFmpeg timemark
playbackUrl: playbackUrl,
posterImageUrl: posterImageUrl,
captionsUrl: captionsUrl,
filename: '',
outPath: '',
});
}
@@ -146,24 +126,17 @@ export async function getStreamInfo(videoUrls: Array<VideoUrl>, session: StreamS
}
export function createUniquePath(videos: Array<Video>, template: string, format: string, skip?: boolean): Array<Video>
export function createUniquePath(videos: Video, template: string, format: string, skip?: boolean): Video
export function createUniquePath(videos: Array<Video> | Video, template: string, format: string, skip?: boolean): Array<Video> | Video {
let singleInput = false;
export function createUniquePaths(videos: Array<Video>, outDirs: Array<string>,
template: string, format: string, skip?: boolean): Array<Video> {
if (!Array.isArray(videos)) {
videos = [videos];
singleInput = true;
}
videos.forEach((video: Video) => {
videos.forEach((video: Video, index: number) => {
let title: string = template;
let finalTitle: string;
const elementRegEx = RegExp(/{(.*?)}/g);
let match = elementRegEx.exec(template);
while (match) {
const value = video[match[1] as keyof (Video)] as string;
const value = video[match[1] as keyof Video] as string;
title = title.replace(match[0], value);
match = elementRegEx.exec(template);
}
@@ -171,23 +144,24 @@ export function createUniquePath(videos: Array<Video> | Video, template: string,
let i = 0;
finalTitle = title;
while (!skip && fs.existsSync(path.join(video.outPath, finalTitle + '.' + format))) {
while (!skip && fs.existsSync(path.join(outDirs[index], finalTitle + '.' + format))) {
finalTitle = `${title}.${++i}`;
}
const finalFileName = `${finalTitle}.${format}`;
const cleanFileName = sanitizeWindowsName(finalFileName, { replacement: '_' });
if (finalFileName !== cleanFileName) {
logger.warn(`Not a valid Windows file name: "${finalFileName}".\nReplacing invalid characters with underscores to preserve cross-platform consistency.`);
logger.warn(
`Not a valid Windows file name: "${finalFileName}"` +
'\nReplacing invalid characters with underscores to ' +
'preserve cross-platform consistency.');
}
video.outPath = path.join(video.outPath, finalFileName);
video.filename = finalFileName;
video.outPath = path.join(outDirs[index], finalFileName);
});
if (singleInput) {
return videos[0];
}
return videos;
}

View File

@@ -1,23 +1,38 @@
import { argv } from './CommandLineParser';
import { ApiClient } from './ApiClient';
import { argv, promptUser } from './CommandLineParser';
import { getDecrypter } from './Decrypter';
import { DownloadManager } from './DownloadManager';
import { ERROR_CODE } from './Errors';
import { setProcessEvents } from './Events';
import { logger } from './Logger';
import { VideoUrl } from './Types';
import { checkRequirements, parseInputFile, parseCLIinput } from './Utils';
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
import { drawThumbnail } from './Thumbnail';
import { TokenCache, refreshSession} from './TokenCache';
import { Video, Session } from './Types';
import { checkRequirements, parseInputFile, parseCLIinput, getUrlsFromPlaylist} from './Utils';
import { getVideosInfo, createUniquePaths } from './VideoUtils';
import { spawn, execSync, ChildProcess } from 'child_process';
import fs from 'fs';
import isElevated from 'is-elevated';
import { downloadShareVideo, downloadStreamVideo } from './Downloaders';
import portfinder from 'portfinder';
import puppeteer from 'puppeteer';
import path from 'path';
import tmp from 'tmp';
// TODO: can we create an export or something for this?
const m3u8Parser: any = require('m3u8-parser');
const tokenCache: TokenCache = new TokenCache();
const downloadManager = new DownloadManager();
export const chromeCacheFolder = '.chrome_data';
tmp.setGracefulCleanup();
async function init(): Promise<void> {
setProcessEvents(); // must be first!
setProcessEvents(); // must be first!
if (argv.verbose) {
logger.level = 'verbose';
}
logger.level = argv.debug ? 'debug' : (argv.verbose ? 'verbose' : 'info');
if (await isElevated()) {
process.exit(ERROR_CODE.ELEVATED_SHELL);
@@ -35,31 +50,353 @@ async function init(): Promise<void> {
}
async function DoInteractiveLogin(url: string, username?: string): Promise<Session> {
logger.info('Launching headless Chrome to perform the OpenID Connect dance...');
const browser: puppeteer.Browser = await puppeteer.launch({
executablePath: getPuppeteerChromiumPath(),
headless: false,
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
args: [
'--disable-dev-shm-usage',
'--fast-start',
'--no-sandbox'
]
});
const page: puppeteer.Page = (await browser.pages())[0];
logger.info('Navigating to login page...');
await page.goto(url, { waitUntil: 'load' });
try {
if (username) {
await page.waitForSelector('input[type="email"]', {timeout: 3000});
await page.keyboard.type(username);
await page.click('input[type="submit"]');
}
else {
/* If a username was not provided we let the user take actions that
lead up to the video page. */
}
}
catch (e) {
/* If there is no email input selector we aren't in the login module,
we are probably using the cache to aid the login.
It could finish the login on its own if the user said 'yes' when asked to
remember the credentials or it could still prompt the user for a password */
}
await browser.waitForTarget((target: puppeteer.Target) => target.url().endsWith('microsoftstream.com/'), { timeout: 150000 });
logger.info('We are logged in.');
let session: Session | null = null;
let tries = 1;
while (!session) {
try {
let sessionInfo: any;
session = await page.evaluate(
() => {
return {
AccessToken: sessionInfo.AccessToken,
ApiGatewayUri: sessionInfo.ApiGatewayUri,
ApiGatewayVersion: sessionInfo.ApiGatewayVersion
};
}
);
}
catch (error) {
if (tries > 5) {
process.exit(ERROR_CODE.NO_SESSION_INFO);
}
session = null;
tries++;
await page.waitFor(3000);
}
}
tokenCache.Write(session);
logger.info('Wrote access token to token cache.');
logger.info("At this point Chromium's job is done, shutting it down... \n\n");
await browser.close();
return session;
}
async function downloadVideo(videoGUIDs: Array<string>,
outputDirectories: Array<string>, session: Session): Promise<void> {
const apiClient = ApiClient.getInstance(session);
logger.info('Downloading video info, this might take a while...');
const videos: Array<Video> = createUniquePaths (
await getVideosInfo(videoGUIDs, session, argv.closedCaptions),
outputDirectories, argv.outputTemplate ,argv.format, argv.skip
);
if (argv.simulate) {
videos.forEach(video => {
logger.info(
'\nTitle: '.green + video.title +
'\nOutPath: '.green + video.outPath +
'\nPublished Date: '.green + video.publishDate + ' ' + video.publishTime +
'\nPlayback URL: '.green + video.playbackUrl +
((video.captionsUrl) ? ('\nCC URL: '.green + video.captionsUrl) : '')
);
});
return;
}
logger.info('Trying to launch and connect to aria2c...\n');
/* FIXME: aria2Exec must be defined here for the scope but later on it's complaining that it's not
initialized even if we never reach line#361 if we fail the assignment here*/
let aria2cExec: ChildProcess;
let arai2cExited = false;
await portfinder.getPortPromise({ port: 6800 }).then(
async (port: number) => {
logger.debug(`[DESTREAMER] Trying to use port ${port}`);
// Launch aria2c
aria2cExec = spawn(
'aria2c',
['--pause=true', '--enable-rpc', '--allow-overwrite=true', '--auto-file-renaming=false', `--rpc-listen-port=${port}`],
{stdio: 'ignore'}
);
aria2cExec.on('exit', (code: number | null, signal: string) => {
if (code === 0) {
logger.verbose('Aria2c process exited');
arai2cExited = true;
}
else {
logger.error(`aria2c exit code: ${code}` + '\n' + `aria2c exit signal: ${signal}`);
process.exit(ERROR_CODE.ARIA2C_CRASH);
}
});
aria2cExec.on('error', (err) => {
logger.error(err as Error);
});
// init webSocket
await downloadManager.init(port, );
// We are connected
},
error => {
logger.error(error);
process.exit(ERROR_CODE.NO_DAEMON_PORT);
}
);
for (const video of videos) {
const masterParser = new m3u8Parser.Parser();
logger.info(`\nDownloading video no.${videos.indexOf(video) + 1} \n`);
if (argv.skip && fs.existsSync(video.outPath)) {
logger.info(`File already exists, skipping: ${video.outPath} \n`);
continue;
}
const [isSessionExpiring] = tokenCache.isExpiring(session);
if (argv.keepLoginCookies && isSessionExpiring) {
logger.info('Trying to refresh access token...');
session = await refreshSession('https://web.microsoftstream.com/');
apiClient.setSession(session);
}
masterParser.push(await apiClient.callUrl(video.playbackUrl).then(res => res?.data));
masterParser.end();
// video playlist url
let videoPlaylistUrl: string;
const videoPlaylists: Array<any> = (masterParser.manifest.playlists as Array<any>)
.filter(playlist =>
Object.prototype.hasOwnProperty.call(playlist.attributes, 'RESOLUTION'));
if (videoPlaylists.length === 1 || argv.selectQuality === 10) {
videoPlaylistUrl = videoPlaylists.pop().uri;
}
else if (argv.selectQuality === 0) {
const resolutions = videoPlaylists.map(playlist =>
playlist.attributes.RESOLUTION.width + 'x' +
playlist.attributes.RESOLUTION.height
);
videoPlaylistUrl = videoPlaylists[promptUser(resolutions)].uri;
}
else {
let choiche = Math.round((argv.selectQuality * videoPlaylists.length) / 10);
if (choiche === videoPlaylists.length) {
choiche--;
}
logger.debug(`Video quality choiche: ${choiche}`);
videoPlaylistUrl = videoPlaylists[choiche].uri;
}
// audio playlist url
// TODO: better audio playlists parsing? With language maybe?
const audioPlaylists: Array<string> =
Object.keys(masterParser.manifest.mediaGroups.AUDIO.audio);
const audioPlaylistUrl: string =
masterParser.manifest.mediaGroups.AUDIO.audio[audioPlaylists[0]].uri;
// if (audioPlaylists.length === 1){
// audioPlaylistUrl = masterParser.manifest.mediaGroups.AUDIO
// .audio[audioPlaylists[0]].uri;
// }
// else {
// audioPlaylistUrl = masterParser.manifest.mediaGroups.AUDIO
// .audio[audioPlaylists[promptUser(audioPlaylists)]].uri;
// }
const videoUrls = await getUrlsFromPlaylist(videoPlaylistUrl, session);
const audioUrls = await getUrlsFromPlaylist(audioPlaylistUrl, session);
const videoDecrypter = await getDecrypter(videoPlaylistUrl, session);
const audioDecrypter = await getDecrypter(videoPlaylistUrl, session);
if (!argv.noExperiments) {
await drawThumbnail(video.posterImageUrl, session);
}
// video download
const videoSegmentsDir = tmp.dirSync({
prefix: 'video',
tmpdir: path.dirname(video.outPath),
unsafeCleanup: true
});
logger.info('\nDownloading video segments \n');
await downloadManager.downloadUrls(videoUrls, videoSegmentsDir.name);
// audio download
const audioSegmentsDir = tmp.dirSync({
prefix: 'audio',
tmpdir: path.dirname(video.outPath),
unsafeCleanup: true
});
logger.info('\nDownloading audio segments \n');
await downloadManager.downloadUrls(audioUrls, audioSegmentsDir.name);
// subs download
if (argv.closedCaptions && video.captionsUrl) {
logger.info('\nDownloading subtitles \n');
await apiClient.callUrl(video.captionsUrl, 'get', null, 'text')
.then(res => fs.writeFileSync(
path.join(videoSegmentsDir.name, 'CC.vtt'), res?.data));
}
logger.info('\n\nMerging and decrypting video and audio segments...\n');
const cmd = (process.platform == 'win32') ? 'copy /b *.encr ' : 'cat *.encr > ';
execSync(cmd + `"${video.filename}.video.encr"`, { cwd: videoSegmentsDir.name });
const videoDecryptInput = fs.createReadStream(
path.join(videoSegmentsDir.name, video.filename + '.video.encr'));
const videoDecryptOutput = fs.createWriteStream(
path.join(videoSegmentsDir.name, video.filename + '.video'));
const decryptVideoPromise = new Promise(resolve => {
videoDecryptOutput.on('finish', resolve);
videoDecryptInput.pipe(videoDecrypter).pipe(videoDecryptOutput);
});
execSync(cmd + `"${video.filename}.audio.encr"`, {cwd: audioSegmentsDir.name});
const audioDecryptInput = fs.createReadStream(
path.join(audioSegmentsDir.name, video.filename + '.audio.encr'));
const audioDecryptOutput = fs.createWriteStream(
path.join(audioSegmentsDir.name, video.filename + '.audio'));
const decryptAudioPromise = new Promise(resolve => {
audioDecryptOutput.on('finish', resolve);
audioDecryptInput.pipe(audioDecrypter).pipe(audioDecryptOutput);
});
await Promise.all([decryptVideoPromise, decryptAudioPromise]);
logger.info('Decrypted!\n');
logger.info('Merging video and audio together...\n');
const mergeCommand = (
// add video input
`ffmpeg -i "${path.join(videoSegmentsDir.name, video.filename + '.video')}" ` +
// add audio input
`-i "${path.join(audioSegmentsDir.name, video.filename + '.audio')}" ` +
// add subtitles input if present and wanted
((argv.closedCaptions && video.captionsUrl) ?
`-i "${path.join(videoSegmentsDir.name, 'CC.vtt')}" ` : '') +
// copy codec and output path
`-c copy "${video.outPath}"`
);
logger.debug('[destreamer] ' + mergeCommand);
execSync(mergeCommand, { stdio: 'ignore' });
logger.info('Done! Removing temp files...\n');
videoSegmentsDir.removeCallback();
audioSegmentsDir.removeCallback();
logger.info(`Video no.${videos.indexOf(video) + 1} downloaded!!\n\n`);
}
logger.info('Exiting, this will take some seconds...');
logger.debug('[destreamer] closing downloader socket');
await downloadManager.close();
logger.debug('[destreamer] closed downloader. Waiting aria2c deamon exit');
let tries = 0;
while (!arai2cExited) {
if (tries < 10) {
tries++;
await new Promise(r => setTimeout(r, 1000));
}
else {
aria2cExec!.kill('SIGINT');
}
}
logger.debug('[destreamer] stopped aria2c');
return;
}
async function main(): Promise<void> {
await init(); // must be first
let streamVideos: Array<VideoUrl>, shareVideos: Array<VideoUrl>;
const session: Session = tokenCache.Read() ??
await DoInteractiveLogin('https://web.microsoftstream.com/', argv.username);
logger.verbose('Session and API info \n' +
'\t API Gateway URL: '.cyan + session.ApiGatewayUri + '\n' +
'\t API Gateway version: '.cyan + session.ApiGatewayVersion + '\n');
let videoGUIDs: Array<string>;
let outDirs: Array<string>;
if (argv.videoUrls) {
logger.info('Parsing video/group urls');
[streamVideos, shareVideos] = await parseCLIinput(argv.videoUrls as Array<string>, argv.outputDirectory);
[videoGUIDs, outDirs] = await parseCLIinput(argv.videoUrls as Array<string>, argv.outputDirectory, session);
}
else {
logger.info('Parsing input file');
[streamVideos, shareVideos] = await parseInputFile(argv.inputFile!, argv.outputDirectory);
[videoGUIDs, outDirs] = await parseInputFile(argv.inputFile!, argv.outputDirectory, session);
}
logger.verbose(
'List of urls and corresponding output directory \n' +
streamVideos.map(video => `\t${video.url} => ${video.outDir} \n`).join('') +
shareVideos.map(video => `\t${video.url} => ${video.outDir} \n`).join('')
);
logger.verbose('List of videos and corresponding output directory \n' +
videoGUIDs.map((guid: string, i: number) =>
`\thttps://web.microsoftstream.com/video/${guid} => ${outDirs[i]} \n`).join(''));
if (streamVideos.length) {
await downloadStreamVideo(streamVideos);
}
if (shareVideos.length) {
await downloadShareVideo(shareVideos);
}
// fuck you bug, I WON!!!
await downloadVideo(videoGUIDs, outDirs, session);
}

View File

@@ -1,15 +1,32 @@
import { extractStreamGuids, parseInputFile } from '../src/Utils';
import { parseInputFile } from '../src/Utils';
import puppeteer from 'puppeteer';
import assert from 'assert';
import tmp from 'tmp';
import fs from 'fs';
import { StreamSession, VideoUrl } from './Types';
import { Session } from './Types';
describe('Puppeteer', () => {
it('should grab GitHub page title', async () => {
const browser = await puppeteer.launch({
headless: true,
args: ['--disable-dev-shm-usage', '--fast-start', '--no-sandbox']
});
const page = await browser.newPage();
await page.goto('https://github.com/', { waitUntil: 'load' });
let pageTitle = await page.title();
assert.equal(true, pageTitle.includes('GitHub'));
await browser.close();
}).timeout(30000); // yeah, this may take a while...
});
// we cannot test groups parsing as that requires an actual session
// TODO: add SharePoint urls
describe('Destreamer parsing', () => {
it('Input file to arrays of guids', async () => {
const testSession: StreamSession = {
it('Input file to arrays of URLs and DIRs', async () => {
const testSession: Session = {
AccessToken: '',
ApiGatewayUri: '',
ApiGatewayVersion: ''
@@ -27,42 +44,33 @@ describe('Destreamer parsing', () => {
'https://web.microsoftstream.com/video/xxxxxx-gggg-xxxx-xxxx-xxxxxxxxxxxx',
''
];
const expectedStreamOut: Array<VideoUrl> = [
{
url: 'xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx',
outDir: 'videos'
},
{
url: 'xxxxxxxx-bbbb-xxxx-xxxx-xxxxxxxxxxxx',
outDir: 'luca'
},
{
url: 'xxxxxxxx-cccc-xxxx-xxxx-xxxxxxxxxxxx',
outDir: 'videos'
},
{
url: 'xxxxxxxx-dddd-xxxx-xxxx-xxxxxxxxxxxx',
outDir: 'videos'
},
{
url: 'xxxxxxxx-eeee-xxxx-xxxx-xxxxxxxxxxxx',
outDir: 'videos'
},
const expectedGUIDsOut: Array<string> = [
'xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx',
'xxxxxxxx-bbbb-xxxx-xxxx-xxxxxxxxxxxx',
'xxxxxxxx-cccc-xxxx-xxxx-xxxxxxxxxxxx',
'xxxxxxxx-dddd-xxxx-xxxx-xxxxxxxxxxxx',
'xxxxxxxx-eeee-xxxx-xxxx-xxxxxxxxxxxx'
];
const expectedDirOut: Array<string> = [
'videos',
'luca',
'videos',
'videos',
'videos'
];
const tmpFile = tmp.fileSync({ postfix: '.txt' });
fs.writeFileSync(tmpFile.fd, testIn.join('\r\n'));
const [testStreamUrls]: Array<Array<VideoUrl>> = parseInputFile(tmpFile.name, 'videos');
assert.deepStrictEqual(
await extractStreamGuids(testStreamUrls, testSession),
expectedStreamOut,
'Error in parsing the URLs, missmatch between test and expected'.red
);
// assert.deepStrictEqual(testUrlOut, expectedGUIDsOut,
// 'Error in parsing the DIRs, missmatch between test and expected'.red);
const [testUrlOut , testDirOut]: Array<Array<string>> = await parseInputFile(tmpFile.name, 'videos', testSession);
if (testUrlOut.length !== expectedGUIDsOut.length) {
throw "Expected url list and test list don't have the same number of elements".red;
}
else if (testDirOut.length !== expectedDirOut.length) {
throw "Expected dir list and test list don't have the same number of elements".red;
}
assert.deepStrictEqual(testUrlOut, expectedGUIDsOut,
'Error in parsing the URLs, missmatch between test and expected'.red);
assert.deepStrictEqual(testUrlOut, expectedGUIDsOut,
'Error in parsing the DIRs, missmatch between test and expected'.red);
assert.ok('Parsing of input file ok');
});
});