Major code refactoring (#164)
* Added Chromium caching of identity provider cookies * Moved token expiry check in standalone method * Created refreshSession function * Session is now refreshed if the token expires * Linting fixes * Removed debug console.log() * Added CC support * Created function to prompt user for download parameters (interactive mode) * Fix data folder for puppeteer * Fixed multiple session error * Fix token expire time * Moved session refreshing to a more sensible place * Changed Metadata name to Video (to better reflect the data structure) * Complete CLI refactoring * Removed useless sleep function * Added outDir check from CLI * Complete input parsing refactoring (both inline and file) * Fixed and improved tests to work with the new input parsing * Moved and improved output path generation to videoUtils * Main code refactoring, added outpath to video type * Minor changes in spacing and type definition style * Updated readme after code refactoring * Fix if inputFile doesn't start with url on line 1 * Minor naming change * Use module 'winston' for logging * Created logge, changed all console.log and similar to use the logger * Added verbose logging, changed posterUrl property name on Video type * Moved GUID extraction to input parsing * Added support for group links * Fixed test after last input parsing update * Removed debug proces.exit() * Changed from desc to asc order for group videos * Updated test to reflect GUIDs output after parsing * Added couple of comments and restyled some imports * More readable verbose GUIDs logging * Removed unused errors * Temporary fix for timeout not working in ApiClient * Explicit class member accessibility * Defined array naming schema to be Array<T> * Defined type/interface schema to be type only * A LOT of type definitions
This commit is contained in:
parent
89a942eb24
commit
7bfc565a05
19 changed files with 981 additions and 638 deletions
|
@ -1,12 +1,10 @@
|
|||
// NOTE: if you have trouble with a rule not working install eslint as global
|
||||
// then use `eslint --print-config out.txt` and check the output for problems
|
||||
{
|
||||
"env": {
|
||||
"es6": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended"
|
||||
],
|
||||
"globals": {
|
||||
"Atomics": "readonly",
|
||||
"SharedArrayBuffer": "readonly"
|
||||
|
@ -19,6 +17,10 @@
|
|||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended"
|
||||
],
|
||||
"rules": {
|
||||
"semi": [2, "always"],
|
||||
"no-unused-vars": "off",
|
||||
|
@ -29,6 +31,14 @@
|
|||
{ "blankLine": "always", "prev": "*", "next": "return" }
|
||||
],
|
||||
"brace-style": [2, "stroustrup", { "allowSingleLine": false }],
|
||||
"curly": ["error", "all"]
|
||||
"curly": ["error", "all"],
|
||||
"@typescript-eslint/consistent-type-definitions": ["error", "type"],
|
||||
"@typescript-eslint/explicit-function-return-type": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": "error",
|
||||
"@typescript-eslint/array-type": ["error", {"default": "generic"}],
|
||||
|
||||
// these two rules are conflicting, I need some help solving this. I'll disable one for now
|
||||
// "@typescript-eslint/typedef": ["error", { "variableDeclaration": true }],
|
||||
"@typescript-eslint/no-inferrable-types": "error"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -3,7 +3,9 @@
|
|||
*.log
|
||||
*.js
|
||||
*.zip
|
||||
|
||||
.chrome_data
|
||||
node_modules
|
||||
videos
|
||||
release
|
||||
build
|
||||
build
|
||||
|
|
73
README.md
73
README.md
|
@ -69,44 +69,44 @@ $ npm run build
|
|||
$ ./destreamer.sh
|
||||
|
||||
Options:
|
||||
--help Show help [boolean]
|
||||
--version Show version number [boolean]
|
||||
--videoUrls, -i List of video urls [array]
|
||||
--videoUrlsFile, -f Path to txt file containing the urls [string]
|
||||
--username, -u [string]
|
||||
--outputDirectory, -o The directory where destreamer will save your
|
||||
downloads [default: videos] [string]
|
||||
--outputDirectories, -O Path to a txt file containing one output directory
|
||||
per video [string]
|
||||
--noExperiments, -x Do not attempt to render video thumbnails in the
|
||||
console [boolean] [default: false]
|
||||
--simulate, -s Disable video download and print metadata information
|
||||
to the console [boolean] [default: false]
|
||||
--verbose, -v Print additional information to the console (use this
|
||||
before opening an issue on GitHub)
|
||||
[boolean] [default: false]
|
||||
--noCleanup, --nc Don't delete the downloaded video file when an FFmpeg
|
||||
error occurs [boolean] [default: false]
|
||||
--vcodec Re-encode video track. Specify FFmpeg codec (e.g.
|
||||
libx265) or set to "none" to disable video.
|
||||
[string] [default: "copy"]
|
||||
--acodec Re-encode audio track. Specify FFmpeg codec (e.g.
|
||||
libopus) or set to "none" to disable audio.
|
||||
[string] [default: "copy"]
|
||||
--format Output container format (mkv, mp4, mov, anything that
|
||||
FFmpeg supports) [string] [default: "mkv"]
|
||||
--skip Skip download if file already exists
|
||||
[boolean] [default: false]
|
||||
--help Show help [boolean]
|
||||
--version Show version number [boolean]
|
||||
--username, -u The username used to log into Microsoft Stream (enabling this will fill in the email field for
|
||||
you) [string]
|
||||
--videoUrls, -i List of video urls [array]
|
||||
--inputFile, -f Path to text file containing URLs and optionally outDirs. See the README for more on outDirs.
|
||||
[string]
|
||||
--outputDirectory, -o The directory where destreamer will save your downloads [string] [default: "videos"]
|
||||
--keepLoginCookies, -k Let Chromium cache identity provider cookies so you can use "Remember me" during login
|
||||
[boolean] [default: false]
|
||||
--noExperiments, -x Do not attempt to render video thumbnails in the console [boolean] [default: false]
|
||||
--simulate, -s Disable video download and print metadata information to the console[boolean] [default: false]
|
||||
--verbose, -v Print additional information to the console (use this before opening an issue on GitHub)
|
||||
[boolean] [default: false]
|
||||
--closedCaptions, --cc Check if closed captions are aviable and let the user choose which one to download (will not
|
||||
ask if only one aviable) [boolean] [default: false]
|
||||
--noCleanup, --nc Do not delete the downloaded video file when an FFmpeg error occurs [boolean] [default: false]
|
||||
--vcodec Re-encode video track. Specify FFmpeg codec (e.g. libx265) or set to "none" to disable video.
|
||||
[string] [default: "copy"]
|
||||
--acodec Re-encode audio track. Specify FFmpeg codec (e.g. libopus) or set to "none" to disable audio.
|
||||
[string] [default: "copy"]
|
||||
--format Output container format (mkv, mp4, mov, anything that FFmpeg supports)
|
||||
[string] [default: "mkv"]
|
||||
--skip Skip download if file already exists [boolean] [default: false]
|
||||
```
|
||||
|
||||
We default to `.mkv` for the output container. If you prefer something else (like `mp4`), pass `--format mp4`.
|
||||
- Passing `--username` is optional. It's there to make logging in faster (the username field will be populated automatically on the login form).
|
||||
|
||||
- You can use an absolute path for `-o` (output directory), for example `/mnt/videos`.
|
||||
|
||||
- We default to `.mkv` for the output container. If you prefer something else (like `mp4`), pass `--format mp4`.
|
||||
|
||||
Download a video -
|
||||
```sh
|
||||
$ ./destreamer.sh -i "https://web.microsoftstream.com/video/VIDEO-1"
|
||||
```
|
||||
|
||||
Download a video and re-encode with HEVC (libx265):
|
||||
Download a video and re-encode with HEVC (libx265) -
|
||||
```sh
|
||||
$ ./destreamer.sh -i "https://web.microsoftstream.com/video/VIDEO-1" --vcodec libx265
|
||||
```
|
||||
|
@ -131,12 +131,19 @@ Download many videos but read URLs from a file -
|
|||
```sh
|
||||
$ ./destreamer.sh -f list.txt
|
||||
```
|
||||
|
||||
### Input file
|
||||
You can create a `.txt` file containing your video URLs, one video per line. The text file can have any name, followed by the `.txt` extension.
|
||||
Additionally you can have destreamer download each video in the input list to a separate directory.
|
||||
These optional lines must start with white space(s).
|
||||
|
||||
Passing `--username` is optional. It's there to make logging in faster (the username field will be populated automatically on the login form).
|
||||
Usage -
|
||||
```
|
||||
https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx
|
||||
-dir=videos/lessons/week1
|
||||
https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx
|
||||
-dir=videos/lessons/week2"
|
||||
```
|
||||
|
||||
You can use an absolute path for `-o` (output directory), for example `/mnt/videos`.
|
||||
|
||||
## Expected output
|
||||
|
||||
|
|
192
package-lock.json
generated
192
package-lock.json
generated
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "destreamer",
|
||||
"version": "2.0.0",
|
||||
"version": "2.1.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
@ -38,6 +38,16 @@
|
|||
"regenerator-runtime": "^0.13.4"
|
||||
}
|
||||
},
|
||||
"@dabh/diagnostics": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.2.tgz",
|
||||
"integrity": "sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q==",
|
||||
"requires": {
|
||||
"colorspace": "1.1.x",
|
||||
"enabled": "2.0.x",
|
||||
"kuler": "^2.0.0"
|
||||
}
|
||||
},
|
||||
"@jimp/bmp": {
|
||||
"version": "0.10.3",
|
||||
"resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.10.3.tgz",
|
||||
|
@ -449,6 +459,12 @@
|
|||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/readline-sync": {
|
||||
"version": "1.4.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/readline-sync/-/readline-sync-1.4.3.tgz",
|
||||
"integrity": "sha512-YP9NVli96E+qQLAF2db+VjnAUEeZcFVg4YnMgr8kpDUFwQBnj31rPLOVHmazbKQhaIkJ9cMHsZhpKdzUeL0KTg==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/tmp": {
|
||||
"version": "0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz",
|
||||
|
@ -646,6 +662,11 @@
|
|||
"integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==",
|
||||
"dev": true
|
||||
},
|
||||
"async": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz",
|
||||
"integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw=="
|
||||
},
|
||||
"async-limiter": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz",
|
||||
|
@ -840,6 +861,30 @@
|
|||
"wrap-ansi": "^6.2.0"
|
||||
}
|
||||
},
|
||||
"color": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz",
|
||||
"integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==",
|
||||
"requires": {
|
||||
"color-convert": "^1.9.1",
|
||||
"color-string": "^1.5.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"color-convert": {
|
||||
"version": "1.9.3",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
|
||||
"integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
|
||||
"requires": {
|
||||
"color-name": "1.1.3"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
|
||||
}
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
|
@ -853,11 +898,29 @@
|
|||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"color-string": {
|
||||
"version": "1.5.3",
|
||||
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz",
|
||||
"integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==",
|
||||
"requires": {
|
||||
"color-name": "^1.0.0",
|
||||
"simple-swizzle": "^0.2.2"
|
||||
}
|
||||
},
|
||||
"colors": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz",
|
||||
"integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA=="
|
||||
},
|
||||
"colorspace": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.2.tgz",
|
||||
"integrity": "sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ==",
|
||||
"requires": {
|
||||
"color": "3.0.x",
|
||||
"text-hex": "1.0.x"
|
||||
}
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
|
@ -949,6 +1012,11 @@
|
|||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"enabled": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz",
|
||||
"integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ=="
|
||||
},
|
||||
"end-of-stream": {
|
||||
"version": "1.4.4",
|
||||
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
|
||||
|
@ -1251,6 +1319,11 @@
|
|||
"integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=",
|
||||
"dev": true
|
||||
},
|
||||
"fast-safe-stringify": {
|
||||
"version": "2.0.7",
|
||||
"resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz",
|
||||
"integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA=="
|
||||
},
|
||||
"fd-slicer": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz",
|
||||
|
@ -1259,6 +1332,11 @@
|
|||
"pend": "~1.2.0"
|
||||
}
|
||||
},
|
||||
"fecha": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.0.tgz",
|
||||
"integrity": "sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg=="
|
||||
},
|
||||
"figures": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
|
||||
|
@ -1345,6 +1423,11 @@
|
|||
"integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==",
|
||||
"dev": true
|
||||
},
|
||||
"fn.name": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz",
|
||||
"integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz",
|
||||
|
@ -1593,6 +1676,11 @@
|
|||
"execa": "^1.0.0"
|
||||
}
|
||||
},
|
||||
"is-arrayish": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
|
||||
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
|
||||
},
|
||||
"is-binary-path": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
|
||||
|
@ -1768,6 +1856,11 @@
|
|||
"resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-2.2.0.tgz",
|
||||
"integrity": "sha1-fYa9VmefWM5qhHBKZX3TkruoGnk="
|
||||
},
|
||||
"kuler": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz",
|
||||
"integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A=="
|
||||
},
|
||||
"levn": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz",
|
||||
|
@ -1823,6 +1916,25 @@
|
|||
"chalk": "^2.4.2"
|
||||
}
|
||||
},
|
||||
"logform": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/logform/-/logform-2.2.0.tgz",
|
||||
"integrity": "sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg==",
|
||||
"requires": {
|
||||
"colors": "^1.2.1",
|
||||
"fast-safe-stringify": "^2.0.4",
|
||||
"fecha": "^4.2.0",
|
||||
"ms": "^2.1.1",
|
||||
"triple-beam": "^1.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"mime": {
|
||||
"version": "2.4.5",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-2.4.5.tgz",
|
||||
|
@ -2202,6 +2314,14 @@
|
|||
"wrappy": "1"
|
||||
}
|
||||
},
|
||||
"one-time": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz",
|
||||
"integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==",
|
||||
"requires": {
|
||||
"fn.name": "1.x.x"
|
||||
}
|
||||
},
|
||||
"onetime": {
|
||||
"version": "5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
|
||||
|
@ -2452,6 +2572,11 @@
|
|||
"picomatch": "^2.0.4"
|
||||
}
|
||||
},
|
||||
"readline-sync": {
|
||||
"version": "1.4.10",
|
||||
"resolved": "https://registry.npmjs.org/readline-sync/-/readline-sync-1.4.10.tgz",
|
||||
"integrity": "sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw=="
|
||||
},
|
||||
"regenerator-runtime": {
|
||||
"version": "0.13.5",
|
||||
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz",
|
||||
|
@ -2567,6 +2692,14 @@
|
|||
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz",
|
||||
"integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA=="
|
||||
},
|
||||
"simple-swizzle": {
|
||||
"version": "0.2.2",
|
||||
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
|
||||
"integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=",
|
||||
"requires": {
|
||||
"is-arrayish": "^0.3.1"
|
||||
}
|
||||
},
|
||||
"slice-ansi": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz",
|
||||
|
@ -2616,6 +2749,11 @@
|
|||
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=",
|
||||
"dev": true
|
||||
},
|
||||
"stack-trace": {
|
||||
"version": "0.0.10",
|
||||
"resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz",
|
||||
"integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA="
|
||||
},
|
||||
"string-width": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
|
||||
|
@ -2799,6 +2937,11 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"text-hex": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz",
|
||||
"integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg=="
|
||||
},
|
||||
"text-table": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
||||
|
@ -2839,6 +2982,11 @@
|
|||
"is-number": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"triple-beam": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz",
|
||||
"integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw=="
|
||||
},
|
||||
"truncate-utf8-bytes": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz",
|
||||
|
@ -2975,6 +3123,48 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"winston": {
|
||||
"version": "3.3.2",
|
||||
"resolved": "https://registry.npmjs.org/winston/-/winston-3.3.2.tgz",
|
||||
"integrity": "sha512-vTOrUZlyQPS8VpCcQ1JT8BumDAUe4awCHZ9nmGgO7LqkV4atj0dKa5suA7Trf7QKtBszE2yUs9d8744Kz9j4jQ==",
|
||||
"requires": {
|
||||
"@dabh/diagnostics": "^2.0.2",
|
||||
"async": "^3.1.0",
|
||||
"is-stream": "^2.0.0",
|
||||
"logform": "^2.2.0",
|
||||
"one-time": "^1.0.0",
|
||||
"readable-stream": "^3.4.0",
|
||||
"stack-trace": "0.0.x",
|
||||
"triple-beam": "^1.3.0",
|
||||
"winston-transport": "^4.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"is-stream": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz",
|
||||
"integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw=="
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
|
||||
"integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
|
||||
"requires": {
|
||||
"inherits": "^2.0.3",
|
||||
"string_decoder": "^1.1.1",
|
||||
"util-deprecate": "^1.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"winston-transport": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.4.0.tgz",
|
||||
"integrity": "sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw==",
|
||||
"requires": {
|
||||
"readable-stream": "^2.3.7",
|
||||
"triple-beam": "^1.2.0"
|
||||
}
|
||||
},
|
||||
"word-wrap": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz",
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"type": "git",
|
||||
"url": "git://github.com/snobu/destreamer.git"
|
||||
},
|
||||
"version": "2.0.0",
|
||||
"version": "2.1.0",
|
||||
"description": "Save Microsoft Stream videos for offline enjoyment.",
|
||||
"main": "build/src/destreamer.js",
|
||||
"bin": "build/src/destreamer.js",
|
||||
|
@ -19,6 +19,7 @@
|
|||
"devDependencies": {
|
||||
"@types/mocha": "^7.0.2",
|
||||
"@types/puppeteer": "^1.20.4",
|
||||
"@types/readline-sync": "^1.4.3",
|
||||
"@types/tmp": "^0.1.0",
|
||||
"@types/yargs": "^15.0.3",
|
||||
"@typescript-eslint/eslint-plugin": "^2.25.0",
|
||||
|
@ -39,9 +40,11 @@
|
|||
"iso8601-duration": "^1.2.0",
|
||||
"jwt-decode": "^2.2.0",
|
||||
"puppeteer": "2.1.1",
|
||||
"readline-sync": "^1.4.10",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"terminal-image": "^1.0.1",
|
||||
"typescript": "^3.8.3",
|
||||
"winston": "^3.3.2",
|
||||
"yargs": "^15.0.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import { logger } from './Logger';
|
||||
import { Session } from './Types';
|
||||
|
||||
import axios, { AxiosRequestConfig, AxiosResponse, AxiosInstance, AxiosError } from 'axios';
|
||||
import axiosRetry, { isNetworkOrIdempotentRequestError } from 'axios-retry';
|
||||
import { Session } from './Types';
|
||||
|
||||
|
||||
export class ApiClient {
|
||||
private static instance: ApiClient;
|
||||
|
@ -11,26 +14,30 @@ export class ApiClient {
|
|||
this.session = session;
|
||||
this.axiosInstance = axios.create({
|
||||
baseURL: session?.ApiGatewayUri,
|
||||
timeout: 7000,
|
||||
// timeout: 7000,
|
||||
headers: { 'User-Agent': 'destreamer/2.0 (Hammer of Dawn)' }
|
||||
});
|
||||
|
||||
axiosRetry(this.axiosInstance, {
|
||||
// The following option is not working.
|
||||
// We should open an issue on the relative GitHub
|
||||
shouldResetTimeout: true,
|
||||
retries: 6,
|
||||
retryDelay: (retryCount) => {
|
||||
retryDelay: (retryCount: number) => {
|
||||
return retryCount * 2000;
|
||||
},
|
||||
retryCondition: (err: AxiosError) => {
|
||||
const retryCodes = [429, 500, 502, 503];
|
||||
const retryCodes: Array<number> = [429, 500, 502, 503];
|
||||
if (isNetworkOrIdempotentRequestError(err)) {
|
||||
console.warn(`${err}. Retrying request...`);
|
||||
logger.warn(`${err}. Retrying request...`);
|
||||
|
||||
return true;
|
||||
}
|
||||
console.warn(`Got HTTP ${err?.response?.status}. Retrying request...`);
|
||||
const condition = retryCodes.includes(err?.response?.status ?? 0);
|
||||
}
|
||||
logger.warn(`Got HTTP code ${err?.response?.status ?? undefined}. Retrying request...`);
|
||||
|
||||
return condition;
|
||||
const shouldRetry: boolean = retryCodes.includes(err?.response?.status ?? 0);
|
||||
|
||||
return shouldRetry;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -52,7 +59,7 @@ export class ApiClient {
|
|||
method: AxiosRequestConfig['method'] = 'get',
|
||||
payload?: any): Promise<AxiosResponse | undefined> {
|
||||
|
||||
const delimiter = path.split('?').length === 1 ? '?' : '&';
|
||||
const delimiter: '?' | '&' = path.split('?').length === 1 ? '?' : '&';
|
||||
|
||||
const headers: object = {
|
||||
'Authorization': 'Bearer ' + this.session?.AccessToken
|
||||
|
@ -74,7 +81,7 @@ export class ApiClient {
|
|||
method: AxiosRequestConfig['method'] = 'get',
|
||||
payload?: any,
|
||||
responseType: AxiosRequestConfig['responseType'] = 'json'): Promise<AxiosResponse | undefined> {
|
||||
|
||||
|
||||
const headers: object = {
|
||||
'Authorization': 'Bearer ' + this.session?.AccessToken
|
||||
};
|
||||
|
@ -87,5 +94,4 @@ export class ApiClient {
|
|||
responseType: responseType
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,37 +1,43 @@
|
|||
import { CLI_ERROR } from './Errors';
|
||||
import { CLI_ERROR, ERROR_CODE } from './Errors';
|
||||
import { checkOutDir } from './Utils';
|
||||
import { logger } from './Logger';
|
||||
|
||||
import yargs from 'yargs';
|
||||
import colors from 'colors';
|
||||
import fs from 'fs';
|
||||
import readlineSync from 'readline-sync';
|
||||
import yargs from 'yargs';
|
||||
|
||||
export const argv = yargs.options({
|
||||
|
||||
export const argv: any = yargs.options({
|
||||
username: {
|
||||
alias: 'u',
|
||||
type: 'string',
|
||||
describe: 'The username used to log into Microsoft Stream (enabling this will fill in the email field for you)',
|
||||
demandOption: false
|
||||
},
|
||||
videoUrls: {
|
||||
alias: 'i',
|
||||
describe: 'List of video urls',
|
||||
type: 'array',
|
||||
demandOption: false
|
||||
},
|
||||
videoUrlsFile: {
|
||||
inputFile: {
|
||||
alias: 'f',
|
||||
describe: 'Path to txt file containing the urls',
|
||||
type: 'string',
|
||||
demandOption: false
|
||||
},
|
||||
username: {
|
||||
alias: 'u',
|
||||
describe: 'Path to text file containing URLs and optionally outDirs. See the README for more on outDirs.',
|
||||
type: 'string',
|
||||
demandOption: false
|
||||
},
|
||||
outputDirectory: {
|
||||
alias: 'o',
|
||||
describe: 'The directory where destreamer will save your downloads [default: videos]',
|
||||
describe: 'The directory where destreamer will save your downloads',
|
||||
type: 'string',
|
||||
default: 'videos',
|
||||
demandOption: false
|
||||
},
|
||||
outputDirectories: {
|
||||
alias: 'O',
|
||||
describe: 'Path to a txt file containing one output directory per video',
|
||||
type: 'string',
|
||||
keepLoginCookies: {
|
||||
alias: 'k',
|
||||
describe: 'Let Chromium cache identity provider cookies so you can use "Remember me" during login',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
noExperiments: {
|
||||
|
@ -55,6 +61,13 @@ export const argv = yargs.options({
|
|||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
closedCaptions: {
|
||||
alias: 'cc',
|
||||
describe: 'Check if closed captions are aviable and let the user choose which one to download (will not ask if only one aviable)',
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
demandOption: false
|
||||
},
|
||||
noCleanup: {
|
||||
alias: 'nc',
|
||||
describe: 'Do not delete the downloaded video file when an FFmpeg error occurs',
|
||||
|
@ -87,147 +100,74 @@ export const argv = yargs.options({
|
|||
demandOption: false
|
||||
}
|
||||
})
|
||||
/**
|
||||
* Do our own argv magic before destreamer starts.
|
||||
* ORDER IS IMPORTANT!
|
||||
* Do not mess with this.
|
||||
*/
|
||||
.check(() => isShowHelpRequest())
|
||||
.check(argv => checkRequiredArgument(argv))
|
||||
.check(argv => checkVideoUrlsArgConflict(argv))
|
||||
.check(argv => checkOutputDirArgConflict(argv))
|
||||
.check(argv => checkVideoUrlsInput(argv))
|
||||
.check(argv => windowsFileExtensionBadBehaviorFix(argv))
|
||||
.check(argv => mergeVideoUrlsArguments(argv))
|
||||
.check(argv => mergeOutputDirArguments(argv))
|
||||
.wrap(120)
|
||||
.check(() => noArguments())
|
||||
.check((argv: any) => inputConflicts(argv.videoUrls, argv.inputFile))
|
||||
.check((argv: any) => {
|
||||
if (checkOutDir(argv.outputDirectory)) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
logger.error(CLI_ERROR.INVALID_OUTDIR);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
})
|
||||
.argv;
|
||||
|
||||
function hasNoArgs() {
|
||||
return process.argv.length === 2;
|
||||
}
|
||||
|
||||
function isShowHelpRequest() {
|
||||
if (hasNoArgs()) {
|
||||
throw new Error(CLI_ERROR.GRACEFULLY_STOP);
|
||||
function noArguments(): boolean {
|
||||
// if only 2 args no other args (0: node path, 1: js script path)
|
||||
if (process.argv.length === 2) {
|
||||
logger.error(CLI_ERROR.MISSING_INPUT_ARG, {fatal: true});
|
||||
|
||||
// so that the output stays clear
|
||||
throw new Error(' ');
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkRequiredArgument(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
|
||||
function inputConflicts(videoUrls: Array<string | number> | undefined,
|
||||
inputFile: string | undefined): boolean {
|
||||
// check if both inputs are declared
|
||||
if ((videoUrls !== undefined) && (inputFile !== undefined)) {
|
||||
logger.error(CLI_ERROR.INPUT_ARG_CONFLICT);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
// check if no input is declared or if they are declared but empty
|
||||
else if (!(videoUrls || inputFile) || (videoUrls?.length === 0) || (inputFile?.length === 0)) {
|
||||
logger.error(CLI_ERROR.MISSING_INPUT_ARG);
|
||||
|
||||
if (!argv.videoUrls && !argv.videoUrlsFile) {
|
||||
throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG));
|
||||
throw new Error(' ');
|
||||
}
|
||||
else if (inputFile) {
|
||||
// check if inputFile doesn't end in '.txt'
|
||||
if (inputFile.substring(inputFile.length - 4) !== '.txt') {
|
||||
logger.error(CLI_ERROR.INPUTFILE_WRONG_EXTENSION);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkVideoUrlsArgConflict(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (argv.videoUrls && argv.videoUrlsFile) {
|
||||
throw new Error(colors.red(CLI_ERROR.VIDEOURLS_ARG_CONFLICT));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkOutputDirArgConflict(argv: any) {
|
||||
if (hasNoArgs()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (argv.outputDirectory && argv.outputDirectories) {
|
||||
throw new Error(colors.red(CLI_ERROR.OUTPUTDIR_ARG_CONFLICT));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function checkVideoUrlsInput(argv: any) {
|
||||
if (hasNoArgs() || !argv.videoUrls) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!argv.videoUrls.length) {
|
||||
throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG));
|
||||
}
|
||||
|
||||
const t = argv.videoUrls[0] as string;
|
||||
if (t.substring(t.length-4) === '.txt') {
|
||||
throw new Error(colors.red(CLI_ERROR.FILE_INPUT_VIDEOURLS_ARG));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Users see 2 separate options, but we don't really care
|
||||
* cause both options have no difference in code.
|
||||
*
|
||||
* Optimize and make this transparent to destreamer
|
||||
*/
|
||||
function mergeVideoUrlsArguments(argv: any) {
|
||||
if (!argv.videoUrlsFile) {
|
||||
return true;
|
||||
}
|
||||
|
||||
argv.videoUrls = [argv.videoUrlsFile]; // noone will notice ;)
|
||||
|
||||
// these are not valid anymore
|
||||
delete argv.videoUrlsFile;
|
||||
delete argv.F;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Users see 2 separate options, but we don't really care
|
||||
* cause both options have no difference in code.
|
||||
*
|
||||
* Optimize and make this transparent to destreamer
|
||||
*/
|
||||
function mergeOutputDirArguments(argv: any) {
|
||||
if (!argv.outputDirectories && argv.outputDirectory) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!argv.outputDirectory && !argv.outputDirectories) {
|
||||
argv.outputDirectory = 'videos'; // default out dir
|
||||
}
|
||||
else if (argv.outputDirectories) {
|
||||
argv.outputDirectory = argv.outputDirectories;
|
||||
}
|
||||
|
||||
if (argv.outputDirectories) {
|
||||
// these are not valid anymore
|
||||
delete argv.outputDirectories;
|
||||
delete argv.O;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// yeah this is for windows, but lets check everyone, who knows...
|
||||
function windowsFileExtensionBadBehaviorFix(argv: any) {
|
||||
if (hasNoArgs() || !argv.videoUrlsFile || !argv.outputDirectories) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(argv.videoUrlsFile)) {
|
||||
if (fs.existsSync(argv.videoUrlsFile + '.txt')) {
|
||||
argv.videoUrlsFile += '.txt';
|
||||
throw new Error(' ');
|
||||
}
|
||||
else {
|
||||
throw new Error(colors.red(CLI_ERROR.INPUT_URLS_FILE_NOT_FOUND));
|
||||
// check if the inputFile exists
|
||||
else if (!fs.existsSync(inputFile)) {
|
||||
logger.error(CLI_ERROR.INPUTFILE_NOT_FOUND);
|
||||
|
||||
throw new Error(' ');
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
export function promptUser(choices: Array<string>): number {
|
||||
let index: number = readlineSync.keyInSelect(choices, 'Which resolution/format do you prefer?');
|
||||
|
||||
if (index === -1) {
|
||||
process.exit(ERROR_CODE.CANCELLED_USER_INPUT);
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
|
|
@ -1,64 +1,47 @@
|
|||
interface IError {
|
||||
[key: number]: string
|
||||
}
|
||||
|
||||
export const enum ERROR_CODE {
|
||||
NO_ERROR,
|
||||
UNHANDLED_ERROR,
|
||||
MISSING_FFMPEG,
|
||||
ELEVATED_SHELL,
|
||||
INVALID_OUTPUT_DIR,
|
||||
INVALID_INPUT_URLS,
|
||||
OUTDIRS_URLS_MISMATCH,
|
||||
INVALID_VIDEO_ID,
|
||||
INVALID_VIDEO_GUID,
|
||||
CANCELLED_USER_INPUT,
|
||||
MISSING_FFMPEG,
|
||||
UNK_FFMPEG_ERROR,
|
||||
INVALID_VIDEO_GUID,
|
||||
NO_SESSION_INFO
|
||||
}
|
||||
|
||||
// TODO: create better errors descriptions
|
||||
export const Error: IError = {
|
||||
[ERROR_CODE.NO_ERROR]: 'Clean exit with code 0',
|
||||
|
||||
export const errors: {[key: number]: string} = {
|
||||
[ERROR_CODE.UNHANDLED_ERROR]: 'Unhandled error!\n' +
|
||||
'Timeout or fatal error, please check your downloads directory and try again',
|
||||
|
||||
[ERROR_CODE.ELEVATED_SHELL]: 'Destreamer cannot run in an elevated (Administrator/root) shell.\n' +
|
||||
'Please run in a regular, non-elevated window.',
|
||||
|
||||
[ERROR_CODE.INVALID_OUTPUT_DIR]: 'Unable to create output directory',
|
||||
[ERROR_CODE.CANCELLED_USER_INPUT]: 'Input was cancelled by user',
|
||||
|
||||
[ERROR_CODE.MISSING_FFMPEG]: 'FFmpeg is missing!\n' +
|
||||
'Destreamer requires a fairly recent release of FFmpeg to download videos',
|
||||
|
||||
[ERROR_CODE.UNK_FFMPEG_ERROR]: 'Unknown FFmpeg error',
|
||||
|
||||
[ERROR_CODE.INVALID_INPUT_URLS]: 'No valid URL from input',
|
||||
|
||||
[ERROR_CODE.OUTDIRS_URLS_MISMATCH]: 'Output directories and URLs mismatch!\n' +
|
||||
'You must input the same number of URLs and output directories',
|
||||
|
||||
[ERROR_CODE.INVALID_VIDEO_ID]: 'Unable to get video ID from URL',
|
||||
|
||||
[ERROR_CODE.INVALID_VIDEO_GUID]: 'Unable to get video GUID from URL',
|
||||
|
||||
[ERROR_CODE.NO_SESSION_INFO]: 'Could not evaluate sessionInfo on the page'
|
||||
};
|
||||
|
||||
|
||||
export const enum CLI_ERROR {
|
||||
GRACEFULLY_STOP = ' ', // gracefully stop execution, yargs way
|
||||
MISSING_INPUT_ARG = 'You must specify a URLs source. \n' +
|
||||
'Valid options are -i for one or more URLs separated by space or -f for input file. \n',
|
||||
|
||||
MISSING_REQUIRED_ARG = 'You must specify a URLs source.\n' +
|
||||
'Valid options are -i for one or more URLs separated by space or -f for URLs from file.',
|
||||
INPUT_ARG_CONFLICT = 'Too many URLs sources specified! \n' +
|
||||
'Please specify a single source, either -i or -f \n',
|
||||
|
||||
VIDEOURLS_ARG_CONFLICT = 'Too many URLs sources specified!\n' +
|
||||
'Please specify a single source, either -i or -f (URLs from file)',
|
||||
INPUTFILE_WRONG_EXTENSION = 'The specified inputFile has the wrong extension \n' +
|
||||
'Please make sure to use path/to/filename.txt when useing the -f option \n',
|
||||
|
||||
OUTPUTDIR_ARG_CONFLICT = 'Too many output arguments specified!\n' +
|
||||
'Please specify a single output argument, either -o or --outputDirectories.',
|
||||
INPUTFILE_NOT_FOUND = 'The specified inputFile does not exists \n'+
|
||||
'Please check the filename and the path you provided \n',
|
||||
|
||||
FILE_INPUT_VIDEOURLS_ARG = 'Wrong input for option --videoUrls.\n' +
|
||||
'To read URLs from file, use --videoUrlsFile option.',
|
||||
|
||||
INPUT_URLS_FILE_NOT_FOUND = 'Input URL list file not found.'
|
||||
}
|
||||
INVALID_OUTDIR = 'Could not create the default/specified output directory \n' +
|
||||
'Please check directory and permissions and try again. \n'
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { Error, ERROR_CODE } from './Errors';
|
||||
import { errors, ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
|
||||
import colors from 'colors';
|
||||
|
||||
/**
|
||||
* This file contains global destreamer process events
|
||||
|
@ -9,20 +9,25 @@ import colors from 'colors';
|
|||
*
|
||||
* @note function is required for non-packaged destreamer, so we can't do better
|
||||
*/
|
||||
export function setProcessEvents() {
|
||||
export function setProcessEvents(): void {
|
||||
// set exit event first so that we can always print cute errors
|
||||
process.on('exit', (code) => {
|
||||
if (code == 0) {
|
||||
process.on('exit', (code: number) => {
|
||||
if (code === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const msg = code in Error ? `\n\n${Error[code]} \n` : `\n\nUnknown error: exit code ${code} \n`;
|
||||
const msg: string = (code in errors) ? `${errors[code]} \n` : `Unknown error: exit code ${code} \n`;
|
||||
|
||||
console.error(colors.bgRed(msg));
|
||||
logger.error({ message: msg, fatal: true });
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', (reason) => {
|
||||
console.error(colors.red(reason as string));
|
||||
process.on('unhandledRejection', (reason: {} | null | undefined) => {
|
||||
if (reason instanceof Error) {
|
||||
logger.error({ message: (reason as Error) });
|
||||
process.exit(ERROR_CODE.UNHANDLED_ERROR);
|
||||
}
|
||||
|
||||
logger.error({ message: (reason as string) });
|
||||
process.exit(ERROR_CODE.UNHANDLED_ERROR);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
40
src/Logger.ts
Normal file
40
src/Logger.ts
Normal file
|
@ -0,0 +1,40 @@
|
|||
import colors from 'colors';
|
||||
import winston from 'winston';
|
||||
|
||||
|
||||
export const logger: winston.Logger = winston.createLogger({
|
||||
level: 'info',
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }),
|
||||
winston.format.printf(
|
||||
(item: winston.Logform.TransformableInfo) => customPrint(item)
|
||||
)
|
||||
)
|
||||
})
|
||||
]
|
||||
});
|
||||
|
||||
|
||||
function customPrint (info: winston.Logform.TransformableInfo): string {
|
||||
if (info.level === 'error') {
|
||||
if (info.fatal) {
|
||||
return colors.red('\n\n[FATAL ERROR] ') + (info.stack ?? info.message);
|
||||
}
|
||||
|
||||
return colors.red('\n[ERROR] ') + (info.stack ?? info.message) + '\n';
|
||||
}
|
||||
else if (info.level === 'warn') {
|
||||
return colors.yellow('\n[WARNING] ') + info.message;
|
||||
}
|
||||
else if (info.level === 'info') {
|
||||
return info.message;
|
||||
}
|
||||
else if (info.level === 'verbose') {
|
||||
return colors.cyan('\n[VERBOSE] ') + info.message;
|
||||
}
|
||||
|
||||
return `${info.level}: ${info.message} - ${info.timestamp}`;
|
||||
}
|
|
@ -1,61 +0,0 @@
|
|||
import { Metadata, Session } from './Types';
|
||||
import { forEachAsync } from './Utils';
|
||||
import { ApiClient } from './ApiClient';
|
||||
|
||||
import { parse } from 'iso8601-duration';
|
||||
|
||||
|
||||
function publishedDateToString(date: string) {
|
||||
const dateJs = new Date(date);
|
||||
const day = dateJs.getDate().toString().padStart(2, '0');
|
||||
const month = (dateJs.getMonth() + 1).toString(10).padStart(2, '0');
|
||||
const publishedDate = day + '-' + month + '-' + dateJs.getFullYear();
|
||||
|
||||
return publishedDate;
|
||||
}
|
||||
|
||||
function durationToTotalChunks(duration: string) {
|
||||
const durationObj = parse(duration);
|
||||
const hrs = durationObj['hours'] ?? 0;
|
||||
const mins = durationObj['minutes'] ?? 0;
|
||||
const secs = Math.ceil(durationObj['seconds'] ?? 0);
|
||||
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
export async function getVideoMetadata(videoGuids: string[], session: Session): Promise<Metadata[]> {
|
||||
let metadata: Metadata[] = [];
|
||||
let title: string;
|
||||
let date: string;
|
||||
let totalChunks: number;
|
||||
let playbackUrl: string;
|
||||
let posterImage: string;
|
||||
|
||||
const apiClient = ApiClient.getInstance(session);
|
||||
|
||||
await forEachAsync(videoGuids, async (guid: string) => {
|
||||
let response = await apiClient.callApi('videos/' + guid, 'get');
|
||||
|
||||
title = response?.data['name'];
|
||||
playbackUrl = response?.data['playbackUrls']
|
||||
.filter((item: { [x: string]: string; }) =>
|
||||
item['mimeType'] == 'application/vnd.apple.mpegurl')
|
||||
.map((item: { [x: string]: string }) => {
|
||||
return item['playbackUrl'];
|
||||
})[0];
|
||||
|
||||
posterImage = response?.data['posterImage']['medium']['url'];
|
||||
date = publishedDateToString(response?.data['publishedDate']);
|
||||
totalChunks = durationToTotalChunks(response?.data.media['duration']);
|
||||
|
||||
metadata.push({
|
||||
date: date,
|
||||
totalChunks: totalChunks,
|
||||
title: title,
|
||||
playbackUrl: playbackUrl,
|
||||
posterImage: posterImage
|
||||
});
|
||||
});
|
||||
|
||||
return metadata;
|
||||
}
|
|
@ -1,19 +1,21 @@
|
|||
import path from 'path';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
// Thanks pkg-puppeteer [ cleaned up version :) ]
|
||||
export function getPuppeteerChromiumPath() {
|
||||
const isPkg = __filename.includes('snapshot');
|
||||
const macOS_Linux_rex = /^.*?\/node_modules\/puppeteer\/\.local-chromium/;
|
||||
const win32_rex = /^.*?\\node_modules\\puppeteer\\\.local-chromium/;
|
||||
const replaceRegex = process.platform === 'win32' ? win32_rex : macOS_Linux_rex;
|
||||
export function getPuppeteerChromiumPath(): string {
|
||||
const isPkg: boolean = __filename.includes('snapshot');
|
||||
|
||||
const replaceRegex: RegExp = (process.platform === 'win32') ?
|
||||
new RegExp(/^.*?\\node_modules\\puppeteer\\\.local-chromium/) :
|
||||
new RegExp(/^.*?\/node_modules\/puppeteer\/\.local-chromium/);
|
||||
|
||||
if (!isPkg) {
|
||||
return puppeteer.executablePath();
|
||||
}
|
||||
|
||||
const browserPath = puppeteer.executablePath()
|
||||
const browserPath: string = puppeteer.executablePath()
|
||||
.replace(replaceRegex, path.join(path.dirname(process.execPath), 'chromium'));
|
||||
|
||||
return browserPath;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
import { ApiClient } from './ApiClient';
|
||||
import { Session } from './Types';
|
||||
|
||||
import terminalImage from 'terminal-image';
|
||||
import { AxiosResponse } from 'axios';
|
||||
|
||||
|
||||
export async function drawThumbnail(posterImage: string, session: Session): Promise<void> {
|
||||
const apiClient = ApiClient.getInstance(session);
|
||||
let thumbnail = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer');
|
||||
console.log(await terminalImage.buffer(thumbnail?.data, { width: 70 } ));
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
let thumbnail: Buffer = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data);
|
||||
|
||||
console.log(await terminalImage.buffer(thumbnail, { width: 70 } ));
|
||||
}
|
||||
|
|
|
@ -1,56 +1,104 @@
|
|||
import * as fs from 'fs';
|
||||
import { chromeCacheFolder } from './destreamer';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { Session } from './Types';
|
||||
import { bgGreen, bgYellow, green } from 'colors';
|
||||
|
||||
import fs from 'fs';
|
||||
import jwtDecode from 'jwt-decode';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
export class TokenCache {
|
||||
private tokenCacheFile: string = '.token_cache';
|
||||
private tokenCacheFile = '.token_cache';
|
||||
|
||||
public Read(): Session | null {
|
||||
let j = null;
|
||||
if (!fs.existsSync(this.tokenCacheFile)) {
|
||||
console.warn(bgYellow.black(`${this.tokenCacheFile} not found.\n`));
|
||||
logger.warn(`${this.tokenCacheFile} not found. \n`);
|
||||
|
||||
return null;
|
||||
}
|
||||
let f = fs.readFileSync(this.tokenCacheFile, 'utf8');
|
||||
j = JSON.parse(f);
|
||||
|
||||
interface Jwt {
|
||||
let session: Session = JSON.parse(fs.readFileSync(this.tokenCacheFile, 'utf8'));
|
||||
|
||||
type Jwt = {
|
||||
[key: string]: any
|
||||
}
|
||||
const decodedJwt: Jwt = jwtDecode(session.AccessToken);
|
||||
|
||||
const decodedJwt: Jwt = jwtDecode(j.AccessToken);
|
||||
let now: number = Math.floor(Date.now() / 1000);
|
||||
let exp: number = decodedJwt['exp'];
|
||||
let timeLeft: number = exp - now;
|
||||
|
||||
let now = Math.floor(Date.now() / 1000);
|
||||
let exp = decodedJwt['exp'];
|
||||
let timeLeft = exp - now;
|
||||
|
||||
let timeLeftInMinutes = Math.floor(timeLeft / 60);
|
||||
if (timeLeft < 120) {
|
||||
console.warn(bgYellow.black('\nAccess token has expired.'));
|
||||
logger.warn('Access token has expired! \n');
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
console.info(bgGreen.black(`\nAccess token still good for ${timeLeftInMinutes} minutes.\n`));
|
||||
|
||||
let session: Session = {
|
||||
AccessToken: j.AccessToken,
|
||||
ApiGatewayUri: j.ApiGatewayUri,
|
||||
ApiGatewayVersion: j.ApiGatewayVersion
|
||||
};
|
||||
logger.info(`Access token still good for ${Math.floor(timeLeft / 60)} minutes.\n`.green);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
public Write(session: Session): void {
|
||||
let s = JSON.stringify(session, null, 4);
|
||||
let s: string = JSON.stringify(session, null, 4);
|
||||
fs.writeFile('.token_cache', s, (err: any) => {
|
||||
if (err) {
|
||||
return console.error(err);
|
||||
return logger.error(err);
|
||||
}
|
||||
console.info(green('Fresh access token dropped into .token_cache'));
|
||||
logger.info('Fresh access token dropped into .token_cachen \n'.green);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function refreshSession(): Promise<Session> {
|
||||
const url = 'https://web.microsoftstream.com';
|
||||
|
||||
const browser: puppeteer.Browser = await puppeteer.launch({
|
||||
executablePath: getPuppeteerChromiumPath(),
|
||||
headless: false, // NEVER TRUE OR IT DOES NOT WORK
|
||||
userDataDir: chromeCacheFolder,
|
||||
args: [
|
||||
'--disable-dev-shm-usage',
|
||||
'--fast-start',
|
||||
'--no-sandbox'
|
||||
]
|
||||
});
|
||||
|
||||
const page: puppeteer.Page = (await browser.pages())[0];
|
||||
await page.goto(url, { waitUntil: 'load' });
|
||||
|
||||
await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(url), { timeout: 30000 });
|
||||
|
||||
let session: Session | null = null;
|
||||
let tries = 1;
|
||||
|
||||
while (!session) {
|
||||
try {
|
||||
let sessionInfo: any;
|
||||
session = await page.evaluate(
|
||||
() => {
|
||||
return {
|
||||
AccessToken: sessionInfo.AccessToken,
|
||||
ApiGatewayUri: sessionInfo.ApiGatewayUri,
|
||||
ApiGatewayVersion: sessionInfo.ApiGatewayVersion
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
catch (error) {
|
||||
if (tries > 5) {
|
||||
process.exit(ERROR_CODE.NO_SESSION_INFO);
|
||||
}
|
||||
|
||||
session = null;
|
||||
tries++;
|
||||
await page.waitFor(3000);
|
||||
}
|
||||
}
|
||||
browser.close();
|
||||
|
||||
return session;
|
||||
}
|
||||
|
|
11
src/Types.ts
11
src/Types.ts
|
@ -4,10 +4,13 @@ export type Session = {
|
|||
ApiGatewayVersion: string;
|
||||
}
|
||||
|
||||
export type Metadata = {
|
||||
|
||||
export type Video = {
|
||||
date: string;
|
||||
totalChunks: number; // Abstraction of FFmpeg timemark
|
||||
title: string;
|
||||
outPath: string;
|
||||
totalChunks: number; // Abstraction of FFmpeg timemark
|
||||
playbackUrl: string;
|
||||
posterImage: string;
|
||||
}
|
||||
posterImageUrl: string;
|
||||
captionsUrl?: string
|
||||
}
|
||||
|
|
281
src/Utils.ts
281
src/Utils.ts
|
@ -1,152 +1,193 @@
|
|||
import { ApiClient } from './ApiClient';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { logger } from './Logger';
|
||||
import { Session } from './Types';
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { execSync } from 'child_process';
|
||||
import colors from 'colors';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
function sanitizeUrls(urls: string[]) {
|
||||
// eslint-disable-next-line
|
||||
const rex = new RegExp(/(?:https:\/\/)?.*\/video\/[a-z0-9]{8}-(?:[a-z0-9]{4}\-){3}[a-z0-9]{12}$/, 'i');
|
||||
const sanitized: string[] = [];
|
||||
|
||||
for (let i = 0, l = urls.length; i < l; ++i) {
|
||||
let url = urls[i].split('?')[0];
|
||||
async function extractGuids(url: string, client: ApiClient): Promise<Array<string> | null> {
|
||||
|
||||
if (!rex.test(url)) {
|
||||
if (url !== '') {
|
||||
console.warn(colors.yellow('Invalid URL at line ' + (i + 1) + ', skip..'));
|
||||
}
|
||||
const videoRegex = new RegExp(/https:\/\/.*\/video\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
const groupRegex = new RegExp(/https:\/\/.*\/group\/(\w{8}-(?:\w{4}-){3}\w{12})/);
|
||||
|
||||
const videoMatch: RegExpExecArray | null = videoRegex.exec(url);
|
||||
const groupMatch: RegExpExecArray | null = groupRegex.exec(url);
|
||||
|
||||
if (videoMatch) {
|
||||
return [videoMatch[1]];
|
||||
}
|
||||
else if (groupMatch) {
|
||||
const videoNumber: number = await client.callApi(`groups/${groupMatch[1]}`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.metrics.videos);
|
||||
|
||||
let result: Array<string> = await client.callApi(`groups/${groupMatch[1]}/videos?$top=${videoNumber}&$orderby=publishedDate asc`, 'get')
|
||||
.then((response: AxiosResponse<any> | undefined) => response?.data.value.map((item: any) => item.id));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the list of url given by the user via console input.
|
||||
* They can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {Array<string>} urlList list of link to parse
|
||||
* @param {string} defaultOutDir the directry used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseCLIinput(urlList: Array<string>, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
let guidList: Array<string> = [];
|
||||
|
||||
for (const url of urlList) {
|
||||
const guids: Array<string> | null = await extractGuids(url, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url '${url}', skipping..`);
|
||||
}
|
||||
}
|
||||
|
||||
const outDirList: Array<string> = Array(guidList.length).fill(defaultOutDir);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Parse the input text file.
|
||||
* The urls in the file can either be video urls or group urls, in which case the guids
|
||||
* will be added from oldest to newest.
|
||||
*
|
||||
* @param {string} inputFile path to the text file
|
||||
* @param {string} defaultOutDir the default/fallback directory used to save the videos
|
||||
* @param {Session} session used to call the API to get the GUIDs from group links
|
||||
*
|
||||
* @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array
|
||||
*/
|
||||
export async function parseInputFile(inputFile: string, defaultOutDir: string,
|
||||
session: Session): Promise<Array<Array<string>>> {
|
||||
// rawContent is a list of each line of the file
|
||||
const rawContent: Array<string> = fs.readFileSync(inputFile).toString()
|
||||
.split(/\r?\n/);
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
let guidList: Array<string> = [];
|
||||
let outDirList: Array<string> = [];
|
||||
// if the last line was an url set this
|
||||
let foundUrl = false;
|
||||
|
||||
for (let i = 0; i < rawContent.length; i++) {
|
||||
const line: string = rawContent[i];
|
||||
|
||||
// filter out lines with no content
|
||||
if (!line.match(/\S/)) {
|
||||
logger.warn(`Line ${i + 1} is empty, skipping..`);
|
||||
continue;
|
||||
}
|
||||
// parse if line is option
|
||||
else if (line.includes('-dir')) {
|
||||
if (foundUrl) {
|
||||
let outDir: string | null = parseOption('-dir', line);
|
||||
|
||||
if (url.substring(0, 8) !== 'https://') {
|
||||
url = 'https://' + url;
|
||||
}
|
||||
if (outDir && checkOutDir(outDir)) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(outDir));
|
||||
}
|
||||
else {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
sanitized.push(url);
|
||||
}
|
||||
|
||||
if (!sanitized.length) {
|
||||
process.exit(ERROR_CODE.INVALID_INPUT_URLS);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function sanitizeOutDirsList(dirsList: string[]) {
|
||||
const sanitized: string[] = [];
|
||||
|
||||
dirsList.forEach(dir => {
|
||||
if (dir !== '') {
|
||||
sanitized.push(dir);
|
||||
}
|
||||
});
|
||||
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
function readFileToArray(path: string) {
|
||||
return fs.readFileSync(path).toString('utf-8').split(/[\r\n]/);
|
||||
}
|
||||
|
||||
export async function forEachAsync(array: any, callback: any) {
|
||||
for (let i = 0, l = array.length; i < l; ++i) {
|
||||
await callback(array[i], i, array);
|
||||
}
|
||||
}
|
||||
|
||||
export function parseVideoUrls(videoUrls: any) {
|
||||
let input = videoUrls[0] as string;
|
||||
const isPath = input.substring(input.length - 4) === '.txt';
|
||||
let urls: string[];
|
||||
|
||||
if (isPath) {
|
||||
urls = readFileToArray(input);
|
||||
}
|
||||
else {
|
||||
urls = videoUrls as string[];
|
||||
}
|
||||
|
||||
return sanitizeUrls(urls);
|
||||
}
|
||||
|
||||
export function getOutputDirectoriesList(outDirArg: string) {
|
||||
const isList = outDirArg.substring(outDirArg.length - 4) === '.txt';
|
||||
let dirsList: string[];
|
||||
|
||||
if (isList) {
|
||||
dirsList = sanitizeOutDirsList(readFileToArray(outDirArg));
|
||||
}
|
||||
else {
|
||||
dirsList = [outDirArg];
|
||||
}
|
||||
|
||||
return dirsList;
|
||||
}
|
||||
|
||||
export function makeOutputDirectories(dirsList: string[]) {
|
||||
dirsList.forEach(dir => {
|
||||
if (!fs.existsSync(dir)) {
|
||||
console.info(colors.yellow('Creating output directory:'));
|
||||
console.info(colors.green(dir) + '\n');
|
||||
|
||||
try {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
foundUrl = false;
|
||||
continue;
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.INVALID_OUTPUT_DIR);
|
||||
else {
|
||||
logger.warn(`Found options without preceding url at line ${i + 1}, skipping..`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function checkOutDirsUrlsMismatch(dirsList: string[], urlsList: string[]) {
|
||||
const dirsListL = dirsList.length;
|
||||
const urlsListL = urlsList.length;
|
||||
/* now line is not empty nor an option line.
|
||||
If foundUrl is still true last line didn't have a directory option
|
||||
so we stil need to add the default outDir to outDirList to */
|
||||
if (foundUrl) {
|
||||
outDirList.push(...Array(guidList.length - outDirList.length)
|
||||
.fill(defaultOutDir));
|
||||
}
|
||||
|
||||
// single out dir, treat this as the chosen one for all
|
||||
if (dirsListL == 1) {
|
||||
return;
|
||||
const guids: Array<string> | null = await extractGuids(line, apiClient);
|
||||
|
||||
if (guids) {
|
||||
guidList.push(...guids);
|
||||
foundUrl = true;
|
||||
}
|
||||
else {
|
||||
logger.warn(`Invalid url at line ${i + 1}, skipping..`);
|
||||
}
|
||||
}
|
||||
else if (dirsListL != urlsListL) {
|
||||
process.exit(ERROR_CODE.OUTDIRS_URLS_MISMATCH);
|
||||
|
||||
return [guidList, outDirList];
|
||||
}
|
||||
|
||||
|
||||
// This leaves us the option to add more options (badum tss) _Luca
|
||||
function parseOption(optionSyntax: string, item: string): string | null {
|
||||
const match: RegExpMatchArray | null = item.match(
|
||||
RegExp(`^\\s*${optionSyntax}\\s?=\\s?['"](.*)['"]`)
|
||||
);
|
||||
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
|
||||
export function checkOutDir(directory: string): boolean {
|
||||
if (!fs.existsSync(directory)) {
|
||||
try {
|
||||
fs.mkdirSync(directory);
|
||||
logger.info('\nCreated directory: '.yellow + directory);
|
||||
}
|
||||
catch (e) {
|
||||
logger.warn('Cannot create directory: '+ directory +
|
||||
'\nFalling back to default directory..');
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export function checkRequirements() {
|
||||
export function checkRequirements(): void {
|
||||
try {
|
||||
const ffmpegVer = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
console.info(colors.green(`Using ${ffmpegVer}\n`));
|
||||
|
||||
const ffmpegVer: string = execSync('ffmpeg -version').toString().split('\n')[0];
|
||||
logger.info(`Using ${ffmpegVer}\n`);
|
||||
}
|
||||
catch (e) {
|
||||
process.exit(ERROR_CODE.MISSING_FFMPEG);
|
||||
}
|
||||
}
|
||||
|
||||
export function makeUniqueTitle(title: string, outDir: string, skip?: boolean, format?: string) {
|
||||
let ntitle = title;
|
||||
let k = 0;
|
||||
|
||||
while (!skip && fs.existsSync(outDir + path.sep + ntitle + '.' + format)) {
|
||||
ntitle = title + ' - ' + (++k).toString();
|
||||
}
|
||||
export function ffmpegTimemarkToChunk(timemark: string): number {
|
||||
const timeVals: Array<string> = timemark.split(':');
|
||||
const hrs: number = parseInt(timeVals[0]);
|
||||
const mins: number = parseInt(timeVals[1]);
|
||||
const secs: number = parseInt(timeVals[2]);
|
||||
|
||||
return ntitle;
|
||||
}
|
||||
|
||||
export function ffmpegTimemarkToChunk(timemark: string) {
|
||||
const timeVals: string[] = timemark.split(':');
|
||||
const hrs = parseInt(timeVals[0]);
|
||||
const mins = parseInt(timeVals[1]);
|
||||
const secs = parseInt(timeVals[2]);
|
||||
const chunk = (hrs * 60) + mins + (secs / 60);
|
||||
|
||||
return chunk;
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
|
106
src/VideoUtils.ts
Normal file
106
src/VideoUtils.ts
Normal file
|
@ -0,0 +1,106 @@
|
|||
import { ApiClient } from './ApiClient';
|
||||
import { promptUser } from './CommandLineParser';
|
||||
import { logger } from './Logger';
|
||||
import { Video, Session } from './Types';
|
||||
|
||||
import { AxiosResponse } from 'axios';
|
||||
import fs from 'fs';
|
||||
import { parse } from 'iso8601-duration';
|
||||
import path from 'path';
|
||||
import sanitize from 'sanitize-filename';
|
||||
|
||||
|
||||
function publishedDateToString(date: string): string {
|
||||
const dateJs: Date = new Date(date);
|
||||
const day: string = dateJs.getDate().toString().padStart(2, '0');
|
||||
const month: string = (dateJs.getMonth() + 1).toString(10).padStart(2, '0');
|
||||
|
||||
return `${dateJs.getFullYear()}-${month}-${day}`;
|
||||
}
|
||||
|
||||
|
||||
function durationToTotalChunks(duration: string): number {
|
||||
const durationObj: any = parse(duration);
|
||||
const hrs: number = durationObj.hours ?? 0;
|
||||
const mins: number = durationObj.minutes ?? 0;
|
||||
const secs: number = Math.ceil(durationObj.seconds ?? 0);
|
||||
|
||||
return (hrs * 60) + mins + (secs / 60);
|
||||
}
|
||||
|
||||
|
||||
export async function getVideoInfo(videoGuids: Array<string>, session: Session, subtitles?: boolean): Promise<Array<Video>> {
|
||||
let metadata: Array<Video> = [];
|
||||
let title: string;
|
||||
let date: string;
|
||||
let totalChunks: number;
|
||||
let playbackUrl: string;
|
||||
let posterImageUrl: string;
|
||||
let captionsUrl: string | undefined;
|
||||
|
||||
const apiClient: ApiClient = ApiClient.getInstance(session);
|
||||
|
||||
for (const GUID of videoGuids) {
|
||||
let response: AxiosResponse<any> | undefined= await apiClient.callApi('videos/' + GUID, 'get');
|
||||
|
||||
title = sanitize(response?.data['name']);
|
||||
playbackUrl = response?.data['playbackUrls']
|
||||
.filter((item: { [x: string]: string; }) =>
|
||||
item['mimeType'] == 'application/vnd.apple.mpegurl')
|
||||
.map((item: { [x: string]: string }) => {
|
||||
return item['playbackUrl'];
|
||||
})[0];
|
||||
|
||||
posterImageUrl = response?.data['posterImage']['medium']['url'];
|
||||
date = publishedDateToString(response?.data['publishedDate']);
|
||||
totalChunks = durationToTotalChunks(response?.data.media['duration']);
|
||||
|
||||
if (subtitles) {
|
||||
let captions: AxiosResponse<any> | undefined = await apiClient.callApi(`videos/${GUID}/texttracks`, 'get');
|
||||
|
||||
if (!captions?.data.value.length) {
|
||||
captionsUrl = undefined;
|
||||
}
|
||||
else if (captions?.data.value.length === 1) {
|
||||
logger.info(`Found subtitles for ${title}. \n`);
|
||||
captionsUrl = captions?.data.value.pop().url;
|
||||
}
|
||||
else {
|
||||
const index: number = promptUser(captions.data.value.map((item: { language: string; autoGenerated: string; }) => {
|
||||
return `[${item.language}] autogenerated: ${item.autoGenerated}`;
|
||||
}));
|
||||
captionsUrl = captions.data.value[index].url;
|
||||
}
|
||||
}
|
||||
|
||||
metadata.push({
|
||||
date: date,
|
||||
totalChunks: totalChunks,
|
||||
title: title,
|
||||
outPath: '',
|
||||
playbackUrl: playbackUrl,
|
||||
posterImageUrl: posterImageUrl,
|
||||
captionsUrl: captionsUrl
|
||||
});
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
export function createUniquePath(videos: Array<Video>, outDirs: Array<string>, format: string, skip?: boolean): Array<Video> {
|
||||
|
||||
videos.forEach((video: Video, index: number) => {
|
||||
let title = `${video.title} - ${video.date}`;
|
||||
let i = 0;
|
||||
|
||||
while (!skip && fs.existsSync(path.join(outDirs[index], title + '.' + format))) {
|
||||
title = `${video.title} - ${video.date}_${++i}`;
|
||||
}
|
||||
|
||||
|
||||
video.outPath = path.join(outDirs[index], title + '.' + format);
|
||||
});
|
||||
|
||||
return videos;
|
||||
}
|
|
@ -1,31 +1,32 @@
|
|||
import {
|
||||
sleep, parseVideoUrls, checkRequirements, makeUniqueTitle, ffmpegTimemarkToChunk,
|
||||
makeOutputDirectories, getOutputDirectoriesList, checkOutDirsUrlsMismatch
|
||||
} from './Utils';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { setProcessEvents } from './Events';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { TokenCache } from './TokenCache';
|
||||
import { getVideoMetadata } from './Metadata';
|
||||
import { Metadata, Session } from './Types';
|
||||
import { drawThumbnail } from './Thumbnail';
|
||||
import { argv } from './CommandLineParser';
|
||||
import { ERROR_CODE } from './Errors';
|
||||
import { setProcessEvents } from './Events';
|
||||
import { logger } from './Logger';
|
||||
import { getPuppeteerChromiumPath } from './PuppeteerHelper';
|
||||
import { drawThumbnail } from './Thumbnail';
|
||||
import { TokenCache, refreshSession } from './TokenCache';
|
||||
import { Video, Session } from './Types';
|
||||
import { checkRequirements, ffmpegTimemarkToChunk, parseInputFile, parseCLIinput} from './Utils';
|
||||
import { getVideoInfo, createUniquePath } from './VideoUtils';
|
||||
|
||||
import puppeteer from 'puppeteer';
|
||||
import isElevated from 'is-elevated';
|
||||
import colors from 'colors';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { URL } from 'url';
|
||||
import sanitize from 'sanitize-filename';
|
||||
import cliProgress from 'cli-progress';
|
||||
import fs from 'fs';
|
||||
import isElevated from 'is-elevated';
|
||||
import puppeteer from 'puppeteer';
|
||||
|
||||
|
||||
const { FFmpegCommand, FFmpegInput, FFmpegOutput } = require('@tedconf/fessonia')();
|
||||
const tokenCache = new TokenCache();
|
||||
const tokenCache: TokenCache = new TokenCache();
|
||||
export const chromeCacheFolder = '.chrome_data';
|
||||
|
||||
async function init() {
|
||||
|
||||
async function init(): Promise<void> {
|
||||
setProcessEvents(); // must be first!
|
||||
|
||||
if (argv.verbose) {
|
||||
logger.level = 'verbose';
|
||||
}
|
||||
|
||||
if (await isElevated()) {
|
||||
process.exit(ERROR_CODE.ELEVATED_SHELL);
|
||||
}
|
||||
|
@ -33,53 +34,58 @@ async function init() {
|
|||
checkRequirements();
|
||||
|
||||
if (argv.username) {
|
||||
console.info('Username: %s', argv.username);
|
||||
logger.info(`Username: ${argv.username}`);
|
||||
}
|
||||
|
||||
if (argv.simulate) {
|
||||
console.info(colors.yellow('Simulate mode, there will be no video download.\n'));
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.info('Video URLs:');
|
||||
console.info(argv.videoUrls);
|
||||
logger.warn('Simulate mode, there will be no video downloaded. \n');
|
||||
}
|
||||
}
|
||||
|
||||
async function DoInteractiveLogin(url: string, username?: string): Promise<Session> {
|
||||
const videoId = url.split('/').pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_ID);
|
||||
|
||||
console.log('Launching headless Chrome to perform the OpenID Connect dance...');
|
||||
const browser = await puppeteer.launch({
|
||||
async function DoInteractiveLogin(url: string, username?: string): Promise<Session> {
|
||||
const videoId: string = url.split('/').pop() ?? process.exit(ERROR_CODE.INVALID_VIDEO_GUID);
|
||||
|
||||
logger.info('Launching headless Chrome to perform the OpenID Connect dance...');
|
||||
|
||||
const browser: puppeteer.Browser = await puppeteer.launch({
|
||||
executablePath: getPuppeteerChromiumPath(),
|
||||
headless: false,
|
||||
userDataDir: (argv.keepLoginCookies) ? chromeCacheFolder : undefined,
|
||||
args: [
|
||||
'--disable-dev-shm-usage',
|
||||
'--fast-start',
|
||||
'--no-sandbox'
|
||||
]
|
||||
});
|
||||
const page = (await browser.pages())[0];
|
||||
console.log('Navigating to login page...');
|
||||
const page: puppeteer.Page = (await browser.pages())[0];
|
||||
|
||||
logger.info('Navigating to login page...');
|
||||
await page.goto(url, { waitUntil: 'load' });
|
||||
|
||||
if (username) {
|
||||
await page.waitForSelector('input[type="email"]');
|
||||
await page.keyboard.type(username);
|
||||
await page.click('input[type="submit"]');
|
||||
try {
|
||||
if (username) {
|
||||
await page.waitForSelector('input[type="email"]', {timeout: 3000});
|
||||
await page.keyboard.type(username);
|
||||
await page.click('input[type="submit"]');
|
||||
}
|
||||
else {
|
||||
/* If a username was not provided we let the user take actions that
|
||||
lead up to the video page. */
|
||||
}
|
||||
}
|
||||
else {
|
||||
// If a username was not provided we let the user take actions that
|
||||
// lead up to the video page.
|
||||
catch (e) {
|
||||
/* If there is no email input selector we aren't in the login module,
|
||||
we are probably using the cache to aid the login.
|
||||
It could finish the login on its own if the user said 'yes' when asked to
|
||||
remember the credentials or it could still prompt the user for a password */
|
||||
}
|
||||
|
||||
await browser.waitForTarget(target => target.url().includes(videoId), { timeout: 150000 });
|
||||
console.info('We are logged in.');
|
||||
|
||||
let session = null;
|
||||
let tries: number = 1;
|
||||
await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(videoId), { timeout: 150000 });
|
||||
logger.info('We are logged in.');
|
||||
|
||||
let session: Session | null = null;
|
||||
let tries = 1;
|
||||
while (!session) {
|
||||
try {
|
||||
let sessionInfo: any;
|
||||
|
@ -100,85 +106,55 @@ async function DoInteractiveLogin(url: string, username?: string): Promise<Sessi
|
|||
|
||||
session = null;
|
||||
tries++;
|
||||
await sleep(3000);
|
||||
await page.waitFor(3000);
|
||||
}
|
||||
}
|
||||
|
||||
tokenCache.Write(session);
|
||||
console.log('Wrote access token to token cache.');
|
||||
console.log("At this point Chromium's job is done, shutting it down...\n");
|
||||
logger.info('Wrote access token to token cache.');
|
||||
logger.info("At this point Chromium's job is done, shutting it down...\n");
|
||||
|
||||
await browser.close();
|
||||
// --- Ignore all this for now ---
|
||||
// --- hopefully we won't need it ----
|
||||
// await sleep(1000);
|
||||
// let banner = await page.evaluate(
|
||||
// () => {
|
||||
// let topbar = document.getElementsByTagName('body')[0];
|
||||
// topbar.innerHTML =
|
||||
// '<h1 style="color: red">DESTREAMER NEEDS THIS WINDOW ' +
|
||||
// 'TO DO SOME ACCESS TOKEN MAGIC. DO NOT CLOSE IT.</h1>';
|
||||
// });
|
||||
// --------------------------------
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
function extractVideoGuid(videoUrls: string[]): string[] {
|
||||
const videoGuids: string[] = [];
|
||||
let guid: string | undefined = '';
|
||||
|
||||
for (const url of videoUrls) {
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
guid = urlObj.pathname.split('/').pop();
|
||||
}
|
||||
catch (e) {
|
||||
console.error(`Unrecognized URL format in ${url}: ${e.message}`);
|
||||
process.exit(ERROR_CODE.INVALID_VIDEO_GUID);
|
||||
}
|
||||
async function downloadVideo(videoGUIDs: Array<string>, outputDirectories: Array<string>, session: Session): Promise<void> {
|
||||
|
||||
if (guid) {
|
||||
videoGuids.push(guid);
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.info('Video GUIDs:');
|
||||
console.info(videoGuids);
|
||||
}
|
||||
|
||||
return videoGuids;
|
||||
}
|
||||
|
||||
async function downloadVideo(videoUrls: string[], outputDirectories: string[], session: Session) {
|
||||
const videoGuids = extractVideoGuid(videoUrls);
|
||||
|
||||
console.log('Fetching metadata...');
|
||||
|
||||
const metadata: Metadata[] = await getVideoMetadata(videoGuids, session);
|
||||
logger.info('Fetching videos info... \n');
|
||||
const videos: Array<Video> = createUniquePath (
|
||||
await getVideoInfo(videoGUIDs, session, argv.closedCaptions),
|
||||
outputDirectories, argv.format, argv.skip
|
||||
);
|
||||
|
||||
if (argv.simulate) {
|
||||
metadata.forEach(video => {
|
||||
console.log(
|
||||
colors.yellow('\n\nTitle: ') + colors.green(video.title) +
|
||||
colors.yellow('\nPublished Date: ') + colors.green(video.date) +
|
||||
colors.yellow('\nPlayback URL: ') + colors.green(video.playbackUrl)
|
||||
videos.forEach((video: Video) => {
|
||||
logger.info(
|
||||
'\nTitle: '.green + video.title +
|
||||
'\nOutPath: '.green + video.outPath +
|
||||
'\nPublished Date: '.green + video.date +
|
||||
'\nPlayback URL: '.green + video.playbackUrl +
|
||||
((video.captionsUrl) ? ('\nCC URL: '.green + video.captionsUrl) : '')
|
||||
);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (argv.verbose) {
|
||||
console.log(outputDirectories);
|
||||
}
|
||||
for (const video of videos) {
|
||||
|
||||
const outDirsIdxInc = outputDirectories.length > 1 ? 1:0;
|
||||
if (argv.skip && fs.existsSync(video.outPath)) {
|
||||
logger.info(`File already exists, skipping: ${video.outPath} \n`);
|
||||
continue;
|
||||
}
|
||||
|
||||
for (let i=0, j=0, l=metadata.length; i<l; ++i, j+=outDirsIdxInc) {
|
||||
const video = metadata[i];
|
||||
const pbar = new cliProgress.SingleBar({
|
||||
if (argv.keepLoginCookies) {
|
||||
logger.info('Trying to refresh token...');
|
||||
session = await refreshSession();
|
||||
}
|
||||
|
||||
const pbar: cliProgress.SingleBar = new cliProgress.SingleBar({
|
||||
barCompleteChar: '\u2588',
|
||||
barIncompleteChar: '\u2591',
|
||||
format: 'progress [{bar}] {percentage}% {speed} {eta_formatted}',
|
||||
|
@ -188,37 +164,40 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
|||
hideCursor: true,
|
||||
});
|
||||
|
||||
console.log(colors.yellow(`\nDownloading Video: ${video.title}\n`));
|
||||
logger.info(`\nDownloading Video: ${video.title} \n`);
|
||||
logger.verbose('Extra video info \n' +
|
||||
'\t Video m3u8 playlist URL: '.cyan + video.playbackUrl + '\n' +
|
||||
'\t Video tumbnail URL: '.cyan + video.posterImageUrl + '\n' +
|
||||
'\t Video subtitle URL (may not exist): '.cyan + video.captionsUrl + '\n' +
|
||||
'\t Video total chunks: '.cyan + video.totalChunks + '\n');
|
||||
|
||||
video.title = makeUniqueTitle(sanitize(video.title) + ' - ' + video.date, outputDirectories[j], argv.skip, argv.format);
|
||||
|
||||
console.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...');
|
||||
logger.info('Spawning ffmpeg with access token and HLS URL. This may take a few seconds...\n\n');
|
||||
if (!process.stdout.columns) {
|
||||
console.info(colors.red('Unable to get number of columns from terminal.\n' +
|
||||
'This happens sometimes in Cygwin/MSYS.\n' +
|
||||
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
|
||||
'Please use PowerShell or cmd.exe to run destreamer on Windows.'));
|
||||
logger.warn(
|
||||
'Unable to get number of columns from terminal.\n' +
|
||||
'This happens sometimes in Cygwin/MSYS.\n' +
|
||||
'No progress bar can be rendered, however the download process should not be affected.\n\n' +
|
||||
'Please use PowerShell or cmd.exe to run destreamer on Windows.'
|
||||
);
|
||||
}
|
||||
|
||||
const headers = 'Authorization: Bearer ' + session.AccessToken;
|
||||
const headers: string = 'Authorization: Bearer ' + session.AccessToken;
|
||||
|
||||
// Very experimental inline thumbnail rendering
|
||||
if (!argv.noExperiments) {
|
||||
await drawThumbnail(video.posterImage, session);
|
||||
await drawThumbnail(video.posterImageUrl, session);
|
||||
}
|
||||
|
||||
const outputPath = outputDirectories[j] + path.sep + video.title + '.' + argv.format;
|
||||
const ffmpegInpt = new FFmpegInput(video.playbackUrl, new Map([
|
||||
const ffmpegInpt: any = new FFmpegInput(video.playbackUrl, new Map([
|
||||
['headers', headers]
|
||||
]));
|
||||
const ffmpegOutput = new FFmpegOutput(outputPath, new Map([
|
||||
const ffmpegOutput: any = new FFmpegOutput(video.outPath, new Map([
|
||||
argv.acodec === 'none' ? ['an', null] : ['c:a', argv.acodec],
|
||||
argv.vcodec === 'none' ? ['vn', null] : ['c:v', argv.vcodec],
|
||||
['n', null]
|
||||
]));
|
||||
const ffmpegCmd = new FFmpegCommand();
|
||||
const ffmpegCmd: any = new FFmpegCommand();
|
||||
|
||||
const cleanupFn = (): void => {
|
||||
const cleanupFn: () => void = () => {
|
||||
pbar.stop();
|
||||
|
||||
if (argv.noCleanup) {
|
||||
|
@ -226,10 +205,10 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
|||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(outputPath);
|
||||
fs.unlinkSync(video.outPath);
|
||||
}
|
||||
catch (e) {
|
||||
// Future handling of an error maybe
|
||||
// Future handling of an error (maybe)
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -240,9 +219,16 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
|||
// prepare ffmpeg command line
|
||||
ffmpegCmd.addInput(ffmpegInpt);
|
||||
ffmpegCmd.addOutput(ffmpegOutput);
|
||||
if (argv.closedCaptions && video.captionsUrl) {
|
||||
const captionsInpt: any = new FFmpegInput(video.captionsUrl, new Map([
|
||||
['headers', headers]
|
||||
]));
|
||||
|
||||
ffmpegCmd.on('update', (data: any) => {
|
||||
const currentChunks = ffmpegTimemarkToChunk(data.out_time);
|
||||
ffmpegCmd.addInput(captionsInpt);
|
||||
}
|
||||
|
||||
ffmpegCmd.on('update', async (data: any) => {
|
||||
const currentChunks: number = ffmpegTimemarkToChunk(data.out_time);
|
||||
|
||||
pbar.update(currentChunks, {
|
||||
speed: data.bitrate
|
||||
|
@ -259,22 +245,15 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
|||
// let the magic begin...
|
||||
await new Promise((resolve: any) => {
|
||||
ffmpegCmd.on('error', (error: any) => {
|
||||
if (argv.skip && error.message.includes('exists') && error.message.includes(outputPath)) {
|
||||
pbar.update(video.totalChunks); // set progress bar to 100%
|
||||
console.log(colors.yellow(`\nFile already exists, skipping: ${outputPath}`));
|
||||
resolve();
|
||||
}
|
||||
else {
|
||||
cleanupFn();
|
||||
cleanupFn();
|
||||
|
||||
console.log(`\nffmpeg returned an error: ${error.message}`);
|
||||
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
|
||||
}
|
||||
logger.error(`FFmpeg returned an error: ${error.message}`);
|
||||
process.exit(ERROR_CODE.UNK_FFMPEG_ERROR);
|
||||
});
|
||||
|
||||
ffmpegCmd.on('success', () => {
|
||||
pbar.update(video.totalChunks); // set progress bar to 100%
|
||||
console.log(colors.green(`\nDownload finished: ${outputPath}`));
|
||||
logger.info(`\nDownload finished: ${video.outPath} \n`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
|
@ -285,19 +264,36 @@ async function downloadVideo(videoUrls: string[], outputDirectories: string[], s
|
|||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
||||
async function main(): Promise<void> {
|
||||
await init(); // must be first
|
||||
|
||||
const outDirs: string[] = getOutputDirectoriesList(argv.outputDirectory as string);
|
||||
const videoUrls: string[] = parseVideoUrls(argv.videoUrls);
|
||||
let session: Session;
|
||||
session = tokenCache.Read() ?? await DoInteractiveLogin('https://web.microsoftstream.com/', argv.username);
|
||||
|
||||
checkOutDirsUrlsMismatch(outDirs, videoUrls);
|
||||
makeOutputDirectories(outDirs); // create all dirs now to prevent ffmpeg panic
|
||||
logger.verbose('Session and API info \n' +
|
||||
'\t API Gateway URL: '.cyan + session.ApiGatewayUri + '\n' +
|
||||
'\t API Gateway version: '.cyan + session.ApiGatewayVersion + '\n');
|
||||
|
||||
session = tokenCache.Read() ?? await DoInteractiveLogin(videoUrls[0], argv.username);
|
||||
let videoGUIDs: Array<string>;
|
||||
let outDirs: Array<string>;
|
||||
|
||||
downloadVideo(videoUrls, outDirs, session);
|
||||
if (argv.videoUrls) {
|
||||
logger.info('Parsing video/group urls');
|
||||
[videoGUIDs, outDirs] = await parseCLIinput(argv.videoUrls as Array<string>, argv.outputDirectory, session);
|
||||
}
|
||||
else {
|
||||
logger.info('Parsing input file');
|
||||
[videoGUIDs, outDirs] = await parseInputFile(argv.inputFile!, argv.outputDirectory, session);
|
||||
}
|
||||
|
||||
logger.verbose('List of GUIDs and corresponding output directory \n' +
|
||||
videoGUIDs.map((guid: string, i: number) =>
|
||||
`\thttps://web.microsoftstream.com/video/${guid} => ${outDirs[i]} \n`).join(''));
|
||||
|
||||
|
||||
downloadVideo(videoGUIDs, outDirs, session);
|
||||
}
|
||||
|
||||
|
||||
main();
|
||||
|
|
89
test/test.ts
89
test/test.ts
|
@ -1,59 +1,76 @@
|
|||
import { parseVideoUrls } from '../src/Utils';
|
||||
import { parseInputFile } from '../src/Utils';
|
||||
import puppeteer from 'puppeteer';
|
||||
import assert from 'assert';
|
||||
import tmp from 'tmp';
|
||||
import fs from 'fs';
|
||||
import { Session } from './Types';
|
||||
|
||||
let browser: any;
|
||||
let page: any;
|
||||
|
||||
describe('Puppeteer', () => {
|
||||
it('should grab GitHub page title', async () => {
|
||||
browser = await puppeteer.launch({
|
||||
const browser = await puppeteer.launch({
|
||||
headless: true,
|
||||
args: ['--disable-dev-shm-usage', '--fast-start', '--no-sandbox']
|
||||
});
|
||||
page = await browser.newPage();
|
||||
await page.goto("https://github.com/", { waitUntil: 'load' });
|
||||
|
||||
const page = await browser.newPage();
|
||||
await page.goto('https://github.com/', { waitUntil: 'load' });
|
||||
|
||||
let pageTitle = await page.title();
|
||||
assert.equal(true, pageTitle.includes('GitHub'));
|
||||
|
||||
await browser.close();
|
||||
}).timeout(25000); // yeah, this may take a while...
|
||||
}).timeout(30000); // yeah, this may take a while...
|
||||
});
|
||||
|
||||
describe('Destreamer', () => {
|
||||
it('should parse and sanitize URL list from file', () => {
|
||||
const testIn: string[] = [
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd?",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd&",
|
||||
"",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd?a=b&c",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd?a",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddd",
|
||||
"https://web.microsoftstream.com/video/xxxxxx-zzzz-hhhh-rrrr-dddddddddddd",
|
||||
""
|
||||
|
||||
describe('Destreamer parsing', () => {
|
||||
it('Input file to arrays of URLs and DIRs', async () => {
|
||||
const testSession: Session = {
|
||||
AccessToken: '',
|
||||
ApiGatewayUri: '',
|
||||
ApiGatewayVersion: ''
|
||||
};
|
||||
const testIn: Array<string> = [
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-bbbb-xxxx-xxxx-xxxxxxxxxxxx?',
|
||||
' -dir = "luca"',
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-cccc-xxxx-xxxx-xxxxxxxxxxxx&',
|
||||
'',
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-dddd-xxxx-xxxx-xxxxxxxxxxxx?a=b&c',
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-eeee-xxxx-xxxx-xxxxxxxxxxxx?a',
|
||||
' -dir =\'checking/justToSee\'',
|
||||
'https://web.microsoftstream.com/video/xxxxxxxx-ffff-xxxx-xxxx-dddddddddd',
|
||||
'https://web.microsoftstream.com/video/xxxxxx-gggg-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
''
|
||||
];
|
||||
const expectedOut: string[] = [
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd",
|
||||
"https://web.microsoftstream.com/video/xxxxxxxx-zzzz-hhhh-rrrr-dddddddddddd"
|
||||
const expectedGUIDsOut: Array<string> = [
|
||||
'xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
'xxxxxxxx-bbbb-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
'xxxxxxxx-cccc-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
'xxxxxxxx-dddd-xxxx-xxxx-xxxxxxxxxxxx',
|
||||
'xxxxxxxx-eeee-xxxx-xxxx-xxxxxxxxxxxx'
|
||||
];
|
||||
const expectedDirOut: Array<string> = [
|
||||
'videos',
|
||||
'luca',
|
||||
'videos',
|
||||
'videos',
|
||||
'videos'
|
||||
];
|
||||
const tmpFile = tmp.fileSync({ postfix: '.txt' });
|
||||
let testOut: string[];
|
||||
|
||||
fs.writeFileSync(tmpFile.fd, testIn.join('\r\n'));
|
||||
|
||||
testOut = parseVideoUrls([tmpFile.name])!;
|
||||
if (testOut.length !== expectedOut.length)
|
||||
assert.strictEqual(testOut, expectedOut, "URL list not sanitized");
|
||||
|
||||
for (let i=0, l=testOut.length; i<l; ++i) {
|
||||
if (testOut[i] !== expectedOut[i])
|
||||
assert.strictEqual(testOut[i], expectedOut[i], "URL not sanitized");
|
||||
const [testUrlOut , testDirOut]: Array<Array<string>> = await parseInputFile(tmpFile.name, 'videos', testSession);
|
||||
if (testUrlOut.length !== expectedGUIDsOut.length) {
|
||||
throw "Expected url list and test list don't have the same number of elements".red;
|
||||
}
|
||||
|
||||
assert.ok("sanitizeUrls ok");
|
||||
else if (testDirOut.length !== expectedDirOut.length) {
|
||||
throw "Expected dir list and test list don't have the same number of elements".red;
|
||||
}
|
||||
assert.deepStrictEqual(testUrlOut, expectedGUIDsOut,
|
||||
'Error in parsing the URLs, missmatch between test and expected'.red);
|
||||
assert.deepStrictEqual(testUrlOut, expectedGUIDsOut,
|
||||
'Error in parsing the DIRs, missmatch between test and expected'.red);
|
||||
assert.ok('Parsing of input file ok');
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in a new issue