diff --git a/.eslintrc.json b/.eslintrc.json index dcb7e1f..f9c9e41 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,12 +1,10 @@ +// NOTE: if you have trouble with a rule not working install eslint as global +// then use `eslint --print-config out.txt` and check the output for problems { "env": { "es6": true, "node": true }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/eslint-recommended" - ], "globals": { "Atomics": "readonly", "SharedArrayBuffer": "readonly" @@ -19,6 +17,10 @@ "plugins": [ "@typescript-eslint" ], + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/eslint-recommended" + ], "rules": { "semi": [2, "always"], "no-unused-vars": "off", @@ -29,6 +31,14 @@ { "blankLine": "always", "prev": "*", "next": "return" } ], "brace-style": [2, "stroustrup", { "allowSingleLine": false }], - "curly": ["error", "all"] + "curly": ["error", "all"], + "@typescript-eslint/consistent-type-definitions": ["error", "type"], + "@typescript-eslint/explicit-function-return-type": "error", + "@typescript-eslint/explicit-member-accessibility": "error", + "@typescript-eslint/array-type": ["error", {"default": "generic"}], + + // these two rules are conflicting, I need some help solving this. I'll disable one for now + // "@typescript-eslint/typedef": ["error", { "variableDeclaration": true }], + "@typescript-eslint/no-inferrable-types": "error" } -} \ No newline at end of file +} diff --git a/.gitignore b/.gitignore index d020c14..5beb38f 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,9 @@ *.log *.js *.zip + +.chrome_data node_modules videos release -build \ No newline at end of file +build diff --git a/README.md b/README.md index e2b8e4f..a2282a9 100644 --- a/README.md +++ b/README.md @@ -69,44 +69,44 @@ $ npm run build $ ./destreamer.sh Options: - --help Show help [boolean] - --version Show version number [boolean] - --videoUrls, -i List of video urls [array] - --videoUrlsFile, -f Path to txt file containing the urls [string] - --username, -u [string] - --outputDirectory, -o The directory where destreamer will save your - downloads [default: videos] [string] - --outputDirectories, -O Path to a txt file containing one output directory - per video [string] - --noExperiments, -x Do not attempt to render video thumbnails in the - console [boolean] [default: false] - --simulate, -s Disable video download and print metadata information - to the console [boolean] [default: false] - --verbose, -v Print additional information to the console (use this - before opening an issue on GitHub) - [boolean] [default: false] - --noCleanup, --nc Don't delete the downloaded video file when an FFmpeg - error occurs [boolean] [default: false] - --vcodec Re-encode video track. Specify FFmpeg codec (e.g. - libx265) or set to "none" to disable video. - [string] [default: "copy"] - --acodec Re-encode audio track. Specify FFmpeg codec (e.g. - libopus) or set to "none" to disable audio. - [string] [default: "copy"] - --format Output container format (mkv, mp4, mov, anything that - FFmpeg supports) [string] [default: "mkv"] - --skip Skip download if file already exists - [boolean] [default: false] + --help Show help [boolean] + --version Show version number [boolean] + --username, -u The username used to log into Microsoft Stream (enabling this will fill in the email field for + you) [string] + --videoUrls, -i List of video urls [array] + --inputFile, -f Path to text file containing URLs and optionally outDirs. See the README for more on outDirs. + [string] + --outputDirectory, -o The directory where destreamer will save your downloads [string] [default: "videos"] + --keepLoginCookies, -k Let Chromium cache identity provider cookies so you can use "Remember me" during login + [boolean] [default: false] + --noExperiments, -x Do not attempt to render video thumbnails in the console [boolean] [default: false] + --simulate, -s Disable video download and print metadata information to the console[boolean] [default: false] + --verbose, -v Print additional information to the console (use this before opening an issue on GitHub) + [boolean] [default: false] + --closedCaptions, --cc Check if closed captions are aviable and let the user choose which one to download (will not + ask if only one aviable) [boolean] [default: false] + --noCleanup, --nc Do not delete the downloaded video file when an FFmpeg error occurs [boolean] [default: false] + --vcodec Re-encode video track. Specify FFmpeg codec (e.g. libx265) or set to "none" to disable video. + [string] [default: "copy"] + --acodec Re-encode audio track. Specify FFmpeg codec (e.g. libopus) or set to "none" to disable audio. + [string] [default: "copy"] + --format Output container format (mkv, mp4, mov, anything that FFmpeg supports) + [string] [default: "mkv"] + --skip Skip download if file already exists [boolean] [default: false] ``` -We default to `.mkv` for the output container. If you prefer something else (like `mp4`), pass `--format mp4`. +- Passing `--username` is optional. It's there to make logging in faster (the username field will be populated automatically on the login form). + +- You can use an absolute path for `-o` (output directory), for example `/mnt/videos`. + +- We default to `.mkv` for the output container. If you prefer something else (like `mp4`), pass `--format mp4`. Download a video - ```sh $ ./destreamer.sh -i "https://web.microsoftstream.com/video/VIDEO-1" ``` -Download a video and re-encode with HEVC (libx265): +Download a video and re-encode with HEVC (libx265) - ```sh $ ./destreamer.sh -i "https://web.microsoftstream.com/video/VIDEO-1" --vcodec libx265 ``` @@ -131,12 +131,19 @@ Download many videos but read URLs from a file - ```sh $ ./destreamer.sh -f list.txt ``` - +### Input file You can create a `.txt` file containing your video URLs, one video per line. The text file can have any name, followed by the `.txt` extension. +Additionally you can have destreamer download each video in the input list to a separate directory. +These optional lines must start with white space(s). -Passing `--username` is optional. It's there to make logging in faster (the username field will be populated automatically on the login form). +Usage - +``` +https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx + -dir=videos/lessons/week1 +https://web.microsoftstream.com/video/xxxxxxxx-aaaa-xxxx-xxxx-xxxxxxxxxxxx + -dir=videos/lessons/week2" +``` -You can use an absolute path for `-o` (output directory), for example `/mnt/videos`. ## Expected output diff --git a/package-lock.json b/package-lock.json index 4660018..cc04080 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "destreamer", - "version": "2.0.0", + "version": "2.1.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -38,6 +38,16 @@ "regenerator-runtime": "^0.13.4" } }, + "@dabh/diagnostics": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.2.tgz", + "integrity": "sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q==", + "requires": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" + } + }, "@jimp/bmp": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/@jimp/bmp/-/bmp-0.10.3.tgz", @@ -449,6 +459,12 @@ "@types/node": "*" } }, + "@types/readline-sync": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/readline-sync/-/readline-sync-1.4.3.tgz", + "integrity": "sha512-YP9NVli96E+qQLAF2db+VjnAUEeZcFVg4YnMgr8kpDUFwQBnj31rPLOVHmazbKQhaIkJ9cMHsZhpKdzUeL0KTg==", + "dev": true + }, "@types/tmp": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz", @@ -646,6 +662,11 @@ "integrity": "sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg==", "dev": true }, + "async": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", + "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + }, "async-limiter": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", @@ -840,6 +861,30 @@ "wrap-ansi": "^6.2.0" } }, + "color": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", + "integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==", + "requires": { + "color-convert": "^1.9.1", + "color-string": "^1.5.2" + }, + "dependencies": { + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + } + } + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -853,11 +898,29 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, + "color-string": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", + "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", + "requires": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, "colors": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==" }, + "colorspace": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.2.tgz", + "integrity": "sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ==", + "requires": { + "color": "3.0.x", + "text-hex": "1.0.x" + } + }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", @@ -949,6 +1012,11 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, + "enabled": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", + "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" + }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -1251,6 +1319,11 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, + "fast-safe-stringify": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz", + "integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA==" + }, "fd-slicer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", @@ -1259,6 +1332,11 @@ "pend": "~1.2.0" } }, + "fecha": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.0.tgz", + "integrity": "sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg==" + }, "figures": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", @@ -1345,6 +1423,11 @@ "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", "dev": true }, + "fn.name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", + "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" + }, "follow-redirects": { "version": "1.5.10", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.5.10.tgz", @@ -1593,6 +1676,11 @@ "execa": "^1.0.0" } }, + "is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + }, "is-binary-path": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", @@ -1768,6 +1856,11 @@ "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-2.2.0.tgz", "integrity": "sha1-fYa9VmefWM5qhHBKZX3TkruoGnk=" }, + "kuler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", + "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" + }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -1823,6 +1916,25 @@ "chalk": "^2.4.2" } }, + "logform": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.2.0.tgz", + "integrity": "sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg==", + "requires": { + "colors": "^1.2.1", + "fast-safe-stringify": "^2.0.4", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "triple-beam": "^1.3.0" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "mime": { "version": "2.4.5", "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.5.tgz", @@ -2202,6 +2314,14 @@ "wrappy": "1" } }, + "one-time": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", + "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "requires": { + "fn.name": "1.x.x" + } + }, "onetime": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", @@ -2452,6 +2572,11 @@ "picomatch": "^2.0.4" } }, + "readline-sync": { + "version": "1.4.10", + "resolved": "https://registry.npmjs.org/readline-sync/-/readline-sync-1.4.10.tgz", + "integrity": "sha512-gNva8/6UAe8QYepIQH/jQ2qn91Qj0B9sYjMBBs3QOB8F2CXcKgLxQaJRP76sWVRQt+QU+8fAkCbCvjjMFu7Ycw==" + }, "regenerator-runtime": { "version": "0.13.5", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.5.tgz", @@ -2567,6 +2692,14 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" }, + "simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "requires": { + "is-arrayish": "^0.3.1" + } + }, "slice-ansi": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", @@ -2616,6 +2749,11 @@ "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", "dev": true }, + "stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=" + }, "string-width": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", @@ -2799,6 +2937,11 @@ } } }, + "text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, "text-table": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", @@ -2839,6 +2982,11 @@ "is-number": "^7.0.0" } }, + "triple-beam": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", + "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" + }, "truncate-utf8-bytes": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz", @@ -2975,6 +3123,48 @@ } } }, + "winston": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.3.2.tgz", + "integrity": "sha512-vTOrUZlyQPS8VpCcQ1JT8BumDAUe4awCHZ9nmGgO7LqkV4atj0dKa5suA7Trf7QKtBszE2yUs9d8744Kz9j4jQ==", + "requires": { + "@dabh/diagnostics": "^2.0.2", + "async": "^3.1.0", + "is-stream": "^2.0.0", + "logform": "^2.2.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.4.0" + }, + "dependencies": { + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==" + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, + "winston-transport": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.4.0.tgz", + "integrity": "sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw==", + "requires": { + "readable-stream": "^2.3.7", + "triple-beam": "^1.2.0" + } + }, "word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", diff --git a/package.json b/package.json index f85aa79..754d31a 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "type": "git", "url": "git://github.com/snobu/destreamer.git" }, - "version": "2.0.0", + "version": "2.1.0", "description": "Save Microsoft Stream videos for offline enjoyment.", "main": "build/src/destreamer.js", "bin": "build/src/destreamer.js", @@ -19,6 +19,7 @@ "devDependencies": { "@types/mocha": "^7.0.2", "@types/puppeteer": "^1.20.4", + "@types/readline-sync": "^1.4.3", "@types/tmp": "^0.1.0", "@types/yargs": "^15.0.3", "@typescript-eslint/eslint-plugin": "^2.25.0", @@ -39,9 +40,11 @@ "iso8601-duration": "^1.2.0", "jwt-decode": "^2.2.0", "puppeteer": "2.1.1", + "readline-sync": "^1.4.10", "sanitize-filename": "^1.6.3", "terminal-image": "^1.0.1", "typescript": "^3.8.3", + "winston": "^3.3.2", "yargs": "^15.0.3" } } diff --git a/src/ApiClient.ts b/src/ApiClient.ts index b0ac073..3d1585f 100644 --- a/src/ApiClient.ts +++ b/src/ApiClient.ts @@ -1,6 +1,9 @@ +import { logger } from './Logger'; +import { Session } from './Types'; + import axios, { AxiosRequestConfig, AxiosResponse, AxiosInstance, AxiosError } from 'axios'; import axiosRetry, { isNetworkOrIdempotentRequestError } from 'axios-retry'; -import { Session } from './Types'; + export class ApiClient { private static instance: ApiClient; @@ -11,26 +14,30 @@ export class ApiClient { this.session = session; this.axiosInstance = axios.create({ baseURL: session?.ApiGatewayUri, - timeout: 7000, + // timeout: 7000, headers: { 'User-Agent': 'destreamer/2.0 (Hammer of Dawn)' } }); + axiosRetry(this.axiosInstance, { + // The following option is not working. + // We should open an issue on the relative GitHub shouldResetTimeout: true, retries: 6, - retryDelay: (retryCount) => { + retryDelay: (retryCount: number) => { return retryCount * 2000; }, retryCondition: (err: AxiosError) => { - const retryCodes = [429, 500, 502, 503]; + const retryCodes: Array = [429, 500, 502, 503]; if (isNetworkOrIdempotentRequestError(err)) { - console.warn(`${err}. Retrying request...`); + logger.warn(`${err}. Retrying request...`); return true; - } - console.warn(`Got HTTP ${err?.response?.status}. Retrying request...`); - const condition = retryCodes.includes(err?.response?.status ?? 0); + } + logger.warn(`Got HTTP code ${err?.response?.status ?? undefined}. Retrying request...`); - return condition; + const shouldRetry: boolean = retryCodes.includes(err?.response?.status ?? 0); + + return shouldRetry; } }); } @@ -52,7 +59,7 @@ export class ApiClient { method: AxiosRequestConfig['method'] = 'get', payload?: any): Promise { - const delimiter = path.split('?').length === 1 ? '?' : '&'; + const delimiter: '?' | '&' = path.split('?').length === 1 ? '?' : '&'; const headers: object = { 'Authorization': 'Bearer ' + this.session?.AccessToken @@ -74,7 +81,7 @@ export class ApiClient { method: AxiosRequestConfig['method'] = 'get', payload?: any, responseType: AxiosRequestConfig['responseType'] = 'json'): Promise { - + const headers: object = { 'Authorization': 'Bearer ' + this.session?.AccessToken }; @@ -87,5 +94,4 @@ export class ApiClient { responseType: responseType }); } - -} \ No newline at end of file +} diff --git a/src/CommandLineParser.ts b/src/CommandLineParser.ts index 0179092..55c36db 100644 --- a/src/CommandLineParser.ts +++ b/src/CommandLineParser.ts @@ -1,37 +1,43 @@ -import { CLI_ERROR } from './Errors'; +import { CLI_ERROR, ERROR_CODE } from './Errors'; +import { checkOutDir } from './Utils'; +import { logger } from './Logger'; -import yargs from 'yargs'; -import colors from 'colors'; import fs from 'fs'; +import readlineSync from 'readline-sync'; +import yargs from 'yargs'; -export const argv = yargs.options({ + +export const argv: any = yargs.options({ + username: { + alias: 'u', + type: 'string', + describe: 'The username used to log into Microsoft Stream (enabling this will fill in the email field for you)', + demandOption: false + }, videoUrls: { alias: 'i', describe: 'List of video urls', type: 'array', demandOption: false }, - videoUrlsFile: { + inputFile: { alias: 'f', - describe: 'Path to txt file containing the urls', - type: 'string', - demandOption: false - }, - username: { - alias: 'u', + describe: 'Path to text file containing URLs and optionally outDirs. See the README for more on outDirs.', type: 'string', demandOption: false }, outputDirectory: { alias: 'o', - describe: 'The directory where destreamer will save your downloads [default: videos]', + describe: 'The directory where destreamer will save your downloads', type: 'string', + default: 'videos', demandOption: false }, - outputDirectories: { - alias: 'O', - describe: 'Path to a txt file containing one output directory per video', - type: 'string', + keepLoginCookies: { + alias: 'k', + describe: 'Let Chromium cache identity provider cookies so you can use "Remember me" during login', + type: 'boolean', + default: false, demandOption: false }, noExperiments: { @@ -55,6 +61,13 @@ export const argv = yargs.options({ default: false, demandOption: false }, + closedCaptions: { + alias: 'cc', + describe: 'Check if closed captions are aviable and let the user choose which one to download (will not ask if only one aviable)', + type: 'boolean', + default: false, + demandOption: false + }, noCleanup: { alias: 'nc', describe: 'Do not delete the downloaded video file when an FFmpeg error occurs', @@ -87,147 +100,74 @@ export const argv = yargs.options({ demandOption: false } }) -/** - * Do our own argv magic before destreamer starts. - * ORDER IS IMPORTANT! - * Do not mess with this. - */ -.check(() => isShowHelpRequest()) -.check(argv => checkRequiredArgument(argv)) -.check(argv => checkVideoUrlsArgConflict(argv)) -.check(argv => checkOutputDirArgConflict(argv)) -.check(argv => checkVideoUrlsInput(argv)) -.check(argv => windowsFileExtensionBadBehaviorFix(argv)) -.check(argv => mergeVideoUrlsArguments(argv)) -.check(argv => mergeOutputDirArguments(argv)) +.wrap(120) +.check(() => noArguments()) +.check((argv: any) => inputConflicts(argv.videoUrls, argv.inputFile)) +.check((argv: any) => { + if (checkOutDir(argv.outputDirectory)) { + return true; + } + else { + logger.error(CLI_ERROR.INVALID_OUTDIR); + + throw new Error(' '); + } +}) .argv; -function hasNoArgs() { - return process.argv.length === 2; -} -function isShowHelpRequest() { - if (hasNoArgs()) { - throw new Error(CLI_ERROR.GRACEFULLY_STOP); +function noArguments(): boolean { + // if only 2 args no other args (0: node path, 1: js script path) + if (process.argv.length === 2) { + logger.error(CLI_ERROR.MISSING_INPUT_ARG, {fatal: true}); + + // so that the output stays clear + throw new Error(' '); } return true; } -function checkRequiredArgument(argv: any) { - if (hasNoArgs()) { - return true; + +function inputConflicts(videoUrls: Array | undefined, + inputFile: string | undefined): boolean { + // check if both inputs are declared + if ((videoUrls !== undefined) && (inputFile !== undefined)) { + logger.error(CLI_ERROR.INPUT_ARG_CONFLICT); + + throw new Error(' '); } + // check if no input is declared or if they are declared but empty + else if (!(videoUrls || inputFile) || (videoUrls?.length === 0) || (inputFile?.length === 0)) { + logger.error(CLI_ERROR.MISSING_INPUT_ARG); - if (!argv.videoUrls && !argv.videoUrlsFile) { - throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG)); + throw new Error(' '); } + else if (inputFile) { + // check if inputFile doesn't end in '.txt' + if (inputFile.substring(inputFile.length - 4) !== '.txt') { + logger.error(CLI_ERROR.INPUTFILE_WRONG_EXTENSION); - return true; -} - -function checkVideoUrlsArgConflict(argv: any) { - if (hasNoArgs()) { - return true; - } - - if (argv.videoUrls && argv.videoUrlsFile) { - throw new Error(colors.red(CLI_ERROR.VIDEOURLS_ARG_CONFLICT)); - } - - return true; -} - -function checkOutputDirArgConflict(argv: any) { - if (hasNoArgs()) { - return true; - } - - if (argv.outputDirectory && argv.outputDirectories) { - throw new Error(colors.red(CLI_ERROR.OUTPUTDIR_ARG_CONFLICT)); - } - - return true; -} - -function checkVideoUrlsInput(argv: any) { - if (hasNoArgs() || !argv.videoUrls) { - return true; - } - - if (!argv.videoUrls.length) { - throw new Error(colors.red(CLI_ERROR.MISSING_REQUIRED_ARG)); - } - - const t = argv.videoUrls[0] as string; - if (t.substring(t.length-4) === '.txt') { - throw new Error(colors.red(CLI_ERROR.FILE_INPUT_VIDEOURLS_ARG)); - } - - return true; -} - -/** - * Users see 2 separate options, but we don't really care - * cause both options have no difference in code. - * - * Optimize and make this transparent to destreamer - */ -function mergeVideoUrlsArguments(argv: any) { - if (!argv.videoUrlsFile) { - return true; - } - - argv.videoUrls = [argv.videoUrlsFile]; // noone will notice ;) - - // these are not valid anymore - delete argv.videoUrlsFile; - delete argv.F; - - return true; -} - -/** - * Users see 2 separate options, but we don't really care - * cause both options have no difference in code. - * - * Optimize and make this transparent to destreamer - */ -function mergeOutputDirArguments(argv: any) { - if (!argv.outputDirectories && argv.outputDirectory) { - return true; - } - - if (!argv.outputDirectory && !argv.outputDirectories) { - argv.outputDirectory = 'videos'; // default out dir - } - else if (argv.outputDirectories) { - argv.outputDirectory = argv.outputDirectories; - } - - if (argv.outputDirectories) { - // these are not valid anymore - delete argv.outputDirectories; - delete argv.O; - } - - return true; -} - -// yeah this is for windows, but lets check everyone, who knows... -function windowsFileExtensionBadBehaviorFix(argv: any) { - if (hasNoArgs() || !argv.videoUrlsFile || !argv.outputDirectories) { - return true; - } - - if (!fs.existsSync(argv.videoUrlsFile)) { - if (fs.existsSync(argv.videoUrlsFile + '.txt')) { - argv.videoUrlsFile += '.txt'; + throw new Error(' '); } - else { - throw new Error(colors.red(CLI_ERROR.INPUT_URLS_FILE_NOT_FOUND)); + // check if the inputFile exists + else if (!fs.existsSync(inputFile)) { + logger.error(CLI_ERROR.INPUTFILE_NOT_FOUND); + + throw new Error(' '); } } return true; } + + +export function promptUser(choices: Array): number { + let index: number = readlineSync.keyInSelect(choices, 'Which resolution/format do you prefer?'); + + if (index === -1) { + process.exit(ERROR_CODE.CANCELLED_USER_INPUT); + } + + return index; +} diff --git a/src/Errors.ts b/src/Errors.ts index 8298758..dc8d28b 100644 --- a/src/Errors.ts +++ b/src/Errors.ts @@ -1,64 +1,47 @@ -interface IError { - [key: number]: string -} - export const enum ERROR_CODE { - NO_ERROR, UNHANDLED_ERROR, - MISSING_FFMPEG, ELEVATED_SHELL, - INVALID_OUTPUT_DIR, - INVALID_INPUT_URLS, - OUTDIRS_URLS_MISMATCH, - INVALID_VIDEO_ID, - INVALID_VIDEO_GUID, + CANCELLED_USER_INPUT, + MISSING_FFMPEG, UNK_FFMPEG_ERROR, + INVALID_VIDEO_GUID, NO_SESSION_INFO } -// TODO: create better errors descriptions -export const Error: IError = { - [ERROR_CODE.NO_ERROR]: 'Clean exit with code 0', +export const errors: {[key: number]: string} = { [ERROR_CODE.UNHANDLED_ERROR]: 'Unhandled error!\n' + 'Timeout or fatal error, please check your downloads directory and try again', [ERROR_CODE.ELEVATED_SHELL]: 'Destreamer cannot run in an elevated (Administrator/root) shell.\n' + 'Please run in a regular, non-elevated window.', - [ERROR_CODE.INVALID_OUTPUT_DIR]: 'Unable to create output directory', + [ERROR_CODE.CANCELLED_USER_INPUT]: 'Input was cancelled by user', [ERROR_CODE.MISSING_FFMPEG]: 'FFmpeg is missing!\n' + 'Destreamer requires a fairly recent release of FFmpeg to download videos', [ERROR_CODE.UNK_FFMPEG_ERROR]: 'Unknown FFmpeg error', - [ERROR_CODE.INVALID_INPUT_URLS]: 'No valid URL from input', - - [ERROR_CODE.OUTDIRS_URLS_MISMATCH]: 'Output directories and URLs mismatch!\n' + - 'You must input the same number of URLs and output directories', - - [ERROR_CODE.INVALID_VIDEO_ID]: 'Unable to get video ID from URL', - [ERROR_CODE.INVALID_VIDEO_GUID]: 'Unable to get video GUID from URL', [ERROR_CODE.NO_SESSION_INFO]: 'Could not evaluate sessionInfo on the page' }; + export const enum CLI_ERROR { - GRACEFULLY_STOP = ' ', // gracefully stop execution, yargs way + MISSING_INPUT_ARG = 'You must specify a URLs source. \n' + + 'Valid options are -i for one or more URLs separated by space or -f for input file. \n', - MISSING_REQUIRED_ARG = 'You must specify a URLs source.\n' + - 'Valid options are -i for one or more URLs separated by space or -f for URLs from file.', + INPUT_ARG_CONFLICT = 'Too many URLs sources specified! \n' + + 'Please specify a single source, either -i or -f \n', - VIDEOURLS_ARG_CONFLICT = 'Too many URLs sources specified!\n' + - 'Please specify a single source, either -i or -f (URLs from file)', + INPUTFILE_WRONG_EXTENSION = 'The specified inputFile has the wrong extension \n' + + 'Please make sure to use path/to/filename.txt when useing the -f option \n', - OUTPUTDIR_ARG_CONFLICT = 'Too many output arguments specified!\n' + - 'Please specify a single output argument, either -o or --outputDirectories.', + INPUTFILE_NOT_FOUND = 'The specified inputFile does not exists \n'+ + 'Please check the filename and the path you provided \n', - FILE_INPUT_VIDEOURLS_ARG = 'Wrong input for option --videoUrls.\n' + - 'To read URLs from file, use --videoUrlsFile option.', - - INPUT_URLS_FILE_NOT_FOUND = 'Input URL list file not found.' -} \ No newline at end of file + INVALID_OUTDIR = 'Could not create the default/specified output directory \n' + + 'Please check directory and permissions and try again. \n' +} diff --git a/src/Events.ts b/src/Events.ts index 2a11b5e..aacc980 100644 --- a/src/Events.ts +++ b/src/Events.ts @@ -1,6 +1,6 @@ -import { Error, ERROR_CODE } from './Errors'; +import { errors, ERROR_CODE } from './Errors'; +import { logger } from './Logger'; -import colors from 'colors'; /** * This file contains global destreamer process events @@ -9,20 +9,25 @@ import colors from 'colors'; * * @note function is required for non-packaged destreamer, so we can't do better */ -export function setProcessEvents() { +export function setProcessEvents(): void { // set exit event first so that we can always print cute errors - process.on('exit', (code) => { - if (code == 0) { + process.on('exit', (code: number) => { + if (code === 0) { return; } - const msg = code in Error ? `\n\n${Error[code]} \n` : `\n\nUnknown error: exit code ${code} \n`; + const msg: string = (code in errors) ? `${errors[code]} \n` : `Unknown error: exit code ${code} \n`; - console.error(colors.bgRed(msg)); + logger.error({ message: msg, fatal: true }); }); - process.on('unhandledRejection', (reason) => { - console.error(colors.red(reason as string)); + process.on('unhandledRejection', (reason: {} | null | undefined) => { + if (reason instanceof Error) { + logger.error({ message: (reason as Error) }); + process.exit(ERROR_CODE.UNHANDLED_ERROR); + } + + logger.error({ message: (reason as string) }); process.exit(ERROR_CODE.UNHANDLED_ERROR); }); -} \ No newline at end of file +} diff --git a/src/Logger.ts b/src/Logger.ts new file mode 100644 index 0000000..7380ae6 --- /dev/null +++ b/src/Logger.ts @@ -0,0 +1,40 @@ +import colors from 'colors'; +import winston from 'winston'; + + +export const logger: winston.Logger = winston.createLogger({ + level: 'info', + transports: [ + new winston.transports.Console({ + format: winston.format.combine( + winston.format.errors({ stack: true }), + winston.format.timestamp({ format: 'YYYY-MM-DD hh:mm:ss' }), + winston.format.printf( + (item: winston.Logform.TransformableInfo) => customPrint(item) + ) + ) + }) + ] +}); + + +function customPrint (info: winston.Logform.TransformableInfo): string { + if (info.level === 'error') { + if (info.fatal) { + return colors.red('\n\n[FATAL ERROR] ') + (info.stack ?? info.message); + } + + return colors.red('\n[ERROR] ') + (info.stack ?? info.message) + '\n'; + } + else if (info.level === 'warn') { + return colors.yellow('\n[WARNING] ') + info.message; + } + else if (info.level === 'info') { + return info.message; + } + else if (info.level === 'verbose') { + return colors.cyan('\n[VERBOSE] ') + info.message; + } + + return `${info.level}: ${info.message} - ${info.timestamp}`; +} diff --git a/src/Metadata.ts b/src/Metadata.ts deleted file mode 100644 index bd52736..0000000 --- a/src/Metadata.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { Metadata, Session } from './Types'; -import { forEachAsync } from './Utils'; -import { ApiClient } from './ApiClient'; - -import { parse } from 'iso8601-duration'; - - -function publishedDateToString(date: string) { - const dateJs = new Date(date); - const day = dateJs.getDate().toString().padStart(2, '0'); - const month = (dateJs.getMonth() + 1).toString(10).padStart(2, '0'); - const publishedDate = day + '-' + month + '-' + dateJs.getFullYear(); - - return publishedDate; -} - -function durationToTotalChunks(duration: string) { - const durationObj = parse(duration); - const hrs = durationObj['hours'] ?? 0; - const mins = durationObj['minutes'] ?? 0; - const secs = Math.ceil(durationObj['seconds'] ?? 0); - - return (hrs * 60) + mins + (secs / 60); -} - -export async function getVideoMetadata(videoGuids: string[], session: Session): Promise { - let metadata: Metadata[] = []; - let title: string; - let date: string; - let totalChunks: number; - let playbackUrl: string; - let posterImage: string; - - const apiClient = ApiClient.getInstance(session); - - await forEachAsync(videoGuids, async (guid: string) => { - let response = await apiClient.callApi('videos/' + guid, 'get'); - - title = response?.data['name']; - playbackUrl = response?.data['playbackUrls'] - .filter((item: { [x: string]: string; }) => - item['mimeType'] == 'application/vnd.apple.mpegurl') - .map((item: { [x: string]: string }) => { - return item['playbackUrl']; - })[0]; - - posterImage = response?.data['posterImage']['medium']['url']; - date = publishedDateToString(response?.data['publishedDate']); - totalChunks = durationToTotalChunks(response?.data.media['duration']); - - metadata.push({ - date: date, - totalChunks: totalChunks, - title: title, - playbackUrl: playbackUrl, - posterImage: posterImage - }); - }); - - return metadata; -} diff --git a/src/PuppeteerHelper.ts b/src/PuppeteerHelper.ts index 209acf0..9411f0c 100644 --- a/src/PuppeteerHelper.ts +++ b/src/PuppeteerHelper.ts @@ -1,19 +1,21 @@ import path from 'path'; import puppeteer from 'puppeteer'; + // Thanks pkg-puppeteer [ cleaned up version :) ] -export function getPuppeteerChromiumPath() { - const isPkg = __filename.includes('snapshot'); - const macOS_Linux_rex = /^.*?\/node_modules\/puppeteer\/\.local-chromium/; - const win32_rex = /^.*?\\node_modules\\puppeteer\\\.local-chromium/; - const replaceRegex = process.platform === 'win32' ? win32_rex : macOS_Linux_rex; +export function getPuppeteerChromiumPath(): string { + const isPkg: boolean = __filename.includes('snapshot'); + + const replaceRegex: RegExp = (process.platform === 'win32') ? + new RegExp(/^.*?\\node_modules\\puppeteer\\\.local-chromium/) : + new RegExp(/^.*?\/node_modules\/puppeteer\/\.local-chromium/); if (!isPkg) { return puppeteer.executablePath(); } - const browserPath = puppeteer.executablePath() + const browserPath: string = puppeteer.executablePath() .replace(replaceRegex, path.join(path.dirname(process.execPath), 'chromium')); return browserPath; -} \ No newline at end of file +} diff --git a/src/Thumbnail.ts b/src/Thumbnail.ts index 6aeaccb..aea3940 100644 --- a/src/Thumbnail.ts +++ b/src/Thumbnail.ts @@ -1,10 +1,15 @@ import { ApiClient } from './ApiClient'; import { Session } from './Types'; + import terminalImage from 'terminal-image'; +import { AxiosResponse } from 'axios'; export async function drawThumbnail(posterImage: string, session: Session): Promise { - const apiClient = ApiClient.getInstance(session); - let thumbnail = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer'); - console.log(await terminalImage.buffer(thumbnail?.data, { width: 70 } )); + const apiClient: ApiClient = ApiClient.getInstance(session); + + let thumbnail: Buffer = await apiClient.callUrl(posterImage, 'get', null, 'arraybuffer') + .then((response: AxiosResponse | undefined) => response?.data); + + console.log(await terminalImage.buffer(thumbnail, { width: 70 } )); } diff --git a/src/TokenCache.ts b/src/TokenCache.ts index a565df3..020ffab 100644 --- a/src/TokenCache.ts +++ b/src/TokenCache.ts @@ -1,56 +1,104 @@ -import * as fs from 'fs'; +import { chromeCacheFolder } from './destreamer'; +import { ERROR_CODE } from './Errors'; +import { logger } from './Logger'; +import { getPuppeteerChromiumPath } from './PuppeteerHelper'; import { Session } from './Types'; -import { bgGreen, bgYellow, green } from 'colors'; + +import fs from 'fs'; import jwtDecode from 'jwt-decode'; +import puppeteer from 'puppeteer'; + export class TokenCache { - private tokenCacheFile: string = '.token_cache'; + private tokenCacheFile = '.token_cache'; public Read(): Session | null { - let j = null; if (!fs.existsSync(this.tokenCacheFile)) { - console.warn(bgYellow.black(`${this.tokenCacheFile} not found.\n`)); + logger.warn(`${this.tokenCacheFile} not found. \n`); return null; } - let f = fs.readFileSync(this.tokenCacheFile, 'utf8'); - j = JSON.parse(f); - interface Jwt { + let session: Session = JSON.parse(fs.readFileSync(this.tokenCacheFile, 'utf8')); + + type Jwt = { [key: string]: any } + const decodedJwt: Jwt = jwtDecode(session.AccessToken); - const decodedJwt: Jwt = jwtDecode(j.AccessToken); + let now: number = Math.floor(Date.now() / 1000); + let exp: number = decodedJwt['exp']; + let timeLeft: number = exp - now; - let now = Math.floor(Date.now() / 1000); - let exp = decodedJwt['exp']; - let timeLeft = exp - now; - - let timeLeftInMinutes = Math.floor(timeLeft / 60); if (timeLeft < 120) { - console.warn(bgYellow.black('\nAccess token has expired.')); + logger.warn('Access token has expired! \n'); return null; } - console.info(bgGreen.black(`\nAccess token still good for ${timeLeftInMinutes} minutes.\n`)); - - let session: Session = { - AccessToken: j.AccessToken, - ApiGatewayUri: j.ApiGatewayUri, - ApiGatewayVersion: j.ApiGatewayVersion - }; + logger.info(`Access token still good for ${Math.floor(timeLeft / 60)} minutes.\n`.green); return session; } public Write(session: Session): void { - let s = JSON.stringify(session, null, 4); + let s: string = JSON.stringify(session, null, 4); fs.writeFile('.token_cache', s, (err: any) => { if (err) { - return console.error(err); + return logger.error(err); } - console.info(green('Fresh access token dropped into .token_cache')); + logger.info('Fresh access token dropped into .token_cachen \n'.green); }); } -} \ No newline at end of file +} + + +export async function refreshSession(): Promise { + const url = 'https://web.microsoftstream.com'; + + const browser: puppeteer.Browser = await puppeteer.launch({ + executablePath: getPuppeteerChromiumPath(), + headless: false, // NEVER TRUE OR IT DOES NOT WORK + userDataDir: chromeCacheFolder, + args: [ + '--disable-dev-shm-usage', + '--fast-start', + '--no-sandbox' + ] + }); + + const page: puppeteer.Page = (await browser.pages())[0]; + await page.goto(url, { waitUntil: 'load' }); + + await browser.waitForTarget((target: puppeteer.Target) => target.url().includes(url), { timeout: 30000 }); + + let session: Session | null = null; + let tries = 1; + + while (!session) { + try { + let sessionInfo: any; + session = await page.evaluate( + () => { + return { + AccessToken: sessionInfo.AccessToken, + ApiGatewayUri: sessionInfo.ApiGatewayUri, + ApiGatewayVersion: sessionInfo.ApiGatewayVersion + }; + } + ); + } + catch (error) { + if (tries > 5) { + process.exit(ERROR_CODE.NO_SESSION_INFO); + } + + session = null; + tries++; + await page.waitFor(3000); + } + } + browser.close(); + + return session; +} diff --git a/src/Types.ts b/src/Types.ts index 5cd6793..e75a536 100644 --- a/src/Types.ts +++ b/src/Types.ts @@ -4,10 +4,13 @@ export type Session = { ApiGatewayVersion: string; } -export type Metadata = { + +export type Video = { date: string; - totalChunks: number; // Abstraction of FFmpeg timemark title: string; + outPath: string; + totalChunks: number; // Abstraction of FFmpeg timemark playbackUrl: string; - posterImage: string; -} \ No newline at end of file + posterImageUrl: string; + captionsUrl?: string +} diff --git a/src/Utils.ts b/src/Utils.ts index 6af12d3..e50d326 100644 --- a/src/Utils.ts +++ b/src/Utils.ts @@ -1,152 +1,193 @@ +import { ApiClient } from './ApiClient'; import { ERROR_CODE } from './Errors'; +import { logger } from './Logger'; +import { Session } from './Types'; +import { AxiosResponse } from 'axios'; import { execSync } from 'child_process'; -import colors from 'colors'; import fs from 'fs'; -import path from 'path'; -function sanitizeUrls(urls: string[]) { - // eslint-disable-next-line - const rex = new RegExp(/(?:https:\/\/)?.*\/video\/[a-z0-9]{8}-(?:[a-z0-9]{4}\-){3}[a-z0-9]{12}$/, 'i'); - const sanitized: string[] = []; - for (let i = 0, l = urls.length; i < l; ++i) { - let url = urls[i].split('?')[0]; +async function extractGuids(url: string, client: ApiClient): Promise | null> { - if (!rex.test(url)) { - if (url !== '') { - console.warn(colors.yellow('Invalid URL at line ' + (i + 1) + ', skip..')); - } + const videoRegex = new RegExp(/https:\/\/.*\/video\/(\w{8}-(?:\w{4}-){3}\w{12})/); + const groupRegex = new RegExp(/https:\/\/.*\/group\/(\w{8}-(?:\w{4}-){3}\w{12})/); + const videoMatch: RegExpExecArray | null = videoRegex.exec(url); + const groupMatch: RegExpExecArray | null = groupRegex.exec(url); + + if (videoMatch) { + return [videoMatch[1]]; + } + else if (groupMatch) { + const videoNumber: number = await client.callApi(`groups/${groupMatch[1]}`, 'get') + .then((response: AxiosResponse | undefined) => response?.data.metrics.videos); + + let result: Array = await client.callApi(`groups/${groupMatch[1]}/videos?$top=${videoNumber}&$orderby=publishedDate asc`, 'get') + .then((response: AxiosResponse | undefined) => response?.data.value.map((item: any) => item.id)); + + return result; + } + + return null; +} + + +/** + * Parse the list of url given by the user via console input. + * They can either be video urls or group urls, in which case the guids + * will be added from oldest to newest. + * + * @param {Array} urlList list of link to parse + * @param {string} defaultOutDir the directry used to save the videos + * @param {Session} session used to call the API to get the GUIDs from group links + * + * @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array + */ +export async function parseCLIinput(urlList: Array, defaultOutDir: string, + session: Session): Promise>> { + + const apiClient: ApiClient = ApiClient.getInstance(session); + let guidList: Array = []; + + for (const url of urlList) { + const guids: Array | null = await extractGuids(url, apiClient); + + if (guids) { + guidList.push(...guids); + } + else { + logger.warn(`Invalid url '${url}', skipping..`); + } + } + + const outDirList: Array = Array(guidList.length).fill(defaultOutDir); + + return [guidList, outDirList]; +} + + +/** + * Parse the input text file. + * The urls in the file can either be video urls or group urls, in which case the guids + * will be added from oldest to newest. + * + * @param {string} inputFile path to the text file + * @param {string} defaultOutDir the default/fallback directory used to save the videos + * @param {Session} session used to call the API to get the GUIDs from group links + * + * @returns Array of 2 elements, 1st one being the GUIDs array, 2nd one the output directories array + */ +export async function parseInputFile(inputFile: string, defaultOutDir: string, + session: Session): Promise>> { + // rawContent is a list of each line of the file + const rawContent: Array = fs.readFileSync(inputFile).toString() + .split(/\r?\n/); + const apiClient: ApiClient = ApiClient.getInstance(session); + + let guidList: Array = []; + let outDirList: Array = []; + // if the last line was an url set this + let foundUrl = false; + + for (let i = 0; i < rawContent.length; i++) { + const line: string = rawContent[i]; + + // filter out lines with no content + if (!line.match(/\S/)) { + logger.warn(`Line ${i + 1} is empty, skipping..`); continue; } + // parse if line is option + else if (line.includes('-dir')) { + if (foundUrl) { + let outDir: string | null = parseOption('-dir', line); - if (url.substring(0, 8) !== 'https://') { - url = 'https://' + url; - } + if (outDir && checkOutDir(outDir)) { + outDirList.push(...Array(guidList.length - outDirList.length) + .fill(outDir)); + } + else { + outDirList.push(...Array(guidList.length - outDirList.length) + .fill(defaultOutDir)); + } - sanitized.push(url); - } - - if (!sanitized.length) { - process.exit(ERROR_CODE.INVALID_INPUT_URLS); - } - - return sanitized; -} - -function sanitizeOutDirsList(dirsList: string[]) { - const sanitized: string[] = []; - - dirsList.forEach(dir => { - if (dir !== '') { - sanitized.push(dir); - } - }); - - return sanitized; -} - -function readFileToArray(path: string) { - return fs.readFileSync(path).toString('utf-8').split(/[\r\n]/); -} - -export async function forEachAsync(array: any, callback: any) { - for (let i = 0, l = array.length; i < l; ++i) { - await callback(array[i], i, array); - } -} - -export function parseVideoUrls(videoUrls: any) { - let input = videoUrls[0] as string; - const isPath = input.substring(input.length - 4) === '.txt'; - let urls: string[]; - - if (isPath) { - urls = readFileToArray(input); - } - else { - urls = videoUrls as string[]; - } - - return sanitizeUrls(urls); -} - -export function getOutputDirectoriesList(outDirArg: string) { - const isList = outDirArg.substring(outDirArg.length - 4) === '.txt'; - let dirsList: string[]; - - if (isList) { - dirsList = sanitizeOutDirsList(readFileToArray(outDirArg)); - } - else { - dirsList = [outDirArg]; - } - - return dirsList; -} - -export function makeOutputDirectories(dirsList: string[]) { - dirsList.forEach(dir => { - if (!fs.existsSync(dir)) { - console.info(colors.yellow('Creating output directory:')); - console.info(colors.green(dir) + '\n'); - - try { - fs.mkdirSync(dir, { recursive: true }); + foundUrl = false; + continue; } - catch (e) { - process.exit(ERROR_CODE.INVALID_OUTPUT_DIR); + else { + logger.warn(`Found options without preceding url at line ${i + 1}, skipping..`); + continue; } } - }); -} -export function checkOutDirsUrlsMismatch(dirsList: string[], urlsList: string[]) { - const dirsListL = dirsList.length; - const urlsListL = urlsList.length; + /* now line is not empty nor an option line. + If foundUrl is still true last line didn't have a directory option + so we stil need to add the default outDir to outDirList to */ + if (foundUrl) { + outDirList.push(...Array(guidList.length - outDirList.length) + .fill(defaultOutDir)); + } - // single out dir, treat this as the chosen one for all - if (dirsListL == 1) { - return; + const guids: Array | null = await extractGuids(line, apiClient); + + if (guids) { + guidList.push(...guids); + foundUrl = true; + } + else { + logger.warn(`Invalid url at line ${i + 1}, skipping..`); + } } - else if (dirsListL != urlsListL) { - process.exit(ERROR_CODE.OUTDIRS_URLS_MISMATCH); + + return [guidList, outDirList]; +} + + +// This leaves us the option to add more options (badum tss) _Luca +function parseOption(optionSyntax: string, item: string): string | null { + const match: RegExpMatchArray | null = item.match( + RegExp(`^\\s*${optionSyntax}\\s?=\\s?['"](.*)['"]`) + ); + + return match ? match[1] : null; +} + + +export function checkOutDir(directory: string): boolean { + if (!fs.existsSync(directory)) { + try { + fs.mkdirSync(directory); + logger.info('\nCreated directory: '.yellow + directory); + } + catch (e) { + logger.warn('Cannot create directory: '+ directory + + '\nFalling back to default directory..'); + + return false; + } } + + return true; } -export function sleep(ms: number) { - return new Promise(resolve => setTimeout(resolve, ms)); -} -export function checkRequirements() { +export function checkRequirements(): void { try { - const ffmpegVer = execSync('ffmpeg -version').toString().split('\n')[0]; - console.info(colors.green(`Using ${ffmpegVer}\n`)); - + const ffmpegVer: string = execSync('ffmpeg -version').toString().split('\n')[0]; + logger.info(`Using ${ffmpegVer}\n`); } catch (e) { process.exit(ERROR_CODE.MISSING_FFMPEG); } } -export function makeUniqueTitle(title: string, outDir: string, skip?: boolean, format?: string) { - let ntitle = title; - let k = 0; - while (!skip && fs.existsSync(outDir + path.sep + ntitle + '.' + format)) { - ntitle = title + ' - ' + (++k).toString(); - } +export function ffmpegTimemarkToChunk(timemark: string): number { + const timeVals: Array = timemark.split(':'); + const hrs: number = parseInt(timeVals[0]); + const mins: number = parseInt(timeVals[1]); + const secs: number = parseInt(timeVals[2]); - return ntitle; -} - -export function ffmpegTimemarkToChunk(timemark: string) { - const timeVals: string[] = timemark.split(':'); - const hrs = parseInt(timeVals[0]); - const mins = parseInt(timeVals[1]); - const secs = parseInt(timeVals[2]); - const chunk = (hrs * 60) + mins + (secs / 60); - - return chunk; + return (hrs * 60) + mins + (secs / 60); } diff --git a/src/VideoUtils.ts b/src/VideoUtils.ts new file mode 100644 index 0000000..e2b0040 --- /dev/null +++ b/src/VideoUtils.ts @@ -0,0 +1,106 @@ +import { ApiClient } from './ApiClient'; +import { promptUser } from './CommandLineParser'; +import { logger } from './Logger'; +import { Video, Session } from './Types'; + +import { AxiosResponse } from 'axios'; +import fs from 'fs'; +import { parse } from 'iso8601-duration'; +import path from 'path'; +import sanitize from 'sanitize-filename'; + + +function publishedDateToString(date: string): string { + const dateJs: Date = new Date(date); + const day: string = dateJs.getDate().toString().padStart(2, '0'); + const month: string = (dateJs.getMonth() + 1).toString(10).padStart(2, '0'); + + return `${dateJs.getFullYear()}-${month}-${day}`; +} + + +function durationToTotalChunks(duration: string): number { + const durationObj: any = parse(duration); + const hrs: number = durationObj.hours ?? 0; + const mins: number = durationObj.minutes ?? 0; + const secs: number = Math.ceil(durationObj.seconds ?? 0); + + return (hrs * 60) + mins + (secs / 60); +} + + +export async function getVideoInfo(videoGuids: Array, session: Session, subtitles?: boolean): Promise> { + let metadata: Array