mirror of
https://github.com/Tzahi12345/YoutubeDL-Material.git
synced 2026-03-08 04:20:08 +03:00
Compare commits
1 Commits
codespaces
...
youtube-su
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5c28f8dd48 |
@@ -1,39 +0,0 @@
|
|||||||
// README at: https://github.com/devcontainers/templates/tree/main/src/javascript-node
|
|
||||||
{
|
|
||||||
"name": "Node.js",
|
|
||||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
|
||||||
"image": "mcr.microsoft.com/devcontainers/javascript-node:0-18-bullseye",
|
|
||||||
"features": {
|
|
||||||
"ghcr.io/devcontainers-contrib/features/jshint:2": {},
|
|
||||||
"ghcr.io/devcontainers-contrib/features/angular-cli:2": {},
|
|
||||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
|
||||||
},
|
|
||||||
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
|
||||||
"forwardPorts": [4200, 17442],
|
|
||||||
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
|
||||||
"postCreateCommand": "npm install && cd backend && npm install",
|
|
||||||
|
|
||||||
// Configure tool-specific properties.
|
|
||||||
"customizations": {
|
|
||||||
// Configure properties specific to VS Code.
|
|
||||||
"vscode": {
|
|
||||||
// Add the IDs of extensions you want installed when the container is created.
|
|
||||||
"extensions": [
|
|
||||||
"ms-python.python",
|
|
||||||
"Angular.ng-template",
|
|
||||||
"dbaeumer.vscode-eslint",
|
|
||||||
"waderyan.gitblame",
|
|
||||||
"42Crunch.vscode-openapi",
|
|
||||||
"christian-kohler.npm-intellisense",
|
|
||||||
"redhat.vscode-yaml",
|
|
||||||
"hbenl.vscode-mocha-test-adapter",
|
|
||||||
"DavidAnson.vscode-markdownlint"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
|
||||||
// "remoteUser": "root"
|
|
||||||
}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
.git
|
|
||||||
db
|
|
||||||
appdata
|
|
||||||
audio
|
|
||||||
video
|
|
||||||
subscriptions
|
|
||||||
users
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"env": {
|
|
||||||
"browser": true,
|
|
||||||
"es2021": true
|
|
||||||
},
|
|
||||||
"extends": [
|
|
||||||
"eslint:recommended",
|
|
||||||
"plugin:@typescript-eslint/recommended"
|
|
||||||
],
|
|
||||||
"parser": "@typescript-eslint/parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 12,
|
|
||||||
"sourceType": "module"
|
|
||||||
},
|
|
||||||
"plugins": [
|
|
||||||
"@typescript-eslint"
|
|
||||||
],
|
|
||||||
"rules": {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
38
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Create a report to help us improve
|
|
||||||
title: "[BUG]"
|
|
||||||
labels: bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Describe the bug**
|
|
||||||
A clear and concise description of what the bug is.
|
|
||||||
|
|
||||||
**To Reproduce**
|
|
||||||
Steps to reproduce the behavior:
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
A clear and concise description of what you expected to happen.
|
|
||||||
|
|
||||||
**Screenshots**
|
|
||||||
If applicable, add screenshots to help explain your problem.
|
|
||||||
|
|
||||||
**Environment**
|
|
||||||
- YoutubeDL-Material version
|
|
||||||
- Docker tag: <tag> (optional)
|
|
||||||
|
|
||||||
Ideally you'd copy the info as presented on the "About" dialogue
|
|
||||||
in YoutubeDL-Material.
|
|
||||||
(for that, click on the three dots on the top right and then
|
|
||||||
check "installation details". On later versions of YoutubeDL-
|
|
||||||
Material you will find pretty much all the crucial information
|
|
||||||
here that we need in most cases!)
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context about the problem here. For example, a YouTube link.
|
|
||||||
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
17
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
title: "[FEATURE]"
|
|
||||||
labels: enhancement
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
||||||
|
|
||||||
**Describe the solution you'd like**
|
|
||||||
A clear and concise description of what you want to happen.
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context or screenshots about the feature request here.
|
|
||||||
18
.github/dependabot.yaml
vendored
18
.github/dependabot.yaml
vendored
@@ -1,18 +0,0 @@
|
|||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "docker"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "github-actions"
|
|
||||||
directory: "/.github/workflows"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "npm"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
- package-ecosystem: "npm"
|
|
||||||
directory: "/backend/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
41
.github/workflows/build.yml
vendored
41
.github/workflows/build.yml
vendored
@@ -13,33 +13,17 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: checkout code
|
- name: checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
- name: setup node
|
- name: setup node
|
||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v1
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
cache: 'npm'
|
|
||||||
- name: install dependencies
|
- name: install dependencies
|
||||||
run: |
|
run: |
|
||||||
npm install
|
npm install
|
||||||
cd backend
|
cd backend
|
||||||
npm install
|
npm install
|
||||||
sudo npm install -g @angular/cli
|
sudo npm install -g @angular/cli
|
||||||
- name: Set hash
|
|
||||||
id: vars
|
|
||||||
run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)"
|
|
||||||
- name: Get current date
|
|
||||||
id: date
|
|
||||||
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
|
|
||||||
- name: create-json
|
|
||||||
id: create-json
|
|
||||||
uses: jsdaniell/create-json@v1.2.2
|
|
||||||
with:
|
|
||||||
name: "version.json"
|
|
||||||
json: '{"type": "autobuild", "tag": "N/A", "commit": "${{ steps.vars.outputs.sha_short }}", "date": "${{ steps.date.outputs.date }}"}'
|
|
||||||
dir: 'backend/'
|
|
||||||
- name: build
|
- name: build
|
||||||
run: npm run build
|
run: ng build --prod
|
||||||
- name: prepare artifact upload
|
- name: prepare artifact upload
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
run: |
|
run: |
|
||||||
@@ -51,11 +35,11 @@ jobs:
|
|||||||
Copy-Item -Path ./backend/public -Recurse -Destination ./build/youtubedl-material
|
Copy-Item -Path ./backend/public -Recurse -Destination ./build/youtubedl-material
|
||||||
Copy-Item -Path ./backend/subscriptions -Recurse -Destination ./build/youtubedl-material
|
Copy-Item -Path ./backend/subscriptions -Recurse -Destination ./build/youtubedl-material
|
||||||
Copy-Item -Path ./backend/video -Recurse -Destination ./build/youtubedl-material
|
Copy-Item -Path ./backend/video -Recurse -Destination ./build/youtubedl-material
|
||||||
New-Item -Path ./build/youtubedl-material -Name users -ItemType Directory
|
New-Item -Path ./build/youtubedl-material -Name users
|
||||||
Copy-Item -Path ./backend/*.js -Destination ./build/youtubedl-material
|
Copy-Item -Path ./backend/*.js -Destination ./build/youtubedl-material
|
||||||
Copy-Item -Path ./backend/*.json -Destination ./build/youtubedl-material
|
Copy-Item -Path ./backend/*.json -Destination ./build/youtubedl-material
|
||||||
- name: upload build artifact
|
- name: upload build artifact
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v1
|
||||||
with:
|
with:
|
||||||
name: youtubedl-material
|
name: youtubedl-material
|
||||||
path: build
|
path: build
|
||||||
@@ -65,7 +49,7 @@ jobs:
|
|||||||
if: contains(github.ref, '/tags/v')
|
if: contains(github.ref, '/tags/v')
|
||||||
steps:
|
steps:
|
||||||
- name: checkout code
|
- name: checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
- name: create release
|
- name: create release
|
||||||
id: create_release
|
id: create_release
|
||||||
uses: actions/create-release@v1
|
uses: actions/create-release@v1
|
||||||
@@ -81,24 +65,21 @@ jobs:
|
|||||||
draft: true
|
draft: true
|
||||||
prerelease: false
|
prerelease: false
|
||||||
- name: download build artifact
|
- name: download build artifact
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v1
|
||||||
with:
|
with:
|
||||||
name: youtubedl-material
|
name: youtubedl-material
|
||||||
path: ${{runner.temp}}/youtubedl-material
|
path: ${{runner.temp}}/youtubedl-material
|
||||||
- name: extract tag name
|
|
||||||
id: tag_name
|
|
||||||
run: echo ::set-output name=tag_name::${GITHUB_REF#refs/tags/}
|
|
||||||
- name: prepare release asset
|
- name: prepare release asset
|
||||||
shell: pwsh
|
shell: pwsh
|
||||||
run: Compress-Archive -Path ${{runner.temp}}/youtubedl-material -DestinationPath youtubedl-material-${{ steps.tag_name.outputs.tag_name }}.zip
|
run: Compress-Archive -Path ${{runner.temp}}/youtubedl-material -DestinationPath youtubedl-material-${{ github.ref }}.zip
|
||||||
- name: upload release asset
|
- name: upload build asset
|
||||||
uses: actions/upload-release-asset@v1
|
uses: actions/upload-release-asset@v1
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
with:
|
with:
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
asset_path: ./youtubedl-material-${{ steps.tag_name.outputs.tag_name }}.zip
|
asset_path: ./youtubedl-material-${{ github.ref }}.zip
|
||||||
asset_name: youtubedl-material-${{ steps.tag_name.outputs.tag_name }}.zip
|
asset_name: youtubedl-material-${{ github.ref }}.zip
|
||||||
asset_content_type: application/zip
|
asset_content_type: application/zip
|
||||||
- name: upload docker-compose asset
|
- name: upload docker-compose asset
|
||||||
uses: actions/upload-release-asset@v1
|
uses: actions/upload-release-asset@v1
|
||||||
|
|||||||
38
.github/workflows/close-issue-if-noresponse.yml
vendored
38
.github/workflows/close-issue-if-noresponse.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: No Response
|
|
||||||
|
|
||||||
# Both `issue_comment` and `scheduled` event types are required for this Action
|
|
||||||
# to work properly.
|
|
||||||
on:
|
|
||||||
issue_comment:
|
|
||||||
types: [created]
|
|
||||||
schedule:
|
|
||||||
# Schedule for five minutes after the hour, every hour
|
|
||||||
- cron: '5 * * * *'
|
|
||||||
|
|
||||||
# By specifying the access of one of the scopes, all of those that are not
|
|
||||||
# specified are set to 'none'.
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
noResponse:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.repository == 'Tzahi12345/YoutubeDL-Material' }}
|
|
||||||
steps:
|
|
||||||
- uses: lee-dohm/no-response@v0.5.0
|
|
||||||
with:
|
|
||||||
token: ${{ github.token }}
|
|
||||||
# Comment to post when closing an Issue for lack of response. Set to `false` to disable
|
|
||||||
closeComment: >
|
|
||||||
This issue has been automatically closed because there has been no response
|
|
||||||
to our request for more information from the original author. With only the
|
|
||||||
information that is currently in the issue, we don't have enough information
|
|
||||||
to take action. Please reach out if you have or find the answers we need so
|
|
||||||
that we can investigate further. We will re-open this issue if you provide us
|
|
||||||
with the requested information with a comment under this issue.
|
|
||||||
Thank you for your understanding and for trying to help make this application
|
|
||||||
a better one!
|
|
||||||
# Number of days of inactivity before an issue is closed for lack of response.
|
|
||||||
daysUntilClose: 21
|
|
||||||
# Label requiring a response.
|
|
||||||
responseRequiredLabel: "💬 response-needed"
|
|
||||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
with:
|
with:
|
||||||
# We must fetch at least the immediate parents so that if this is
|
# We must fetch at least the immediate parents so that if this is
|
||||||
# a pull request then we can checkout the head.
|
# a pull request then we can checkout the head.
|
||||||
@@ -43,7 +43,7 @@ jobs:
|
|||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v1
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -54,7 +54,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
@@ -68,4 +68,4 @@ jobs:
|
|||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v1
|
||||||
|
|||||||
38
.github/workflows/docker-pr.yml
vendored
38
.github/workflows/docker-pr.yml
vendored
@@ -1,38 +0,0 @@
|
|||||||
name: docker-pr
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches: [master]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: checkout code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Set hash
|
|
||||||
id: vars
|
|
||||||
run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)"
|
|
||||||
- name: Get current date
|
|
||||||
id: date
|
|
||||||
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
|
|
||||||
- name: create-json
|
|
||||||
id: create-json
|
|
||||||
uses: jsdaniell/create-json@v1.2.2
|
|
||||||
with:
|
|
||||||
name: "version.json"
|
|
||||||
json: '{"type": "docker", "tag": "nightly", "commit": "${{ steps.vars.outputs.sha_short }}", "date": "${{ steps.date.outputs.date }}"}'
|
|
||||||
dir: 'backend/'
|
|
||||||
- name: setup platform emulator
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
- name: setup multi-arch docker build
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
- name: build & push images
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm/v7,linux/arm64/v8
|
|
||||||
#platforms: linux/amd64
|
|
||||||
push: false
|
|
||||||
tags: tzahi12345/youtubedl-material:nightly-pr
|
|
||||||
86
.github/workflows/docker-release.yml
vendored
86
.github/workflows/docker-release.yml
vendored
@@ -1,86 +0,0 @@
|
|||||||
name: docker-release
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
tags:
|
|
||||||
description: 'Docker tags'
|
|
||||||
required: true
|
|
||||||
release:
|
|
||||||
types: [published]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: checkout code
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Set hash
|
|
||||||
id: vars
|
|
||||||
run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)"
|
|
||||||
|
|
||||||
- name: Get current date
|
|
||||||
id: date
|
|
||||||
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
|
|
||||||
|
|
||||||
- name: create-json
|
|
||||||
id: create-json
|
|
||||||
uses: jsdaniell/create-json@v1.2.2
|
|
||||||
with:
|
|
||||||
name: "version.json"
|
|
||||||
json: '{"type": "docker", "tag": "latest", "commit": "${{ steps.vars.outputs.sha_short }}", "date": "${{ steps.date.outputs.date }}"}'
|
|
||||||
dir: 'backend/'
|
|
||||||
|
|
||||||
- name: Set image tag
|
|
||||||
id: tags
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event.inputs.tags }}" != "" ]; then
|
|
||||||
echo "::set-output name=tags::${{ github.event.inputs.tags }}"
|
|
||||||
elif [ ${{ github.event.action }} == "release" ]; then
|
|
||||||
echo "::set-output name=tags::${{ github.event.release.tag_name }}"
|
|
||||||
else
|
|
||||||
echo "Unknown workflow trigger: ${{ github.event.action }}! Cannot determine default tag."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Generate Docker image metadata
|
|
||||||
id: docker-meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ secrets.DOCKERHUB_REPO }}
|
|
||||||
ghcr.io/${{ github.repository_owner }}/${{ secrets.DOCKERHUB_REPO }}
|
|
||||||
tags: |
|
|
||||||
type=raw,value=${{ steps.tags.outputs.tags }}
|
|
||||||
type=raw,value=latest
|
|
||||||
|
|
||||||
- name: setup platform emulator
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
|
|
||||||
- name: setup multi-arch docker build
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: build & push images
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: linux/amd64,linux/arm64/v8
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
|
||||||
71
.github/workflows/docker.yml
vendored
71
.github/workflows/docker.yml
vendored
@@ -3,84 +3,27 @@ name: docker
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths-ignore:
|
|
||||||
- '.github/**'
|
|
||||||
- '.vscode/**'
|
|
||||||
- 'chrome-extension/**'
|
|
||||||
- 'releases/**'
|
|
||||||
- '**/**.md'
|
|
||||||
- '**.crx'
|
|
||||||
- '**.pem'
|
|
||||||
- '.dockerignore'
|
|
||||||
- '.gitignore'
|
|
||||||
schedule:
|
|
||||||
- cron: '34 4 * * 2'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-push:
|
build-and-push:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: checkout code
|
- name: checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Set hash
|
|
||||||
id: vars
|
|
||||||
run: echo "::set-output name=sha_short::$(git rev-parse --short HEAD)"
|
|
||||||
|
|
||||||
- name: Get current date
|
|
||||||
id: date
|
|
||||||
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
|
|
||||||
|
|
||||||
- name: create-json
|
|
||||||
id: create-json
|
|
||||||
uses: jsdaniell/create-json@v1.2.2
|
|
||||||
with:
|
|
||||||
name: "version.json"
|
|
||||||
json: '{"type": "docker", "tag": "${{secrets.DOCKERHUB_MASTER_TAG}}", "commit": "${{ steps.vars.outputs.sha_short }}", "date": "${{ steps.date.outputs.date }}"}'
|
|
||||||
dir: 'backend/'
|
|
||||||
|
|
||||||
- name: setup platform emulator
|
- name: setup platform emulator
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
||||||
- name: setup multi-arch docker build
|
- name: setup multi-arch docker build
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
- name: Generate Docker image metadata
|
|
||||||
id: docker-meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
# Defaults:
|
|
||||||
# DOCKERHUB_USERNAME : tzahi12345
|
|
||||||
# DOCKERHUB_REPO : youtubedl-material
|
|
||||||
# DOCKERHUB_MASTER_TAG: nightly
|
|
||||||
with:
|
|
||||||
images: |
|
|
||||||
${{ secrets.DOCKERHUB_USERNAME }}/${{ secrets.DOCKERHUB_REPO }}
|
|
||||||
ghcr.io/${{ github.repository_owner }}/${{ secrets.DOCKERHUB_REPO }}
|
|
||||||
tags: |
|
|
||||||
type=raw,${{secrets.DOCKERHUB_MASTER_TAG}}-{{ date 'YYYY-MM-DD' }}
|
|
||||||
type=raw,${{secrets.DOCKERHUB_MASTER_TAG}}
|
|
||||||
type=sha,prefix=sha-,format=short
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: ghcr.io
|
|
||||||
username: ${{ github.repository_owner }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: build & push images
|
- name: build & push images
|
||||||
uses: docker/build-push-action@v4
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64/v8,linux/arm/v7
|
platforms: linux/amd64,linux/arm,linux/arm64/v8
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.docker-meta.outputs.tags }}
|
tags: tzahi12345/youtubedl-material:nightly
|
||||||
labels: ${{ steps.docker-meta.outputs.labels }}
|
|
||||||
|
|||||||
40
.github/workflows/mocha.yml
vendored
40
.github/workflows/mocha.yml
vendored
@@ -1,40 +0,0 @@
|
|||||||
name: Tests
|
|
||||||
'on':
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
pull_request:
|
|
||||||
types:
|
|
||||||
- opened
|
|
||||||
- synchronize
|
|
||||||
- reopened
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
name: 'Backend - mocha'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
node:
|
|
||||||
- 16
|
|
||||||
steps:
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: '${{ matrix.node }}'
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: 'Cache node_modules'
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ~/.npm
|
|
||||||
key: ${{ runner.os }}-node-v${{ matrix.node }}-${{ hashFiles('**/package-lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-node-v${{ matrix.node }}-
|
|
||||||
working-directory: ./backend
|
|
||||||
- uses: FedericoCarboni/setup-ffmpeg@v2
|
|
||||||
id: setup-ffmpeg
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: npm install
|
|
||||||
working-directory: ./backend
|
|
||||||
- name: Run All Node.js Tests
|
|
||||||
run: npm run test
|
|
||||||
working-directory: ./backend
|
|
||||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -25,7 +25,6 @@
|
|||||||
!.vscode/extensions.json
|
!.vscode/extensions.json
|
||||||
|
|
||||||
# misc
|
# misc
|
||||||
/.angular/cache
|
|
||||||
/.sass-cache
|
/.sass-cache
|
||||||
/connect.lock
|
/connect.lock
|
||||||
/coverage
|
/coverage
|
||||||
@@ -67,12 +66,3 @@ backend/appdata/users.json
|
|||||||
backend/users/*
|
backend/users/*
|
||||||
backend/appdata/cookies.txt
|
backend/appdata/cookies.txt
|
||||||
backend/public
|
backend/public
|
||||||
src/assets/i18n/*.json
|
|
||||||
|
|
||||||
# User Files
|
|
||||||
db/
|
|
||||||
appdata/
|
|
||||||
audio/
|
|
||||||
video/
|
|
||||||
subscriptions/
|
|
||||||
users/
|
|
||||||
11
.vscode/extensions.json
vendored
11
.vscode/extensions.json
vendored
@@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"recommendations": [
|
|
||||||
"angular.ng-template",
|
|
||||||
"dbaeumer.vscode-eslint",
|
|
||||||
"waderyan.gitblame",
|
|
||||||
"42crunch.vscode-openapi",
|
|
||||||
"redhat.vscode-yaml",
|
|
||||||
"christian-kohler.npm-intellisense",
|
|
||||||
"hbenl.vscode-mocha-test-adapter"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -4,20 +4,6 @@
|
|||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
|
||||||
"name": "Dev: Debug Backend",
|
|
||||||
"request": "launch",
|
|
||||||
"runtimeArgs": [
|
|
||||||
"run-script",
|
|
||||||
"debug"
|
|
||||||
],
|
|
||||||
"runtimeExecutable": "npm",
|
|
||||||
"skipFiles": [
|
|
||||||
"<node_internals>/**"
|
|
||||||
],
|
|
||||||
"type": "node",
|
|
||||||
"cwd": "${workspaceFolder}/backend"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
|
|||||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -1,8 +0,0 @@
|
|||||||
{
|
|
||||||
"mochaExplorer.files": "backend/test/**/*.js",
|
|
||||||
"mochaExplorer.cwd": "backend",
|
|
||||||
"mochaExplorer.globImplementation": "vscode",
|
|
||||||
"mochaExplorer.env": {
|
|
||||||
// "YTDL_MODE": "debug"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
60
.vscode/tasks.json
vendored
60
.vscode/tasks.json
vendored
@@ -1,60 +0,0 @@
|
|||||||
{
|
|
||||||
"version": "2.0.0",
|
|
||||||
"windows": {
|
|
||||||
"options": {
|
|
||||||
"shell": {
|
|
||||||
"executable": "cmd.exe",
|
|
||||||
"args": [
|
|
||||||
"/d", "/c"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"tasks": [
|
|
||||||
{
|
|
||||||
"type": "npm",
|
|
||||||
"script": "start",
|
|
||||||
"problemMatcher": [],
|
|
||||||
"label": "Dev: start frontend",
|
|
||||||
"detail": "ng serve",
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": true,
|
|
||||||
"clear": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Dev: start backend",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "node app.js",
|
|
||||||
"options": {
|
|
||||||
"cwd": "./backend",
|
|
||||||
"env": {
|
|
||||||
"YTDL_MODE": "debug"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"presentation": {
|
|
||||||
"echo": true,
|
|
||||||
"reveal": "always",
|
|
||||||
"focus": true,
|
|
||||||
"panel": "shared",
|
|
||||||
"showReuseMessage": true,
|
|
||||||
"clear": false
|
|
||||||
},
|
|
||||||
"problemMatcher": [],
|
|
||||||
"dependsOn": ["Dev: post-build"]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Dev: post-build",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "node src/postbuild.mjs"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Dev: run all",
|
|
||||||
"dependsOn": ["Dev: start backend", "Dev: start frontend"]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
<h1>Development</h1>
|
|
||||||
|
|
||||||
- [First time...](#first-time)
|
|
||||||
- [Setup](#setup)
|
|
||||||
- [Startup](#startup)
|
|
||||||
- [Debugging the backend (VSC)](#debugging-the-backend-vsc)
|
|
||||||
- [Deploy changes](#deploy-changes)
|
|
||||||
- [Frontend](#frontend)
|
|
||||||
- [Backend](#backend)
|
|
||||||
|
|
||||||
# First time...
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
Checkout the repository and navigate to the `youtubedl-material` directory.
|
|
||||||
```bash
|
|
||||||
vim ./src/assets/default.json # Edit settings for your local environment. This config file is just the dev config file, if YTDL_MODE is not set to "debug", then ./backend/appdata/default.json will be used
|
|
||||||
npm -g install pm2 # Install pm2
|
|
||||||
npm install # Install dependencies for the frontend
|
|
||||||
cd ./backend
|
|
||||||
npm install # Install dependencies for the backend
|
|
||||||
cd ..
|
|
||||||
npm run build # Build the frontend
|
|
||||||
```
|
|
||||||
This step have to be done only once.
|
|
||||||
|
|
||||||
## Startup
|
|
||||||
Navigate to the `youtubedl-material/backend` directory and run `npm start`.
|
|
||||||
|
|
||||||
# Debugging the backend (VSC)
|
|
||||||
Open the `youtubedl-material` directory in Visual Studio Code and run the launch configuration `Dev: Debug Backend`.
|
|
||||||
|
|
||||||
# Deploy changes
|
|
||||||
|
|
||||||
## Frontend
|
|
||||||
Navigate to the `youtubedl-material` directory and run `npm run build`. Restart the backend.
|
|
||||||
|
|
||||||
## Backend
|
|
||||||
Simply restart the backend.
|
|
||||||
114
Dockerfile
114
Dockerfile
@@ -1,97 +1,43 @@
|
|||||||
# Fetching our utils
|
FROM alpine:3.12 as frontend
|
||||||
FROM ubuntu:22.04 AS utils
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
|
||||||
# Use script due local build compability
|
|
||||||
COPY docker-utils/*.sh .
|
|
||||||
RUN chmod +x *.sh
|
|
||||||
RUN sh ./ffmpeg-fetch.sh
|
|
||||||
RUN sh ./fetch-twitchdownloader.sh
|
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
npm
|
||||||
|
|
||||||
# Create our Ubuntu 22.04 with node 16.14.2 (that specific version is required as per: https://stackoverflow.com/a/72855258/8088021)
|
|
||||||
# Go to 20.04
|
|
||||||
FROM ubuntu:22.04 AS base
|
|
||||||
ARG TARGETPLATFORM
|
|
||||||
ARG DEBIAN_FRONTEND=noninteractive
|
|
||||||
ENV UID=1000
|
|
||||||
ENV GID=1000
|
|
||||||
ENV USER=youtube
|
|
||||||
ENV NO_UPDATE_NOTIFIER=true
|
|
||||||
ENV PM2_HOME=/app/pm2
|
|
||||||
ENV ALLOW_CONFIG_MUTATIONS=true
|
|
||||||
ENV npm_config_cache=/app/.npm
|
|
||||||
|
|
||||||
# Use NVM to get specific node version
|
|
||||||
ENV NODE_VERSION=16.14.2
|
|
||||||
RUN groupadd -g $GID $USER && useradd --system -m -g $USER --uid $UID $USER && \
|
|
||||||
apt update && \
|
|
||||||
apt install -y --no-install-recommends curl ca-certificates tzdata libicu70 libatomic1 && \
|
|
||||||
apt clean && \
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
RUN mkdir /usr/local/nvm
|
|
||||||
ENV PATH="/usr/local/nvm/versions/node/v${NODE_VERSION}/bin/:${PATH}"
|
|
||||||
ENV NVM_DIR=/usr/local/nvm
|
|
||||||
RUN curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.5/install.sh | bash
|
|
||||||
RUN . "$NVM_DIR/nvm.sh" && nvm install ${NODE_VERSION}
|
|
||||||
RUN . "$NVM_DIR/nvm.sh" && nvm use v${NODE_VERSION}
|
|
||||||
RUN . "$NVM_DIR/nvm.sh" && nvm alias default v${NODE_VERSION}
|
|
||||||
|
|
||||||
# Build frontend
|
|
||||||
ARG BUILDPLATFORM
|
|
||||||
FROM --platform=${BUILDPLATFORM} node:16 as frontend
|
|
||||||
RUN npm install -g @angular/cli
|
RUN npm install -g @angular/cli
|
||||||
|
|
||||||
WORKDIR /build
|
WORKDIR /build
|
||||||
COPY [ "package.json", "package-lock.json", "angular.json", "tsconfig.json", "/build/" ]
|
COPY [ "package.json", "package-lock.json", "/build/" ]
|
||||||
|
RUN npm install
|
||||||
|
|
||||||
|
COPY [ "angular.json", "tsconfig.json", "/build/" ]
|
||||||
COPY [ "src/", "/build/src/" ]
|
COPY [ "src/", "/build/src/" ]
|
||||||
RUN npm install && \
|
RUN ng build --prod
|
||||||
npm run build && \
|
|
||||||
ls -al /build/backend/public
|
|
||||||
RUN npm uninstall -g @angular/cli
|
|
||||||
RUN rm -rf node_modules
|
|
||||||
|
|
||||||
|
#--------------#
|
||||||
|
|
||||||
|
FROM alpine:3.12
|
||||||
|
|
||||||
|
ENV UID=1000 \
|
||||||
|
GID=1000 \
|
||||||
|
USER=youtube
|
||||||
|
|
||||||
|
RUN addgroup -S $USER -g $GID && adduser -D -S $USER -G $USER -u $UID
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
ffmpeg \
|
||||||
|
npm \
|
||||||
|
python2 \
|
||||||
|
su-exec \
|
||||||
|
&& apk add --no-cache --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing/ \
|
||||||
|
atomicparsley
|
||||||
|
|
||||||
# Install backend deps
|
|
||||||
FROM base as backend
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY [ "backend/","/app/" ]
|
COPY --chown=$UID:$GID [ "backend/package.json", "backend/package-lock.json", "/app/" ]
|
||||||
RUN npm config set strict-ssl false && \
|
RUN npm install && chown -R $UID:$GID ./
|
||||||
npm install --prod && \
|
|
||||||
ls -al
|
|
||||||
|
|
||||||
#FROM base as python
|
|
||||||
# armv7 need build from source
|
|
||||||
#WORKDIR /app
|
|
||||||
#COPY docker-utils/GetTwitchDownloader.py .
|
|
||||||
#RUN apt update && \
|
|
||||||
# apt install -y --no-install-recommends python3-minimal python-is-python3 python3-pip python3-dev build-essential libffi-dev && \
|
|
||||||
# apt clean && \
|
|
||||||
# rm -rf /var/lib/apt/lists/*
|
|
||||||
#RUN pip install PyGithub requests
|
|
||||||
#RUN python GetTwitchDownloader.py
|
|
||||||
|
|
||||||
# Final image
|
|
||||||
FROM base
|
|
||||||
RUN npm install -g pm2 && \
|
|
||||||
apt update && \
|
|
||||||
apt install -y --no-install-recommends gosu python3-minimal python-is-python3 python3-pip atomicparsley build-essential && \
|
|
||||||
pip install pycryptodomex && \
|
|
||||||
apt remove -y --purge build-essential && \
|
|
||||||
apt autoremove -y --purge && \
|
|
||||||
apt clean && \
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
WORKDIR /app
|
|
||||||
# User 1000 already exist from base image
|
|
||||||
COPY --chown=$UID:$GID --from=utils [ "/usr/local/bin/ffmpeg", "/usr/local/bin/ffmpeg" ]
|
|
||||||
COPY --chown=$UID:$GID --from=utils [ "/usr/local/bin/ffprobe", "/usr/local/bin/ffprobe" ]
|
|
||||||
COPY --chown=$UID:$GID --from=utils [ "/usr/local/bin/TwitchDownloaderCLI", "/usr/local/bin/TwitchDownloaderCLI"]
|
|
||||||
COPY --chown=$UID:$GID --from=backend ["/app/","/app/"]
|
|
||||||
COPY --chown=$UID:$GID --from=frontend [ "/build/backend/public/", "/app/public/" ]
|
COPY --chown=$UID:$GID --from=frontend [ "/build/backend/public/", "/app/public/" ]
|
||||||
#COPY --chown=$UID:$GID --from=python ["/app/TwitchDownloaderCLI","/usr/local/bin/TwitchDownloaderCLI"]
|
COPY --chown=$UID:$GID [ "/backend/", "/app/" ]
|
||||||
RUN chmod +x /app/fix-scripts/*.sh
|
|
||||||
# Add some persistence data
|
|
||||||
#VOLUME ["/app/appdata"]
|
|
||||||
|
|
||||||
EXPOSE 17442
|
EXPOSE 17442
|
||||||
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
||||||
CMD [ "npm","start" ]
|
CMD [ "node", "app.js" ]
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
FROM tzahi12345/youtubedl-material:latest
|
|
||||||
CMD [ "npm", "start" ]
|
|
||||||
2915
Public API v1.yaml
2915
Public API v1.yaml
File diff suppressed because it is too large
Load Diff
70
README.md
70
README.md
@@ -6,50 +6,37 @@
|
|||||||
[](https://github.com/Tzahi12345/YoutubeDL-Material/issues)
|
[](https://github.com/Tzahi12345/YoutubeDL-Material/issues)
|
||||||
[](https://github.com/Tzahi12345/YoutubeDL-Material/blob/master/LICENSE.md)
|
[](https://github.com/Tzahi12345/YoutubeDL-Material/blob/master/LICENSE.md)
|
||||||
|
|
||||||
YoutubeDL-Material is a Material Design frontend for [youtube-dl](https://rg3.github.io/youtube-dl/). It's coded using [Angular 15](https://angular.io/) for the frontend, and [Node.js](https://nodejs.org/) on the backend.
|
YoutubeDL-Material is a Material Design frontend for [youtube-dl](https://rg3.github.io/youtube-dl/). It's coded using [Angular 9](https://angular.io/) for the frontend, and [Node.js](https://nodejs.org/) on the backend.
|
||||||
|
|
||||||
Now with [Docker](#Docker) support!
|
Now with [Docker](#Docker) support!
|
||||||
|
|
||||||
<hr>
|
|
||||||
|
|
||||||
## Getting Started
|
## Getting Started
|
||||||
|
|
||||||
Check out the prerequisites, and go to the [installation](#Installing) section. Easy as pie!
|
Check out the prerequisites, and go to the installation section. Easy as pie!
|
||||||
|
|
||||||
Here's an image of what it'll look like once you're done:
|
Here's an image of what it'll look like once you're done:
|
||||||
|
|
||||||
<img src="https://i.imgur.com/C6vFGbL.png" width="800">
|

|
||||||
|
|
||||||
|
With optional file management enabled (default):
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
Dark mode:
|
Dark mode:
|
||||||
|
|
||||||
<img src="https://i.imgur.com/vOtvH5w.png" width="800">
|

|
||||||
|
|
||||||
### Prerequisites
|
### Prerequisites
|
||||||
|
|
||||||
NOTE: If you would like to use Docker, you can skip down to the [Docker](#Docker) section for a setup guide.
|
NOTE: If you would like to use Docker, you can skip down to the [Docker](#Docker) section for a setup guide.
|
||||||
|
|
||||||
Required dependencies:
|
Debian/Ubuntu:
|
||||||
|
|
||||||
* Node.js 16
|
|
||||||
* Python
|
|
||||||
|
|
||||||
Optional dependencies:
|
|
||||||
|
|
||||||
* AtomicParsley (for embedding thumbnails, package name `atomicparsley`)
|
|
||||||
* [Twitch Downloader CLI](https://github.com/lay295/TwitchDownloader) (for downloading Twitch VOD chats)
|
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>Debian/Ubuntu</summary>
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
curl -fsSL https://deb.nodesource.com/setup_16.x | sudo -E bash -
|
sudo apt-get install nodejs youtube-dl ffmpeg
|
||||||
sudo apt-get install nodejs youtube-dl ffmpeg unzip python npm
|
|
||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
CentOS 7:
|
||||||
|
|
||||||
<details>
|
|
||||||
<summary>CentOS 7</summary>
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo yum install epel-release
|
sudo yum install epel-release
|
||||||
@@ -57,16 +44,15 @@ sudo yum localinstall --nogpgcheck https://download1.rpmfusion.org/free/el/rpmfu
|
|||||||
sudo yum install centos-release-scl-rh
|
sudo yum install centos-release-scl-rh
|
||||||
sudo yum install rh-nodejs12
|
sudo yum install rh-nodejs12
|
||||||
scl enable rh-nodejs12 bash
|
scl enable rh-nodejs12 bash
|
||||||
curl -fsSL https://rpm.nodesource.com/setup_16.x | sudo bash -
|
|
||||||
sudo yum install nodejs youtube-dl ffmpeg ffmpeg-devel
|
sudo yum install nodejs youtube-dl ffmpeg ffmpeg-devel
|
||||||
```
|
```
|
||||||
|
|
||||||
</details>
|
Optional dependencies:
|
||||||
|
|
||||||
|
* AtomicParsley (for embedding thumbnails, package name `atomicparsley`)
|
||||||
|
|
||||||
### Installing
|
### Installing
|
||||||
|
|
||||||
If you are using Docker, skip to the [Docker](#Docker) section. Otherwise, continue:
|
|
||||||
|
|
||||||
1. First, download the [latest release](https://github.com/Tzahi12345/YoutubeDL-Material/releases/latest)!
|
1. First, download the [latest release](https://github.com/Tzahi12345/YoutubeDL-Material/releases/latest)!
|
||||||
|
|
||||||
2. Drag the `youtubedl-material` directory to an easily accessible directory. Navigate to the `appdata` folder and edit the `default.json` file.
|
2. Drag the `youtubedl-material` directory to an easily accessible directory. Navigate to the `appdata` folder and edit the `default.json` file.
|
||||||
@@ -85,9 +71,7 @@ If you'd like to install YoutubeDL-Material, go to the Installation section. If
|
|||||||
|
|
||||||
To deploy, simply clone the repository, and go into the `youtubedl-material` directory. Type `npm install` and all the dependencies will install. Then type `cd backend` and again type `npm install` to install the dependencies for the backend.
|
To deploy, simply clone the repository, and go into the `youtubedl-material` directory. Type `npm install` and all the dependencies will install. Then type `cd backend` and again type `npm install` to install the dependencies for the backend.
|
||||||
|
|
||||||
Once you do that, you're almost up and running. All you need to do is edit the configuration in `youtubedl-material/appdata`, go back into the `youtubedl-material` directory, and type `npm run build`. This will build the app, and put the output files in the `youtubedl-material/backend/public` folder.
|
Once you do that, you're almost up and running. All you need to do is edit the configuration in `youtubedl-material/appdata`, go back into the `youtubedl-material` directory, and type `ng build --prod`. This will build the app, and put the output files in the `youtubedl-material/backend/public` folder.
|
||||||
|
|
||||||
Lastly, type `npm -g install pm2` to install pm2 globally.
|
|
||||||
|
|
||||||
The frontend is now complete. The backend is much easier. Just go into the `backend` folder, and type `npm start`.
|
The frontend is now complete. The backend is much easier. Just go into the `backend` folder, and type `npm start`.
|
||||||
|
|
||||||
@@ -97,10 +81,6 @@ Alternatively, you can port forward the port specified in the config (defaults t
|
|||||||
|
|
||||||
## Docker
|
## Docker
|
||||||
|
|
||||||
### Host-specific instructions
|
|
||||||
|
|
||||||
If you're on a Synology NAS, unRAID, Raspberry Pi 4 or any other possible special case you can check if there's known issues or instructions both in the issue tracker and in the [Wiki!](https://github.com/Tzahi12345/YoutubeDL-Material/wiki#environment-specific-guideshelp)
|
|
||||||
|
|
||||||
### Setup
|
### Setup
|
||||||
|
|
||||||
If you are looking to setup YoutubeDL-Material with Docker, this section is for you. And you're in luck! Docker setup is quite simple.
|
If you are looking to setup YoutubeDL-Material with Docker, this section is for you. And you're in luck! Docker setup is quite simple.
|
||||||
@@ -110,6 +90,8 @@ If you are looking to setup YoutubeDL-Material with Docker, this section is for
|
|||||||
3. Run `docker-compose up` to start it up. If successful, it should say "HTTP(S): Started on port 17443" or something similar. This tells you the *container-internal* port of the application. Please check your `docker-compose.yml` file for the *external* port. If you downloaded the file as described above, it defaults to **8998**.
|
3. Run `docker-compose up` to start it up. If successful, it should say "HTTP(S): Started on port 17443" or something similar. This tells you the *container-internal* port of the application. Please check your `docker-compose.yml` file for the *external* port. If you downloaded the file as described above, it defaults to **8998**.
|
||||||
4. Make sure you can connect to the specified URL + *external* port, and if so, you are done!
|
4. Make sure you can connect to the specified URL + *external* port, and if so, you are done!
|
||||||
|
|
||||||
|
NOTE: It is currently recommended that you use the `nightly` tag on Docker. To do so, simply update the docker-compose.yml `image` field so that it points to `tzahi12345/youtubedl-material:nightly`.
|
||||||
|
|
||||||
### Custom UID/GID
|
### Custom UID/GID
|
||||||
|
|
||||||
By default, the Docker container runs as non-root with UID=1000 and GID=1000. To set this to your own UID/GID, simply update the `environment` section in your `docker-compose.yml` like so:
|
By default, the Docker container runs as non-root with UID=1000 and GID=1000. To set this to your own UID/GID, simply update the `environment` section in your `docker-compose.yml` like so:
|
||||||
@@ -120,12 +102,6 @@ environment:
|
|||||||
GID: YOUR_GID
|
GID: YOUR_GID
|
||||||
```
|
```
|
||||||
|
|
||||||
## MongoDB
|
|
||||||
|
|
||||||
For much better scaling with large datasets please run your YoutubeDL-Material instance with MongoDB backend rather than the json file-based default. It will fix a lot of performance problems (especially with datasets in the tens of thousands videos/audios)!
|
|
||||||
|
|
||||||
[Tutorial](https://github.com/Tzahi12345/YoutubeDL-Material/wiki/Setting-a-MongoDB-backend-to-use-as-database-provider-for-YTDL-M).
|
|
||||||
|
|
||||||
## API
|
## API
|
||||||
|
|
||||||
[API Docs](https://youtubedl-material.stoplight.io/docs/youtubedl-material/Public%20API%20v1.yaml)
|
[API Docs](https://youtubedl-material.stoplight.io/docs/youtubedl-material/Public%20API%20v1.yaml)
|
||||||
@@ -134,12 +110,6 @@ To get started, go to the settings menu and enable the public API from the *Extr
|
|||||||
|
|
||||||
Once you have enabled the API and have the key, you can start sending requests by adding the query param `apiKey=API_KEY`. Replace `API_KEY` with your actual API key, and you should be good to go! Nearly all of the backend should be at your disposal. View available endpoints in the link above.
|
Once you have enabled the API and have the key, you can start sending requests by adding the query param `apiKey=API_KEY`. Replace `API_KEY` with your actual API key, and you should be good to go! Nearly all of the backend should be at your disposal. View available endpoints in the link above.
|
||||||
|
|
||||||
## iOS Shortcut
|
|
||||||
|
|
||||||
If you are using iOS, try YoutubeDL-Material more conveniently with a Shortcut. With this Shorcut, you can easily start downloading YouTube video with just two taps! (Or maybe three?)
|
|
||||||
|
|
||||||
You can download Shortcut [here.](https://routinehub.co/shortcut/10283/)
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
If you're interested in contributing, first: awesome! Second, please refer to the guidelines/setup information located in the [Contributing](https://github.com/Tzahi12345/YoutubeDL-Material/wiki/Contributing) wiki page, it's a helpful way to get you on your feet and coding away.
|
If you're interested in contributing, first: awesome! Second, please refer to the guidelines/setup information located in the [Contributing](https://github.com/Tzahi12345/YoutubeDL-Material/wiki/Contributing) wiki page, it's a helpful way to get you on your feet and coding away.
|
||||||
@@ -158,16 +128,12 @@ Official translators:
|
|||||||
* German - UnlimitedCookies
|
* German - UnlimitedCookies
|
||||||
* Chinese - TyRoyal
|
* Chinese - TyRoyal
|
||||||
|
|
||||||
See also the list of [contributors](https://github.com/Tzahi12345/YoutubeDL-Material/graphs/contributors) who participated in this project.
|
See also the list of [contributors](https://github.com/your/project/contributors) who participated in this project.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
|
This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
|
||||||
|
|
||||||
## Legal Disclaimer
|
|
||||||
|
|
||||||
This project is in no way affiliated with Google LLC, Alphabet Inc. or YouTube (or their subsidiaries) nor endorsed by them.
|
|
||||||
|
|
||||||
## Acknowledgments
|
## Acknowledgments
|
||||||
|
|
||||||
* youtube-dl
|
* youtube-dl
|
||||||
|
|||||||
21
SECURITY.md
21
SECURITY.md
@@ -1,21 +0,0 @@
|
|||||||
# Security Policy
|
|
||||||
|
|
||||||
## Supported Versions
|
|
||||||
|
|
||||||
If you would like to see the latest updates, use the `nightly` tag on Docker.
|
|
||||||
|
|
||||||
If you'd like to stick with more stable releases, use the `latest` tag on Docker or download the [latest release here](https://github.com/Tzahi12345/YoutubeDL-Material/releases/latest).
|
|
||||||
|
|
||||||
| Version | Supported |
|
|
||||||
| -------------------- | ------------------ |
|
|
||||||
| 4.3 Docker Nightlies | :white_check_mark: |
|
|
||||||
| 4.3 Release | :white_check_mark: |
|
|
||||||
| 4.2 Release | :x: |
|
|
||||||
| < 4.2 | :x: |
|
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
|
||||||
|
|
||||||
Please file an issue in our GitHub's repo, because this app
|
|
||||||
isn't meant to be safe to run as public instance yet, but rather as a LAN facing app.
|
|
||||||
|
|
||||||
We welcome PRs and help in general in making YTDL-M more secure, but it's not a priority as of now.
|
|
||||||
62
angular.json
62
angular.json
@@ -17,6 +17,7 @@
|
|||||||
"build": {
|
"build": {
|
||||||
"builder": "@angular-devkit/build-angular:browser",
|
"builder": "@angular-devkit/build-angular:browser",
|
||||||
"options": {
|
"options": {
|
||||||
|
"aot": true,
|
||||||
"outputPath": "backend/public",
|
"outputPath": "backend/public",
|
||||||
"index": "src/index.html",
|
"index": "src/index.html",
|
||||||
"main": "src/main.ts",
|
"main": "src/main.ts",
|
||||||
@@ -30,20 +31,9 @@
|
|||||||
"src/backend"
|
"src/backend"
|
||||||
],
|
],
|
||||||
"styles": [
|
"styles": [
|
||||||
"src/styles.scss",
|
"src/styles.scss"
|
||||||
"src/bootstrap.min.css"
|
|
||||||
],
|
],
|
||||||
"scripts": [],
|
"scripts": []
|
||||||
"vendorChunk": true,
|
|
||||||
"extractLicenses": false,
|
|
||||||
"buildOptimizer": false,
|
|
||||||
"sourceMap": true,
|
|
||||||
"optimization": false,
|
|
||||||
"namedChunks": true,
|
|
||||||
"allowedCommonJsDependencies": [
|
|
||||||
"rxjs",
|
|
||||||
"crypto-js"
|
|
||||||
]
|
|
||||||
},
|
},
|
||||||
"configurations": {
|
"configurations": {
|
||||||
"production": {
|
"production": {
|
||||||
@@ -55,7 +45,10 @@
|
|||||||
],
|
],
|
||||||
"optimization": true,
|
"optimization": true,
|
||||||
"outputHashing": "all",
|
"outputHashing": "all",
|
||||||
|
"sourceMap": false,
|
||||||
|
"extractCss": true,
|
||||||
"namedChunks": false,
|
"namedChunks": false,
|
||||||
|
"aot": true,
|
||||||
"extractLicenses": true,
|
"extractLicenses": true,
|
||||||
"vendorChunk": false,
|
"vendorChunk": false,
|
||||||
"buildOptimizer": true,
|
"buildOptimizer": true,
|
||||||
@@ -66,19 +59,10 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"codespaces": {
|
|
||||||
"fileReplacements": [
|
|
||||||
{
|
|
||||||
"replace": "src/environments/environment.ts",
|
|
||||||
"with": "src/environments/environment.codespaces.ts"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"es": {
|
"es": {
|
||||||
"localize": ["es"]
|
"localize": ["es"]
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
"defaultConfiguration": ""
|
|
||||||
},
|
},
|
||||||
"serve": {
|
"serve": {
|
||||||
"builder": "@angular-devkit/build-angular:dev-server",
|
"builder": "@angular-devkit/build-angular:dev-server",
|
||||||
@@ -91,9 +75,6 @@
|
|||||||
},
|
},
|
||||||
"es": {
|
"es": {
|
||||||
"browserTarget": "youtube-dl-material:build:es"
|
"browserTarget": "youtube-dl-material:build:es"
|
||||||
},
|
|
||||||
"codespaces": {
|
|
||||||
"browserTarget": "youtube-dl-material:build:codespaces"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -130,8 +111,7 @@
|
|||||||
"src/backend"
|
"src/backend"
|
||||||
],
|
],
|
||||||
"styles": [
|
"styles": [
|
||||||
"src/styles.scss",
|
"src/styles.scss"
|
||||||
"src/bootstrap.min.css"
|
|
||||||
],
|
],
|
||||||
"scripts": []
|
"scripts": []
|
||||||
},
|
},
|
||||||
@@ -164,8 +144,7 @@
|
|||||||
"tsConfig": "src/tsconfig.spec.json",
|
"tsConfig": "src/tsconfig.spec.json",
|
||||||
"scripts": [],
|
"scripts": [],
|
||||||
"styles": [
|
"styles": [
|
||||||
"src/styles.scss",
|
"src/styles.scss"
|
||||||
"src/bootstrap.min.css"
|
|
||||||
],
|
],
|
||||||
"assets": [
|
"assets": [
|
||||||
"src/assets",
|
"src/assets",
|
||||||
@@ -175,6 +154,16 @@
|
|||||||
"src/backend"
|
"src/backend"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"lint": {
|
||||||
|
"builder": "@angular-devkit/build-angular:tslint",
|
||||||
|
"options": {
|
||||||
|
"tsConfig": [
|
||||||
|
"src/tsconfig.app.json",
|
||||||
|
"src/tsconfig.spec.json"
|
||||||
|
],
|
||||||
|
"exclude": []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -189,10 +178,20 @@
|
|||||||
"protractorConfig": "./protractor.conf.js",
|
"protractorConfig": "./protractor.conf.js",
|
||||||
"devServerTarget": "youtube-dl-material:serve"
|
"devServerTarget": "youtube-dl-material:serve"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"lint": {
|
||||||
|
"builder": "@angular-devkit/build-angular:tslint",
|
||||||
|
"options": {
|
||||||
|
"tsConfig": [
|
||||||
|
"e2e/tsconfig.e2e.json"
|
||||||
|
],
|
||||||
|
"exclude": []
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"defaultProject": "youtube-dl-material",
|
||||||
"schematics": {
|
"schematics": {
|
||||||
"@schematics/angular:component": {
|
"@schematics/angular:component": {
|
||||||
"prefix": "app",
|
"prefix": "app",
|
||||||
@@ -201,8 +200,5 @@
|
|||||||
"@schematics/angular:directive": {
|
"@schematics/angular:directive": {
|
||||||
"prefix": "app"
|
"prefix": "app"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"cli": {
|
|
||||||
"analytics": false
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
1
app.json
1
app.json
@@ -2,7 +2,6 @@
|
|||||||
"name": "YoutubeDL-Material",
|
"name": "YoutubeDL-Material",
|
||||||
"description": "An open-source and self-hosted YouTube downloader based on Google's Material Design specifications.",
|
"description": "An open-source and self-hosted YouTube downloader based on Google's Material Design specifications.",
|
||||||
"repository": "https://github.com/Tzahi12345/YoutubeDL-Material",
|
"repository": "https://github.com/Tzahi12345/YoutubeDL-Material",
|
||||||
"stack": "container",
|
|
||||||
"logo": "https://i.imgur.com/GPzvPiU.png",
|
"logo": "https://i.imgur.com/GPzvPiU.png",
|
||||||
"keywords": ["youtube-dl", "youtubedl-material", "nodejs"]
|
"keywords": ["youtube-dl", "youtubedl-material", "nodejs"]
|
||||||
}
|
}
|
||||||
49
armhf.Dockerfile
Normal file
49
armhf.Dockerfile
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
FROM alpine:3.12 as frontend
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
npm \
|
||||||
|
curl
|
||||||
|
|
||||||
|
RUN npm install -g @angular/cli
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
RUN curl -L https://github.com/balena-io/qemu/releases/download/v3.0.0%2Bresin/qemu-3.0.0+resin-arm.tar.gz | tar zxvf - -C . && mv qemu-3.0.0+resin-arm/qemu-arm-static .
|
||||||
|
|
||||||
|
COPY [ "package.json", "package-lock.json", "/build/" ]
|
||||||
|
RUN npm install
|
||||||
|
|
||||||
|
COPY [ "angular.json", "tsconfig.json", "/build/" ]
|
||||||
|
COPY [ "src/", "/build/src/" ]
|
||||||
|
RUN ng build --prod
|
||||||
|
|
||||||
|
#--------------#
|
||||||
|
|
||||||
|
FROM arm32v7/alpine:3.12
|
||||||
|
|
||||||
|
COPY --from=frontend /build/qemu-arm-static /usr/bin
|
||||||
|
|
||||||
|
ENV UID=1000 \
|
||||||
|
GID=1000 \
|
||||||
|
USER=youtube
|
||||||
|
|
||||||
|
RUN addgroup -S $USER -g $GID && adduser -D -S $USER -G $USER -u $UID
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
ffmpeg \
|
||||||
|
npm \
|
||||||
|
python2 \
|
||||||
|
su-exec \
|
||||||
|
&& apk add --no-cache --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing/ \
|
||||||
|
atomicparsley
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY --chown=$UID:$GID [ "backend/package.json", "backend/package-lock.json", "/app/" ]
|
||||||
|
RUN npm install && chown -R $UID:$GID ./
|
||||||
|
|
||||||
|
COPY --chown=$UID:$GID --from=frontend [ "/build/backend/public/", "/app/public/" ]
|
||||||
|
COPY --chown=$UID:$GID [ "/backend/", "/app/" ]
|
||||||
|
|
||||||
|
EXPOSE 17442
|
||||||
|
ENTRYPOINT [ "/app/entrypoint.sh" ]
|
||||||
|
CMD [ "node", "app.js" ]
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"env": {
|
|
||||||
"node": true,
|
|
||||||
"es2021": true
|
|
||||||
},
|
|
||||||
"extends": [
|
|
||||||
"eslint:recommended"
|
|
||||||
],
|
|
||||||
"parser": "esprima",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 12,
|
|
||||||
"sourceType": "module"
|
|
||||||
},
|
|
||||||
"plugins": [],
|
|
||||||
"rules": {
|
|
||||||
},
|
|
||||||
"root": true
|
|
||||||
}
|
|
||||||
2859
backend/app.js
2859
backend/app.js
File diff suppressed because it is too large
Load Diff
@@ -12,47 +12,21 @@
|
|||||||
"custom_args": "",
|
"custom_args": "",
|
||||||
"safe_download_override": false,
|
"safe_download_override": false,
|
||||||
"include_thumbnail": true,
|
"include_thumbnail": true,
|
||||||
"include_metadata": true,
|
"include_metadata": true
|
||||||
"max_concurrent_downloads": 5,
|
|
||||||
"download_rate_limit": ""
|
|
||||||
},
|
},
|
||||||
"Extra": {
|
"Extra": {
|
||||||
"title_top": "YoutubeDL-Material",
|
"title_top": "YoutubeDL-Material",
|
||||||
"file_manager_enabled": true,
|
"file_manager_enabled": true,
|
||||||
"allow_quality_select": true,
|
"allow_quality_select": true,
|
||||||
"download_only_mode": false,
|
"download_only_mode": false,
|
||||||
"allow_autoplay": true,
|
"allow_multi_download_mode": true,
|
||||||
"enable_downloads_manager": true,
|
"enable_downloads_manager": true
|
||||||
"allow_playlist_categorization": true,
|
|
||||||
"force_autoplay": false,
|
|
||||||
"enable_notifications": true,
|
|
||||||
"enable_all_notifications": true,
|
|
||||||
"allowed_notification_types": [],
|
|
||||||
"enable_rss_feed": false
|
|
||||||
},
|
},
|
||||||
"API": {
|
"API": {
|
||||||
"use_API_key": false,
|
"use_API_key": false,
|
||||||
"API_key": "",
|
"API_key": "",
|
||||||
"use_youtube_API": false,
|
"use_youtube_API": false,
|
||||||
"youtube_API_key": "",
|
"youtube_API_key": ""
|
||||||
"use_twitch_API": false,
|
|
||||||
"twitch_client_ID": "",
|
|
||||||
"twitch_client_secret": "",
|
|
||||||
"twitch_auto_download_chat": false,
|
|
||||||
"use_sponsorblock_API": false,
|
|
||||||
"generate_NFO_files": false,
|
|
||||||
"use_ntfy_API": false,
|
|
||||||
"ntfy_topic_URL": "",
|
|
||||||
"use_gotify_API": false,
|
|
||||||
"gotify_server_URL": "",
|
|
||||||
"gotify_app_token": "",
|
|
||||||
"use_telegram_API": false,
|
|
||||||
"telegram_bot_token": "",
|
|
||||||
"telegram_chat_id": "",
|
|
||||||
"telegram_webhook_proxy": "",
|
|
||||||
"webhook_URL": "",
|
|
||||||
"discord_webhook_URL": "",
|
|
||||||
"slack_webhook_URL": ""
|
|
||||||
},
|
},
|
||||||
"Themes": {
|
"Themes": {
|
||||||
"default_theme": "default",
|
"default_theme": "default",
|
||||||
@@ -61,8 +35,7 @@
|
|||||||
"Subscriptions": {
|
"Subscriptions": {
|
||||||
"allow_subscriptions": true,
|
"allow_subscriptions": true,
|
||||||
"subscriptions_base_path": "subscriptions/",
|
"subscriptions_base_path": "subscriptions/",
|
||||||
"subscriptions_check_interval": "300",
|
"subscriptions_check_interval": "300"
|
||||||
"redownload_fresh_uploads": false
|
|
||||||
},
|
},
|
||||||
"Users": {
|
"Users": {
|
||||||
"base_path": "users/",
|
"base_path": "users/",
|
||||||
@@ -76,12 +49,8 @@
|
|||||||
"searchFilter": "(uid={{username}})"
|
"searchFilter": "(uid={{username}})"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Database": {
|
|
||||||
"use_local_db": true,
|
|
||||||
"mongodb_connection_string": "mongodb://127.0.0.1:27017/?compressors=zlib"
|
|
||||||
},
|
|
||||||
"Advanced": {
|
"Advanced": {
|
||||||
"default_downloader": "yt-dlp",
|
"default_downloader": "youtube-dl",
|
||||||
"use_default_downloading_agent": true,
|
"use_default_downloading_agent": true,
|
||||||
"custom_downloading_agent": "",
|
"custom_downloading_agent": "",
|
||||||
"multi_user_mode": false,
|
"multi_user_mode": false,
|
||||||
|
|||||||
@@ -1,91 +0,0 @@
|
|||||||
const path = require('path');
|
|
||||||
const fs = require('fs-extra');
|
|
||||||
const { v4: uuid } = require('uuid');
|
|
||||||
|
|
||||||
const db_api = require('./db');
|
|
||||||
|
|
||||||
exports.generateArchive = async (type = null, user_uid = null, sub_id = null) => {
|
|
||||||
const filter = {user_uid: user_uid, sub_id: sub_id};
|
|
||||||
if (type) filter['type'] = type;
|
|
||||||
const archive_items = await db_api.getRecords('archives', filter);
|
|
||||||
const archive_item_lines = archive_items.map(archive_item => `${archive_item['extractor']} ${archive_item['id']}`);
|
|
||||||
return archive_item_lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.addToArchive = async (extractor, id, type, title, user_uid = null, sub_id = null) => {
|
|
||||||
const archive_item = createArchiveItem(extractor, id, type, title, user_uid, sub_id);
|
|
||||||
const success = await db_api.insertRecordIntoTable('archives', archive_item, {extractor: extractor, id: id, type: type});
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.removeFromArchive = async (extractor, id, type, user_uid = null, sub_id = null) => {
|
|
||||||
const success = await db_api.removeAllRecords('archives', {extractor: extractor, id: id, type: type, user_uid: user_uid, sub_id: sub_id});
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.existsInArchive = async (extractor, id, type, user_uid, sub_id) => {
|
|
||||||
const archive_item = await db_api.getRecord('archives', {extractor: extractor, id: id, type: type, user_uid: user_uid, sub_id: sub_id});
|
|
||||||
return !!archive_item;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.importArchiveFile = async (archive_text, type, user_uid = null, sub_id = null) => {
|
|
||||||
let archive_import_count = 0;
|
|
||||||
const lines = archive_text.split('\n');
|
|
||||||
for (let line of lines) {
|
|
||||||
const archive_line_parts = line.trim().split(' ');
|
|
||||||
// should just be the extractor and the video ID
|
|
||||||
if (archive_line_parts.length !== 2) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const extractor = archive_line_parts[0];
|
|
||||||
const id = archive_line_parts[1];
|
|
||||||
if (!extractor || !id) continue;
|
|
||||||
|
|
||||||
// we can't do a bulk write because we need to avoid duplicate archive items existing in db
|
|
||||||
|
|
||||||
const archive_item = createArchiveItem(extractor, id, type, null, user_uid, sub_id);
|
|
||||||
await db_api.insertRecordIntoTable('archives', archive_item, {extractor: extractor, id: id, type: type, sub_id: sub_id, user_uid: user_uid});
|
|
||||||
archive_import_count++;
|
|
||||||
}
|
|
||||||
return archive_import_count;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.importArchives = async () => {
|
|
||||||
const imported_archives = [];
|
|
||||||
const dirs_to_check = await db_api.getFileDirectoriesAndDBs();
|
|
||||||
|
|
||||||
// run through check list and check each file to see if it's missing from the db
|
|
||||||
for (let i = 0; i < dirs_to_check.length; i++) {
|
|
||||||
const dir_to_check = dirs_to_check[i];
|
|
||||||
if (!dir_to_check['archive_path']) continue;
|
|
||||||
|
|
||||||
const files_to_import = [
|
|
||||||
path.join(dir_to_check['archive_path'], `archive_${dir_to_check['type']}.txt`),
|
|
||||||
path.join(dir_to_check['archive_path'], `blacklist_${dir_to_check['type']}.txt`)
|
|
||||||
]
|
|
||||||
|
|
||||||
for (const file_to_import of files_to_import) {
|
|
||||||
const file_exists = await fs.pathExists(file_to_import);
|
|
||||||
if (!file_exists) continue;
|
|
||||||
|
|
||||||
const archive_text = await fs.readFile(file_to_import, 'utf8');
|
|
||||||
await exports.importArchiveFile(archive_text, dir_to_check.type, dir_to_check.user_uid, dir_to_check.sub_id);
|
|
||||||
imported_archives.push(file_to_import);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return imported_archives;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createArchiveItem = (extractor, id, type, title = null, user_uid = null, sub_id = null) => {
|
|
||||||
return {
|
|
||||||
extractor: extractor,
|
|
||||||
id: id,
|
|
||||||
type: type,
|
|
||||||
title: title,
|
|
||||||
user_uid: user_uid ? user_uid : null,
|
|
||||||
sub_id: sub_id ? sub_id : null,
|
|
||||||
timestamp: Date.now() / 1000,
|
|
||||||
uid: uuid()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +1,12 @@
|
|||||||
const config_api = require('../config');
|
|
||||||
const CONSTS = require('../consts');
|
|
||||||
const logger = require('../logger');
|
|
||||||
const db_api = require('../db');
|
|
||||||
|
|
||||||
const jwt = require('jsonwebtoken');
|
|
||||||
const { v4: uuid } = require('uuid');
|
|
||||||
const bcrypt = require('bcryptjs');
|
|
||||||
const fs = require('fs-extra');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const config_api = require('../config');
|
||||||
|
const consts = require('../consts');
|
||||||
|
var subscriptions_api = require('../subscriptions')
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
var jwt = require('jsonwebtoken');
|
||||||
|
const { uuid } = require('uuidv4');
|
||||||
|
var bcrypt = require('bcryptjs');
|
||||||
|
|
||||||
|
|
||||||
var LocalStrategy = require('passport-local').Strategy;
|
var LocalStrategy = require('passport-local').Strategy;
|
||||||
var LdapStrategy = require('passport-ldapauth');
|
var LdapStrategy = require('passport-ldapauth');
|
||||||
@@ -15,47 +14,40 @@ var JwtStrategy = require('passport-jwt').Strategy,
|
|||||||
ExtractJwt = require('passport-jwt').ExtractJwt;
|
ExtractJwt = require('passport-jwt').ExtractJwt;
|
||||||
|
|
||||||
// other required vars
|
// other required vars
|
||||||
|
let logger = null;
|
||||||
|
var users_db = null;
|
||||||
let SERVER_SECRET = null;
|
let SERVER_SECRET = null;
|
||||||
let JWT_EXPIRATION = null;
|
let JWT_EXPIRATION = null;
|
||||||
let opts = null;
|
let opts = null;
|
||||||
let saltRounds = 10;
|
let saltRounds = null;
|
||||||
|
|
||||||
|
exports.initialize = function(input_users_db, input_logger) {
|
||||||
|
setLogger(input_logger)
|
||||||
|
setDB(input_users_db);
|
||||||
|
|
||||||
exports.initialize = function () {
|
|
||||||
/*************************
|
/*************************
|
||||||
* Authentication module
|
* Authentication module
|
||||||
************************/
|
************************/
|
||||||
|
saltRounds = 10;
|
||||||
|
|
||||||
if (db_api.database_initialized) {
|
|
||||||
setupRoles();
|
|
||||||
} else {
|
|
||||||
db_api.database_initialized_bs.subscribe(init => {
|
|
||||||
if (init) setupRoles();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sometimes this value is not properly typed: https://github.com/Tzahi12345/YoutubeDL-Material/issues/813
|
|
||||||
JWT_EXPIRATION = config_api.getConfigItem('ytdl_jwt_expiration');
|
JWT_EXPIRATION = config_api.getConfigItem('ytdl_jwt_expiration');
|
||||||
if (!(+JWT_EXPIRATION)) {
|
|
||||||
logger.warn(`JWT expiration value improperly set to ${JWT_EXPIRATION}, auto setting to 1 day.`);
|
|
||||||
JWT_EXPIRATION = 86400;
|
|
||||||
} else {
|
|
||||||
JWT_EXPIRATION = +JWT_EXPIRATION;
|
|
||||||
}
|
|
||||||
|
|
||||||
SERVER_SECRET = null;
|
SERVER_SECRET = null;
|
||||||
if (db_api.users_db.get('jwt_secret').value()) {
|
if (users_db.get('jwt_secret').value()) {
|
||||||
SERVER_SECRET = db_api.users_db.get('jwt_secret').value();
|
SERVER_SECRET = users_db.get('jwt_secret').value();
|
||||||
} else {
|
} else {
|
||||||
SERVER_SECRET = uuid();
|
SERVER_SECRET = uuid();
|
||||||
db_api.users_db.set('jwt_secret', SERVER_SECRET).write();
|
users_db.set('jwt_secret', SERVER_SECRET).write();
|
||||||
}
|
}
|
||||||
|
|
||||||
opts = {}
|
opts = {}
|
||||||
opts.jwtFromRequest = ExtractJwt.fromUrlQueryParameter('jwt');
|
opts.jwtFromRequest = ExtractJwt.fromUrlQueryParameter('jwt');
|
||||||
opts.secretOrKey = SERVER_SECRET;
|
opts.secretOrKey = SERVER_SECRET;
|
||||||
|
/*opts.issuer = 'example.com';
|
||||||
|
opts.audience = 'example.com';*/
|
||||||
|
|
||||||
exports.passport.use(new JwtStrategy(opts, async function(jwt_payload, done) {
|
exports.passport.use(new JwtStrategy(opts, function(jwt_payload, done) {
|
||||||
const user = await db_api.getRecord('users', {uid: jwt_payload.user});
|
const user = users_db.get('users').find({uid: jwt_payload.user}).value();
|
||||||
if (user) {
|
if (user) {
|
||||||
return done(null, user);
|
return done(null, user);
|
||||||
} else {
|
} else {
|
||||||
@@ -65,32 +57,12 @@ exports.initialize = function () {
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
const setupRoles = async () => {
|
function setLogger(input_logger) {
|
||||||
const required_roles = {
|
logger = input_logger;
|
||||||
admin: {
|
}
|
||||||
permissions: CONSTS.AVAILABLE_PERMISSIONS
|
|
||||||
},
|
|
||||||
user: {
|
|
||||||
permissions: [
|
|
||||||
'filemanager',
|
|
||||||
'subscriptions',
|
|
||||||
'sharing'
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const role_keys = Object.keys(required_roles);
|
function setDB(input_users_db) {
|
||||||
for (let i = 0; i < role_keys.length; i++) {
|
users_db = input_users_db;
|
||||||
const role_key = role_keys[i];
|
|
||||||
const role_in_db = await db_api.getRecord('roles', {key: role_key});
|
|
||||||
if (!role_in_db) {
|
|
||||||
// insert task metadata into table if missing
|
|
||||||
await db_api.insertRecordIntoTable('roles', {
|
|
||||||
key: role_key,
|
|
||||||
permissions: required_roles[role_key]['permissions']
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.passport = require('passport');
|
exports.passport = require('passport');
|
||||||
@@ -106,41 +78,49 @@ exports.passport.deserializeUser(function(user, done) {
|
|||||||
/***************************************
|
/***************************************
|
||||||
* Register user with hashed password
|
* Register user with hashed password
|
||||||
**************************************/
|
**************************************/
|
||||||
|
exports.registerUser = function(req, res) {
|
||||||
|
var userid = req.body.userid;
|
||||||
|
var username = req.body.username;
|
||||||
|
var plaintextPassword = req.body.password;
|
||||||
|
|
||||||
exports.registerUser = async (userid, username, plaintextPassword) => {
|
if (userid !== 'admin' && !config_api.getConfigItem('ytdl_allow_registration') && !req.isAuthenticated() && (!req.user || !exports.userHasPermission(req.user.uid, 'settings'))) {
|
||||||
const hash = await bcrypt.hash(plaintextPassword, saltRounds);
|
res.sendStatus(409);
|
||||||
const new_user = generateUserObject(userid, username, hash);
|
logger.error(`Registration failed for user ${userid}. Registration is disabled.`);
|
||||||
// check if user exists
|
return;
|
||||||
if (await db_api.getRecord('users', {uid: userid})) {
|
|
||||||
// user id is taken!
|
|
||||||
logger.error('Registration failed: UID is already taken!');
|
|
||||||
return null;
|
|
||||||
} else if (await db_api.getRecord('users', {name: username})) {
|
|
||||||
// user name is taken!
|
|
||||||
logger.error('Registration failed: User name is already taken!');
|
|
||||||
return null;
|
|
||||||
} else {
|
|
||||||
// add to db
|
|
||||||
await db_api.insertRecordIntoTable('users', new_user);
|
|
||||||
logger.verbose(`New user created: ${new_user.name}`);
|
|
||||||
return new_user;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
exports.deleteUser = async (uid) => {
|
bcrypt.hash(plaintextPassword, saltRounds)
|
||||||
let success = false;
|
.then(function(hash) {
|
||||||
let usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
let new_user = generateUserObject(userid, username, hash);
|
||||||
const user_folder = path.join(__dirname, usersFileFolder, uid);
|
// check if user exists
|
||||||
const user_db_obj = await db_api.getRecord('users', {uid: uid});
|
if (users_db.get('users').find({uid: userid}).value()) {
|
||||||
if (user_db_obj) {
|
// user id is taken!
|
||||||
// user exists, let's delete
|
logger.error('Registration failed: UID is already taken!');
|
||||||
await fs.remove(user_folder);
|
res.status(409).send('UID is already taken!');
|
||||||
await db_api.removeRecord('users', {uid: uid});
|
} else if (users_db.get('users').find({name: username}).value()) {
|
||||||
success = true;
|
// user name is taken!
|
||||||
} else {
|
logger.error('Registration failed: User name is already taken!');
|
||||||
logger.error(`Could not find user with uid ${uid}`);
|
res.status(409).send('User name is already taken!');
|
||||||
}
|
} else {
|
||||||
return success;
|
// add to db
|
||||||
|
users_db.get('users').push(new_user).write();
|
||||||
|
logger.verbose(`New user created: ${new_user.name}`);
|
||||||
|
res.send({
|
||||||
|
user: new_user
|
||||||
|
});
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(function(result) {
|
||||||
|
|
||||||
|
})
|
||||||
|
.catch(function(err) {
|
||||||
|
logger.error(err);
|
||||||
|
if( err.code == 'ER_DUP_ENTRY' ) {
|
||||||
|
res.status(409).send('UserId already taken');
|
||||||
|
} else {
|
||||||
|
res.sendStatus(409);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/***************************************
|
/***************************************
|
||||||
@@ -156,22 +136,16 @@ exports.deleteUser = async (uid) => {
|
|||||||
************************************************/
|
************************************************/
|
||||||
|
|
||||||
|
|
||||||
exports.login = async (username, password) => {
|
|
||||||
// even if we're using LDAP, we still want users to be able to login using internal credentials
|
|
||||||
const user = await db_api.getRecord('users', {name: username});
|
|
||||||
if (!user) {
|
|
||||||
if (config_api.getConfigItem('ytdl_auth_method') === 'internal') logger.error(`User ${username} not found`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (user.auth_method && user.auth_method !== 'internal') { return false }
|
|
||||||
return await bcrypt.compare(password, user.passhash) ? user : false;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.passport.use(new LocalStrategy({
|
exports.passport.use(new LocalStrategy({
|
||||||
usernameField: 'username',
|
usernameField: 'username',
|
||||||
passwordField: 'password'},
|
passwordField: 'password'},
|
||||||
async function(username, password, done) {
|
async function(username, password, done) {
|
||||||
return done(null, await exports.login(username, password));
|
const user = users_db.get('users').find({name: username}).value();
|
||||||
|
if (!user) { logger.error(`User ${username} not found`); return done(null, false); }
|
||||||
|
if (user.auth_method && user.auth_method !== 'internal') { return done(null, false); }
|
||||||
|
if (user) {
|
||||||
|
return done(null, (await bcrypt.compare(password, user.passhash)) ? user : false);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
|
|
||||||
@@ -182,17 +156,17 @@ var getLDAPConfiguration = function(req, callback) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
exports.passport.use(new LdapStrategy(getLDAPConfiguration,
|
exports.passport.use(new LdapStrategy(getLDAPConfiguration,
|
||||||
async function(user, done) {
|
function(user, done) {
|
||||||
// check if ldap auth is enabled
|
// check if ldap auth is enabled
|
||||||
const ldap_enabled = config_api.getConfigItem('ytdl_auth_method') === 'ldap';
|
const ldap_enabled = config_api.getConfigItem('ytdl_auth_method') === 'ldap';
|
||||||
if (!ldap_enabled) return done(null, false);
|
if (!ldap_enabled) return done(null, false);
|
||||||
|
|
||||||
const user_uid = user.uid;
|
const user_uid = user.uid;
|
||||||
let db_user = await db_api.getRecord('users', {uid: user_uid});
|
let db_user = users_db.get('users').find({uid: user_uid}).value();
|
||||||
if (!db_user) {
|
if (!db_user) {
|
||||||
// generate DB user
|
// generate DB user
|
||||||
let new_user = generateUserObject(user_uid, user_uid, null, 'ldap');
|
let new_user = generateUserObject(user_uid, user_uid, null, 'ldap');
|
||||||
await db_api.insertRecordIntoTable('users', new_user);
|
users_db.get('users').push(new_user).write();
|
||||||
db_user = new_user;
|
db_user = new_user;
|
||||||
logger.verbose(`Generated new user ${user_uid} using LDAP`);
|
logger.verbose(`Generated new user ${user_uid} using LDAP`);
|
||||||
}
|
}
|
||||||
@@ -216,12 +190,12 @@ exports.generateJWT = function(req, res, next) {
|
|||||||
next();
|
next();
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.returnAuthResponse = async function(req, res) {
|
exports.returnAuthResponse = function(req, res) {
|
||||||
res.status(200).json({
|
res.status(200).json({
|
||||||
user: req.user,
|
user: req.user,
|
||||||
token: req.token,
|
token: req.token,
|
||||||
permissions: await exports.userPermissions(req.user.uid),
|
permissions: exports.userPermissions(req.user.uid),
|
||||||
available_permissions: CONSTS.AVAILABLE_PERMISSIONS
|
available_permissions: consts['AVAILABLE_PERMISSIONS']
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -233,7 +207,7 @@ exports.returnAuthResponse = async function(req, res) {
|
|||||||
* It also passes the user object to the next
|
* It also passes the user object to the next
|
||||||
* middleware through res.locals
|
* middleware through res.locals
|
||||||
**************************************/
|
**************************************/
|
||||||
exports.ensureAuthenticatedElseError = (req, res, next) => {
|
exports.ensureAuthenticatedElseError = function(req, res, next) {
|
||||||
var token = getToken(req.query);
|
var token = getToken(req.query);
|
||||||
if( token ) {
|
if( token ) {
|
||||||
try {
|
try {
|
||||||
@@ -251,10 +225,10 @@ exports.ensureAuthenticatedElseError = (req, res, next) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// change password
|
// change password
|
||||||
exports.changeUserPassword = async (user_uid, new_pass) => {
|
exports.changeUserPassword = async function(user_uid, new_pass) {
|
||||||
try {
|
try {
|
||||||
const hash = await bcrypt.hash(new_pass, saltRounds);
|
const hash = await bcrypt.hash(new_pass, saltRounds);
|
||||||
await db_api.updateRecord('users', {uid: user_uid}, {passhash: hash});
|
users_db.get('users').find({uid: user_uid}).assign({passhash: hash}).write();
|
||||||
return true;
|
return true;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
return false;
|
return false;
|
||||||
@@ -262,15 +236,16 @@ exports.changeUserPassword = async (user_uid, new_pass) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// change user permissions
|
// change user permissions
|
||||||
exports.changeUserPermissions = async (user_uid, permission, new_value) => {
|
exports.changeUserPermissions = function(user_uid, permission, new_value) {
|
||||||
try {
|
try {
|
||||||
await db_api.pullFromRecordsArray('users', {uid: user_uid}, 'permissions', permission);
|
const user_db_obj = users_db.get('users').find({uid: user_uid});
|
||||||
await db_api.pullFromRecordsArray('users', {uid: user_uid}, 'permission_overrides', permission);
|
user_db_obj.get('permissions').pull(permission).write();
|
||||||
|
user_db_obj.get('permission_overrides').pull(permission).write();
|
||||||
if (new_value === 'yes') {
|
if (new_value === 'yes') {
|
||||||
await db_api.pushToRecordsArray('users', {uid: user_uid}, 'permissions', permission);
|
user_db_obj.get('permissions').push(permission).write();
|
||||||
await db_api.pushToRecordsArray('users', {uid: user_uid}, 'permission_overrides', permission);
|
user_db_obj.get('permission_overrides').push(permission).write();
|
||||||
} else if (new_value === 'no') {
|
} else if (new_value === 'no') {
|
||||||
await db_api.pushToRecordsArray('users', {uid: user_uid}, 'permission_overrides', permission);
|
user_db_obj.get('permission_overrides').push(permission).write();
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -280,11 +255,12 @@ exports.changeUserPermissions = async (user_uid, permission, new_value) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// change role permissions
|
// change role permissions
|
||||||
exports.changeRolePermissions = async (role, permission, new_value) => {
|
exports.changeRolePermissions = function(role, permission, new_value) {
|
||||||
try {
|
try {
|
||||||
await db_api.pullFromRecordsArray('roles', {key: role}, 'permissions', permission);
|
const role_db_obj = users_db.get('roles').get(role);
|
||||||
|
role_db_obj.get('permissions').pull(permission).write();
|
||||||
if (new_value === 'yes') {
|
if (new_value === 'yes') {
|
||||||
await db_api.pushToRecordsArray('roles', {key: role}, 'permissions', permission);
|
role_db_obj.get('permissions').push(permission).write();
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -293,19 +269,30 @@ exports.changeRolePermissions = async (role, permission, new_value) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.adminExists = async function() {
|
exports.adminExists = function() {
|
||||||
return !!(await db_api.getRecord('users', {uid: 'admin'}));
|
return !!users_db.get('users').find({uid: 'admin'}).value();
|
||||||
}
|
}
|
||||||
|
|
||||||
// video stuff
|
// video stuff
|
||||||
|
|
||||||
exports.getUserVideos = async function(user_uid, type) {
|
exports.getUserVideos = function(user_uid, type) {
|
||||||
const files = await db_api.getRecords('files', {user_uid: user_uid});
|
const user = users_db.get('users').find({uid: user_uid}).value();
|
||||||
return type ? files.filter(file => file.isAudio === (type === 'audio')) : files;
|
return user['files'][type];
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getUserVideo = async function(user_uid, file_uid, requireSharing = false) {
|
exports.getUserVideo = function(user_uid, file_uid, type, requireSharing = false) {
|
||||||
let file = await db_api.getRecord('files', {uid: file_uid});
|
let file = null;
|
||||||
|
if (!type) {
|
||||||
|
file = users_db.get('users').find({uid: user_uid}).get(`files.audio`).find({uid: file_uid}).value();
|
||||||
|
if (!file) {
|
||||||
|
file = users_db.get('users').find({uid: user_uid}).get(`files.video`).find({uid: file_uid}).value();
|
||||||
|
if (file) type = 'video';
|
||||||
|
} else {
|
||||||
|
type = 'audio';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!file && type) file = users_db.get('users').find({uid: user_uid}).get(`files.${type}`).find({uid: file_uid}).value();
|
||||||
|
|
||||||
// prevent unauthorized users from accessing the file info
|
// prevent unauthorized users from accessing the file info
|
||||||
if (file && !file['sharingEnabled'] && requireSharing) file = null;
|
if (file && !file['sharingEnabled'] && requireSharing) file = null;
|
||||||
@@ -313,17 +300,38 @@ exports.getUserVideo = async function(user_uid, file_uid, requireSharing = false
|
|||||||
return file;
|
return file;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.removePlaylist = async function(user_uid, playlistID) {
|
exports.addPlaylist = function(user_uid, new_playlist, type) {
|
||||||
await db_api.removeRecord('playlist', {playlistID: playlistID});
|
users_db.get('users').find({uid: user_uid}).get(`playlists.${type}`).push(new_playlist).write();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getUserPlaylists = async function(user_uid) {
|
exports.updatePlaylistFiles = function(user_uid, playlistID, new_filenames, type) {
|
||||||
return await db_api.getRecords('playlists', {user_uid: user_uid});
|
users_db.get('users').find({uid: user_uid}).get(`playlists.${type}`).find({id: playlistID}).assign({fileNames: new_filenames});
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getUserPlaylist = async function(user_uid, playlistID, requireSharing = false) {
|
exports.removePlaylist = function(user_uid, playlistID, type) {
|
||||||
let playlist = await db_api.getRecord('playlists', {id: playlistID});
|
users_db.get('users').find({uid: user_uid}).get(`playlists.${type}`).remove({id: playlistID}).write();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.getUserPlaylists = function(user_uid, type) {
|
||||||
|
const user = users_db.get('users').find({uid: user_uid}).value();
|
||||||
|
return user['playlists'][type];
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.getUserPlaylist = function(user_uid, playlistID, type, requireSharing = false) {
|
||||||
|
let playlist = null;
|
||||||
|
if (!type) {
|
||||||
|
playlist = users_db.get('users').find({uid: user_uid}).get(`playlists.audio`).find({id: playlistID}).value();
|
||||||
|
if (!playlist) {
|
||||||
|
playlist = users_db.get('users').find({uid: user_uid}).get(`playlists.video`).find({id: playlistID}).value();
|
||||||
|
if (playlist) type = 'video';
|
||||||
|
} else {
|
||||||
|
type = 'audio';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!playlist) playlist = users_db.get('users').find({uid: user_uid}).get(`playlists.${type}`).find({id: playlistID}).value();
|
||||||
|
|
||||||
// prevent unauthorized users from accessing the file info
|
// prevent unauthorized users from accessing the file info
|
||||||
if (requireSharing && !playlist['sharingEnabled']) playlist = null;
|
if (requireSharing && !playlist['sharingEnabled']) playlist = null;
|
||||||
@@ -331,22 +339,108 @@ exports.getUserPlaylist = async function(user_uid, playlistID, requireSharing =
|
|||||||
return playlist;
|
return playlist;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.changeSharingMode = async function(user_uid, file_uid, is_playlist, enabled) {
|
exports.registerUserFile = function(user_uid, file_object, type) {
|
||||||
|
users_db.get('users').find({uid: user_uid}).get(`files.${type}`)
|
||||||
|
.remove({
|
||||||
|
path: file_object['path']
|
||||||
|
}).write();
|
||||||
|
|
||||||
|
users_db.get('users').find({uid: user_uid}).get(`files.${type}`)
|
||||||
|
.push(file_object)
|
||||||
|
.write();
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.deleteUserFile = async function(user_uid, file_uid, type, blacklistMode = false) {
|
||||||
let success = false;
|
let success = false;
|
||||||
is_playlist ? await db_api.updateRecord(`playlists`, {id: file_uid}, {sharingEnabled: enabled}) : await db_api.updateRecord(`files`, {uid: file_uid}, {sharingEnabled: enabled});
|
const file_obj = users_db.get('users').find({uid: user_uid}).get(`files.${type}`).find({uid: file_uid}).value();
|
||||||
success = true;
|
if (file_obj) {
|
||||||
|
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
||||||
|
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
||||||
|
|
||||||
|
// close descriptors
|
||||||
|
if (config_api.descriptors[file_obj.id]) {
|
||||||
|
try {
|
||||||
|
for (let i = 0; i < config_api.descriptors[file_obj.id].length; i++) {
|
||||||
|
config_api.descriptors[file_obj.id][i].destroy();
|
||||||
|
}
|
||||||
|
} catch(e) {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const full_path = path.join(usersFileFolder, user_uid, type, file_obj.id + ext);
|
||||||
|
users_db.get('users').find({uid: user_uid}).get(`files.${type}`)
|
||||||
|
.remove({
|
||||||
|
uid: file_uid
|
||||||
|
}).write();
|
||||||
|
if (await fs.pathExists(full_path)) {
|
||||||
|
// remove json and file
|
||||||
|
const json_path = path.join(usersFileFolder, user_uid, type, file_obj.id + '.info.json');
|
||||||
|
const alternate_json_path = path.join(usersFileFolder, user_uid, type, file_obj.id + ext + '.info.json');
|
||||||
|
let youtube_id = null;
|
||||||
|
if (await fs.pathExists(json_path)) {
|
||||||
|
youtube_id = await fs.readJSON(json_path).id;
|
||||||
|
await fs.unlink(json_path);
|
||||||
|
} else if (await fs.pathExists(alternate_json_path)) {
|
||||||
|
youtube_id = await fs.readJSON(alternate_json_path).id;
|
||||||
|
await fs.unlink(alternate_json_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.unlink(full_path);
|
||||||
|
|
||||||
|
// do archive stuff
|
||||||
|
|
||||||
|
let useYoutubeDLArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
||||||
|
if (useYoutubeDLArchive) {
|
||||||
|
const archive_path = path.join(usersFileFolder, user_uid, 'archives', `archive_${type}.txt`);
|
||||||
|
|
||||||
|
// use subscriptions API to remove video from the archive file, and write it to the blacklist
|
||||||
|
if (await fs.pathExists(archive_path)) {
|
||||||
|
const line = youtube_id ? await subscriptions_api.removeIDFromArchive(archive_path, youtube_id) : null;
|
||||||
|
if (blacklistMode && line) {
|
||||||
|
let blacklistPath = path.join(usersFileFolder, user_uid, 'archives', `blacklist_${type}.txt`);
|
||||||
|
// adds newline to the beginning of the line
|
||||||
|
line = '\n' + line;
|
||||||
|
await fs.appendFile(blacklistPath, line);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info(`Could not find archive file for ${type} files. Creating...`);
|
||||||
|
await fs.ensureFile(archive_path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
success = true;
|
||||||
|
} else {
|
||||||
|
success = false;
|
||||||
|
logger.warn(`User file ${file_uid} does not exist!`);
|
||||||
|
}
|
||||||
|
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.userHasPermission = async function(user_uid, permission) {
|
exports.changeSharingMode = function(user_uid, file_uid, type, is_playlist, enabled) {
|
||||||
|
let success = false;
|
||||||
|
const user_db_obj = users_db.get('users').find({uid: user_uid});
|
||||||
|
if (user_db_obj.value()) {
|
||||||
|
const file_db_obj = is_playlist ? user_db_obj.get(`playlists.${type}`).find({id: file_uid}) : user_db_obj.get(`files.${type}`).find({uid: file_uid});
|
||||||
|
if (file_db_obj.value()) {
|
||||||
|
success = true;
|
||||||
|
file_db_obj.assign({sharingEnabled: enabled}).write();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const user_obj = await db_api.getRecord('users', ({uid: user_uid}));
|
return success;
|
||||||
|
}
|
||||||
|
|
||||||
|
exports.userHasPermission = function(user_uid, permission) {
|
||||||
|
const user_obj = users_db.get('users').find({uid: user_uid}).value();
|
||||||
const role = user_obj['role'];
|
const role = user_obj['role'];
|
||||||
if (!role) {
|
if (!role) {
|
||||||
// role doesn't exist
|
// role doesn't exist
|
||||||
logger.error('Invalid role ' + role);
|
logger.error('Invalid role ' + role);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
const role_permissions = (users_db.get('roles').value())['permissions'];
|
||||||
|
|
||||||
const user_has_explicit_permission = user_obj['permissions'].includes(permission);
|
const user_has_explicit_permission = user_obj['permissions'].includes(permission);
|
||||||
const permission_in_overrides = user_obj['permission_overrides'].includes(permission);
|
const permission_in_overrides = user_obj['permission_overrides'].includes(permission);
|
||||||
@@ -361,8 +455,7 @@ exports.userHasPermission = async function(user_uid, permission) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// no overrides, let's check if the role has the permission
|
// no overrides, let's check if the role has the permission
|
||||||
const role_has_permission = await exports.roleHasPermissions(role, permission);
|
if (role_permissions.includes(permission)) {
|
||||||
if (role_has_permission) {
|
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
logger.verbose(`User ${user_uid} failed to get permission ${permission}`);
|
logger.verbose(`User ${user_uid} failed to get permission ${permission}`);
|
||||||
@@ -370,30 +463,19 @@ exports.userHasPermission = async function(user_uid, permission) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.roleHasPermissions = async function(role, permission) {
|
exports.userPermissions = function(user_uid) {
|
||||||
const role_obj = await db_api.getRecord('roles', {key: role})
|
|
||||||
if (!role) {
|
|
||||||
logger.error(`Role ${role} does not exist!`);
|
|
||||||
}
|
|
||||||
const role_permissions = role_obj['permissions'];
|
|
||||||
if (role_permissions && role_permissions.includes(permission)) return true;
|
|
||||||
else return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.userPermissions = async function(user_uid) {
|
|
||||||
let user_permissions = [];
|
let user_permissions = [];
|
||||||
const user_obj = await db_api.getRecord('users', ({uid: user_uid}));
|
const user_obj = users_db.get('users').find({uid: user_uid}).value();
|
||||||
const role = user_obj['role'];
|
const role = user_obj['role'];
|
||||||
if (!role) {
|
if (!role) {
|
||||||
// role doesn't exist
|
// role doesn't exist
|
||||||
logger.error('Invalid role ' + role);
|
logger.error('Invalid role ' + role);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const role_obj = await db_api.getRecord('roles', {key: role});
|
const role_permissions = users_db.get('roles').get(role).get('permissions').value()
|
||||||
const role_permissions = role_obj['permissions'];
|
|
||||||
|
|
||||||
for (let i = 0; i < CONSTS.AVAILABLE_PERMISSIONS.length; i++) {
|
for (let i = 0; i < consts['AVAILABLE_PERMISSIONS'].length; i++) {
|
||||||
let permission = CONSTS.AVAILABLE_PERMISSIONS[i];
|
let permission = consts['AVAILABLE_PERMISSIONS'][i];
|
||||||
|
|
||||||
const user_has_explicit_permission = user_obj['permissions'].includes(permission);
|
const user_has_explicit_permission = user_obj['permissions'].includes(permission);
|
||||||
const permission_in_overrides = user_obj['permission_overrides'].includes(permission);
|
const permission_in_overrides = user_obj['permission_overrides'].includes(permission);
|
||||||
@@ -436,8 +518,14 @@ function generateUserObject(userid, username, hash, auth_method = 'internal') {
|
|||||||
name: username,
|
name: username,
|
||||||
uid: userid,
|
uid: userid,
|
||||||
passhash: auth_method === 'internal' ? hash : null,
|
passhash: auth_method === 'internal' ? hash : null,
|
||||||
files: [],
|
files: {
|
||||||
playlists: [],
|
audio: [],
|
||||||
|
video: []
|
||||||
|
},
|
||||||
|
playlists: {
|
||||||
|
audio: [],
|
||||||
|
video: []
|
||||||
|
},
|
||||||
subscriptions: [],
|
subscriptions: [],
|
||||||
created: Date.now(),
|
created: Date.now(),
|
||||||
role: userid === 'admin' && auth_method === 'internal' ? 'admin' : 'user',
|
role: userid === 'admin' && auth_method === 'internal' ? 'admin' : 'user',
|
||||||
|
|||||||
@@ -1,6 +1,18 @@
|
|||||||
const utils = require('./utils');
|
const config_api = require('./config');
|
||||||
const logger = require('./logger');
|
|
||||||
const db_api = require('./db');
|
var logger = null;
|
||||||
|
var db = null;
|
||||||
|
var users_db = null;
|
||||||
|
var db_api = null;
|
||||||
|
|
||||||
|
function setDB(input_db, input_users_db, input_db_api) { db = input_db; users_db = input_users_db; db_api = input_db_api }
|
||||||
|
function setLogger(input_logger) { logger = input_logger; }
|
||||||
|
|
||||||
|
function initialize(input_db, input_users_db, input_logger, input_db_api) {
|
||||||
|
setDB(input_db, input_users_db, input_db_api);
|
||||||
|
setLogger(input_logger);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
||||||
Categories:
|
Categories:
|
||||||
@@ -21,57 +33,35 @@ Rules:
|
|||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
async function categorize(file_jsons) {
|
async function categorize(file_json) {
|
||||||
// to make the logic easier, let's assume the file metadata is an array
|
|
||||||
if (!Array.isArray(file_jsons)) file_jsons = [file_jsons];
|
|
||||||
|
|
||||||
let selected_category = null;
|
let selected_category = null;
|
||||||
const categories = await getCategories();
|
const categories = getCategories();
|
||||||
if (!categories) {
|
if (!categories) {
|
||||||
logger.warn('Categories could not be found.');
|
logger.warn('Categories could not be found. Initializing categories...');
|
||||||
|
db.assign({categories: []}).write();
|
||||||
return null;
|
return null;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const file_json of file_jsons) {
|
for (let i = 0; i < categories.length; i++) {
|
||||||
for (const category of categories) {
|
const category = categories[i];
|
||||||
const rules = category['rules'];
|
const rules = category['rules'];
|
||||||
|
|
||||||
// if rules for current category apply, then that is the selected category
|
// if rules for current category apply, then that is the selected category
|
||||||
if (applyCategoryRules(file_json, rules, category['name'])) {
|
if (applyCategoryRules(file_json, rules, category['name'])) {
|
||||||
selected_category = category;
|
selected_category = category;
|
||||||
logger.verbose(`Selected category ${category['name']} for ${file_json['webpage_url']}`);
|
logger.verbose(`Selected category ${category['name']} for ${file_json['webpage_url']}`);
|
||||||
return selected_category;
|
return selected_category;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return selected_category;
|
return selected_category;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getCategories() {
|
function getCategories() {
|
||||||
const categories = await db_api.getRecords('categories');
|
const categories = db.get('categories').value();
|
||||||
return categories ? categories : null;
|
return categories ? categories : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getCategoriesAsPlaylists() {
|
|
||||||
const categories_as_playlists = [];
|
|
||||||
const available_categories = await getCategories();
|
|
||||||
if (available_categories) {
|
|
||||||
for (let category of available_categories) {
|
|
||||||
const files_that_match = await db_api.getRecords('files', {'category.uid': category['uid']});
|
|
||||||
if (files_that_match && files_that_match.length > 0) {
|
|
||||||
category['thumbnailURL'] = files_that_match[0].thumbnailURL;
|
|
||||||
category['thumbnailPath'] = files_that_match[0].thumbnailPath;
|
|
||||||
category['duration'] = files_that_match.reduce((a, b) => a + utils.durationStringToNumber(b.duration), 0);
|
|
||||||
category['id'] = category['uid'];
|
|
||||||
category['auto'] = true;
|
|
||||||
categories_as_playlists.push(category);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return categories_as_playlists;
|
|
||||||
}
|
|
||||||
|
|
||||||
function applyCategoryRules(file_json, rules, category_name) {
|
function applyCategoryRules(file_json, rules, category_name) {
|
||||||
let rules_apply = false;
|
let rules_apply = false;
|
||||||
for (let i = 0; i < rules.length; i++) {
|
for (let i = 0; i < rules.length; i++) {
|
||||||
@@ -82,10 +72,10 @@ function applyCategoryRules(file_json, rules, category_name) {
|
|||||||
|
|
||||||
switch (rule['comparator']) {
|
switch (rule['comparator']) {
|
||||||
case 'includes':
|
case 'includes':
|
||||||
rule_applies = file_json[rule['property']].toLowerCase().includes(rule['value'].toLowerCase());
|
rule_applies = file_json[rule['property']].includes(rule['value']);
|
||||||
break;
|
break;
|
||||||
case 'not_includes':
|
case 'not_includes':
|
||||||
rule_applies = !(file_json[rule['property']].toLowerCase().includes(rule['value'].toLowerCase()));
|
rule_applies = !(file_json[rule['property']].includes(rule['value']));
|
||||||
break;
|
break;
|
||||||
case 'equals':
|
case 'equals':
|
||||||
rule_applies = file_json[rule['property']] === rule['value'];
|
rule_applies = file_json[rule['property']] === rule['value'];
|
||||||
@@ -111,24 +101,23 @@ function applyCategoryRules(file_json, rules, category_name) {
|
|||||||
return rules_apply;
|
return rules_apply;
|
||||||
}
|
}
|
||||||
|
|
||||||
// async function addTagToVideo(tag, video, user_uid) {
|
async function addTagToVideo(tag, video, user_uid) {
|
||||||
// // TODO: Implement
|
// TODO: Implement
|
||||||
// }
|
}
|
||||||
|
|
||||||
// async function removeTagFromVideo(tag, video, user_uid) {
|
async function removeTagFromVideo(tag, video, user_uid) {
|
||||||
// // TODO: Implement
|
// TODO: Implement
|
||||||
// }
|
}
|
||||||
|
|
||||||
// // adds tag to list of existing tags (used for tag suggestions)
|
// adds tag to list of existing tags (used for tag suggestions)
|
||||||
// async function addTagToExistingTags(tag) {
|
async function addTagToExistingTags(tag) {
|
||||||
// const existing_tags = db.get('tags').value();
|
const existing_tags = db.get('tags').value();
|
||||||
// if (!existing_tags.includes(tag)) {
|
if (!existing_tags.includes(tag)) {
|
||||||
// db.get('tags').push(tag).write();
|
db.get('tags').push(tag).write();
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
|
initialize: initialize,
|
||||||
categorize: categorize,
|
categorize: categorize,
|
||||||
getCategories: getCategories,
|
|
||||||
getCategoriesAsPlaylists: getCategoriesAsPlaylists
|
|
||||||
}
|
}
|
||||||
@@ -1,26 +1,24 @@
|
|||||||
const logger = require('./logger');
|
|
||||||
|
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const { BehaviorSubject } = require('rxjs');
|
|
||||||
|
|
||||||
exports.CONFIG_ITEMS = require('./consts.js')['CONFIG_ITEMS'];
|
|
||||||
exports.descriptors = {}; // to get rid of file locks when needed, TODO: move to youtube-dl.js
|
|
||||||
|
|
||||||
|
let CONFIG_ITEMS = require('./consts.js')['CONFIG_ITEMS'];
|
||||||
const debugMode = process.env.YTDL_MODE === 'debug';
|
const debugMode = process.env.YTDL_MODE === 'debug';
|
||||||
|
|
||||||
let configPath = debugMode ? '../src/assets/default.json' : 'appdata/default.json';
|
let configPath = debugMode ? '../src/assets/default.json' : 'appdata/default.json';
|
||||||
exports.config_updated = new BehaviorSubject();
|
|
||||||
|
|
||||||
exports.initialize = () => {
|
var logger = null;
|
||||||
|
function setLogger(input_logger) { logger = input_logger; }
|
||||||
|
|
||||||
|
function initialize(input_logger) {
|
||||||
|
setLogger(input_logger);
|
||||||
ensureConfigFileExists();
|
ensureConfigFileExists();
|
||||||
ensureConfigItemsExist();
|
ensureConfigItemsExist();
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureConfigItemsExist() {
|
function ensureConfigItemsExist() {
|
||||||
const config_keys = Object.keys(exports.CONFIG_ITEMS);
|
const config_keys = Object.keys(CONFIG_ITEMS);
|
||||||
for (let i = 0; i < config_keys.length; i++) {
|
for (let i = 0; i < config_keys.length; i++) {
|
||||||
const config_key = config_keys[i];
|
const config_key = config_keys[i];
|
||||||
exports.getConfigItem(config_key);
|
getConfigItem(config_key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,17 +59,17 @@ function getElementNameInConfig(path) {
|
|||||||
/**
|
/**
|
||||||
* Check if config exists. If not, write default config to config path
|
* Check if config exists. If not, write default config to config path
|
||||||
*/
|
*/
|
||||||
exports.configExistsCheck = () => {
|
function configExistsCheck() {
|
||||||
let exists = fs.existsSync(configPath);
|
let exists = fs.existsSync(configPath);
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
exports.setConfigFile(DEFAULT_CONFIG);
|
setConfigFile(DEFAULT_CONFIG);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Gets config file and returns as a json
|
* Gets config file and returns as a json
|
||||||
*/
|
*/
|
||||||
exports.getConfigFile = () => {
|
function getConfigFile() {
|
||||||
try {
|
try {
|
||||||
let raw_data = fs.readFileSync(configPath);
|
let raw_data = fs.readFileSync(configPath);
|
||||||
let parsed_data = JSON.parse(raw_data);
|
let parsed_data = JSON.parse(raw_data);
|
||||||
@@ -82,40 +80,35 @@ exports.getConfigFile = () => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.setConfigFile = (config) => {
|
function setConfigFile(config) {
|
||||||
try {
|
try {
|
||||||
const old_config = exports.getConfigFile();
|
|
||||||
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
|
fs.writeFileSync(configPath, JSON.stringify(config, null, 2));
|
||||||
const changes = exports.findChangedConfigItems(old_config, config);
|
|
||||||
if (changes.length > 0) {
|
|
||||||
for (const change of changes) exports.config_updated.next(change);
|
|
||||||
}
|
|
||||||
return true;
|
return true;
|
||||||
} catch(e) {
|
} catch(e) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getConfigItem = (key) => {
|
function getConfigItem(key) {
|
||||||
let config_json = exports.getConfigFile();
|
let config_json = getConfigFile();
|
||||||
if (!exports.CONFIG_ITEMS[key]) {
|
if (!CONFIG_ITEMS[key]) {
|
||||||
logger.error(`Config item with key '${key}' is not recognized.`);
|
logger.error(`Config item with key '${key}' is not recognized.`);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
let path = exports.CONFIG_ITEMS[key]['path'];
|
let path = CONFIG_ITEMS[key]['path'];
|
||||||
const val = Object.byString(config_json, path);
|
const val = Object.byString(config_json, path);
|
||||||
if (val === undefined && Object.byString(DEFAULT_CONFIG, path) !== undefined) {
|
if (val === undefined && Object.byString(DEFAULT_CONFIG, path)) {
|
||||||
logger.warn(`Cannot find config with key '${key}'. Creating one with the default value...`);
|
logger.warn(`Cannot find config with key '${key}'. Creating one with the default value...`);
|
||||||
exports.setConfigItem(key, Object.byString(DEFAULT_CONFIG, path));
|
setConfigItem(key, Object.byString(DEFAULT_CONFIG, path));
|
||||||
return Object.byString(DEFAULT_CONFIG, path);
|
return Object.byString(DEFAULT_CONFIG, path);
|
||||||
}
|
}
|
||||||
return Object.byString(config_json, path);
|
return Object.byString(config_json, path);
|
||||||
}
|
};
|
||||||
|
|
||||||
exports.setConfigItem = (key, value) => {
|
function setConfigItem(key, value) {
|
||||||
let success = false;
|
let success = false;
|
||||||
let config_json = exports.getConfigFile();
|
let config_json = getConfigFile();
|
||||||
let path = exports.CONFIG_ITEMS[key]['path'];
|
let path = CONFIG_ITEMS[key]['path'];
|
||||||
let element_name = getElementNameInConfig(path);
|
let element_name = getElementNameInConfig(path);
|
||||||
let parent_path = getParentPath(path);
|
let parent_path = getParentPath(path);
|
||||||
let parent_object = Object.byString(config_json, parent_path);
|
let parent_object = Object.byString(config_json, parent_path);
|
||||||
@@ -127,18 +120,20 @@ exports.setConfigItem = (key, value) => {
|
|||||||
parent_parent_object[parent_parent_single_key] = {};
|
parent_parent_object[parent_parent_single_key] = {};
|
||||||
parent_object = Object.byString(config_json, parent_path);
|
parent_object = Object.byString(config_json, parent_path);
|
||||||
}
|
}
|
||||||
if (value === 'false') value = false;
|
|
||||||
if (value === 'true') value = true;
|
|
||||||
parent_object[element_name] = value;
|
|
||||||
|
|
||||||
success = exports.setConfigFile(config_json);
|
if (value === 'false' || value === 'true') {
|
||||||
|
parent_object[element_name] = (value === 'true');
|
||||||
|
} else {
|
||||||
|
parent_object[element_name] = value;
|
||||||
|
}
|
||||||
|
success = setConfigFile(config_json);
|
||||||
|
|
||||||
return success;
|
return success;
|
||||||
}
|
};
|
||||||
|
|
||||||
exports.setConfigItems = (items) => {
|
function setConfigItems(items) {
|
||||||
let success = false;
|
let success = false;
|
||||||
let config_json = exports.getConfigFile();
|
let config_json = getConfigFile();
|
||||||
for (let i = 0; i < items.length; i++) {
|
for (let i = 0; i < items.length; i++) {
|
||||||
let key = items[i].key;
|
let key = items[i].key;
|
||||||
let value = items[i].value;
|
let value = items[i].value;
|
||||||
@@ -148,7 +143,7 @@ exports.setConfigItems = (items) => {
|
|||||||
value = (value === 'true');
|
value = (value === 'true');
|
||||||
}
|
}
|
||||||
|
|
||||||
let item_path = exports.CONFIG_ITEMS[key]['path'];
|
let item_path = CONFIG_ITEMS[key]['path'];
|
||||||
let item_parent_path = getParentPath(item_path);
|
let item_parent_path = getParentPath(item_path);
|
||||||
let item_element_name = getElementNameInConfig(item_path);
|
let item_element_name = getElementNameInConfig(item_path);
|
||||||
|
|
||||||
@@ -156,44 +151,31 @@ exports.setConfigItems = (items) => {
|
|||||||
item_parent_object[item_element_name] = value;
|
item_parent_object[item_element_name] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
success = exports.setConfigFile(config_json);
|
success = setConfigFile(config_json);
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.globalArgsRequiresSafeDownload = () => {
|
function globalArgsRequiresSafeDownload() {
|
||||||
const globalArgs = exports.getConfigItem('ytdl_custom_args').split(',,');
|
const globalArgs = getConfigItem('ytdl_custom_args').split(',,');
|
||||||
const argsThatRequireSafeDownload = ['--write-sub', '--write-srt', '--proxy'];
|
const argsThatRequireSafeDownload = ['--write-sub', '--write-srt', '--proxy'];
|
||||||
const failedArgs = globalArgs.filter(arg => argsThatRequireSafeDownload.includes(arg));
|
const failedArgs = globalArgs.filter(arg => argsThatRequireSafeDownload.includes(arg));
|
||||||
return failedArgs && failedArgs.length > 0;
|
return failedArgs && failedArgs.length > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.findChangedConfigItems = (old_config, new_config, path = '', changedConfigItems = [], depth = 0) => {
|
module.exports = {
|
||||||
if (typeof old_config === 'object' && typeof new_config === 'object' && depth < 3) {
|
getConfigItem: getConfigItem,
|
||||||
for (const key in old_config) {
|
setConfigItem: setConfigItem,
|
||||||
if (Object.prototype.hasOwnProperty.call(new_config, key)) {
|
setConfigItems: setConfigItems,
|
||||||
exports.findChangedConfigItems(old_config[key], new_config[key], `${path}${path ? '.' : ''}${key}`, changedConfigItems, depth + 1);
|
getConfigFile: getConfigFile,
|
||||||
}
|
setConfigFile: setConfigFile,
|
||||||
}
|
configExistsCheck: configExistsCheck,
|
||||||
} else {
|
CONFIG_ITEMS: CONFIG_ITEMS,
|
||||||
if (JSON.stringify(old_config) !== JSON.stringify(new_config)) {
|
initialize: initialize,
|
||||||
const key = getConfigItemKeyByPath(path);
|
descriptors: {},
|
||||||
changedConfigItems.push({
|
globalArgsRequiresSafeDownload: globalArgsRequiresSafeDownload
|
||||||
key: key ? key : path.split('.')[path.split('.').length - 1], // return key in CONFIG_ITEMS or the object key
|
|
||||||
old_value: JSON.parse(JSON.stringify(old_config)),
|
|
||||||
new_value: JSON.parse(JSON.stringify(new_config))
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return changedConfigItems;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getConfigItemKeyByPath(path) {
|
DEFAULT_CONFIG = {
|
||||||
const found_item = Object.values(exports.CONFIG_ITEMS).find(item => item.path === path);
|
|
||||||
if (found_item) return found_item['key'];
|
|
||||||
else return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_CONFIG = {
|
|
||||||
"YoutubeDLMaterial": {
|
"YoutubeDLMaterial": {
|
||||||
"Host": {
|
"Host": {
|
||||||
"url": "http://example.com",
|
"url": "http://example.com",
|
||||||
@@ -205,44 +187,23 @@ const DEFAULT_CONFIG = {
|
|||||||
"default_file_output": "",
|
"default_file_output": "",
|
||||||
"use_youtubedl_archive": false,
|
"use_youtubedl_archive": false,
|
||||||
"custom_args": "",
|
"custom_args": "",
|
||||||
|
"safe_download_override": false,
|
||||||
"include_thumbnail": true,
|
"include_thumbnail": true,
|
||||||
"include_metadata": true,
|
"include_metadata": true
|
||||||
"max_concurrent_downloads": 5,
|
|
||||||
"download_rate_limit": ""
|
|
||||||
},
|
},
|
||||||
"Extra": {
|
"Extra": {
|
||||||
"title_top": "YoutubeDL-Material",
|
"title_top": "YoutubeDL-Material",
|
||||||
"file_manager_enabled": true,
|
"file_manager_enabled": true,
|
||||||
"allow_quality_select": true,
|
"allow_quality_select": true,
|
||||||
"download_only_mode": false,
|
"download_only_mode": false,
|
||||||
"force_autoplay": false,
|
"allow_multi_download_mode": true,
|
||||||
"enable_downloads_manager": true,
|
"enable_downloads_manager": true
|
||||||
"allow_playlist_categorization": true,
|
|
||||||
"enable_notifications": true,
|
|
||||||
"enable_all_notifications": true,
|
|
||||||
"allowed_notification_types": [],
|
|
||||||
"enable_rss_feed": false,
|
|
||||||
},
|
},
|
||||||
"API": {
|
"API": {
|
||||||
"use_API_key": false,
|
"use_API_key": false,
|
||||||
"API_key": "",
|
"API_key": "",
|
||||||
"use_youtube_API": false,
|
"use_youtube_API": false,
|
||||||
"youtube_API_key": "",
|
"youtube_API_key": ""
|
||||||
"twitch_auto_download_chat": false,
|
|
||||||
"use_sponsorblock_API": false,
|
|
||||||
"generate_NFO_files": false,
|
|
||||||
"use_ntfy_API": false,
|
|
||||||
"ntfy_topic_URL": "",
|
|
||||||
"use_gotify_API": false,
|
|
||||||
"gotify_server_URL": "",
|
|
||||||
"gotify_app_token": "",
|
|
||||||
"use_telegram_API": false,
|
|
||||||
"telegram_bot_token": "",
|
|
||||||
"telegram_chat_id": "",
|
|
||||||
"telegram_webhook_proxy": "",
|
|
||||||
"webhook_URL": "",
|
|
||||||
"discord_webhook_URL": "",
|
|
||||||
"slack_webhook_URL": "",
|
|
||||||
},
|
},
|
||||||
"Themes": {
|
"Themes": {
|
||||||
"default_theme": "default",
|
"default_theme": "default",
|
||||||
@@ -251,8 +212,7 @@ const DEFAULT_CONFIG = {
|
|||||||
"Subscriptions": {
|
"Subscriptions": {
|
||||||
"allow_subscriptions": true,
|
"allow_subscriptions": true,
|
||||||
"subscriptions_base_path": "subscriptions/",
|
"subscriptions_base_path": "subscriptions/",
|
||||||
"subscriptions_check_interval": "86400",
|
"subscriptions_check_interval": "300"
|
||||||
"redownload_fresh_uploads": false
|
|
||||||
},
|
},
|
||||||
"Users": {
|
"Users": {
|
||||||
"base_path": "users/",
|
"base_path": "users/",
|
||||||
@@ -266,12 +226,8 @@ const DEFAULT_CONFIG = {
|
|||||||
"searchFilter": "(uid={{username}})"
|
"searchFilter": "(uid={{username}})"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"Database": {
|
|
||||||
"use_local_db": true,
|
|
||||||
"mongodb_connection_string": "mongodb://127.0.0.1:27017/?compressors=zlib"
|
|
||||||
},
|
|
||||||
"Advanced": {
|
"Advanced": {
|
||||||
"default_downloader": "yt-dlp",
|
"default_downloader": "youtube-dl",
|
||||||
"use_default_downloading_agent": true,
|
"use_default_downloading_agent": true,
|
||||||
"custom_downloading_agent": "",
|
"custom_downloading_agent": "",
|
||||||
"multi_user_mode": false,
|
"multi_user_mode": false,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
exports.CONFIG_ITEMS = {
|
let CONFIG_ITEMS = {
|
||||||
// Host
|
// Host
|
||||||
'ytdl_url': {
|
'ytdl_url': {
|
||||||
'key': 'ytdl_url',
|
'key': 'ytdl_url',
|
||||||
@@ -30,6 +30,10 @@ exports.CONFIG_ITEMS = {
|
|||||||
'key': 'ytdl_custom_args',
|
'key': 'ytdl_custom_args',
|
||||||
'path': 'YoutubeDLMaterial.Downloader.custom_args'
|
'path': 'YoutubeDLMaterial.Downloader.custom_args'
|
||||||
},
|
},
|
||||||
|
'ytdl_safe_download_override': {
|
||||||
|
'key': 'ytdl_safe_download_override',
|
||||||
|
'path': 'YoutubeDLMaterial.Downloader.safe_download_override'
|
||||||
|
},
|
||||||
'ytdl_include_thumbnail': {
|
'ytdl_include_thumbnail': {
|
||||||
'key': 'ytdl_include_thumbnail',
|
'key': 'ytdl_include_thumbnail',
|
||||||
'path': 'YoutubeDLMaterial.Downloader.include_thumbnail'
|
'path': 'YoutubeDLMaterial.Downloader.include_thumbnail'
|
||||||
@@ -38,14 +42,6 @@ exports.CONFIG_ITEMS = {
|
|||||||
'key': 'ytdl_include_metadata',
|
'key': 'ytdl_include_metadata',
|
||||||
'path': 'YoutubeDLMaterial.Downloader.include_metadata'
|
'path': 'YoutubeDLMaterial.Downloader.include_metadata'
|
||||||
},
|
},
|
||||||
'ytdl_max_concurrent_downloads': {
|
|
||||||
'key': 'ytdl_max_concurrent_downloads',
|
|
||||||
'path': 'YoutubeDLMaterial.Downloader.max_concurrent_downloads'
|
|
||||||
},
|
|
||||||
'ytdl_download_rate_limit': {
|
|
||||||
'key': 'ytdl_download_rate_limit',
|
|
||||||
'path': 'YoutubeDLMaterial.Downloader.download_rate_limit'
|
|
||||||
},
|
|
||||||
|
|
||||||
// Extra
|
// Extra
|
||||||
'ytdl_title_top': {
|
'ytdl_title_top': {
|
||||||
@@ -64,34 +60,14 @@ exports.CONFIG_ITEMS = {
|
|||||||
'key': 'ytdl_download_only_mode',
|
'key': 'ytdl_download_only_mode',
|
||||||
'path': 'YoutubeDLMaterial.Extra.download_only_mode'
|
'path': 'YoutubeDLMaterial.Extra.download_only_mode'
|
||||||
},
|
},
|
||||||
'ytdl_force_autoplay': {
|
'ytdl_allow_multi_download_mode': {
|
||||||
'key': 'ytdl_force_autoplay',
|
'key': 'ytdl_allow_multi_download_mode',
|
||||||
'path': 'YoutubeDLMaterial.Extra.force_autoplay'
|
'path': 'YoutubeDLMaterial.Extra.allow_multi_download_mode'
|
||||||
},
|
},
|
||||||
'ytdl_enable_downloads_manager': {
|
'ytdl_enable_downloads_manager': {
|
||||||
'key': 'ytdl_enable_downloads_manager',
|
'key': 'ytdl_enable_downloads_manager',
|
||||||
'path': 'YoutubeDLMaterial.Extra.enable_downloads_manager'
|
'path': 'YoutubeDLMaterial.Extra.enable_downloads_manager'
|
||||||
},
|
},
|
||||||
'ytdl_allow_playlist_categorization': {
|
|
||||||
'key': 'ytdl_allow_playlist_categorization',
|
|
||||||
'path': 'YoutubeDLMaterial.Extra.allow_playlist_categorization'
|
|
||||||
},
|
|
||||||
'ytdl_enable_notifications': {
|
|
||||||
'key': 'ytdl_enable_notifications',
|
|
||||||
'path': 'YoutubeDLMaterial.Extra.enable_notifications'
|
|
||||||
},
|
|
||||||
'ytdl_enable_all_notifications': {
|
|
||||||
'key': 'ytdl_enable_all_notifications',
|
|
||||||
'path': 'YoutubeDLMaterial.Extra.enable_all_notifications'
|
|
||||||
},
|
|
||||||
'ytdl_allowed_notification_types': {
|
|
||||||
'key': 'ytdl_allowed_notification_types',
|
|
||||||
'path': 'YoutubeDLMaterial.Extra.allowed_notification_types'
|
|
||||||
},
|
|
||||||
'ytdl_enable_rss_feed': {
|
|
||||||
'key': 'ytdl_enable_rss_feed',
|
|
||||||
'path': 'YoutubeDLMaterial.Extra.enable_rss_feed'
|
|
||||||
},
|
|
||||||
|
|
||||||
// API
|
// API
|
||||||
'ytdl_use_api_key': {
|
'ytdl_use_api_key': {
|
||||||
@@ -110,67 +86,6 @@ exports.CONFIG_ITEMS = {
|
|||||||
'key': 'ytdl_youtube_api_key',
|
'key': 'ytdl_youtube_api_key',
|
||||||
'path': 'YoutubeDLMaterial.API.youtube_API_key'
|
'path': 'YoutubeDLMaterial.API.youtube_API_key'
|
||||||
},
|
},
|
||||||
'ytdl_twitch_auto_download_chat': {
|
|
||||||
'key': 'ytdl_twitch_auto_download_chat',
|
|
||||||
'path': 'YoutubeDLMaterial.API.twitch_auto_download_chat'
|
|
||||||
},
|
|
||||||
'ytdl_use_sponsorblock_api': {
|
|
||||||
'key': 'ytdl_use_sponsorblock_api',
|
|
||||||
'path': 'YoutubeDLMaterial.API.use_sponsorblock_API'
|
|
||||||
},
|
|
||||||
'ytdl_generate_nfo_files': {
|
|
||||||
'key': 'ytdl_generate_nfo_files',
|
|
||||||
'path': 'YoutubeDLMaterial.API.generate_NFO_files'
|
|
||||||
},
|
|
||||||
'ytdl_use_ntfy_API': {
|
|
||||||
'key': 'ytdl_use_ntfy_API',
|
|
||||||
'path': 'YoutubeDLMaterial.API.use_ntfy_API'
|
|
||||||
},
|
|
||||||
'ytdl_ntfy_topic_url': {
|
|
||||||
'key': 'ytdl_ntfy_topic_url',
|
|
||||||
'path': 'YoutubeDLMaterial.API.ntfy_topic_URL'
|
|
||||||
},
|
|
||||||
'ytdl_use_gotify_API': {
|
|
||||||
'key': 'ytdl_use_gotify_API',
|
|
||||||
'path': 'YoutubeDLMaterial.API.use_gotify_API'
|
|
||||||
},
|
|
||||||
'ytdl_gotify_server_url': {
|
|
||||||
'key': 'ytdl_gotify_server_url',
|
|
||||||
'path': 'YoutubeDLMaterial.API.gotify_server_URL'
|
|
||||||
},
|
|
||||||
'ytdl_gotify_app_token': {
|
|
||||||
'key': 'ytdl_gotify_app_token',
|
|
||||||
'path': 'YoutubeDLMaterial.API.gotify_app_token'
|
|
||||||
},
|
|
||||||
'ytdl_use_telegram_API': {
|
|
||||||
'key': 'ytdl_use_telegram_API',
|
|
||||||
'path': 'YoutubeDLMaterial.API.use_telegram_API'
|
|
||||||
},
|
|
||||||
'ytdl_telegram_bot_token': {
|
|
||||||
'key': 'ytdl_telegram_bot_token',
|
|
||||||
'path': 'YoutubeDLMaterial.API.telegram_bot_token'
|
|
||||||
},
|
|
||||||
'ytdl_telegram_chat_id': {
|
|
||||||
'key': 'ytdl_telegram_chat_id',
|
|
||||||
'path': 'YoutubeDLMaterial.API.telegram_chat_id'
|
|
||||||
},
|
|
||||||
'ytdl_telegram_webhook_proxy': {
|
|
||||||
'key': 'ytdl_telegram_webhook_proxy',
|
|
||||||
'path': 'YoutubeDLMaterial.API.telegram_webhook_proxy'
|
|
||||||
},
|
|
||||||
'ytdl_webhook_url': {
|
|
||||||
'key': 'ytdl_webhook_url',
|
|
||||||
'path': 'YoutubeDLMaterial.API.webhook_URL'
|
|
||||||
},
|
|
||||||
'ytdl_discord_webhook_url': {
|
|
||||||
'key': 'ytdl_discord_webhook_url',
|
|
||||||
'path': 'YoutubeDLMaterial.API.discord_webhook_URL'
|
|
||||||
},
|
|
||||||
'ytdl_slack_webhook_url': {
|
|
||||||
'key': 'ytdl_slack_webhook_url',
|
|
||||||
'path': 'YoutubeDLMaterial.API.slack_webhook_URL'
|
|
||||||
},
|
|
||||||
|
|
||||||
|
|
||||||
// Themes
|
// Themes
|
||||||
'ytdl_default_theme': {
|
'ytdl_default_theme': {
|
||||||
@@ -195,9 +110,9 @@ exports.CONFIG_ITEMS = {
|
|||||||
'key': 'ytdl_subscriptions_check_interval',
|
'key': 'ytdl_subscriptions_check_interval',
|
||||||
'path': 'YoutubeDLMaterial.Subscriptions.subscriptions_check_interval'
|
'path': 'YoutubeDLMaterial.Subscriptions.subscriptions_check_interval'
|
||||||
},
|
},
|
||||||
'ytdl_subscriptions_redownload_fresh_uploads': {
|
'ytdl_subscriptions_check_interval': {
|
||||||
'key': 'ytdl_subscriptions_redownload_fresh_uploads',
|
'key': 'ytdl_subscriptions_check_interval',
|
||||||
'path': 'YoutubeDLMaterial.Subscriptions.redownload_fresh_uploads'
|
'path': 'YoutubeDLMaterial.Subscriptions.subscriptions_check_interval'
|
||||||
},
|
},
|
||||||
|
|
||||||
// Users
|
// Users
|
||||||
@@ -218,16 +133,6 @@ exports.CONFIG_ITEMS = {
|
|||||||
'path': 'YoutubeDLMaterial.Users.ldap_config'
|
'path': 'YoutubeDLMaterial.Users.ldap_config'
|
||||||
},
|
},
|
||||||
|
|
||||||
// Database
|
|
||||||
'ytdl_use_local_db': {
|
|
||||||
'key': 'ytdl_use_local_db',
|
|
||||||
'path': 'YoutubeDLMaterial.Database.use_local_db'
|
|
||||||
},
|
|
||||||
'ytdl_mongodb_connection_string': {
|
|
||||||
'key': 'ytdl_mongodb_connection_string',
|
|
||||||
'path': 'YoutubeDLMaterial.Database.mongodb_connection_string'
|
|
||||||
},
|
|
||||||
|
|
||||||
// Advanced
|
// Advanced
|
||||||
'ytdl_default_downloader': {
|
'ytdl_default_downloader': {
|
||||||
'key': 'ytdl_default_downloader',
|
'key': 'ytdl_default_downloader',
|
||||||
@@ -263,100 +168,17 @@ exports.CONFIG_ITEMS = {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.AVAILABLE_PERMISSIONS = [
|
AVAILABLE_PERMISSIONS = [
|
||||||
'filemanager',
|
'filemanager',
|
||||||
'settings',
|
'settings',
|
||||||
'subscriptions',
|
'subscriptions',
|
||||||
'sharing',
|
'sharing',
|
||||||
'advanced_download',
|
'advanced_download',
|
||||||
'downloads_manager',
|
'downloads_manager'
|
||||||
'tasks_manager'
|
|
||||||
];
|
];
|
||||||
|
|
||||||
exports.DETAILS_BIN_PATH = 'appdata/youtube-dl.json'
|
module.exports = {
|
||||||
exports.OUTDATED_YOUTUBEDL_VERSION = "2020.00.00";
|
CONFIG_ITEMS: CONFIG_ITEMS,
|
||||||
|
AVAILABLE_PERMISSIONS: AVAILABLE_PERMISSIONS,
|
||||||
// args that have a value after it (e.g. -o <output> or -f <format>)
|
CURRENT_VERSION: 'v4.1'
|
||||||
const YTDL_ARGS_WITH_VALUES = [
|
}
|
||||||
'--default-search',
|
|
||||||
'--config-location',
|
|
||||||
'--proxy',
|
|
||||||
'--socket-timeout',
|
|
||||||
'--source-address',
|
|
||||||
'--geo-verification-proxy',
|
|
||||||
'--geo-bypass-country',
|
|
||||||
'--geo-bypass-ip-block',
|
|
||||||
'--playlist-start',
|
|
||||||
'--playlist-end',
|
|
||||||
'--playlist-items',
|
|
||||||
'--match-title',
|
|
||||||
'--reject-title',
|
|
||||||
'--max-downloads',
|
|
||||||
'--min-filesize',
|
|
||||||
'--max-filesize',
|
|
||||||
'--date',
|
|
||||||
'--datebefore',
|
|
||||||
'--dateafter',
|
|
||||||
'--min-views',
|
|
||||||
'--max-views',
|
|
||||||
'--match-filter',
|
|
||||||
'--age-limit',
|
|
||||||
'--download-archive',
|
|
||||||
'-r',
|
|
||||||
'--limit-rate',
|
|
||||||
'-R',
|
|
||||||
'--retries',
|
|
||||||
'--fragment-retries',
|
|
||||||
'--buffer-size',
|
|
||||||
'--http-chunk-size',
|
|
||||||
'--external-downloader',
|
|
||||||
'--external-downloader-args',
|
|
||||||
'-a',
|
|
||||||
'--batch-file',
|
|
||||||
'-o',
|
|
||||||
'--output',
|
|
||||||
'--output-na-placeholder',
|
|
||||||
'--autonumber-start',
|
|
||||||
'--load-info-json',
|
|
||||||
'--cookies',
|
|
||||||
'--cache-dir',
|
|
||||||
'--encoding',
|
|
||||||
'--user-agent',
|
|
||||||
'--referer',
|
|
||||||
'--add-header',
|
|
||||||
'--sleep-interval',
|
|
||||||
'--max-sleep-interval',
|
|
||||||
'-f',
|
|
||||||
'--format',
|
|
||||||
'--merge-output-format',
|
|
||||||
'--sub-format',
|
|
||||||
'--sub-lang',
|
|
||||||
'-u',
|
|
||||||
'--username',
|
|
||||||
'-p',
|
|
||||||
'--password',
|
|
||||||
'-2',
|
|
||||||
'--twofactor',
|
|
||||||
'--video-password',
|
|
||||||
'--ap-mso',
|
|
||||||
'--ap-username',
|
|
||||||
'--ap-password',
|
|
||||||
'--audio-format',
|
|
||||||
'--audio-quality',
|
|
||||||
'--recode-video',
|
|
||||||
'--postprocessor-args',
|
|
||||||
'--metadata-from-title',
|
|
||||||
'--fixup',
|
|
||||||
'--ffmpeg-location',
|
|
||||||
'--exec',
|
|
||||||
'--convert-subs'
|
|
||||||
];
|
|
||||||
|
|
||||||
exports.SUBSCRIPTION_BACKUP_PATH = 'subscription_backup.json'
|
|
||||||
|
|
||||||
// we're using a Set here for performance
|
|
||||||
exports.YTDL_ARGS_WITH_VALUES = new Set(YTDL_ARGS_WITH_VALUES);
|
|
||||||
|
|
||||||
exports.ICON_URL = 'https://i.imgur.com/IKOlr0N.png';
|
|
||||||
|
|
||||||
exports.CURRENT_VERSION = 'v4.3.2';
|
|
||||||
|
|||||||
910
backend/db.js
910
backend/db.js
@@ -1,179 +1,121 @@
|
|||||||
const fs = require('fs-extra')
|
var fs = require('fs-extra')
|
||||||
const path = require('path')
|
var path = require('path')
|
||||||
const { MongoClient } = require("mongodb");
|
var utils = require('./utils')
|
||||||
const _ = require('lodash');
|
const { uuid } = require('uuidv4');
|
||||||
|
|
||||||
const config_api = require('./config');
|
const config_api = require('./config');
|
||||||
const utils = require('./utils')
|
|
||||||
const logger = require('./logger');
|
|
||||||
|
|
||||||
const low = require('lowdb')
|
var logger = null;
|
||||||
const FileSync = require('lowdb/adapters/FileSync');
|
var db = null;
|
||||||
const { BehaviorSubject } = require('rxjs');
|
var users_db = null;
|
||||||
|
function setDB(input_db, input_users_db) { db = input_db; users_db = input_users_db }
|
||||||
|
function setLogger(input_logger) { logger = input_logger; }
|
||||||
|
|
||||||
let local_db = null;
|
function initialize(input_db, input_users_db, input_logger) {
|
||||||
let database = null;
|
|
||||||
exports.database_initialized = false;
|
|
||||||
exports.database_initialized_bs = new BehaviorSubject(false);
|
|
||||||
|
|
||||||
const tables = {
|
|
||||||
files: {
|
|
||||||
name: 'files',
|
|
||||||
primary_key: 'uid',
|
|
||||||
text_search: {
|
|
||||||
title: 'text',
|
|
||||||
uploader: 'text',
|
|
||||||
uid: 'text'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
playlists: {
|
|
||||||
name: 'playlists',
|
|
||||||
primary_key: 'id'
|
|
||||||
},
|
|
||||||
categories: {
|
|
||||||
name: 'categories',
|
|
||||||
primary_key: 'uid'
|
|
||||||
},
|
|
||||||
subscriptions: {
|
|
||||||
name: 'subscriptions',
|
|
||||||
primary_key: 'id'
|
|
||||||
},
|
|
||||||
downloads: {
|
|
||||||
name: 'downloads'
|
|
||||||
},
|
|
||||||
users: {
|
|
||||||
name: 'users',
|
|
||||||
primary_key: 'uid'
|
|
||||||
},
|
|
||||||
roles: {
|
|
||||||
name: 'roles',
|
|
||||||
primary_key: 'key'
|
|
||||||
},
|
|
||||||
download_queue: {
|
|
||||||
name: 'download_queue',
|
|
||||||
primary_key: 'uid'
|
|
||||||
},
|
|
||||||
tasks: {
|
|
||||||
name: 'tasks',
|
|
||||||
primary_key: 'key'
|
|
||||||
},
|
|
||||||
notifications: {
|
|
||||||
name: 'notifications',
|
|
||||||
primary_key: 'uid'
|
|
||||||
},
|
|
||||||
archives: {
|
|
||||||
name: 'archives'
|
|
||||||
},
|
|
||||||
test: {
|
|
||||||
name: 'test'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const tables_list = Object.keys(tables);
|
|
||||||
|
|
||||||
let using_local_db = null;
|
|
||||||
|
|
||||||
function setDB(input_db, input_users_db) {
|
|
||||||
db = input_db; users_db = input_users_db;
|
|
||||||
exports.db = input_db;
|
|
||||||
exports.users_db = input_users_db
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.initialize = (input_db, input_users_db, db_name = 'local_db.json') => {
|
|
||||||
setDB(input_db, input_users_db);
|
setDB(input_db, input_users_db);
|
||||||
|
setLogger(input_logger);
|
||||||
// must be done here to prevent getConfigItem from being called before init
|
|
||||||
using_local_db = config_api.getConfigItem('ytdl_use_local_db');
|
|
||||||
|
|
||||||
const local_adapter = new FileSync(`./appdata/${db_name}`);
|
|
||||||
local_db = low(local_adapter);
|
|
||||||
|
|
||||||
const local_db_defaults = {}
|
|
||||||
tables_list.forEach(table => {local_db_defaults[table] = []});
|
|
||||||
local_db.defaults(local_db_defaults).write();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.connectToDB = async (retries = 5, no_fallback = false, custom_connection_string = null) => {
|
function registerFileDB(file_path, type, multiUserMode = null, sub = null, customPath = null) {
|
||||||
const success = await exports._connectToDB(custom_connection_string);
|
let db_path = null;
|
||||||
if (success) return true;
|
const file_id = file_path.substring(0, file_path.length-4);
|
||||||
|
const file_object = generateFileObject(file_id, type, customPath || multiUserMode && multiUserMode.file_path, sub);
|
||||||
|
if (!file_object) {
|
||||||
|
logger.error(`Could not find associated JSON file for ${type} file ${file_id}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if (retries) {
|
utils.fixVideoMetadataPerms(file_id, type, multiUserMode && multiUserMode.file_path);
|
||||||
logger.warn(`MongoDB connection failed! Retrying ${retries} times...`);
|
|
||||||
const retry_delay_ms = 2000;
|
|
||||||
for (let i = 0; i < retries; i++) {
|
|
||||||
const retry_succeeded = await exports._connectToDB();
|
|
||||||
if (retry_succeeded) {
|
|
||||||
logger.info(`Successfully connected to DB after ${i+1} attempt(s)`);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (i !== retries - 1) {
|
// add thumbnail path
|
||||||
logger.warn(`Retry ${i+1} failed, waiting ${retry_delay_ms}ms before trying again.`);
|
file_object['thumbnailPath'] = utils.getDownloadedThumbnail(file_id, type, customPath || multiUserMode && multiUserMode.file_path);
|
||||||
await utils.wait(retry_delay_ms);
|
|
||||||
} else {
|
if (!sub) {
|
||||||
logger.warn(`Retry ${i+1} failed.`);
|
if (multiUserMode) {
|
||||||
}
|
const user_uid = multiUserMode.user;
|
||||||
|
db_path = users_db.get('users').find({uid: user_uid}).get(`files.${type}`);
|
||||||
|
} else {
|
||||||
|
db_path = db.get(`files.${type}`)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (multiUserMode) {
|
||||||
|
const user_uid = multiUserMode.user;
|
||||||
|
db_path = users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id}).get('videos');
|
||||||
|
} else {
|
||||||
|
db_path = db.get('subscriptions').find({id: sub.id}).get('videos');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (no_fallback) {
|
const file_uid = registerFileDBManual(db_path, file_object);
|
||||||
logger.error('Failed to connect to MongoDB. Verify your connection string is valid.');
|
|
||||||
return;
|
// remove metadata JSON if needed
|
||||||
|
if (!config_api.getConfigItem('ytdl_include_metadata')) {
|
||||||
|
utils.deleteJSONFile(file_id, type, multiUserMode && multiUserMode.file_path)
|
||||||
}
|
}
|
||||||
using_local_db = true;
|
|
||||||
config_api.setConfigItem('ytdl_use_local_db', true);
|
return file_uid;
|
||||||
logger.error('Failed to connect to MongoDB, using Local DB as a fallback. Make sure your MongoDB instance is accessible, or set Local DB as a default through the config.');
|
}
|
||||||
|
|
||||||
|
function registerFileDBManual(db_path, file_object) {
|
||||||
|
// add additional info
|
||||||
|
file_object['uid'] = uuid();
|
||||||
|
file_object['registered'] = Date.now();
|
||||||
|
path_object = path.parse(file_object['path']);
|
||||||
|
file_object['path'] = path.format(path_object);
|
||||||
|
|
||||||
|
// remove duplicate(s)
|
||||||
|
db_path.remove({path: file_object['path']}).write();
|
||||||
|
|
||||||
|
// add new file to db
|
||||||
|
db_path.push(file_object).write();
|
||||||
|
return file_object['uid'];
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateFileObject(id, type, customPath = null, sub = null) {
|
||||||
|
if (!customPath && sub) {
|
||||||
|
customPath = getAppendedBasePathSub(sub, config_api.getConfigItem('ytdl_subscriptions_base_path'));
|
||||||
|
}
|
||||||
|
var jsonobj = (type === 'audio') ? utils.getJSONMp3(id, customPath, true) : utils.getJSONMp4(id, customPath, true);
|
||||||
|
if (!jsonobj) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const ext = (type === 'audio') ? '.mp3' : '.mp4'
|
||||||
|
const file_path = utils.getTrueFileName(jsonobj['_filename'], type); // path.join(type === 'audio' ? audioFolderPath : videoFolderPath, id + ext);
|
||||||
|
// console.
|
||||||
|
var stats = fs.statSync(path.join(__dirname, file_path));
|
||||||
|
|
||||||
|
var title = jsonobj.title;
|
||||||
|
var url = jsonobj.webpage_url;
|
||||||
|
var uploader = jsonobj.uploader;
|
||||||
|
var upload_date = jsonobj.upload_date;
|
||||||
|
upload_date = upload_date ? `${upload_date.substring(0, 4)}-${upload_date.substring(4, 6)}-${upload_date.substring(6, 8)}` : 'N/A';
|
||||||
|
|
||||||
|
var size = stats.size;
|
||||||
|
|
||||||
|
var thumbnail = jsonobj.thumbnail;
|
||||||
|
var duration = jsonobj.duration;
|
||||||
|
var isaudio = type === 'audio';
|
||||||
|
var file_obj = new utils.File(id, title, thumbnail, isaudio, duration, url, uploader, size, file_path, upload_date);
|
||||||
|
return file_obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updatePlaylist(playlist, user_uid) {
|
||||||
|
let playlistID = playlist.id;
|
||||||
|
let type = playlist.type;
|
||||||
|
let db_loc = null;
|
||||||
|
if (user_uid) {
|
||||||
|
db_loc = users_db.get('users').find({uid: user_uid}).get(`playlists.${type}`).find({id: playlistID});
|
||||||
|
} else {
|
||||||
|
db_loc = db.get(`playlists.${type}`).find({id: playlistID});
|
||||||
|
}
|
||||||
|
db_loc.assign(playlist).write();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports._connectToDB = async (custom_connection_string = null) => {
|
function getAppendedBasePathSub(sub, base_path) {
|
||||||
const uri = !custom_connection_string ? config_api.getConfigItem('ytdl_mongodb_connection_string') : custom_connection_string; // "mongodb://127.0.0.1:27017/?compressors=zlib&gssapiServiceName=mongodb";
|
return path.join(base_path, (sub.isPlaylist ? 'playlists/' : 'channels/'), sub.name);
|
||||||
const client = new MongoClient(uri, {
|
|
||||||
useNewUrlParser: true,
|
|
||||||
useUnifiedTopology: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
await client.connect();
|
|
||||||
database = client.db('ytdl_material');
|
|
||||||
|
|
||||||
// avoid doing anything else if it's just a test
|
|
||||||
if (custom_connection_string) return true;
|
|
||||||
|
|
||||||
const existing_collections = (await database.listCollections({}, { nameOnly: true }).toArray()).map(collection => collection.name);
|
|
||||||
|
|
||||||
const missing_tables = tables_list.filter(table => !(existing_collections.includes(table)));
|
|
||||||
missing_tables.forEach(async table => {
|
|
||||||
await database.createCollection(table);
|
|
||||||
});
|
|
||||||
|
|
||||||
tables_list.forEach(async table => {
|
|
||||||
const primary_key = tables[table]['primary_key'];
|
|
||||||
if (primary_key) {
|
|
||||||
await database.collection(table).createIndex({[primary_key]: 1}, { unique: true });
|
|
||||||
}
|
|
||||||
const text_search = tables[table]['text_search'];
|
|
||||||
if (text_search) {
|
|
||||||
await database.collection(table).createIndex(text_search);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
using_local_db = false; // needs to happen for tests (in normal operation using_local_db is guaranteed false)
|
|
||||||
return true;
|
|
||||||
} catch(err) {
|
|
||||||
logger.error(err);
|
|
||||||
return false;
|
|
||||||
} finally {
|
|
||||||
// Ensures that the client will close when you finish/error
|
|
||||||
// await client.close();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.setVideoProperty = async (file_uid, assignment_obj) => {
|
async function importUnregisteredFiles() {
|
||||||
// TODO: check if video exists, throw error if not
|
|
||||||
await exports.updateRecord('files', {uid: file_uid}, assignment_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getFileDirectoriesAndDBs = async () => {
|
|
||||||
let dirs_to_check = [];
|
let dirs_to_check = [];
|
||||||
let subscriptions_to_check = [];
|
let subscriptions_to_check = [];
|
||||||
const subscriptions_base_path = config_api.getConfigItem('ytdl_subscriptions_base_path'); // only for single-user mode
|
const subscriptions_base_path = config_api.getConfigItem('ytdl_subscriptions_base_path'); // only for single-user mode
|
||||||
@@ -181,50 +123,48 @@ exports.getFileDirectoriesAndDBs = async () => {
|
|||||||
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
||||||
const subscriptions_enabled = config_api.getConfigItem('ytdl_allow_subscriptions');
|
const subscriptions_enabled = config_api.getConfigItem('ytdl_allow_subscriptions');
|
||||||
if (multi_user_mode) {
|
if (multi_user_mode) {
|
||||||
const users = await exports.getRecords('users');
|
let users = users_db.get('users').value();
|
||||||
for (let i = 0; i < users.length; i++) {
|
for (let i = 0; i < users.length; i++) {
|
||||||
const user = users[i];
|
const user = users[i];
|
||||||
|
|
||||||
|
if (subscriptions_enabled) subscriptions_to_check = subscriptions_to_check.concat(users[i]['subscriptions']);
|
||||||
|
|
||||||
// add user's audio dir to check list
|
// add user's audio dir to check list
|
||||||
dirs_to_check.push({
|
dirs_to_check.push({
|
||||||
basePath: path.join(usersFileFolder, user.uid, 'audio'),
|
basePath: path.join(usersFileFolder, user.uid, 'audio'),
|
||||||
user_uid: user.uid,
|
dbPath: users_db.get('users').find({uid: user.uid}).get('files.audio'),
|
||||||
type: 'audio',
|
type: 'audio'
|
||||||
archive_path: utils.getArchiveFolder('audio', user.uid)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// add user's video dir to check list
|
// add user's video dir to check list
|
||||||
dirs_to_check.push({
|
dirs_to_check.push({
|
||||||
basePath: path.join(usersFileFolder, user.uid, 'video'),
|
basePath: path.join(usersFileFolder, user.uid, 'video'),
|
||||||
user_uid: user.uid,
|
dbPath: users_db.get('users').find({uid: user.uid}).get('files.video'),
|
||||||
type: 'video',
|
type: 'video'
|
||||||
archive_path: utils.getArchiveFolder('video', user.uid)
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const audioFolderPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
const audioFolderPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
||||||
const videoFolderPath = config_api.getConfigItem('ytdl_video_folder_path');
|
const videoFolderPath = config_api.getConfigItem('ytdl_video_folder_path');
|
||||||
|
const subscriptions = db.get('subscriptions').value();
|
||||||
|
|
||||||
|
if (subscriptions_enabled && subscriptions) subscriptions_to_check = subscriptions_to_check.concat(subscriptions);
|
||||||
|
|
||||||
// add audio dir to check list
|
// add audio dir to check list
|
||||||
dirs_to_check.push({
|
dirs_to_check.push({
|
||||||
basePath: audioFolderPath,
|
basePath: audioFolderPath,
|
||||||
type: 'audio',
|
dbPath: db.get('files.audio'),
|
||||||
archive_path: utils.getArchiveFolder('audio')
|
type: 'audio'
|
||||||
});
|
});
|
||||||
|
|
||||||
// add video dir to check list
|
// add video dir to check list
|
||||||
dirs_to_check.push({
|
dirs_to_check.push({
|
||||||
basePath: videoFolderPath,
|
basePath: videoFolderPath,
|
||||||
type: 'video',
|
dbPath: db.get('files.video'),
|
||||||
archive_path: utils.getArchiveFolder('video')
|
type: 'video'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (subscriptions_enabled) {
|
|
||||||
const subscriptions = await exports.getRecords('subscriptions');
|
|
||||||
subscriptions_to_check = subscriptions_to_check.concat(subscriptions);
|
|
||||||
}
|
|
||||||
|
|
||||||
// add subscriptions to check list
|
// add subscriptions to check list
|
||||||
for (let i = 0; i < subscriptions_to_check.length; i++) {
|
for (let i = 0; i < subscriptions_to_check.length; i++) {
|
||||||
let subscription_to_check = subscriptions_to_check[i];
|
let subscription_to_check = subscriptions_to_check[i];
|
||||||
@@ -233,615 +173,35 @@ exports.getFileDirectoriesAndDBs = async () => {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
dirs_to_check.push({
|
dirs_to_check.push({
|
||||||
basePath: subscription_to_check.user_uid ? path.join(usersFileFolder, subscription_to_check.user_uid, 'subscriptions', subscription_to_check.isPlaylist ? 'playlists/' : 'channels/', subscription_to_check.name)
|
basePath: multi_user_mode ? path.join(usersFileFolder, subscription_to_check.user_uid, 'subscriptions', subscription_to_check.isPlaylist ? 'playlists/' : 'channels/', subscription_to_check.name)
|
||||||
: path.join(subscriptions_base_path, subscription_to_check.isPlaylist ? 'playlists/' : 'channels/', subscription_to_check.name),
|
: path.join(subscriptions_base_path, subscription_to_check.isPlaylist ? 'playlists/' : 'channels/', subscription_to_check.name),
|
||||||
user_uid: subscription_to_check.user_uid,
|
dbPath: multi_user_mode ? users_db.get('users').find({uid: subscription_to_check.user_uid}).get('subscriptions').find({id: subscription_to_check.id}).get('videos')
|
||||||
type: subscription_to_check.type,
|
: db.get('subscriptions').find({id: subscription_to_check.id}).get('videos'),
|
||||||
sub_id: subscription_to_check['id'],
|
type: subscription_to_check.type
|
||||||
archive_path: utils.getArchiveFolder(subscription_to_check.type, subscription_to_check.user_uid, subscription_to_check)
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return dirs_to_check;
|
// run through check list and check each file to see if it's missing from the db
|
||||||
}
|
for (const dir_to_check of dirs_to_check) {
|
||||||
|
// recursively get all files in dir's path
|
||||||
|
const files = await utils.getDownloadedFilesByType(dir_to_check.basePath, dir_to_check.type);
|
||||||
|
|
||||||
// Basic DB functions
|
files.forEach(file => {
|
||||||
|
// check if file exists in db, if not add it
|
||||||
// Create
|
const file_is_registered = !!(dir_to_check.dbPath.find({id: file.id}).value())
|
||||||
|
if (!file_is_registered) {
|
||||||
exports.insertRecordIntoTable = async (table, doc, replaceFilter = null) => {
|
// add additional info
|
||||||
// local db override
|
registerFileDBManual(dir_to_check.dbPath, file);
|
||||||
if (using_local_db) {
|
logger.verbose(`Added discovered file to the database: ${file.id}`);
|
||||||
if (replaceFilter) local_db.get(table).remove((doc) => _.isMatch(doc, replaceFilter)).write();
|
|
||||||
local_db.get(table).push(doc).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (replaceFilter) {
|
|
||||||
const output = await database.collection(table).bulkWrite([
|
|
||||||
{
|
|
||||||
deleteMany: {
|
|
||||||
filter: replaceFilter
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
insertOne: {
|
|
||||||
document: doc
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]);
|
|
||||||
logger.debug(`Inserted doc into ${table} with filter: ${JSON.stringify(replaceFilter)}`);
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).insertOne(doc);
|
|
||||||
logger.debug(`Inserted doc into ${table}`);
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.insertRecordsIntoTable = async (table, docs, ignore_errors = false) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
const records_limit = 30000;
|
|
||||||
if (docs.length < records_limit) {
|
|
||||||
local_db.get(table).push(...docs).write();
|
|
||||||
} else {
|
|
||||||
for (let i = 0; i < docs.length; i+=records_limit) {
|
|
||||||
const records_to_push = docs.slice(i, i+records_limit > docs.length ? docs.length : i+records_limit)
|
|
||||||
local_db.get(table).push(...records_to_push).write();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const output = await database.collection(table).insertMany(docs, {ordered: !ignore_errors});
|
|
||||||
logger.debug(`Inserted ${output.insertedCount} docs into ${table}`);
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.bulkInsertRecordsIntoTable = async (table, docs) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
return await exports.insertRecordsIntoTable(table, docs);
|
|
||||||
}
|
|
||||||
|
|
||||||
// not a necessary function as insertRecords does the same thing but gives us more control on batch size if needed
|
|
||||||
const table_collection = database.collection(table);
|
|
||||||
|
|
||||||
let bulk = table_collection.initializeOrderedBulkOp(); // Initialize the Ordered Batch
|
|
||||||
|
|
||||||
for (let i = 0; i < docs.length; i++) {
|
|
||||||
bulk.insert(docs[i]);
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await bulk.execute();
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read
|
|
||||||
|
|
||||||
exports.getRecord = async (table, filter_obj) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
return exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').value();
|
|
||||||
}
|
|
||||||
|
|
||||||
return await database.collection(table).findOne(filter_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getRecords = async (table, filter_obj = null, return_count = false, sort = null, range = null) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
let cursor = filter_obj ? exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'filter').value() : local_db.get(table).value();
|
|
||||||
if (sort) {
|
|
||||||
cursor = cursor.sort((a, b) => (a[sort['by']] > b[sort['by']] ? sort['order'] : sort['order']*-1));
|
|
||||||
}
|
|
||||||
if (range) {
|
|
||||||
cursor = cursor.slice(range[0], range[1]);
|
|
||||||
}
|
|
||||||
return !return_count ? cursor : cursor.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
const cursor = filter_obj ? database.collection(table).find(filter_obj) : database.collection(table).find();
|
|
||||||
if (sort) {
|
|
||||||
cursor.sort({[sort['by']]: sort['order']});
|
|
||||||
}
|
|
||||||
if (range) {
|
|
||||||
cursor.skip(range[0]).limit(range[1] - range[0]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return !return_count ? await cursor.toArray() : await cursor.count();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update
|
|
||||||
|
|
||||||
exports.updateRecord = async (table, filter_obj, update_obj, nested_mode = false) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
if (nested_mode) {
|
|
||||||
// if object is nested we need to handle it differently
|
|
||||||
update_obj = utils.convertFlatObjectToNestedObject(update_obj);
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').merge(update_obj).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').assign(update_obj).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// sometimes _id will be in the update obj, this breaks mongodb
|
|
||||||
if (update_obj['_id']) delete update_obj['_id'];
|
|
||||||
const output = await database.collection(table).updateOne(filter_obj, {$set: update_obj});
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateRecords = async (table, filter_obj, update_obj) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'filter').each((record) => {
|
|
||||||
const props_to_update = Object.keys(update_obj);
|
|
||||||
for (let i = 0; i < props_to_update.length; i++) {
|
|
||||||
const prop_to_update = props_to_update[i];
|
|
||||||
const prop_value = update_obj[prop_to_update];
|
|
||||||
record[prop_to_update] = prop_value;
|
|
||||||
}
|
|
||||||
}).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).updateMany(filter_obj, {$set: update_obj});
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.removePropertyFromRecord = async (table, filter_obj, remove_obj) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
const props_to_remove = Object.keys(remove_obj);
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').unset(props_to_remove).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).updateOne(filter_obj, {$unset: remove_obj});
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.bulkUpdateRecordsByKey = async (table, key_label, update_obj) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
local_db.get(table).each((record) => {
|
|
||||||
const item_id_to_update = record[key_label];
|
|
||||||
if (!update_obj[item_id_to_update]) return;
|
|
||||||
|
|
||||||
const props_to_update = Object.keys(update_obj[item_id_to_update]);
|
|
||||||
for (let i = 0; i < props_to_update.length; i++) {
|
|
||||||
const prop_to_update = props_to_update[i];
|
|
||||||
const prop_value = update_obj[item_id_to_update][prop_to_update];
|
|
||||||
record[prop_to_update] = prop_value;
|
|
||||||
}
|
|
||||||
}).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const table_collection = database.collection(table);
|
|
||||||
|
|
||||||
let bulk = table_collection.initializeOrderedBulkOp(); // Initialize the Ordered Batch
|
|
||||||
|
|
||||||
const item_ids_to_update = Object.keys(update_obj);
|
|
||||||
|
|
||||||
for (let i = 0; i < item_ids_to_update.length; i++) {
|
|
||||||
const item_id_to_update = item_ids_to_update[i];
|
|
||||||
bulk.find({[key_label]: item_id_to_update }).updateOne({
|
|
||||||
"$set": update_obj[item_id_to_update]
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const output = await bulk.execute();
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.pushToRecordsArray = async (table, filter_obj, key, value) => {
|
module.exports = {
|
||||||
// local db override
|
initialize: initialize,
|
||||||
if (using_local_db) {
|
registerFileDB: registerFileDB,
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').get(key).push(value).write();
|
updatePlaylist: updatePlaylist,
|
||||||
return true;
|
importUnregisteredFiles: importUnregisteredFiles
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).updateOne(filter_obj, {$push: {[key]: value}});
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.pullFromRecordsArray = async (table, filter_obj, key, value) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'find').get(key).pull(value).write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).updateOne(filter_obj, {$pull: {[key]: value}});
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete
|
|
||||||
|
|
||||||
exports.removeRecord = async (table, filter_obj) => {
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'remove').write();
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const output = await database.collection(table).deleteOne(filter_obj);
|
|
||||||
return !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
|
|
||||||
// exports.removeRecordsByUIDBulk = async (table, uids) => {
|
|
||||||
// // local db override
|
|
||||||
// if (using_local_db) {
|
|
||||||
// exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'remove').write();
|
|
||||||
// return true;
|
|
||||||
// }
|
|
||||||
|
|
||||||
// const table_collection = database.collection(table);
|
|
||||||
|
|
||||||
// let bulk = table_collection.initializeOrderedBulkOp(); // Initialize the Ordered Batch
|
|
||||||
|
|
||||||
// const item_ids_to_remove =
|
|
||||||
|
|
||||||
// for (let i = 0; i < item_ids_to_update.length; i++) {
|
|
||||||
// const item_id_to_update = item_ids_to_update[i];
|
|
||||||
// bulk.find({[key_label]: item_id_to_update }).updateOne({
|
|
||||||
// "$set": update_obj[item_id_to_update]
|
|
||||||
// });
|
|
||||||
// }
|
|
||||||
|
|
||||||
// const output = await bulk.execute();
|
|
||||||
// return !!(output['result']['ok']);
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
exports.findDuplicatesByKey = async (table, key) => {
|
|
||||||
let duplicates = [];
|
|
||||||
if (using_local_db) {
|
|
||||||
// this can probably be optimized
|
|
||||||
const all_records = await exports.getRecords(table);
|
|
||||||
const existing_records = {};
|
|
||||||
for (let i = 0; i < all_records.length; i++) {
|
|
||||||
const record = all_records[i];
|
|
||||||
const value = record[key];
|
|
||||||
|
|
||||||
if (existing_records[value]) {
|
|
||||||
duplicates.push(record);
|
|
||||||
}
|
|
||||||
|
|
||||||
existing_records[value] = true;
|
|
||||||
}
|
|
||||||
return duplicates;
|
|
||||||
}
|
|
||||||
|
|
||||||
const duplicated_values = await database.collection(table).aggregate([
|
|
||||||
{"$group" : { "_id": `$${key}`, "count": { "$sum": 1 } } },
|
|
||||||
{"$match": {"_id" :{ "$ne" : null } , "count" : {"$gt": 1} } },
|
|
||||||
{"$project": {[key] : "$_id", "_id" : 0} }
|
|
||||||
]).toArray();
|
|
||||||
|
|
||||||
for (let i = 0; i < duplicated_values.length; i++) {
|
|
||||||
const duplicated_value = duplicated_values[i];
|
|
||||||
const duplicated_records = await exports.getRecords(table, duplicated_value, false);
|
|
||||||
if (duplicated_records.length > 1) {
|
|
||||||
duplicates = duplicates.concat(duplicated_records.slice(1, duplicated_records.length));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return duplicates;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.removeAllRecords = async (table = null, filter_obj = null) => {
|
|
||||||
// local db override
|
|
||||||
const tables_to_remove = table ? [table] : tables_list;
|
|
||||||
logger.debug(`Removing all records from: ${tables_to_remove} with filter: ${JSON.stringify(filter_obj)}`)
|
|
||||||
if (using_local_db) {
|
|
||||||
for (let i = 0; i < tables_to_remove.length; i++) {
|
|
||||||
const table_to_remove = tables_to_remove[i];
|
|
||||||
if (filter_obj) exports.applyFilterLocalDB(local_db.get(table), filter_obj, 'remove').write();
|
|
||||||
else local_db.assign({[table_to_remove]: []}).write();
|
|
||||||
logger.debug(`Successfully removed records from ${table_to_remove}`);
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let success = true;
|
|
||||||
for (let i = 0; i < tables_to_remove.length; i++) {
|
|
||||||
const table_to_remove = tables_to_remove[i];
|
|
||||||
|
|
||||||
const output = await database.collection(table_to_remove).deleteMany(filter_obj ? filter_obj : {});
|
|
||||||
logger.debug(`Successfully removed records from ${table_to_remove}`);
|
|
||||||
success &= !!(output['result']['ok']);
|
|
||||||
}
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Stats
|
|
||||||
|
|
||||||
exports.getDBStats = async () => {
|
|
||||||
const stats_by_table = {};
|
|
||||||
for (let i = 0; i < tables_list.length; i++) {
|
|
||||||
const table = tables_list[i];
|
|
||||||
if (table === 'test') continue;
|
|
||||||
|
|
||||||
stats_by_table[table] = await getDBTableStats(table);
|
|
||||||
}
|
|
||||||
return {stats_by_table: stats_by_table, using_local_db: using_local_db};
|
|
||||||
}
|
|
||||||
|
|
||||||
const getDBTableStats = async (table) => {
|
|
||||||
const table_stats = {};
|
|
||||||
// local db override
|
|
||||||
if (using_local_db) {
|
|
||||||
table_stats['records_count'] = local_db.get(table).value().length;
|
|
||||||
} else {
|
|
||||||
const stats = await database.collection(table).stats();
|
|
||||||
table_stats['records_count'] = stats.count;
|
|
||||||
}
|
|
||||||
return table_stats;
|
|
||||||
}
|
|
||||||
|
|
||||||
// JSON to DB
|
|
||||||
|
|
||||||
exports.generateJSONTables = async (db_json, users_json) => {
|
|
||||||
// create records
|
|
||||||
let files = db_json['files'] || [];
|
|
||||||
let playlists = db_json['playlists'] || [];
|
|
||||||
let categories = db_json['categories'] || [];
|
|
||||||
let subscriptions = db_json['subscriptions'] || [];
|
|
||||||
|
|
||||||
const users = users_json['users'];
|
|
||||||
|
|
||||||
for (let i = 0; i < users.length; i++) {
|
|
||||||
const user = users[i];
|
|
||||||
|
|
||||||
if (user['files']) {
|
|
||||||
user['files'] = user['files'].map(file => ({ ...file, user_uid: user['uid'] }));
|
|
||||||
files = files.concat(user['files']);
|
|
||||||
}
|
|
||||||
if (user['playlists']) {
|
|
||||||
user['playlists'] = user['playlists'].map(playlist => ({ ...playlist, user_uid: user['uid'] }));
|
|
||||||
playlists = playlists.concat(user['playlists']);
|
|
||||||
}
|
|
||||||
if (user['categories']) {
|
|
||||||
user['categories'] = user['categories'].map(category => ({ ...category, user_uid: user['uid'] }));
|
|
||||||
categories = categories.concat(user['categories']);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (user['subscriptions']) {
|
|
||||||
user['subscriptions'] = user['subscriptions'].map(subscription => ({ ...subscription, user_uid: user['uid'] }));
|
|
||||||
subscriptions = subscriptions.concat(user['subscriptions']);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const tables_obj = {};
|
|
||||||
|
|
||||||
// TODO: use create*Records funcs to strip unnecessary properties
|
|
||||||
tables_obj.files = createFilesRecords(files, subscriptions);
|
|
||||||
tables_obj.playlists = playlists;
|
|
||||||
tables_obj.categories = categories;
|
|
||||||
tables_obj.subscriptions = createSubscriptionsRecords(subscriptions);
|
|
||||||
tables_obj.users = createUsersRecords(users);
|
|
||||||
tables_obj.roles = createRolesRecords(users_json['roles']);
|
|
||||||
tables_obj.downloads = createDownloadsRecords(db_json['downloads'])
|
|
||||||
|
|
||||||
return tables_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.importJSONToDB = async (db_json, users_json) => {
|
|
||||||
await fs.writeFile(`appdata/db.json.${Date.now()/1000}.bak`, JSON.stringify(db_json, null, 2));
|
|
||||||
await fs.writeFile(`appdata/users_db.json.${Date.now()/1000}.bak`, JSON.stringify(users_json, null, 2));
|
|
||||||
|
|
||||||
await exports.removeAllRecords();
|
|
||||||
const tables_obj = await exports.generateJSONTables(db_json, users_json);
|
|
||||||
|
|
||||||
const table_keys = Object.keys(tables_obj);
|
|
||||||
|
|
||||||
let success = true;
|
|
||||||
for (let i = 0; i < table_keys.length; i++) {
|
|
||||||
const table_key = table_keys[i];
|
|
||||||
if (!tables_obj[table_key] || tables_obj[table_key].length === 0) continue;
|
|
||||||
success &= await exports.insertRecordsIntoTable(table_key, tables_obj[table_key], true);
|
|
||||||
}
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createFilesRecords = (files, subscriptions) => {
|
|
||||||
for (let i = 0; i < subscriptions.length; i++) {
|
|
||||||
const subscription = subscriptions[i];
|
|
||||||
if (!subscription['videos']) continue;
|
|
||||||
subscription['videos'] = subscription['videos'].map(file => ({ ...file, sub_id: subscription['id'], user_uid: subscription['user_uid'] ? subscription['user_uid'] : undefined}));
|
|
||||||
files = files.concat(subscriptions[i]['videos']);
|
|
||||||
}
|
|
||||||
|
|
||||||
return files;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createPlaylistsRecords = async (playlists) => {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
const createCategoriesRecords = async (categories) => {
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
const createSubscriptionsRecords = (subscriptions) => {
|
|
||||||
for (let i = 0; i < subscriptions.length; i++) {
|
|
||||||
delete subscriptions[i]['videos'];
|
|
||||||
}
|
|
||||||
|
|
||||||
return subscriptions;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createUsersRecords = (users) => {
|
|
||||||
users.forEach(user => {
|
|
||||||
delete user['files'];
|
|
||||||
delete user['playlists'];
|
|
||||||
delete user['subscriptions'];
|
|
||||||
});
|
|
||||||
return users;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createRolesRecords = (roles) => {
|
|
||||||
const new_roles = [];
|
|
||||||
Object.keys(roles).forEach(role_key => {
|
|
||||||
new_roles.push({
|
|
||||||
key: role_key,
|
|
||||||
...roles[role_key]
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return new_roles;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createDownloadsRecords = (downloads) => {
|
|
||||||
const new_downloads = [];
|
|
||||||
Object.keys(downloads).forEach(session_key => {
|
|
||||||
new_downloads.push({
|
|
||||||
key: session_key,
|
|
||||||
...downloads[session_key]
|
|
||||||
});
|
|
||||||
});
|
|
||||||
return new_downloads;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.backupDB = async () => {
|
|
||||||
const backup_dir = path.join('appdata', 'db_backup');
|
|
||||||
fs.ensureDirSync(backup_dir);
|
|
||||||
const backup_file_name = `${using_local_db ? 'local' : 'remote'}_db.json.${Date.now()/1000}.bak`;
|
|
||||||
const path_to_backups = path.join(backup_dir, backup_file_name);
|
|
||||||
|
|
||||||
logger.info(`Backing up ${using_local_db ? 'local' : 'remote'} DB to ${path_to_backups}`);
|
|
||||||
|
|
||||||
const table_to_records = {};
|
|
||||||
for (let i = 0; i < tables_list.length; i++) {
|
|
||||||
const table = tables_list[i];
|
|
||||||
table_to_records[table] = await exports.getRecords(table);
|
|
||||||
}
|
|
||||||
|
|
||||||
fs.writeJsonSync(path_to_backups, table_to_records);
|
|
||||||
|
|
||||||
return backup_file_name;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.restoreDB = async (file_name) => {
|
|
||||||
const path_to_backup = path.join('appdata', 'db_backup', file_name);
|
|
||||||
|
|
||||||
logger.debug('Reading database backup file.');
|
|
||||||
const table_to_records = fs.readJSONSync(path_to_backup);
|
|
||||||
|
|
||||||
if (!table_to_records) {
|
|
||||||
logger.error(`Failed to restore DB! Backup file '${path_to_backup}' could not be read.`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug('Clearing database.');
|
|
||||||
await exports.removeAllRecords();
|
|
||||||
|
|
||||||
logger.debug('Database cleared! Beginning restore.');
|
|
||||||
let success = true;
|
|
||||||
for (let i = 0; i < tables_list.length; i++) {
|
|
||||||
const table = tables_list[i];
|
|
||||||
if (!table_to_records[table] || table_to_records[table].length === 0) continue;
|
|
||||||
success &= await exports.bulkInsertRecordsIntoTable(table, table_to_records[table]);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug('Restore finished!');
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.transferDB = async (local_to_remote) => {
|
|
||||||
const table_to_records = {};
|
|
||||||
for (let i = 0; i < tables_list.length; i++) {
|
|
||||||
const table = tables_list[i];
|
|
||||||
table_to_records[table] = await exports.getRecords(table);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info('Backup up DB...');
|
|
||||||
await exports.backupDB(); // should backup always
|
|
||||||
|
|
||||||
using_local_db = !local_to_remote;
|
|
||||||
if (local_to_remote) {
|
|
||||||
const db_connected = await exports.connectToDB(5, true);
|
|
||||||
if (!db_connected) {
|
|
||||||
logger.error('Failed to transfer database - could not connect to MongoDB. Verify that your connection URL is valid.');
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
success = true;
|
|
||||||
|
|
||||||
logger.debug('Clearing new database before transfer...');
|
|
||||||
|
|
||||||
await exports.removeAllRecords();
|
|
||||||
|
|
||||||
logger.debug('Database cleared! Beginning transfer.');
|
|
||||||
|
|
||||||
for (let i = 0; i < tables_list.length; i++) {
|
|
||||||
const table = tables_list[i];
|
|
||||||
if (!table_to_records[table] || table_to_records[table].length === 0) continue;
|
|
||||||
success &= await exports.bulkInsertRecordsIntoTable(table, table_to_records[table]);
|
|
||||||
}
|
|
||||||
|
|
||||||
config_api.setConfigItem('ytdl_use_local_db', using_local_db);
|
|
||||||
|
|
||||||
logger.debug('Transfer finished!');
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
This function is necessary to emulate mongodb's ability to search for null or missing values.
|
|
||||||
A filter of null or undefined for a property will find docs that have that property missing, or have it
|
|
||||||
null or undefined. We want that same functionality for the local DB as well
|
|
||||||
|
|
||||||
error: {$ne: null}
|
|
||||||
^ ^
|
|
||||||
| |
|
|
||||||
filter_prop filter_prop_value
|
|
||||||
*/
|
|
||||||
exports.applyFilterLocalDB = (db_path, filter_obj, operation) => {
|
|
||||||
const filter_props = Object.keys(filter_obj);
|
|
||||||
const return_val = db_path[operation](record => {
|
|
||||||
if (!filter_props) return true;
|
|
||||||
let filtered = true;
|
|
||||||
for (let i = 0; i < filter_props.length; i++) {
|
|
||||||
const filter_prop = filter_props[i];
|
|
||||||
const filter_prop_value = filter_obj[filter_prop];
|
|
||||||
if (filter_prop_value === undefined || filter_prop_value === null) {
|
|
||||||
filtered &= record[filter_prop] === undefined || record[filter_prop] === null;
|
|
||||||
} else {
|
|
||||||
if (typeof filter_prop_value === 'object') {
|
|
||||||
if ('$regex' in filter_prop_value) {
|
|
||||||
filtered &= (record[filter_prop].search(new RegExp(filter_prop_value['$regex'], filter_prop_value['$options'])) !== -1);
|
|
||||||
} else if ('$ne' in filter_prop_value) {
|
|
||||||
filtered &= filter_prop in record && record[filter_prop] !== filter_prop_value['$ne'];
|
|
||||||
} else if ('$lt' in filter_prop_value) {
|
|
||||||
filtered &= filter_prop in record && record[filter_prop] < filter_prop_value['$lt'];
|
|
||||||
} else if ('$gt' in filter_prop_value) {
|
|
||||||
filtered &= filter_prop in record && record[filter_prop] > filter_prop_value['$gt'];
|
|
||||||
} else if ('$lte' in filter_prop_value) {
|
|
||||||
filtered &= filter_prop in record && record[filter_prop] <= filter_prop_value['$lt'];
|
|
||||||
} else if ('$gte' in filter_prop_value) {
|
|
||||||
filtered &= filter_prop in record && record[filter_prop] >= filter_prop_value['$gt'];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// handle case of nested property check
|
|
||||||
if (filter_prop.includes('.'))
|
|
||||||
filtered &= utils.searchObjectByString(record, filter_prop) === filter_prop_value;
|
|
||||||
else
|
|
||||||
filtered &= record[filter_prop] === filter_prop_value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return filtered;
|
|
||||||
});
|
|
||||||
return return_val;
|
|
||||||
}
|
|
||||||
|
|
||||||
// should only be used for tests
|
|
||||||
exports.setLocalDBMode = (mode) => {
|
|
||||||
using_local_db = mode;
|
|
||||||
}
|
}
|
||||||
@@ -1,644 +0,0 @@
|
|||||||
const fs = require('fs-extra');
|
|
||||||
const { v4: uuid } = require('uuid');
|
|
||||||
const path = require('path');
|
|
||||||
const NodeID3 = require('node-id3')
|
|
||||||
const Mutex = require('async-mutex').Mutex;
|
|
||||||
|
|
||||||
const logger = require('./logger');
|
|
||||||
const youtubedl_api = require('./youtube-dl');
|
|
||||||
const config_api = require('./config');
|
|
||||||
const twitch_api = require('./twitch');
|
|
||||||
const { create } = require('xmlbuilder2');
|
|
||||||
const categories_api = require('./categories');
|
|
||||||
const utils = require('./utils');
|
|
||||||
const db_api = require('./db');
|
|
||||||
const files_api = require('./files');
|
|
||||||
const notifications_api = require('./notifications');
|
|
||||||
const archive_api = require('./archive');
|
|
||||||
|
|
||||||
const mutex = new Mutex();
|
|
||||||
let should_check_downloads = true;
|
|
||||||
|
|
||||||
const download_to_child_process = {};
|
|
||||||
|
|
||||||
if (db_api.database_initialized) {
|
|
||||||
exports.setupDownloads();
|
|
||||||
} else {
|
|
||||||
db_api.database_initialized_bs.subscribe(init => {
|
|
||||||
if (init) exports.setupDownloads();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
|
|
||||||
This file handles all the downloading functionality.
|
|
||||||
|
|
||||||
To download a file, we go through 4 steps. Here they are with their respective index & function:
|
|
||||||
|
|
||||||
0: Create the download
|
|
||||||
- createDownload()
|
|
||||||
1: Get info for the download (we need this step for categories and archive functionality)
|
|
||||||
- collectInfo()
|
|
||||||
2: Download the file
|
|
||||||
- downloadQueuedFile()
|
|
||||||
3: Complete
|
|
||||||
- N/A
|
|
||||||
|
|
||||||
We use checkDownloads() to move downloads through the steps and call their respective functions.
|
|
||||||
|
|
||||||
*/
|
|
||||||
|
|
||||||
exports.createDownload = async (url, type, options, user_uid = null, sub_id = null, sub_name = null, prefetched_info = null, paused = false) => {
|
|
||||||
return await mutex.runExclusive(async () => {
|
|
||||||
const download = {
|
|
||||||
url: url,
|
|
||||||
type: type,
|
|
||||||
title: '',
|
|
||||||
user_uid: user_uid,
|
|
||||||
sub_id: sub_id,
|
|
||||||
sub_name: sub_name,
|
|
||||||
prefetched_info: prefetched_info,
|
|
||||||
options: options,
|
|
||||||
uid: uuid(),
|
|
||||||
step_index: 0,
|
|
||||||
paused: paused,
|
|
||||||
running: false,
|
|
||||||
finished_step: true,
|
|
||||||
error: null,
|
|
||||||
percent_complete: null,
|
|
||||||
finished: false,
|
|
||||||
timestamp_start: Date.now()
|
|
||||||
};
|
|
||||||
await db_api.insertRecordIntoTable('download_queue', download);
|
|
||||||
|
|
||||||
should_check_downloads = true;
|
|
||||||
return download;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.pauseDownload = async (download_uid) => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (download['paused']) {
|
|
||||||
logger.warn(`Download ${download_uid} is already paused!`);
|
|
||||||
return false;
|
|
||||||
} else if (download['finished']) {
|
|
||||||
logger.info(`Download ${download_uid} could not be paused before completing.`);
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
logger.info(`Pausing download ${download_uid}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
killActiveDownload(download);
|
|
||||||
return await db_api.updateRecord('download_queue', {uid: download_uid}, {paused: true, running: false});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.resumeDownload = async (download_uid) => {
|
|
||||||
return await mutex.runExclusive(async () => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (!download['paused']) {
|
|
||||||
logger.warn(`Download ${download_uid} is not paused!`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const success = db_api.updateRecord('download_queue', {uid: download_uid}, {paused: false});
|
|
||||||
should_check_downloads = true;
|
|
||||||
return success;
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.restartDownload = async (download_uid) => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
await exports.clearDownload(download_uid);
|
|
||||||
const new_download = await exports.createDownload(download['url'], download['type'], download['options'], download['user_uid']);
|
|
||||||
|
|
||||||
should_check_downloads = true;
|
|
||||||
return new_download;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.cancelDownload = async (download_uid) => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (download['cancelled']) {
|
|
||||||
logger.warn(`Download ${download_uid} is already cancelled!`);
|
|
||||||
return false;
|
|
||||||
} else if (download['finished']) {
|
|
||||||
logger.info(`Download ${download_uid} could not be cancelled before completing.`);
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
logger.info(`Cancelling download ${download_uid}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
killActiveDownload(download);
|
|
||||||
await handleDownloadError(download_uid, 'Cancelled', 'cancelled');
|
|
||||||
return await db_api.updateRecord('download_queue', {uid: download_uid}, {cancelled: true});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.clearDownload = async (download_uid) => {
|
|
||||||
return await db_api.removeRecord('download_queue', {uid: download_uid});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleDownloadError(download_uid, error_message, error_type = null) {
|
|
||||||
if (!download_uid) return;
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (!download || download['error']) return;
|
|
||||||
notifications_api.sendDownloadErrorNotification(download, download['user_uid'], error_message, error_type);
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download['uid']}, {error: error_message, finished: true, running: false, error_type: error_type});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.setupDownloads = async () => {
|
|
||||||
await fixDownloadState();
|
|
||||||
setInterval(checkDownloads, 1000);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function fixDownloadState() {
|
|
||||||
const downloads = await db_api.getRecords('download_queue');
|
|
||||||
downloads.sort((download1, download2) => download1.timestamp_start - download2.timestamp_start);
|
|
||||||
const running_downloads = downloads.filter(download => !download['finished'] && !download['error']);
|
|
||||||
for (let i = 0; i < running_downloads.length; i++) {
|
|
||||||
const running_download = running_downloads[i];
|
|
||||||
const update_obj = {finished_step: true, paused: true, running: false};
|
|
||||||
if (running_download['step_index'] > 0) {
|
|
||||||
update_obj['step_index'] = running_download['step_index'] - 1;
|
|
||||||
}
|
|
||||||
await db_api.updateRecord('download_queue', {uid: running_download['uid']}, update_obj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkDownloads() {
|
|
||||||
if (!should_check_downloads) return;
|
|
||||||
|
|
||||||
const downloads = await db_api.getRecords('download_queue');
|
|
||||||
downloads.sort((download1, download2) => download1.timestamp_start - download2.timestamp_start);
|
|
||||||
|
|
||||||
await mutex.runExclusive(async () => {
|
|
||||||
// avoid checking downloads unnecessarily, but double check that should_check_downloads is still true
|
|
||||||
const running_downloads = downloads.filter(download => !download['paused'] && !download['finished']);
|
|
||||||
if (running_downloads.length === 0) {
|
|
||||||
should_check_downloads = false;
|
|
||||||
logger.verbose('Disabling checking downloads as none are available.');
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
});
|
|
||||||
|
|
||||||
let running_downloads_count = downloads.filter(download => download['running']).length;
|
|
||||||
const waiting_downloads = downloads.filter(download => !download['paused'] && download['finished_step'] && !download['finished']);
|
|
||||||
for (let i = 0; i < waiting_downloads.length; i++) {
|
|
||||||
const waiting_download = waiting_downloads[i];
|
|
||||||
const max_concurrent_downloads = config_api.getConfigItem('ytdl_max_concurrent_downloads');
|
|
||||||
if (max_concurrent_downloads < 0 || running_downloads_count >= max_concurrent_downloads) break;
|
|
||||||
|
|
||||||
if (waiting_download['finished_step'] && !waiting_download['finished']) {
|
|
||||||
if (waiting_download['sub_id']) {
|
|
||||||
const sub_missing = !(await db_api.getRecord('subscriptions', {id: waiting_download['sub_id']}));
|
|
||||||
if (sub_missing) {
|
|
||||||
handleDownloadError(waiting_download['uid'], `Download failed as subscription with id '${waiting_download['sub_id']}' is missing!`, 'sub_id_missing');
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// move to next step
|
|
||||||
running_downloads_count++;
|
|
||||||
if (waiting_download['step_index'] === 0) {
|
|
||||||
exports.collectInfo(waiting_download['uid']);
|
|
||||||
} else if (waiting_download['step_index'] === 1) {
|
|
||||||
exports.downloadQueuedFile(waiting_download['uid']);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function killActiveDownload(download) {
|
|
||||||
const child_process = download_to_child_process[download['uid']];
|
|
||||||
if (download['step_index'] === 2 && child_process) {
|
|
||||||
youtubedl_api.killYoutubeDLProcess(child_process);
|
|
||||||
delete download_to_child_process[download['uid']];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.collectInfo = async (download_uid) => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (download['paused']) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
logger.verbose(`Collecting info for download ${download_uid}`);
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download_uid}, {step_index: 1, finished_step: false, running: true});
|
|
||||||
|
|
||||||
const url = download['url'];
|
|
||||||
const type = download['type'];
|
|
||||||
const options = download['options'];
|
|
||||||
|
|
||||||
if (download['user_uid'] && !options.customFileFolderPath) {
|
|
||||||
let usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
const user_path = path.join(usersFileFolder, download['user_uid'], type);
|
|
||||||
options.customFileFolderPath = user_path + path.sep;
|
|
||||||
}
|
|
||||||
|
|
||||||
let args = await exports.generateArgs(url, type, options, download['user_uid']);
|
|
||||||
|
|
||||||
// get video info prior to download
|
|
||||||
let info = download['prefetched_info'] ? download['prefetched_info'] : await exports.getVideoInfoByURL(url, args, download_uid);
|
|
||||||
|
|
||||||
if (!info || info.length === 0) {
|
|
||||||
// info failed, error presumably already recorded
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// in subscriptions we don't care if archive mode is enabled, but we already removed archived videos from subs by this point
|
|
||||||
const useYoutubeDLArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
|
||||||
if (useYoutubeDLArchive && !options.ignoreArchive && info.length === 1) {
|
|
||||||
const info_obj = info[0];
|
|
||||||
const exists_in_archive = await archive_api.existsInArchive(info['extractor'], info_obj['id'], type, download['user_uid'], download['sub_id']);
|
|
||||||
if (exists_in_archive) {
|
|
||||||
const error = `File '${info_obj['title']}' already exists in archive! Disable the archive or override to continue downloading.`;
|
|
||||||
logger.warn(error);
|
|
||||||
if (download_uid) {
|
|
||||||
await handleDownloadError(download_uid, error, 'exists_in_archive');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let category = null;
|
|
||||||
|
|
||||||
// check if it fits into a category. If so, then get info again using new args
|
|
||||||
if (info.length === 1 || config_api.getConfigItem('ytdl_allow_playlist_categorization')) category = await categories_api.categorize(info);
|
|
||||||
|
|
||||||
// set custom output if the category has one and re-retrieve info so the download manager has the right file name
|
|
||||||
if (category && category['custom_output']) {
|
|
||||||
options.customOutput = category['custom_output'];
|
|
||||||
options.noRelativePath = true;
|
|
||||||
args = await exports.generateArgs(url, type, options, download['user_uid']);
|
|
||||||
info = await exports.getVideoInfoByURL(url, args, download_uid);
|
|
||||||
}
|
|
||||||
|
|
||||||
const stripped_category = category ? {name: category['name'], uid: category['uid']} : null;
|
|
||||||
|
|
||||||
// setup info required to calculate download progress
|
|
||||||
|
|
||||||
const expected_file_size = utils.getExpectedFileSize(info);
|
|
||||||
|
|
||||||
const files_to_check_for_progress = [];
|
|
||||||
|
|
||||||
// store info in download for future use
|
|
||||||
for (let info_obj of info) files_to_check_for_progress.push(utils.removeFileExtension(info_obj['_filename']));
|
|
||||||
|
|
||||||
const title = info.length > 1 ? info[0]['playlist_title'] || info[0]['playlist'] : info[0]['title'];
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download_uid}, {args: args,
|
|
||||||
finished_step: true,
|
|
||||||
running: false,
|
|
||||||
options: options,
|
|
||||||
files_to_check_for_progress: files_to_check_for_progress,
|
|
||||||
expected_file_size: expected_file_size,
|
|
||||||
title: title,
|
|
||||||
category: stripped_category,
|
|
||||||
prefetched_info: null
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.downloadQueuedFile = async(download_uid, customDownloadHandler = null) => {
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (download['paused']) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
logger.verbose(`Downloading ${download_uid}`);
|
|
||||||
return new Promise(async resolve => {
|
|
||||||
const audioFolderPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
|
||||||
const videoFolderPath = config_api.getConfigItem('ytdl_video_folder_path');
|
|
||||||
const usersFolderPath = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download_uid}, {step_index: 2, finished_step: false, running: true});
|
|
||||||
|
|
||||||
const url = download['url'];
|
|
||||||
const type = download['type'];
|
|
||||||
const options = download['options'];
|
|
||||||
const args = download['args'];
|
|
||||||
const category = download['category'];
|
|
||||||
let fileFolderPath = type === 'audio' ? audioFolderPath : videoFolderPath;
|
|
||||||
if (options.customFileFolderPath) {
|
|
||||||
fileFolderPath = options.customFileFolderPath;
|
|
||||||
} else if (download['user_uid']) {
|
|
||||||
fileFolderPath = path.join(usersFolderPath, download['user_uid'], type);
|
|
||||||
}
|
|
||||||
fs.ensureDirSync(fileFolderPath);
|
|
||||||
|
|
||||||
const start_time = Date.now();
|
|
||||||
|
|
||||||
const download_checker = setInterval(() => checkDownloadPercent(download['uid']), 1000);
|
|
||||||
const file_objs = [];
|
|
||||||
// download file
|
|
||||||
let {child_process, callback} = await youtubedl_api.runYoutubeDL(url, args, customDownloadHandler);
|
|
||||||
if (child_process) download_to_child_process[download['uid']] = child_process;
|
|
||||||
const {parsed_output, err} = await callback;
|
|
||||||
clearInterval(download_checker);
|
|
||||||
let end_time = Date.now();
|
|
||||||
let difference = (end_time - start_time)/1000;
|
|
||||||
logger.debug(`${type === 'audio' ? 'Audio' : 'Video'} download delay: ${difference} seconds.`);
|
|
||||||
if (!parsed_output) {
|
|
||||||
const errored_download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (errored_download && errored_download['paused']) return;
|
|
||||||
logger.error(err.toString());
|
|
||||||
await handleDownloadError(download_uid, err.toString(), 'unknown_error');
|
|
||||||
resolve(false);
|
|
||||||
return;
|
|
||||||
} else if (parsed_output) {
|
|
||||||
if (parsed_output.length === 0 || parsed_output[0].length === 0) {
|
|
||||||
// ERROR!
|
|
||||||
const error_message = `No output received for video download, check if it exists in your archive.`;
|
|
||||||
await handleDownloadError(download_uid, error_message, 'no_output');
|
|
||||||
logger.warn(error_message);
|
|
||||||
resolve(false);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const output_json of parsed_output) {
|
|
||||||
if (!output_json) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// get filepath with no extension
|
|
||||||
const filepath_no_extension = utils.removeFileExtension(output_json['_filename']);
|
|
||||||
|
|
||||||
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
|
||||||
var full_file_path = filepath_no_extension + ext;
|
|
||||||
var file_name = filepath_no_extension.substring(fileFolderPath.length, filepath_no_extension.length);
|
|
||||||
|
|
||||||
if (type === 'video' && url.includes('twitch.tv/videos/') && url.split('twitch.tv/videos/').length > 1
|
|
||||||
&& config_api.getConfigItem('ytdl_twitch_auto_download_chat')) {
|
|
||||||
let vodId = url.split('twitch.tv/videos/')[1];
|
|
||||||
vodId = vodId.split('?')[0];
|
|
||||||
twitch_api.downloadTwitchChatByVODID(vodId, file_name, type, download['user_uid']);
|
|
||||||
}
|
|
||||||
|
|
||||||
// renames file if necessary due to bug
|
|
||||||
if (!fs.existsSync(output_json['_filename']) && fs.existsSync(output_json['_filename'] + '.webm')) {
|
|
||||||
try {
|
|
||||||
fs.renameSync(output_json['_filename'] + '.webm', output_json['_filename']);
|
|
||||||
logger.info('Renamed ' + file_name + '.webm to ' + file_name);
|
|
||||||
} catch(e) {
|
|
||||||
logger.error(`Failed to rename file ${output_json['_filename']} to its appropriate extension.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type === 'audio') {
|
|
||||||
let tags = {
|
|
||||||
title: output_json['title'],
|
|
||||||
artist: output_json['artist'] ? output_json['artist'] : output_json['uploader']
|
|
||||||
}
|
|
||||||
let success = NodeID3.write(tags, utils.removeFileExtension(output_json['_filename']) + '.mp3');
|
|
||||||
if (!success) logger.error('Failed to apply ID3 tag to audio file ' + output_json['_filename']);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config_api.getConfigItem('ytdl_generate_nfo_files')) {
|
|
||||||
exports.generateNFOFile(output_json, `${filepath_no_extension}.nfo`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.cropFileSettings) {
|
|
||||||
await utils.cropFile(full_file_path, options.cropFileSettings.cropFileStart, options.cropFileSettings.cropFileEnd, ext);
|
|
||||||
}
|
|
||||||
|
|
||||||
// registers file in DB
|
|
||||||
const file_obj = await files_api.registerFileDB(full_file_path, type, download['user_uid'], category, download['sub_id'] ? download['sub_id'] : null, options.cropFileSettings);
|
|
||||||
|
|
||||||
await archive_api.addToArchive(output_json['extractor'], output_json['id'], type, output_json['title'], download['user_uid'], download['sub_id']);
|
|
||||||
|
|
||||||
notifications_api.sendDownloadNotification(file_obj, download['user_uid']);
|
|
||||||
|
|
||||||
file_objs.push(file_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
let container = null;
|
|
||||||
|
|
||||||
if (file_objs.length > 1) {
|
|
||||||
// create playlist
|
|
||||||
container = await files_api.createPlaylist(download['title'], file_objs.map(file_obj => file_obj.uid), download['user_uid']);
|
|
||||||
} else if (file_objs.length === 1) {
|
|
||||||
container = file_objs[0];
|
|
||||||
} else {
|
|
||||||
const error_message = 'Downloaded file failed to result in metadata object.';
|
|
||||||
logger.error(error_message);
|
|
||||||
await handleDownloadError(download_uid, error_message, 'no_metadata');
|
|
||||||
}
|
|
||||||
|
|
||||||
const file_uids = file_objs.map(file_obj => file_obj.uid);
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download_uid}, {finished_step: true, finished: true, running: false, step_index: 3, percent_complete: 100, file_uids: file_uids, container: container});
|
|
||||||
resolve(file_uids);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// helper functions
|
|
||||||
|
|
||||||
exports.generateArgs = async (url, type, options, user_uid = null, simulated = false) => {
|
|
||||||
const default_downloader = config_api.getConfigItem('ytdl_default_downloader');
|
|
||||||
|
|
||||||
if (!simulated && (default_downloader === 'youtube-dl' || default_downloader === 'youtube-dlc')) {
|
|
||||||
logger.warn('It is recommended you use yt-dlp! To prevent failed downloads, change the downloader in your settings menu to yt-dlp and restart your instance.')
|
|
||||||
}
|
|
||||||
|
|
||||||
const audioFolderPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
|
||||||
const videoFolderPath = config_api.getConfigItem('ytdl_video_folder_path');
|
|
||||||
const usersFolderPath = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
|
|
||||||
const videopath = config_api.getConfigItem('ytdl_default_file_output') ? config_api.getConfigItem('ytdl_default_file_output') : '%(title)s';
|
|
||||||
const globalArgs = config_api.getConfigItem('ytdl_custom_args');
|
|
||||||
const useCookies = config_api.getConfigItem('ytdl_use_cookies');
|
|
||||||
const is_audio = type === 'audio';
|
|
||||||
|
|
||||||
let fileFolderPath = type === 'audio' ? audioFolderPath : videoFolderPath; // TODO: fix
|
|
||||||
if (options.customFileFolderPath) {
|
|
||||||
fileFolderPath = options.customFileFolderPath;
|
|
||||||
} else if (user_uid) {
|
|
||||||
fileFolderPath = path.join(usersFolderPath, user_uid, fileFolderPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.customFileFolderPath) fileFolderPath = options.customFileFolderPath;
|
|
||||||
|
|
||||||
const customArgs = options.customArgs;
|
|
||||||
let customOutput = options.customOutput;
|
|
||||||
const customQualityConfiguration = options.customQualityConfiguration;
|
|
||||||
|
|
||||||
// video-specific args
|
|
||||||
const selectedHeight = options.selectedHeight;
|
|
||||||
const maxHeight = options.maxHeight;
|
|
||||||
const heightParam = selectedHeight || maxHeight;
|
|
||||||
|
|
||||||
// audio-specific args
|
|
||||||
const maxBitrate = options.maxBitrate;
|
|
||||||
|
|
||||||
const youtubeUsername = options.youtubeUsername;
|
|
||||||
const youtubePassword = options.youtubePassword;
|
|
||||||
|
|
||||||
let downloadConfig = null;
|
|
||||||
let qualityPath = (is_audio && !options.skip_audio_args) ? ['-f', 'bestaudio'] : ['-f', 'bestvideo+bestaudio', '--merge-output-format', 'mp4'];
|
|
||||||
const is_youtube = url.includes('youtu');
|
|
||||||
if (!is_audio && !is_youtube) {
|
|
||||||
// tiktok videos fail when using the default format
|
|
||||||
qualityPath = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (customArgs) {
|
|
||||||
downloadConfig = customArgs.split(',,');
|
|
||||||
} else {
|
|
||||||
if (customQualityConfiguration) {
|
|
||||||
qualityPath = ['-f', customQualityConfiguration, '--merge-output-format', 'mp4'];
|
|
||||||
} else if (heightParam && heightParam !== '' && !is_audio) {
|
|
||||||
const heightFilter = (maxHeight && default_downloader === 'yt-dlp') ? ['-S', `res:${heightParam}`] : ['-f', `best[height${maxHeight ? '<' : ''}=${heightParam}]+bestaudio`]
|
|
||||||
qualityPath = [...heightFilter, '--merge-output-format', 'mp4'];
|
|
||||||
} else if (is_audio) {
|
|
||||||
qualityPath = ['--audio-quality', maxBitrate ? maxBitrate : '0']
|
|
||||||
}
|
|
||||||
|
|
||||||
if (customOutput) {
|
|
||||||
customOutput = options.noRelativePath ? customOutput : path.join(fileFolderPath, customOutput);
|
|
||||||
downloadConfig = ['-o', `${customOutput}.%(ext)s`, '--write-info-json', '--print-json'];
|
|
||||||
} else {
|
|
||||||
downloadConfig = ['-o', path.join(fileFolderPath, videopath + (is_audio ? '.%(ext)s' : '.mp4')), '--write-info-json', '--print-json'];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (qualityPath) downloadConfig.push(...qualityPath);
|
|
||||||
|
|
||||||
if (is_audio && !options.skip_audio_args) {
|
|
||||||
downloadConfig.push('-x');
|
|
||||||
downloadConfig.push('--audio-format', 'mp3');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (youtubeUsername && youtubePassword) {
|
|
||||||
downloadConfig.push('--username', youtubeUsername, '--password', youtubePassword);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (useCookies) {
|
|
||||||
if (await fs.pathExists(path.join(__dirname, 'appdata', 'cookies.txt'))) {
|
|
||||||
downloadConfig.push('--cookies', path.join('appdata', 'cookies.txt'));
|
|
||||||
} else {
|
|
||||||
logger.warn('Cookies file could not be found. You can either upload one, or disable \'use cookies\' in the Advanced tab in the settings.');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const useDefaultDownloadingAgent = config_api.getConfigItem('ytdl_use_default_downloading_agent');
|
|
||||||
const customDownloadingAgent = config_api.getConfigItem('ytdl_custom_downloading_agent');
|
|
||||||
if (!useDefaultDownloadingAgent && customDownloadingAgent) {
|
|
||||||
downloadConfig.splice(0, 0, '--external-downloader', customDownloadingAgent);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config_api.getConfigItem('ytdl_include_thumbnail')) {
|
|
||||||
downloadConfig.push('--write-thumbnail');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (globalArgs && globalArgs !== '') {
|
|
||||||
// adds global args
|
|
||||||
if (downloadConfig.indexOf('-o') !== -1 && globalArgs.split(',,').indexOf('-o') !== -1) {
|
|
||||||
// if global args has an output, replce the original output with that of global args
|
|
||||||
const original_output_index = downloadConfig.indexOf('-o');
|
|
||||||
downloadConfig.splice(original_output_index, 2);
|
|
||||||
}
|
|
||||||
downloadConfig = downloadConfig.concat(globalArgs.split(',,'));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.additionalArgs && options.additionalArgs !== '') {
|
|
||||||
downloadConfig = utils.injectArgs(downloadConfig, options.additionalArgs.split(',,'));
|
|
||||||
}
|
|
||||||
|
|
||||||
const rate_limit = config_api.getConfigItem('ytdl_download_rate_limit');
|
|
||||||
if (rate_limit && downloadConfig.indexOf('-r') === -1 && downloadConfig.indexOf('--limit-rate') === -1) {
|
|
||||||
downloadConfig.push('-r', rate_limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (default_downloader === 'yt-dlp') {
|
|
||||||
downloadConfig = utils.filterArgs(downloadConfig, ['--print-json']);
|
|
||||||
|
|
||||||
// in yt-dlp -j --no-simulate is preferable
|
|
||||||
downloadConfig.push('--no-clean-info-json', '-j', '--no-simulate');
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// filter out incompatible args
|
|
||||||
downloadConfig = filterArgs(downloadConfig, is_audio);
|
|
||||||
|
|
||||||
if (!simulated) logger.verbose(`${default_downloader} args being used: ${downloadConfig.join(',')}`);
|
|
||||||
return downloadConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getVideoInfoByURL = async (url, args = [], download_uid = null) => {
|
|
||||||
// remove bad args
|
|
||||||
const temp_args = utils.filterArgs(args, ['--no-simulate']);
|
|
||||||
const new_args = [...temp_args];
|
|
||||||
|
|
||||||
const archiveArgIndex = new_args.indexOf('--download-archive');
|
|
||||||
if (archiveArgIndex !== -1) {
|
|
||||||
new_args.splice(archiveArgIndex, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
new_args.push('--dump-json');
|
|
||||||
|
|
||||||
let {callback} = await youtubedl_api.runYoutubeDL(url, new_args);
|
|
||||||
const {parsed_output, err} = await callback;
|
|
||||||
if (!parsed_output || parsed_output.length === 0) {
|
|
||||||
let error_message = `Error while retrieving info on video with URL ${url} with the following message: ${err}`;
|
|
||||||
if (err.stderr) error_message += `\n\n${err.stderr}`;
|
|
||||||
logger.error(error_message);
|
|
||||||
if (download_uid) {
|
|
||||||
await handleDownloadError(download_uid, error_message, 'info_retrieve_failed');
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return parsed_output;
|
|
||||||
}
|
|
||||||
|
|
||||||
function filterArgs(args, isAudio) {
|
|
||||||
const video_only_args = ['--add-metadata', '--embed-subs', '--xattrs'];
|
|
||||||
const audio_only_args = ['-x', '--extract-audio', '--embed-thumbnail'];
|
|
||||||
return utils.filterArgs(args, isAudio ? video_only_args : audio_only_args);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkDownloadPercent(download_uid) {
|
|
||||||
/*
|
|
||||||
This is more of an art than a science, we're just selecting files that start with the file name,
|
|
||||||
thus capturing the parts being downloaded in files named like so: '<video title>.<format>.<ext>.part'.
|
|
||||||
|
|
||||||
Any file that starts with <video title> will be counted as part of the "bytes downloaded", which will
|
|
||||||
be divided by the "total expected bytes."
|
|
||||||
*/
|
|
||||||
|
|
||||||
const download = await db_api.getRecord('download_queue', {uid: download_uid});
|
|
||||||
if (!download) return;
|
|
||||||
const files_to_check_for_progress = download['files_to_check_for_progress'];
|
|
||||||
const resulting_file_size = download['expected_file_size'];
|
|
||||||
|
|
||||||
if (!resulting_file_size) return;
|
|
||||||
|
|
||||||
let sum_size = 0;
|
|
||||||
for (let i = 0; i < files_to_check_for_progress.length; i++) {
|
|
||||||
const file_to_check_for_progress = files_to_check_for_progress[i];
|
|
||||||
const dir = path.dirname(file_to_check_for_progress);
|
|
||||||
if (!fs.existsSync(dir)) continue;
|
|
||||||
fs.readdir(dir, async (err, files) => {
|
|
||||||
for (let j = 0; j < files.length; j++) {
|
|
||||||
const file = files[j];
|
|
||||||
if (!file.includes(path.basename(file_to_check_for_progress))) continue;
|
|
||||||
try {
|
|
||||||
const file_stats = fs.statSync(path.join(dir, file));
|
|
||||||
if (file_stats && file_stats.size) {
|
|
||||||
sum_size += file_stats.size;
|
|
||||||
}
|
|
||||||
} catch (e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
const percent_complete = (sum_size/resulting_file_size * 100).toFixed(2);
|
|
||||||
await db_api.updateRecord('download_queue', {uid: download_uid}, {percent_complete: percent_complete});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.generateNFOFile = (info, output_path) => {
|
|
||||||
const nfo_obj = {
|
|
||||||
episodedetails: {
|
|
||||||
title: info['fulltitle'],
|
|
||||||
episode: info['playlist_index'] ? info['playlist_index'] : undefined,
|
|
||||||
premiered: utils.formatDateString(info['upload_date']),
|
|
||||||
plot: `${info['uploader_url']}\n${info['description']}\n${info['playlist_title'] ? info['playlist_title'] : ''}`,
|
|
||||||
director: info['artist'] ? info['artist'] : info['uploader']
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const doc = create(nfo_obj);
|
|
||||||
const xml = doc.end({ prettyPrint: true });
|
|
||||||
fs.writeFileSync(output_path, xml);
|
|
||||||
}
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
apps : [{
|
|
||||||
name : "YoutubeDL-Material",
|
|
||||||
script : "./app.js",
|
|
||||||
watch : "placeholder",
|
|
||||||
watch_delay: 5000
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,17 @@
|
|||||||
#!/bin/bash
|
#!/bin/sh
|
||||||
set -eu
|
set -eu
|
||||||
|
|
||||||
|
CMD="node app.js"
|
||||||
|
|
||||||
|
# if the first arg starts with "-" pass it to program
|
||||||
|
if [ "${1#-}" != "$1" ]; then
|
||||||
|
set -- "$CMD" "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
# chown current working directory to current user
|
# chown current working directory to current user
|
||||||
echo "[entrypoint] setup permission, this may take a while"
|
if [ "$*" = "$CMD" ] && [ "$(id -u)" = "0" ]; then
|
||||||
find . \! -user "$UID" -exec chown "$UID:$GID" '{}' + || echo "WARNING! Could not change directory ownership. If you manage permissions externally this is fine, otherwise you may experience issues when downloading or deleting videos."
|
find . \! -user "$UID" -exec chown "$UID:$GID" -R '{}' + || echo "WARNING! Could not change directory ownership. If you manage permissions externally this is fine, otherwise you may experience issues when downloading or deleting videos."
|
||||||
exec gosu "$UID:$GID" "$@"
|
exec su-exec "$UID:$GID" "$0" "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec "$@"
|
||||||
|
|||||||
350
backend/files.js
350
backend/files.js
@@ -1,350 +0,0 @@
|
|||||||
const fs = require('fs-extra')
|
|
||||||
const path = require('path')
|
|
||||||
const { v4: uuid } = require('uuid');
|
|
||||||
|
|
||||||
const config_api = require('./config');
|
|
||||||
const db_api = require('./db');
|
|
||||||
const archive_api = require('./archive');
|
|
||||||
const utils = require('./utils')
|
|
||||||
const logger = require('./logger');
|
|
||||||
|
|
||||||
exports.registerFileDB = async (file_path, type, user_uid = null, category = null, sub_id = null, cropFileSettings = null, file_object = null) => {
|
|
||||||
if (!file_object) file_object = generateFileObject(file_path, type);
|
|
||||||
if (!file_object) {
|
|
||||||
logger.error(`Could not find associated JSON file for ${type} file ${file_path}`);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
utils.fixVideoMetadataPerms(file_path, type);
|
|
||||||
|
|
||||||
// add thumbnail path
|
|
||||||
file_object['thumbnailPath'] = utils.getDownloadedThumbnail(file_path);
|
|
||||||
|
|
||||||
// if category exists, only include essential info
|
|
||||||
if (category) file_object['category'] = {name: category['name'], uid: category['uid']};
|
|
||||||
|
|
||||||
// modify duration
|
|
||||||
if (cropFileSettings) {
|
|
||||||
file_object['duration'] = (cropFileSettings.cropFileEnd || file_object.duration) - cropFileSettings.cropFileStart;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (user_uid) file_object['user_uid'] = user_uid;
|
|
||||||
if (sub_id) file_object['sub_id'] = sub_id;
|
|
||||||
|
|
||||||
const file_obj = await registerFileDBManual(file_object);
|
|
||||||
|
|
||||||
// remove metadata JSON if needed
|
|
||||||
if (!config_api.getConfigItem('ytdl_include_metadata')) {
|
|
||||||
utils.deleteJSONFile(file_path, type)
|
|
||||||
}
|
|
||||||
|
|
||||||
return file_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function registerFileDBManual(file_object) {
|
|
||||||
// add additional info
|
|
||||||
file_object['uid'] = uuid();
|
|
||||||
file_object['registered'] = Date.now();
|
|
||||||
const path_object = path.parse(file_object['path']);
|
|
||||||
file_object['path'] = path.format(path_object);
|
|
||||||
|
|
||||||
await db_api.insertRecordIntoTable('files', file_object, {path: file_object['path']})
|
|
||||||
|
|
||||||
return file_object;
|
|
||||||
}
|
|
||||||
|
|
||||||
function generateFileObject(file_path, type) {
|
|
||||||
const jsonobj = utils.getJSON(file_path, type);
|
|
||||||
if (!jsonobj) {
|
|
||||||
return null;
|
|
||||||
} else if (!jsonobj['_filename']) {
|
|
||||||
logger.error(`Failed to get filename from info JSON! File ${jsonobj['title']} could not be added.`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const true_file_path = utils.getTrueFileName(jsonobj['_filename'], type);
|
|
||||||
// console.
|
|
||||||
const stats = fs.statSync(true_file_path);
|
|
||||||
|
|
||||||
const file_id = utils.removeFileExtension(path.basename(file_path));
|
|
||||||
const title = jsonobj.title;
|
|
||||||
const url = jsonobj.webpage_url;
|
|
||||||
const uploader = jsonobj.uploader;
|
|
||||||
const upload_date = utils.formatDateString(jsonobj.upload_date);
|
|
||||||
|
|
||||||
const size = stats.size;
|
|
||||||
|
|
||||||
const thumbnail = jsonobj.thumbnail;
|
|
||||||
const duration = jsonobj.duration;
|
|
||||||
const isaudio = type === 'audio';
|
|
||||||
const description = jsonobj.description;
|
|
||||||
const file_obj = new utils.File(file_id, title, thumbnail, isaudio, duration, url, uploader, size, true_file_path, upload_date, description, jsonobj.view_count, jsonobj.height, jsonobj.abr);
|
|
||||||
return file_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.importUnregisteredFiles = async () => {
|
|
||||||
const imported_files = [];
|
|
||||||
const dirs_to_check = await db_api.getFileDirectoriesAndDBs();
|
|
||||||
|
|
||||||
// run through check list and check each file to see if it's missing from the db
|
|
||||||
for (let i = 0; i < dirs_to_check.length; i++) {
|
|
||||||
const dir_to_check = dirs_to_check[i];
|
|
||||||
// recursively get all files in dir's path
|
|
||||||
const files = await utils.getDownloadedFilesByType(dir_to_check.basePath, dir_to_check.type);
|
|
||||||
|
|
||||||
for (let j = 0; j < files.length; j++) {
|
|
||||||
const file = files[j];
|
|
||||||
|
|
||||||
// check if file exists in db, if not add it
|
|
||||||
const files_with_same_url = await db_api.getRecords('files', {url: file.url, sub_id: dir_to_check.sub_id});
|
|
||||||
const file_is_registered = !!(files_with_same_url.find(file_with_same_url => path.resolve(file_with_same_url.path) === path.resolve(file.path)));
|
|
||||||
if (!file_is_registered) {
|
|
||||||
// add additional info
|
|
||||||
const file_obj = await exports.registerFileDB(file['path'], dir_to_check.type, dir_to_check.user_uid, null, dir_to_check.sub_id, null);
|
|
||||||
if (file_obj) {
|
|
||||||
imported_files.push(file_obj['uid']);
|
|
||||||
logger.verbose(`Added discovered file to the database: ${file.id}`);
|
|
||||||
} else {
|
|
||||||
logger.error(`Failed to import ${file['path']} automatically.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return imported_files;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.addMetadataPropertyToDB = async (property_key) => {
|
|
||||||
try {
|
|
||||||
const dirs_to_check = await db_api.getFileDirectoriesAndDBs();
|
|
||||||
const update_obj = {};
|
|
||||||
for (let i = 0; i < dirs_to_check.length; i++) {
|
|
||||||
const dir_to_check = dirs_to_check[i];
|
|
||||||
|
|
||||||
// recursively get all files in dir's path
|
|
||||||
const files = await utils.getDownloadedFilesByType(dir_to_check.basePath, dir_to_check.type, true);
|
|
||||||
for (let j = 0; j < files.length; j++) {
|
|
||||||
const file = files[j];
|
|
||||||
if (file[property_key]) {
|
|
||||||
update_obj[file.uid] = {[property_key]: file[property_key]};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return await db_api.bulkUpdateRecordsByKey('files', 'uid', update_obj);
|
|
||||||
} catch(err) {
|
|
||||||
logger.error(err);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.createPlaylist = async (playlist_name, uids, user_uid = null) => {
|
|
||||||
const first_video = await exports.getVideo(uids[0]);
|
|
||||||
const thumbnailToUse = first_video['thumbnailURL'];
|
|
||||||
|
|
||||||
let new_playlist = {
|
|
||||||
name: playlist_name,
|
|
||||||
uids: uids,
|
|
||||||
id: uuid(),
|
|
||||||
thumbnailURL: thumbnailToUse,
|
|
||||||
registered: Date.now(),
|
|
||||||
randomize_order: false
|
|
||||||
};
|
|
||||||
|
|
||||||
new_playlist.user_uid = user_uid ? user_uid : undefined;
|
|
||||||
|
|
||||||
await db_api.insertRecordIntoTable('playlists', new_playlist);
|
|
||||||
|
|
||||||
const duration = await exports.calculatePlaylistDuration(new_playlist);
|
|
||||||
await db_api.updateRecord('playlists', {id: new_playlist.id}, {duration: duration});
|
|
||||||
|
|
||||||
return new_playlist;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getPlaylist = async (playlist_id, user_uid = null, require_sharing = false) => {
|
|
||||||
let playlist = await db_api.getRecord('playlists', {id: playlist_id});
|
|
||||||
|
|
||||||
if (!playlist) {
|
|
||||||
playlist = await db_api.getRecord('categories', {uid: playlist_id});
|
|
||||||
if (playlist) {
|
|
||||||
const uids = (await db_api.getRecords('files', {'category.uid': playlist_id})).map(file => file.uid);
|
|
||||||
playlist['uids'] = uids;
|
|
||||||
playlist['auto'] = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// converts playlists to new UID-based schema
|
|
||||||
if (playlist && playlist['fileNames'] && !playlist['uids']) {
|
|
||||||
playlist['uids'] = [];
|
|
||||||
logger.verbose(`Converting playlist ${playlist['name']} to new UID-based schema.`);
|
|
||||||
for (let i = 0; i < playlist['fileNames'].length; i++) {
|
|
||||||
const fileName = playlist['fileNames'][i];
|
|
||||||
const uid = await exports.getVideoUIDByID(fileName, user_uid);
|
|
||||||
if (uid) playlist['uids'].push(uid);
|
|
||||||
else logger.warn(`Failed to convert file with name ${fileName} to its UID while converting playlist ${playlist['name']} to the new UID-based schema. The original file is likely missing/deleted and it will be skipped.`);
|
|
||||||
}
|
|
||||||
exports.updatePlaylist(playlist, user_uid);
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevent unauthorized users from accessing the file info
|
|
||||||
if (require_sharing && !playlist['sharingEnabled']) return null;
|
|
||||||
|
|
||||||
return playlist;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updatePlaylist = async (playlist) => {
|
|
||||||
let playlistID = playlist.id;
|
|
||||||
|
|
||||||
const duration = await exports.calculatePlaylistDuration(playlist);
|
|
||||||
playlist.duration = duration;
|
|
||||||
|
|
||||||
return await db_api.updateRecord('playlists', {id: playlistID}, playlist);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.setPlaylistProperty = async (playlist_id, assignment_obj, user_uid = null) => {
|
|
||||||
let success = await db_api.updateRecord('playlists', {id: playlist_id}, assignment_obj);
|
|
||||||
|
|
||||||
if (!success) {
|
|
||||||
success = await db_api.updateRecord('categories', {uid: playlist_id}, assignment_obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!success) {
|
|
||||||
logger.error(`Could not find playlist or category with ID ${playlist_id}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return success;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.calculatePlaylistDuration = async (playlist, playlist_file_objs = null) => {
|
|
||||||
if (!playlist_file_objs) {
|
|
||||||
playlist_file_objs = [];
|
|
||||||
for (let i = 0; i < playlist['uids'].length; i++) {
|
|
||||||
const uid = playlist['uids'][i];
|
|
||||||
const file_obj = await exports.getVideo(uid);
|
|
||||||
if (file_obj) playlist_file_objs.push(file_obj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return playlist_file_objs.reduce((a, b) => a + utils.durationStringToNumber(b.duration), 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.deleteFile = async (uid, blacklistMode = false) => {
|
|
||||||
const file_obj = await exports.getVideo(uid);
|
|
||||||
const type = file_obj.isAudio ? 'audio' : 'video';
|
|
||||||
const folderPath = path.dirname(file_obj.path);
|
|
||||||
const name = file_obj.id;
|
|
||||||
const filePathNoExtension = utils.removeFileExtension(file_obj.path);
|
|
||||||
|
|
||||||
var jsonPath = `${file_obj.path}.info.json`;
|
|
||||||
var altJSONPath = `${filePathNoExtension}.info.json`;
|
|
||||||
var thumbnailPath = `${filePathNoExtension}.webp`;
|
|
||||||
var altThumbnailPath = `${filePathNoExtension}.jpg`;
|
|
||||||
|
|
||||||
jsonPath = path.join(__dirname, jsonPath);
|
|
||||||
altJSONPath = path.join(__dirname, altJSONPath);
|
|
||||||
|
|
||||||
let jsonExists = await fs.pathExists(jsonPath);
|
|
||||||
let thumbnailExists = await fs.pathExists(thumbnailPath);
|
|
||||||
|
|
||||||
if (!jsonExists) {
|
|
||||||
if (await fs.pathExists(altJSONPath)) {
|
|
||||||
jsonExists = true;
|
|
||||||
jsonPath = altJSONPath;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!thumbnailExists) {
|
|
||||||
if (await fs.pathExists(altThumbnailPath)) {
|
|
||||||
thumbnailExists = true;
|
|
||||||
thumbnailPath = altThumbnailPath;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let fileExists = await fs.pathExists(file_obj.path);
|
|
||||||
|
|
||||||
if (config_api.descriptors[uid]) {
|
|
||||||
try {
|
|
||||||
for (let i = 0; i < config_api.descriptors[uid].length; i++) {
|
|
||||||
config_api.descriptors[uid][i].destroy();
|
|
||||||
}
|
|
||||||
} catch(e) {
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let useYoutubeDLArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
|
||||||
if (useYoutubeDLArchive || file_obj.sub_id) {
|
|
||||||
// get id/extractor from JSON
|
|
||||||
|
|
||||||
const info_json = await (type === 'audio' ? utils.getJSONMp3(name, folderPath) : utils.getJSONMp4(name, folderPath));
|
|
||||||
let retrievedID = null;
|
|
||||||
let retrievedExtractor = null;
|
|
||||||
if (info_json) {
|
|
||||||
retrievedID = info_json['id'];
|
|
||||||
retrievedExtractor = info_json['extractor'];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove file ID from the archive file, and write it to the blacklist (if enabled)
|
|
||||||
if (!blacklistMode) {
|
|
||||||
await archive_api.removeFromArchive(retrievedExtractor, retrievedID, type, file_obj.user_uid, file_obj.sub_id)
|
|
||||||
} else {
|
|
||||||
const exists_in_archive = await archive_api.existsInArchive(retrievedExtractor, retrievedID, type, file_obj.user_uid, file_obj.sub_id);
|
|
||||||
if (!exists_in_archive) {
|
|
||||||
await archive_api.addToArchive(retrievedExtractor, retrievedID, type, file_obj.title, file_obj.user_uid, file_obj.sub_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (jsonExists) await fs.unlink(jsonPath);
|
|
||||||
if (thumbnailExists) await fs.unlink(thumbnailPath);
|
|
||||||
|
|
||||||
await db_api.removeRecord('files', {uid: uid});
|
|
||||||
|
|
||||||
if (fileExists) {
|
|
||||||
await fs.unlink(file_obj.path);
|
|
||||||
if (await fs.pathExists(jsonPath) || await fs.pathExists(file_obj.path)) {
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// TODO: tell user that the file didn't exist
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Video ID is basically just the file name without the base path and file extension - this method helps us get away from that
|
|
||||||
exports.getVideoUIDByID = async (file_id, uuid = null) => {
|
|
||||||
const file_obj = await db_api.getRecord('files', {id: file_id});
|
|
||||||
return file_obj ? file_obj['uid'] : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getVideo = async (file_uid) => {
|
|
||||||
return await db_api.getRecord('files', {uid: file_uid});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getAllFiles = async (sort, range, text_search, file_type_filter, favorite_filter, sub_id, uuid) => {
|
|
||||||
const filter_obj = {user_uid: uuid};
|
|
||||||
const regex = true;
|
|
||||||
if (text_search) {
|
|
||||||
if (regex) {
|
|
||||||
filter_obj['title'] = {$regex: `.*${text_search}.*`, $options: 'i'};
|
|
||||||
} else {
|
|
||||||
filter_obj['$text'] = { $search: utils.createEdgeNGrams(text_search) };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (favorite_filter) {
|
|
||||||
filter_obj['favorite'] = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sub_id) {
|
|
||||||
filter_obj['sub_id'] = sub_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file_type_filter === 'audio_only') filter_obj['isAudio'] = true;
|
|
||||||
else if (file_type_filter === 'video_only') filter_obj['isAudio'] = false;
|
|
||||||
|
|
||||||
const files = JSON.parse(JSON.stringify(await db_api.getRecords('files', filter_obj, false, sort, range, text_search)));
|
|
||||||
const file_count = await db_api.getRecords('files', filter_obj, true);
|
|
||||||
|
|
||||||
return {files, file_count};
|
|
||||||
}
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# INTERACTIVE PERMISSIONS FIX SCRIPT FOR YTDL-M
|
|
||||||
# Date: 2022-05-03
|
|
||||||
|
|
||||||
# If you want to run this script on a bare-metal installation instead of within Docker
|
|
||||||
# make sure that the paths configured below match your paths! (it's wise to use the full paths)
|
|
||||||
# USAGE: within your container's bash shell:
|
|
||||||
# ./fix-scripts/<name of fix-script>
|
|
||||||
|
|
||||||
# User defines / Docker env defaults
|
|
||||||
PATH_SUBS=/app/subscriptions
|
|
||||||
PATH_AUDIO=/app/audio
|
|
||||||
PATH_VIDS=/app/video
|
|
||||||
|
|
||||||
clear -x
|
|
||||||
echo "\n"
|
|
||||||
printf '%*s\n' "${COLUMNS:-$(tput cols)}" '' | tr ' ' - # horizontal line
|
|
||||||
echo "Welcome to the INTERACTIVE PERMISSIONS FIX SCRIPT FOR YTDL-M."
|
|
||||||
echo "This script will set YTDL-M's download paths' owner to ${USER} (${UID}:${GID})"
|
|
||||||
echo "and permissions to the default of 644."
|
|
||||||
printf '%*s\n' "${COLUMNS:-$(tput cols)}" '' | tr ' ' - # horizontal line
|
|
||||||
echo "\n"
|
|
||||||
|
|
||||||
# check whether dirs exist
|
|
||||||
i=0
|
|
||||||
[ -d $PATH_SUBS ] && i=$((i+1)) && echo "✔ (${i}/3) Found Subscriptions directory at ${PATH_SUBS}"
|
|
||||||
[ -d $PATH_AUDIO ] && i=$((i+1)) && echo "✔ (${i}/3) Found Audio directory at ${PATH_AUDIO}"
|
|
||||||
[ -d $PATH_VIDS ] && i=$((i+1)) && echo "✔ (${i}/3) Found Video directory at ${PATH_VIDS}"
|
|
||||||
|
|
||||||
# Ask to proceed or cancel, exit on missing paths
|
|
||||||
case $i in
|
|
||||||
0)
|
|
||||||
echo "\nCouldn't find any download path to fix permissions for! \nPlease edit this script to configure!"
|
|
||||||
exit 2;;
|
|
||||||
3)
|
|
||||||
echo "\nFound all download paths to fix permissions for. \nProceed? (Y/N)";;
|
|
||||||
*)
|
|
||||||
echo "\nOnly found ${i} out of 3 download paths! Something about this script's config must be wrong. \nProceed anyways? (Y/N)";;
|
|
||||||
esac
|
|
||||||
old_stty_cfg=$(stty -g)
|
|
||||||
stty raw -echo ; answer=$(head -c 1) ; stty $old_stty_cfg # Careful playing with stty
|
|
||||||
if echo "$answer" | grep -iq "^y" ;then
|
|
||||||
echo "\n Running jobs now... (this may take a while)\n"
|
|
||||||
[ -d $PATH_SUBS ] && chown "$UID:$GID" -R $PATH_SUBS && echo "✔ Set owner of ${PATH_SUBS} to ${USER}."
|
|
||||||
[ -d $PATH_SUBS ] && chmod 644 -R $PATH_SUBS && echo "✔ Set permissions of ${PATH_SUBS} to 644."
|
|
||||||
[ -d $PATH_AUDIO ] && chown "$UID:$GID" -R $PATH_AUDIO && echo "✔ Set owner of ${PATH_AUDIO} to ${USER}."
|
|
||||||
[ -d $PATH_AUDIO ] && chmod 644 -R $PATH_AUDIO && echo "✔ Set permissions of ${PATH_AUDIO} to 644."
|
|
||||||
[ -d $PATH_VIDS ] && chown "$UID:$GID" -R $PATH_VIDS && echo "✔ Set owner of ${PATH_VIDS} to ${USER}."
|
|
||||||
[ -d $PATH_VIDS ] && chmod 644 -R $PATH_VIDS && echo "✔ Set permissions of ${PATH_VIDS} to 644."
|
|
||||||
echo "\n✔ Done."
|
|
||||||
echo "\n If you noticed file access errors those MAY be due to currently running downloads."
|
|
||||||
echo " Feel free to re-run this script, however download parts should have correct file permissions anyhow. :)"
|
|
||||||
exit
|
|
||||||
else
|
|
||||||
echo "\nOkay, bye."
|
|
||||||
fi
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# INTERACTIVE ARCHIVE-DUPE-ENTRY FIX SCRIPT FOR YTDL-M
|
|
||||||
# Date: 2022-05-09
|
|
||||||
|
|
||||||
# If you want to run this script on a bare-metal installation instead of within Docker
|
|
||||||
# make sure that the paths configured below match your paths! (it's wise to use the full paths)
|
|
||||||
# USAGE: within your container's bash shell:
|
|
||||||
# ./fix-scripts/<name of fix-script>
|
|
||||||
|
|
||||||
# User defines (NO TRAILING SLASHES) / Docker env defaults
|
|
||||||
PATH_SUBSARCHIVE=/app/subscriptions/archives
|
|
||||||
PATH_ONEOFFARCHIVE=/app/appdata/archives
|
|
||||||
|
|
||||||
# Backup paths (substitute with your personal preference if you like)
|
|
||||||
PATH_SUBSARCHIVEBKP=$PATH_SUBSARCHIVE-BKP-$(date +%Y%m%d%H%M%S)
|
|
||||||
PATH_ONEOFFARCHIVEBKP=$PATH_ONEOFFARCHIVE-BKP-$(date +%Y%m%d%H%M%S)
|
|
||||||
|
|
||||||
|
|
||||||
# Define Colors for TUI
|
|
||||||
yellow=$(tput setaf 3)
|
|
||||||
normal=$(tput sgr0)
|
|
||||||
|
|
||||||
tput civis # hide the cursor
|
|
||||||
|
|
||||||
clear -x
|
|
||||||
printf "\n"
|
|
||||||
printf '%*s\n' "${COLUMNS:-$(tput cols)}" '' | tr ' ' - # horizontal line
|
|
||||||
printf "Welcome to the INTERACTIVE ARCHIVE-DUPE-ENTRY FIX SCRIPT FOR YTDL-M."
|
|
||||||
printf "\nThis script will cycle through the archive files in the folders mentioned"
|
|
||||||
printf "\nbelow and remove within each archive the dupe entries. (compact them)"
|
|
||||||
printf "\nDuring some older builds of YTDL-M the archives could receive dupe"
|
|
||||||
printf "\nentries and blow up in size, sometimes causing conflicts with download management."
|
|
||||||
printf '\n%*s' "${COLUMNS:-$(tput cols)}" '' | tr ' ' - # horizontal line
|
|
||||||
printf "\n"
|
|
||||||
|
|
||||||
# check whether dirs exist
|
|
||||||
i=0
|
|
||||||
[ -d $PATH_SUBSARCHIVE ] && i=$((i+1)) && printf "\n✔ (${i}/2) Found Subscriptions archive directory at ${PATH_SUBSARCHIVE}"
|
|
||||||
[ -d $PATH_ONEOFFARCHIVE ] && i=$((i+1)) && printf "\n✔ (${i}/2) Found one-off archive directory at ${PATH_ONEOFFARCHIVE}"
|
|
||||||
|
|
||||||
# Ask to proceed or cancel, exit on missing paths
|
|
||||||
case $i in
|
|
||||||
0)
|
|
||||||
printf "\n\n Couldn't find any archive location path! \n\nPlease edit this script to configure!"
|
|
||||||
tput cnorm
|
|
||||||
exit 2;;
|
|
||||||
2)
|
|
||||||
printf "\n\n Found all archive locations. \n\nProceed? (Y/N)";;
|
|
||||||
*)
|
|
||||||
printf "\n\n Only found ${i} out of 2 archive locations! Something about this script's config must be wrong. \n\nProceed anyways? (Y/N)";;
|
|
||||||
esac
|
|
||||||
old_stty_cfg=$(stty -g)
|
|
||||||
stty raw -echo ; answer=$(head -c 1) ; stty $old_stty_cfg # Careful playing with stty
|
|
||||||
if echo "$answer" | grep -iq "^y" ;then
|
|
||||||
printf "\n\nRunning jobs now... (this may take a while)\n"
|
|
||||||
|
|
||||||
printf "\nBacking up directories...\n"
|
|
||||||
|
|
||||||
chars="⣾⣽⣻⢿⡿⣟⣯⣷"
|
|
||||||
cp -R $PATH_SUBSARCHIVE $PATH_SUBSARCHIVEBKP &
|
|
||||||
PID=$!
|
|
||||||
i=1
|
|
||||||
echo -n ' '
|
|
||||||
while [ -d /proc/$PID ]
|
|
||||||
do
|
|
||||||
printf "${yellow}\b${chars:i++%${#chars}:1}${normal}"
|
|
||||||
sleep 0.15
|
|
||||||
done
|
|
||||||
[ -d $PATH_SUBSARCHIVEBKP ] && printf "\r✔ Backed up ${PATH_SUBSARCHIVE} to ${PATH_SUBSARCHIVEBKP} ($(du -sh $PATH_SUBSARCHIVEBKP | cut -f1))\n"
|
|
||||||
|
|
||||||
cp -R $PATH_ONEOFFARCHIVE $PATH_ONEOFFARCHIVEBKP &
|
|
||||||
PID2=$!
|
|
||||||
i=1
|
|
||||||
echo -n ' '
|
|
||||||
while [ -d /proc/$PID2 ]
|
|
||||||
do
|
|
||||||
printf "${yellow}\b${chars:i++%${#chars}:1}${normal}"
|
|
||||||
sleep 0.1
|
|
||||||
done
|
|
||||||
[ -d $PATH_ONEOFFARCHIVEBKP ] && printf "\r✔ Backed up ${PATH_ONEOFFARCHIVE} to ${PATH_ONEOFFARCHIVEBKP} ($(du -sh $PATH_ONEOFFARCHIVEBKP | cut -f1))\n"
|
|
||||||
|
|
||||||
|
|
||||||
printf "\nCompacting files...\n"
|
|
||||||
|
|
||||||
tmpfile=$(mktemp) &&
|
|
||||||
|
|
||||||
[ -d $PATH_SUBSARCHIVE ] &&
|
|
||||||
find $PATH_SUBSARCHIVE -name '*.txt' -print0 | while read -d $'\0' file # Set delimiter to null because we want to catch all possible filenames (WE CANNOT CHANGE IFS HERE) - https://stackoverflow.com/a/15931055
|
|
||||||
do
|
|
||||||
cp "$file" "$tmpfile"
|
|
||||||
{ awk '!x[$0]++' "$tmpfile" > "$file"; } & # https://unix.stackexchange.com/questions/159695/how-does-awk-a0-work
|
|
||||||
PID3=$!
|
|
||||||
i=1
|
|
||||||
echo -n ''
|
|
||||||
while [ -d /proc/$PID3 ]
|
|
||||||
do
|
|
||||||
printf "${yellow}\b${chars:i++%${#chars}:1}${normal}"
|
|
||||||
sleep 0.1
|
|
||||||
done
|
|
||||||
BEFORE=$(wc -l < $tmpfile)
|
|
||||||
AFTER=$(wc -l < $file)
|
|
||||||
if [[ "$AFTER" -ne "$BEFORE" ]]; then
|
|
||||||
printf "\b✔ Compacted down to ${AFTER} lines from ${BEFORE}: ${file}\n"
|
|
||||||
else
|
|
||||||
printf "\bℹ No action needed for file: ${file}\n"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
[ -d $PATH_ONEOFFARCHIVE ] &&
|
|
||||||
find $PATH_ONEOFFARCHIVE -name '*.txt' -print0 | while read -d $'\0' file
|
|
||||||
do
|
|
||||||
cp "$file" "$tmpfile" &
|
|
||||||
awk '!x[$0]++' "$tmpfile" > "$file" &
|
|
||||||
PID4=$!
|
|
||||||
i=1
|
|
||||||
echo -n ''
|
|
||||||
while [ -d /proc/$PID4 ]
|
|
||||||
do
|
|
||||||
printf "${yellow}\b${chars:i++%${#chars}:1}${normal}"
|
|
||||||
sleep 0.1
|
|
||||||
done
|
|
||||||
BEFORE=$(wc -l < $tmpfile)
|
|
||||||
AFTER=$(wc -l < $file)
|
|
||||||
if [ "$BEFORE" -ne "$AFTER" ]; then
|
|
||||||
printf "\b✔ Compacted down to ${AFTER} lines from ${BEFORE}: ${file}\n"
|
|
||||||
else
|
|
||||||
printf "\bℹ No action ran for file: ${file}\n"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
tput cnorm # show the cursor
|
|
||||||
rm "$tmpfile"
|
|
||||||
|
|
||||||
printf "\n\n✔ Done."
|
|
||||||
printf "\nℹ Please keep in mind that you may still want to"
|
|
||||||
printf "\n run corruption checks against your archives!\n\n"
|
|
||||||
exit
|
|
||||||
else
|
|
||||||
tput cnorm
|
|
||||||
printf "\nOkay, bye.\n\n"
|
|
||||||
exit
|
|
||||||
fi
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
const winston = require('winston');
|
|
||||||
|
|
||||||
let debugMode = process.env.YTDL_MODE === 'debug';
|
|
||||||
|
|
||||||
const defaultFormat = winston.format.printf(({ level, message, label, timestamp }) => {
|
|
||||||
return `${timestamp} ${level.toUpperCase()}: ${message}`;
|
|
||||||
});
|
|
||||||
const logger = winston.createLogger({
|
|
||||||
level: 'info',
|
|
||||||
format: winston.format.combine(winston.format.timestamp(), defaultFormat),
|
|
||||||
defaultMeta: {},
|
|
||||||
transports: [
|
|
||||||
//
|
|
||||||
// - Write to all logs with level `info` and below to `combined.log`
|
|
||||||
// - Write all logs error (and below) to `error.log`.
|
|
||||||
//
|
|
||||||
new winston.transports.File({ filename: 'appdata/logs/error.log', level: 'error' }),
|
|
||||||
new winston.transports.File({ filename: 'appdata/logs/combined.log' }),
|
|
||||||
new winston.transports.Console({level: !debugMode ? 'info' : 'debug', name: 'console'})
|
|
||||||
]
|
|
||||||
});
|
|
||||||
|
|
||||||
module.exports = logger;
|
|
||||||
@@ -1,293 +0,0 @@
|
|||||||
const db_api = require('./db');
|
|
||||||
const config_api = require('./config');
|
|
||||||
const logger = require('./logger');
|
|
||||||
const utils = require('./utils');
|
|
||||||
const consts = require('./consts');
|
|
||||||
|
|
||||||
const { v4: uuid } = require('uuid');
|
|
||||||
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const { gotify } = require("gotify");
|
|
||||||
const TelegramBotAPI = require('node-telegram-bot-api');
|
|
||||||
let telegram_bot = null;
|
|
||||||
const REST = require('@discordjs/rest').REST;
|
|
||||||
const API = require('@discordjs/core').API;
|
|
||||||
const EmbedBuilder = require('@discordjs/builders').EmbedBuilder;
|
|
||||||
|
|
||||||
const NOTIFICATION_TYPE_TO_TITLE = {
|
|
||||||
task_finished: 'Task finished',
|
|
||||||
download_complete: 'Download complete',
|
|
||||||
download_error: 'Download error'
|
|
||||||
}
|
|
||||||
|
|
||||||
const NOTIFICATION_TYPE_TO_BODY = {
|
|
||||||
task_finished: (notification) => notification['data']['task_title'],
|
|
||||||
download_complete: (notification) => {return `${notification['data']['file_title']}\nOriginal URL: ${notification['data']['original_url']}`},
|
|
||||||
download_error: (notification) => {return `Error: ${notification['data']['download_error_message']}\nError code: ${notification['data']['download_error_type']}\n\nOriginal URL: ${notification['data']['download_url']}`}
|
|
||||||
}
|
|
||||||
|
|
||||||
const NOTIFICATION_TYPE_TO_URL = {
|
|
||||||
task_finished: () => {return `${utils.getBaseURL()}/#/tasks`},
|
|
||||||
download_complete: (notification) => {return `${utils.getBaseURL()}/#/player;uid=${notification['data']['file_uid']}`},
|
|
||||||
download_error: () => {return `${utils.getBaseURL()}/#/downloads`},
|
|
||||||
}
|
|
||||||
|
|
||||||
const NOTIFICATION_TYPE_TO_THUMBNAIL = {
|
|
||||||
task_finished: () => null,
|
|
||||||
download_complete: (notification) => notification['data']['file_thumbnail'],
|
|
||||||
download_error: () => null
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.sendNotification = async (notification) => {
|
|
||||||
// info necessary if we are using 3rd party APIs
|
|
||||||
const type = notification['type'];
|
|
||||||
|
|
||||||
const data = {
|
|
||||||
title: NOTIFICATION_TYPE_TO_TITLE[type],
|
|
||||||
body: NOTIFICATION_TYPE_TO_BODY[type](notification),
|
|
||||||
type: type,
|
|
||||||
url: NOTIFICATION_TYPE_TO_URL[type](notification),
|
|
||||||
thumbnail: NOTIFICATION_TYPE_TO_THUMBNAIL[type](notification)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (config_api.getConfigItem('ytdl_use_ntfy_API') && config_api.getConfigItem('ytdl_ntfy_topic_url')) {
|
|
||||||
sendNtfyNotification(data);
|
|
||||||
}
|
|
||||||
if (config_api.getConfigItem('ytdl_use_gotify_API') && config_api.getConfigItem('ytdl_gotify_server_url') && config_api.getConfigItem('ytdl_gotify_app_token')) {
|
|
||||||
sendGotifyNotification(data);
|
|
||||||
}
|
|
||||||
if (config_api.getConfigItem('ytdl_use_telegram_API') && config_api.getConfigItem('ytdl_telegram_bot_token') && config_api.getConfigItem('ytdl_telegram_chat_id')) {
|
|
||||||
exports.sendTelegramNotification(data);
|
|
||||||
}
|
|
||||||
if (config_api.getConfigItem('ytdl_webhook_url')) {
|
|
||||||
sendGenericNotification(data);
|
|
||||||
}
|
|
||||||
if (config_api.getConfigItem('ytdl_discord_webhook_url')) {
|
|
||||||
sendDiscordNotification(data);
|
|
||||||
}
|
|
||||||
if (config_api.getConfigItem('ytdl_slack_webhook_url')) {
|
|
||||||
sendSlackNotification(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
await db_api.insertRecordIntoTable('notifications', notification);
|
|
||||||
return notification;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.sendTaskNotification = async (task_obj, confirmed) => {
|
|
||||||
if (!notificationEnabled('task_finished')) return;
|
|
||||||
// workaround for tasks which are user_uid agnostic
|
|
||||||
const user_uid = config_api.getConfigItem('ytdl_multi_user_mode') ? 'admin' : null;
|
|
||||||
await db_api.removeAllRecords('notifications', {"data.task_key": task_obj.key});
|
|
||||||
const data = {task_key: task_obj.key, task_title: task_obj.title, confirmed: confirmed};
|
|
||||||
const notification = exports.createNotification('task_finished', ['view_tasks'], data, user_uid);
|
|
||||||
return await exports.sendNotification(notification);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.sendDownloadNotification = async (file, user_uid) => {
|
|
||||||
if (!notificationEnabled('download_complete')) return;
|
|
||||||
const data = {file_uid: file.uid, file_title: file.title, file_thumbnail: file.thumbnailURL, original_url: file.url};
|
|
||||||
const notification = exports.createNotification('download_complete', ['play'], data, user_uid);
|
|
||||||
return await exports.sendNotification(notification);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.sendDownloadErrorNotification = async (download, user_uid, error_message, error_type = null) => {
|
|
||||||
if (!notificationEnabled('download_error')) return;
|
|
||||||
const data = {download_uid: download.uid, download_url: download.url, download_error_message: error_message, download_error_type: error_type};
|
|
||||||
const notification = exports.createNotification('download_error', ['view_download_error', 'retry_download'], data, user_uid);
|
|
||||||
return await exports.sendNotification(notification);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.createNotification = (type, actions, data, user_uid) => {
|
|
||||||
const notification = {
|
|
||||||
type: type,
|
|
||||||
actions: actions,
|
|
||||||
data: data,
|
|
||||||
user_uid: user_uid,
|
|
||||||
uid: uuid(),
|
|
||||||
read: false,
|
|
||||||
timestamp: Date.now()/1000
|
|
||||||
}
|
|
||||||
return notification;
|
|
||||||
}
|
|
||||||
|
|
||||||
function notificationEnabled(type) {
|
|
||||||
return config_api.getConfigItem('ytdl_enable_notifications') && (config_api.getConfigItem('ytdl_enable_all_notifications') || config_api.getConfigItem('ytdl_allowed_notification_types').includes(type));
|
|
||||||
}
|
|
||||||
|
|
||||||
// ntfy
|
|
||||||
|
|
||||||
function sendNtfyNotification({body, title, type, url, thumbnail}) {
|
|
||||||
logger.verbose('Sending notification to ntfy');
|
|
||||||
fetch(config_api.getConfigItem('ytdl_ntfy_topic_url'), {
|
|
||||||
method: 'POST',
|
|
||||||
body: body,
|
|
||||||
headers: {
|
|
||||||
'Title': title,
|
|
||||||
'Tags': type,
|
|
||||||
'Click': url,
|
|
||||||
'Attach': thumbnail
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gotify
|
|
||||||
|
|
||||||
async function sendGotifyNotification({body, title, type, url, thumbnail}) {
|
|
||||||
logger.verbose('Sending notification to gotify');
|
|
||||||
await gotify({
|
|
||||||
server: config_api.getConfigItem('ytdl_gotify_server_url'),
|
|
||||||
app: config_api.getConfigItem('ytdl_gotify_app_token'),
|
|
||||||
title: title,
|
|
||||||
message: body,
|
|
||||||
tag: type,
|
|
||||||
priority: 5, // Keeping default from docs, may want to change this,
|
|
||||||
extras: {
|
|
||||||
"client::notification": {
|
|
||||||
click: { url: url },
|
|
||||||
bigImageUrl: thumbnail
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Telegram
|
|
||||||
|
|
||||||
setupTelegramBot();
|
|
||||||
config_api.config_updated.subscribe(change => {
|
|
||||||
const use_telegram_api = config_api.getConfigItem('ytdl_use_telegram_API');
|
|
||||||
const bot_token = config_api.getConfigItem('ytdl_telegram_bot_token');
|
|
||||||
if (!use_telegram_api || !bot_token) return;
|
|
||||||
if (!change) return;
|
|
||||||
if (change['key'] === 'ytdl_use_telegram_API' || change['key'] === 'ytdl_telegram_bot_token' || change['key'] === 'ytdl_telegram_webhook_proxy') {
|
|
||||||
logger.debug('Telegram bot setting up');
|
|
||||||
setupTelegramBot();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function setupTelegramBot() {
|
|
||||||
const use_telegram_api = config_api.getConfigItem('ytdl_use_telegram_API');
|
|
||||||
const bot_token = config_api.getConfigItem('ytdl_telegram_bot_token');
|
|
||||||
if (!use_telegram_api || !bot_token) return;
|
|
||||||
|
|
||||||
telegram_bot = new TelegramBotAPI(bot_token);
|
|
||||||
const webhook_proxy = config_api.getConfigItem('ytdl_telegram_webhook_proxy');
|
|
||||||
const webhook_url = webhook_proxy ? webhook_proxy : `${utils.getBaseURL()}/api/telegramRequest`;
|
|
||||||
telegram_bot.setWebHook(webhook_url);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.sendTelegramNotification = async ({body, title, type, url, thumbnail}) => {
|
|
||||||
if (!telegram_bot){
|
|
||||||
logger.error('Telegram bot not found!');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const chat_id = config_api.getConfigItem('ytdl_telegram_chat_id');
|
|
||||||
if (!chat_id){
|
|
||||||
logger.error('Telegram chat ID required!');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.verbose('Sending notification to Telegram');
|
|
||||||
if (thumbnail) await telegram_bot.sendPhoto(chat_id, thumbnail);
|
|
||||||
telegram_bot.sendMessage(chat_id, `<b>${title}</b>\n\n${body}\n<a href="${url}">${url}</a>`, {parse_mode: 'HTML'});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Discord
|
|
||||||
|
|
||||||
async function sendDiscordNotification({body, title, type, url, thumbnail}) {
|
|
||||||
const discord_webhook_url = config_api.getConfigItem('ytdl_discord_webhook_url');
|
|
||||||
const url_split = discord_webhook_url.split('webhooks/');
|
|
||||||
const [webhook_id, webhook_token] = url_split[1].split('/');
|
|
||||||
const rest = new REST({ version: '10' });
|
|
||||||
const api = new API(rest);
|
|
||||||
const embed = new EmbedBuilder()
|
|
||||||
.setTitle(title)
|
|
||||||
.setColor(0x00FFFF)
|
|
||||||
.setURL(url)
|
|
||||||
.setDescription(`ID: ${type}`);
|
|
||||||
if (thumbnail) embed.setThumbnail(thumbnail);
|
|
||||||
if (type === 'download_error') embed.setColor(0xFC2003);
|
|
||||||
|
|
||||||
const result = await api.webhooks.execute(webhook_id, webhook_token, {
|
|
||||||
content: body,
|
|
||||||
username: 'YoutubeDL-Material',
|
|
||||||
avatar_url: consts.ICON_URL,
|
|
||||||
embeds: [embed],
|
|
||||||
});
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Slack
|
|
||||||
|
|
||||||
function sendSlackNotification({body, title, type, url, thumbnail}) {
|
|
||||||
const slack_webhook_url = config_api.getConfigItem('ytdl_slack_webhook_url');
|
|
||||||
logger.verbose(`Sending slack notification to ${slack_webhook_url}`);
|
|
||||||
const data = {
|
|
||||||
blocks: [
|
|
||||||
{
|
|
||||||
type: "section",
|
|
||||||
text: {
|
|
||||||
type: "mrkdwn",
|
|
||||||
text: `*${title}*`
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: "section",
|
|
||||||
text: {
|
|
||||||
type: "plain_text",
|
|
||||||
text: body
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
// add thumbnail if exists
|
|
||||||
if (thumbnail) {
|
|
||||||
data['blocks'].push({
|
|
||||||
type: "image",
|
|
||||||
image_url: thumbnail,
|
|
||||||
alt_text: "notification_thumbnail"
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
data['blocks'].push(
|
|
||||||
{
|
|
||||||
type: "section",
|
|
||||||
text: {
|
|
||||||
type: "mrkdwn",
|
|
||||||
text: `<${url}|${url}>`
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
type: "context",
|
|
||||||
elements: [
|
|
||||||
{
|
|
||||||
type: "mrkdwn",
|
|
||||||
text: `*ID:* ${type}`
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
fetch(slack_webhook_url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
},
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generic
|
|
||||||
|
|
||||||
function sendGenericNotification(data) {
|
|
||||||
const webhook_url = config_api.getConfigItem('ytdl_webhook_url');
|
|
||||||
logger.verbose(`Sending generic notification to ${webhook_url}`);
|
|
||||||
fetch(webhook_url, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
"Content-Type": "application/json"
|
|
||||||
},
|
|
||||||
body: JSON.stringify(data),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
5957
backend/package-lock.json
generated
5957
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,9 +4,18 @@
|
|||||||
"description": "backend for YoutubeDL-Material",
|
"description": "backend for YoutubeDL-Material",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "mocha test --exit -s 1000",
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
"start": "pm2-runtime --raw pm2.config.js",
|
"start": "nodemon -q app.js"
|
||||||
"debug": "set YTDL_MODE=debug && node app.js"
|
},
|
||||||
|
"nodemonConfig": {
|
||||||
|
"ignore": [
|
||||||
|
"*.js",
|
||||||
|
"appdata/*",
|
||||||
|
"public/*"
|
||||||
|
],
|
||||||
|
"watch": [
|
||||||
|
"restart.json"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -17,55 +26,38 @@
|
|||||||
"bugs": {
|
"bugs": {
|
||||||
"url": ""
|
"url": ""
|
||||||
},
|
},
|
||||||
"engines": {
|
|
||||||
"node": "^16",
|
|
||||||
"npm": "6.14.4"
|
|
||||||
},
|
|
||||||
"homepage": "",
|
"homepage": "",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@discordjs/builders": "^1.6.1",
|
"archiver": "^3.1.1",
|
||||||
"@discordjs/core": "^0.5.2",
|
"async": "^3.1.0",
|
||||||
"archiver": "^5.3.1",
|
|
||||||
"async": "^3.2.3",
|
|
||||||
"async-mutex": "^0.4.0",
|
|
||||||
"axios": "^0.21.2",
|
|
||||||
"bcryptjs": "^2.4.0",
|
"bcryptjs": "^2.4.0",
|
||||||
"command-exists": "^1.2.9",
|
|
||||||
"compression": "^1.7.4",
|
"compression": "^1.7.4",
|
||||||
"config": "^3.2.3",
|
"config": "^3.2.3",
|
||||||
"execa": "^5.1.1",
|
"exe": "^1.0.2",
|
||||||
"express": "^4.18.2",
|
"express": "^4.17.1",
|
||||||
"express-session": "^1.17.3",
|
|
||||||
"feed": "^4.2.2",
|
|
||||||
"fluent-ffmpeg": "^2.1.2",
|
"fluent-ffmpeg": "^2.1.2",
|
||||||
"fs-extra": "^9.0.0",
|
"fs-extra": "^9.0.0",
|
||||||
"gotify": "^1.1.0",
|
"glob": "^7.1.6",
|
||||||
"jsonwebtoken": "^9.0.0",
|
"jsonwebtoken": "^8.5.1",
|
||||||
"lodash": "^4.17.21",
|
|
||||||
"lowdb": "^1.0.0",
|
"lowdb": "^1.0.0",
|
||||||
"md5": "^2.2.1",
|
"md5": "^2.2.1",
|
||||||
"mocha": "^9.2.2",
|
"merge-files": "^0.1.2",
|
||||||
"moment": "^2.29.4",
|
"multer": "^1.4.2",
|
||||||
"mongodb": "^3.6.9",
|
"node-fetch": "^2.6.1",
|
||||||
"multer": "1.4.5-lts.1",
|
"node-id3": "^0.1.14",
|
||||||
"node-fetch": "^2.6.7",
|
"nodemon": "^2.0.2",
|
||||||
"node-id3": "^0.2.6",
|
"passport": "^0.4.1",
|
||||||
"node-schedule": "^2.1.0",
|
|
||||||
"node-telegram-bot-api": "^0.61.0",
|
|
||||||
"passport": "^0.6.0",
|
|
||||||
"passport-http": "^0.3.0",
|
"passport-http": "^0.3.0",
|
||||||
"passport-jwt": "^4.0.1",
|
"passport-jwt": "^4.0.0",
|
||||||
"passport-ldapauth": "^3.0.1",
|
"passport-ldapauth": "^2.1.4",
|
||||||
"passport-local": "^1.0.0",
|
"passport-local": "^1.0.0",
|
||||||
"progress": "^2.0.3",
|
"progress": "^2.0.3",
|
||||||
"ps-node": "^0.1.6",
|
"ps-node": "^0.1.6",
|
||||||
"read-last-lines": "^1.7.2",
|
"read-last-lines": "^1.7.2",
|
||||||
"rxjs": "^7.3.0",
|
|
||||||
"shortid": "^2.2.15",
|
"shortid": "^2.2.15",
|
||||||
"tree-kill": "^1.2.2",
|
|
||||||
"unzipper": "^0.10.10",
|
"unzipper": "^0.10.10",
|
||||||
"uuid": "^9.0.1",
|
"uuidv4": "^6.0.6",
|
||||||
"winston": "^3.7.2",
|
"winston": "^3.2.1",
|
||||||
"xmlbuilder2": "^3.0.2"
|
"youtube-dl": "^3.0.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
apps : [{
|
|
||||||
name : "YoutubeDL-Material",
|
|
||||||
script : "./app.js",
|
|
||||||
watch : "placeholder",
|
|
||||||
out_file: "/dev/null",
|
|
||||||
error_file: "/dev/null"
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
@@ -1,29 +1,44 @@
|
|||||||
const fs = require('fs-extra');
|
const FileSync = require('lowdb/adapters/FileSync')
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
const youtubedl_api = require('./youtube-dl');
|
var fs = require('fs-extra');
|
||||||
|
const { uuid } = require('uuidv4');
|
||||||
|
var path = require('path');
|
||||||
|
|
||||||
|
var youtubedl = require('youtube-dl');
|
||||||
const config_api = require('./config');
|
const config_api = require('./config');
|
||||||
const archive_api = require('./archive');
|
var utils = require('./utils')
|
||||||
const utils = require('./utils');
|
|
||||||
const logger = require('./logger');
|
|
||||||
const CONSTS = require('./consts');
|
|
||||||
|
|
||||||
const debugMode = process.env.YTDL_MODE === 'debug';
|
const debugMode = process.env.YTDL_MODE === 'debug';
|
||||||
|
|
||||||
const db_api = require('./db');
|
var logger = null;
|
||||||
const downloader_api = require('./downloader');
|
var db = null;
|
||||||
|
var users_db = null;
|
||||||
|
var db_api = null;
|
||||||
|
|
||||||
exports.subscribe = async (sub, user_uid = null, skip_get_info = false) => {
|
function setDB(input_db, input_users_db, input_db_api) { db = input_db; users_db = input_users_db; db_api = input_db_api }
|
||||||
|
function setLogger(input_logger) { logger = input_logger; }
|
||||||
|
|
||||||
|
function initialize(input_db, input_users_db, input_logger, input_db_api) {
|
||||||
|
setDB(input_db, input_users_db, input_db_api);
|
||||||
|
setLogger(input_logger);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function subscribe(sub, user_uid = null) {
|
||||||
const result_obj = {
|
const result_obj = {
|
||||||
success: false,
|
success: false,
|
||||||
error: ''
|
error: ''
|
||||||
};
|
};
|
||||||
return new Promise(async resolve => {
|
return new Promise(async resolve => {
|
||||||
// sub should just have url and name. here we will get isPlaylist and path
|
// sub should just have url and name. here we will get isPlaylist and path
|
||||||
sub.isPlaylist = sub.isPlaylist || sub.url.includes('playlist');
|
sub.isPlaylist = sub.url.includes('playlist');
|
||||||
sub.videos = [];
|
sub.videos = [];
|
||||||
|
|
||||||
let url_exists = !!(await db_api.getRecord('subscriptions', {url: sub.url, user_uid: user_uid}));
|
let url_exists = false;
|
||||||
|
|
||||||
|
if (user_uid)
|
||||||
|
url_exists = !!users_db.get('users').find({uid: user_uid}).get('subscriptions').find({url: sub.url}).value()
|
||||||
|
else
|
||||||
|
url_exists = !!db.get('subscriptions').find({url: sub.url}).value();
|
||||||
|
|
||||||
if (!sub.name && url_exists) {
|
if (!sub.name && url_exists) {
|
||||||
logger.error(`Sub with the same URL "${sub.url}" already exists -- please provide a custom name for this new subscription.`);
|
logger.error(`Sub with the same URL "${sub.url}" already exists -- please provide a custom name for this new subscription.`);
|
||||||
@@ -32,17 +47,23 @@ exports.subscribe = async (sub, user_uid = null, skip_get_info = false) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
sub['user_uid'] = user_uid ? user_uid : undefined;
|
// add sub to db
|
||||||
await db_api.insertRecordIntoTable('subscriptions', JSON.parse(JSON.stringify(sub)));
|
let sub_db = null;
|
||||||
|
if (user_uid) {
|
||||||
let success = skip_get_info ? true : await getSubscriptionInfo(sub);
|
users_db.get('users').find({uid: user_uid}).get('subscriptions').push(sub).write();
|
||||||
exports.writeSubscriptionMetadata(sub);
|
sub_db = users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id});
|
||||||
|
} else {
|
||||||
|
db.get('subscriptions').push(sub).write();
|
||||||
|
sub_db = db.get('subscriptions').find({id: sub.id});
|
||||||
|
}
|
||||||
|
let success = await getSubscriptionInfo(sub, user_uid);
|
||||||
|
|
||||||
if (success) {
|
if (success) {
|
||||||
if (!sub.paused) exports.getVideosForSub(sub.id);
|
sub = sub_db.value();
|
||||||
|
getVideosForSub(sub, user_uid);
|
||||||
} else {
|
} else {
|
||||||
logger.error('Subscribe: Failed to get subscription info. Subscribe failed.')
|
logger.error('Subscribe: Failed to get subscription info. Subscribe failed.')
|
||||||
}
|
};
|
||||||
|
|
||||||
result_obj.success = success;
|
result_obj.success = success;
|
||||||
result_obj.sub = sub;
|
result_obj.sub = sub;
|
||||||
@@ -51,7 +72,13 @@ exports.subscribe = async (sub, user_uid = null, skip_get_info = false) => {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getSubscriptionInfo(sub) {
|
async function getSubscriptionInfo(sub, user_uid = null) {
|
||||||
|
let basePath = null;
|
||||||
|
if (user_uid)
|
||||||
|
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
||||||
|
else
|
||||||
|
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
||||||
|
|
||||||
// get videos
|
// get videos
|
||||||
let downloadConfig = ['--dump-json', '--playlist-end', '1'];
|
let downloadConfig = ['--dump-json', '--playlist-end', '1'];
|
||||||
let useCookies = config_api.getConfigItem('ytdl_use_cookies');
|
let useCookies = config_api.getConfigItem('ytdl_use_cookies');
|
||||||
@@ -63,66 +90,85 @@ async function getSubscriptionInfo(sub) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let {callback} = await youtubedl_api.runYoutubeDL(sub.url, downloadConfig);
|
return new Promise(resolve => {
|
||||||
const {parsed_output, err} = await callback;
|
youtubedl.exec(sub.url, downloadConfig, {}, function(err, output) {
|
||||||
if (err) {
|
if (debugMode) {
|
||||||
logger.error(err.stderr);
|
logger.info('Subscribe: got info for subscription ' + sub.id);
|
||||||
return false;
|
|
||||||
}
|
|
||||||
logger.verbose('Subscribe: got info for subscription ' + sub.id);
|
|
||||||
for (const output_json of parsed_output) {
|
|
||||||
if (!output_json) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sub.name) {
|
|
||||||
if (sub.isPlaylist) {
|
|
||||||
sub.name = output_json.playlist_title ? output_json.playlist_title : output_json.playlist;
|
|
||||||
} else {
|
|
||||||
sub.name = output_json.uploader;
|
|
||||||
}
|
}
|
||||||
// if it's now valid, update
|
if (err) {
|
||||||
if (sub.name) {
|
logger.error(err.stderr);
|
||||||
let sub_name = sub.name;
|
resolve(false);
|
||||||
const sub_name_exists = await db_api.getRecord('subscriptions', {name: sub.name, isPlaylist: sub.isPlaylist, user_uid: sub.user_uid});
|
} else if (output) {
|
||||||
if (sub_name_exists) sub_name += ` - ${sub.id}`;
|
if (output.length === 0 || (output.length === 1 && output[0] === '')) {
|
||||||
await db_api.updateRecord('subscriptions', {id: sub.id}, {name: sub_name});
|
logger.verbose('Could not get info for ' + sub.id);
|
||||||
|
resolve(false);
|
||||||
|
}
|
||||||
|
for (let i = 0; i < output.length; i++) {
|
||||||
|
let output_json = null;
|
||||||
|
try {
|
||||||
|
output_json = JSON.parse(output[i]);
|
||||||
|
} catch(e) {
|
||||||
|
output_json = null;
|
||||||
|
}
|
||||||
|
if (!output_json) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!sub.name) {
|
||||||
|
if (sub.isPlaylist) {
|
||||||
|
sub.name = output_json.playlist_title ? output_json.playlist_title : output_json.playlist;
|
||||||
|
} else {
|
||||||
|
sub.name = output_json.uploader;
|
||||||
|
}
|
||||||
|
// if it's now valid, update
|
||||||
|
if (sub.name) {
|
||||||
|
if (user_uid)
|
||||||
|
users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id}).assign({name: sub.name}).write();
|
||||||
|
else
|
||||||
|
db.get('subscriptions').find({id: sub.id}).assign({name: sub.name}).write();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const useArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
||||||
|
if (useArchive && !sub.archive) {
|
||||||
|
// must create the archive
|
||||||
|
const archive_dir = path.join(__dirname, basePath, 'archives', sub.name);
|
||||||
|
const archive_path = path.join(archive_dir, 'archive.txt');
|
||||||
|
|
||||||
|
// creates archive directory and text file if it doesn't exist
|
||||||
|
fs.ensureDirSync(archive_dir);
|
||||||
|
fs.ensureFileSync(archive_path);
|
||||||
|
|
||||||
|
// updates subscription
|
||||||
|
sub.archive = archive_dir;
|
||||||
|
if (user_uid)
|
||||||
|
users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id}).assign({archive: archive_dir}).write();
|
||||||
|
else
|
||||||
|
db.get('subscriptions').find({id: sub.id}).assign({archive: archive_dir}).write();
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: get even more info
|
||||||
|
|
||||||
|
resolve(true);
|
||||||
|
}
|
||||||
|
resolve(false);
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
|
});
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.unsubscribe = async (sub_id, deleteMode, user_uid = null) => {
|
async function unsubscribe(sub, deleteMode, user_uid = null) {
|
||||||
const sub = await exports.getSubscription(sub_id);
|
|
||||||
let basePath = null;
|
let basePath = null;
|
||||||
if (user_uid)
|
if (user_uid)
|
||||||
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
||||||
else
|
else
|
||||||
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
||||||
|
let result_obj = { success: false, error: '' };
|
||||||
|
|
||||||
let id = sub.id;
|
let id = sub.id;
|
||||||
|
if (user_uid)
|
||||||
const sub_files = await db_api.getRecords('files', {sub_id: id});
|
users_db.get('users').find({uid: user_uid}).get('subscriptions').remove({id: id}).write();
|
||||||
for (let i = 0; i < sub_files.length; i++) {
|
else
|
||||||
const sub_file = sub_files[i];
|
db.get('subscriptions').remove({id: id}).write();
|
||||||
if (config_api.descriptors[sub_file['uid']]) {
|
|
||||||
try {
|
|
||||||
for (let i = 0; i < config_api.descriptors[sub_file['uid']].length; i++) {
|
|
||||||
config_api.descriptors[sub_file['uid']][i].destroy();
|
|
||||||
}
|
|
||||||
} catch(e) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await killSubDownloads(sub_id, true);
|
|
||||||
await db_api.removeRecord('subscriptions', {id: id});
|
|
||||||
await db_api.removeAllRecords('files', {sub_id: id});
|
|
||||||
|
|
||||||
// failed subs have no name, on unsubscribe they shouldn't error
|
// failed subs have no name, on unsubscribe they shouldn't error
|
||||||
if (!sub.name) {
|
if (!sub.name) {
|
||||||
@@ -131,34 +177,40 @@ exports.unsubscribe = async (sub_id, deleteMode, user_uid = null) => {
|
|||||||
|
|
||||||
const appendedBasePath = getAppendedBasePath(sub, basePath);
|
const appendedBasePath = getAppendedBasePath(sub, basePath);
|
||||||
if (deleteMode && (await fs.pathExists(appendedBasePath))) {
|
if (deleteMode && (await fs.pathExists(appendedBasePath))) {
|
||||||
|
if (sub.archive && (await fs.pathExists(sub.archive))) {
|
||||||
|
const archive_file_path = path.join(sub.archive, 'archive.txt');
|
||||||
|
// deletes archive if it exists
|
||||||
|
if (await fs.pathExists(archive_file_path)) {
|
||||||
|
await fs.unlink(archive_file_path);
|
||||||
|
}
|
||||||
|
await fs.rmdir(sub.archive);
|
||||||
|
}
|
||||||
await fs.remove(appendedBasePath);
|
await fs.remove(appendedBasePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
await db_api.removeAllRecords('archives', {sub_id: sub.id});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deleteSubscriptionFile = async (sub, file, deleteForever, file_uid = null, user_uid = null) => {
|
async function deleteSubscriptionFile(sub, file, deleteForever, file_uid = null, user_uid = null) {
|
||||||
if (typeof sub === 'string') {
|
|
||||||
// TODO: fix bad workaround where sub is a sub_id
|
|
||||||
sub = await db_api.getRecord('subscriptions', {sub_id: sub});
|
|
||||||
}
|
|
||||||
// TODO: combine this with deletefile
|
|
||||||
let basePath = null;
|
let basePath = null;
|
||||||
basePath = user_uid ? path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions')
|
let sub_db = null;
|
||||||
: config_api.getConfigItem('ytdl_subscriptions_base_path');
|
if (user_uid) {
|
||||||
|
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
||||||
|
sub_db = users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id});
|
||||||
|
} else {
|
||||||
|
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
||||||
|
sub_db = db.get('subscriptions').find({id: sub.id});
|
||||||
|
}
|
||||||
|
const useArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
||||||
const appendedBasePath = getAppendedBasePath(sub, basePath);
|
const appendedBasePath = getAppendedBasePath(sub, basePath);
|
||||||
const name = file;
|
const name = file;
|
||||||
let retrievedID = null;
|
let retrievedID = null;
|
||||||
let retrievedExtractor = null;
|
sub_db.get('videos').remove({uid: file_uid}).write();
|
||||||
|
|
||||||
await db_api.removeRecord('files', {uid: file_uid});
|
|
||||||
|
|
||||||
let filePath = appendedBasePath;
|
let filePath = appendedBasePath;
|
||||||
const ext = (sub.type && sub.type === 'audio') ? '.mp3' : '.mp4'
|
const ext = (sub.type && sub.type === 'audio') ? '.mp3' : '.mp4'
|
||||||
var jsonPath = path.join(__dirname,filePath,name+'.info.json');
|
var jsonPath = path.join(__dirname,filePath,name+'.info.json');
|
||||||
var videoFilePath = path.join(__dirname,filePath,name+ext);
|
var videoFilePath = path.join(__dirname,filePath,name+ext);
|
||||||
var imageFilePath = path.join(__dirname,filePath,name+'.jpg');
|
var imageFilePath = path.join(__dirname,filePath,name+'.jpg');
|
||||||
var altImageFilePath = path.join(__dirname,filePath,name+'.webp');
|
var altImageFilePath = path.join(__dirname,filePath,name+'.jpg');
|
||||||
|
|
||||||
const [jsonExists, videoFileExists, imageFileExists, altImageFileExists] = await Promise.all([
|
const [jsonExists, videoFileExists, imageFileExists, altImageFileExists] = await Promise.all([
|
||||||
fs.pathExists(jsonPath),
|
fs.pathExists(jsonPath),
|
||||||
@@ -168,9 +220,7 @@ exports.deleteSubscriptionFile = async (sub, file, deleteForever, file_uid = nul
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
if (jsonExists) {
|
if (jsonExists) {
|
||||||
const info_json = fs.readJSONSync(jsonPath);
|
retrievedID = JSON.parse(await fs.readFile(jsonPath, 'utf8'))['id'];
|
||||||
retrievedID = info_json['id'];
|
|
||||||
retrievedExtractor = info_json['extractor'];
|
|
||||||
await fs.unlink(jsonPath);
|
await fs.unlink(jsonPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -188,14 +238,12 @@ exports.deleteSubscriptionFile = async (sub, file, deleteForever, file_uid = nul
|
|||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
// check if the user wants the video to be redownloaded (deleteForever === false)
|
// check if the user wants the video to be redownloaded (deleteForever === false)
|
||||||
if (deleteForever) {
|
if (!deleteForever && useArchive && sub.archive && retrievedID) {
|
||||||
// ensure video is in the archives
|
const archive_path = path.join(sub.archive, 'archive.txt')
|
||||||
const exists_in_archive = await archive_api.existsInArchive(retrievedExtractor, retrievedID, sub.type, user_uid, sub.id);
|
// if archive exists, remove line with video ID
|
||||||
if (!exists_in_archive) {
|
if (await fs.pathExists(archive_path)) {
|
||||||
await archive_api.addToArchive(retrievedExtractor, retrievedID, sub.type, file.title, user_uid, sub.id);
|
await removeIDFromArchive(archive_path, retrievedID);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
await archive_api.removeFromArchive(retrievedExtractor, retrievedID, sub.type, user_uid, sub.id);
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -205,77 +253,17 @@ exports.deleteSubscriptionFile = async (sub, file, deleteForever, file_uid = nul
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let current_sub_index = 0; // To keep track of the current subscription
|
async function getVideosForSub(sub, user_uid = null) {
|
||||||
exports.watchSubscriptionsInterval = async () => {
|
if (!subExists(sub.id, user_uid)) {
|
||||||
const subscriptions_check_interval = config_api.getConfigItem('ytdl_subscriptions_check_interval');
|
|
||||||
let parent_interval = setInterval(() => watchSubscriptions(), subscriptions_check_interval*1000);
|
|
||||||
watchSubscriptions();
|
|
||||||
config_api.config_updated.subscribe(change => {
|
|
||||||
if (!change) return;
|
|
||||||
if (change['key'] === 'ytdl_subscriptions_check_interval' || change['key'] === 'ytdl_multi_user_mode') {
|
|
||||||
current_sub_index = 0; // TODO: start after the last sub check
|
|
||||||
logger.verbose('Resetting sub check schedule due to config change');
|
|
||||||
clearInterval(parent_interval);
|
|
||||||
const new_interval = config_api.getConfigItem('ytdl_subscriptions_check_interval');
|
|
||||||
parent_interval = setInterval(() => watchSubscriptions(), new_interval*1000);
|
|
||||||
watchSubscriptions();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function watchSubscriptions() {
|
|
||||||
const subscription_ids = await getValidSubscriptionsToCheck();
|
|
||||||
if (subscription_ids.length === 0) {
|
|
||||||
logger.info('Skipping subscription check as no valid subscriptions exist.');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
checkSubscription(subscription_ids[current_sub_index]);
|
|
||||||
current_sub_index = (current_sub_index + 1) % subscription_ids.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkSubscription(sub_id) {
|
|
||||||
let sub = await exports.getSubscription(sub_id);
|
|
||||||
|
|
||||||
// don't check the sub if the last check for the same subscription has not completed
|
|
||||||
if (sub.downloading) {
|
|
||||||
logger.verbose(`Subscription: skipped checking ${sub.name} as it's downloading videos.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!sub.name) {
|
|
||||||
logger.verbose(`Subscription: skipped check for subscription with uid ${sub.id} as name has not been retrieved yet.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await exports.getVideosForSub(sub.id);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getValidSubscriptionsToCheck() {
|
|
||||||
const subscriptions = await exports.getAllSubscriptions();
|
|
||||||
|
|
||||||
if (!subscriptions) return;
|
|
||||||
|
|
||||||
// auto pause deprecated streamingOnly mode
|
|
||||||
const streaming_only_subs = subscriptions.filter(sub => sub.streamingOnly);
|
|
||||||
exports.updateSubscriptionPropertyMultiple(streaming_only_subs, {paused: true});
|
|
||||||
|
|
||||||
const valid_subscription_ids = subscriptions.filter(sub => !sub.paused && !sub.streamingOnly).map(sub => sub.id);
|
|
||||||
return valid_subscription_ids;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getVideosForSub = async (sub_id) => {
|
|
||||||
const sub = await exports.getSubscription(sub_id);
|
|
||||||
if (!sub || sub['downloading']) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
_getVideosForSub(sub);
|
// get sub_db
|
||||||
return true;
|
let sub_db = null;
|
||||||
}
|
if (user_uid)
|
||||||
|
sub_db = users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id});
|
||||||
async function _getVideosForSub(sub) {
|
else
|
||||||
const user_uid = sub['user_uid'];
|
sub_db = db.get('subscriptions').find({id: sub.id});
|
||||||
updateSubscriptionProperty(sub, {downloading: true}, user_uid);
|
|
||||||
|
|
||||||
// get basePath
|
// get basePath
|
||||||
let basePath = null;
|
let basePath = null;
|
||||||
@@ -284,99 +272,27 @@ async function _getVideosForSub(sub) {
|
|||||||
else
|
else
|
||||||
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
||||||
|
|
||||||
let appendedBasePath = getAppendedBasePath(sub, basePath);
|
const useArchive = config_api.getConfigItem('ytdl_use_youtubedl_archive');
|
||||||
fs.ensureDirSync(appendedBasePath);
|
|
||||||
|
|
||||||
const downloadConfig = await generateArgsForSubscription(sub, user_uid);
|
let appendedBasePath = null
|
||||||
|
appendedBasePath = getAppendedBasePath(sub, basePath);
|
||||||
|
|
||||||
// get videos
|
let multiUserMode = null;
|
||||||
logger.verbose(`Subscription: getting list of videos to download for ${sub.name} with args: ${downloadConfig.join(',')}`);
|
if (user_uid) {
|
||||||
|
multiUserMode = {
|
||||||
let {child_process, callback} = await youtubedl_api.runYoutubeDL(sub.url, downloadConfig);
|
user: user_uid,
|
||||||
updateSubscriptionProperty(sub, {child_process: child_process}, user_uid);
|
file_path: appendedBasePath
|
||||||
const {parsed_output, err} = await callback;
|
}
|
||||||
updateSubscriptionProperty(sub, {downloading: false, child_process: null}, user_uid);
|
|
||||||
if (!parsed_output) {
|
|
||||||
logger.error('Subscription check failed!');
|
|
||||||
if (err) logger.error(err);
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove temporary archive file if it exists
|
const ext = (sub.type && sub.type === 'audio') ? '.mp3' : '.mp4'
|
||||||
const archive_path = path.join(appendedBasePath, 'archive.txt');
|
|
||||||
const archive_exists = await fs.pathExists(archive_path);
|
let fullOutput = `${appendedBasePath}/%(title)s.%(ext)s`;
|
||||||
if (archive_exists) {
|
if (sub.custom_output) {
|
||||||
await fs.unlink(archive_path);
|
fullOutput = `${appendedBasePath}/${sub.custom_output}.%(ext)s`;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.verbose('Subscription: finished check for ' + sub.name);
|
let downloadConfig = ['-o', fullOutput, '-ciw', '--write-info-json', '--print-json'];
|
||||||
const files_to_download = await handleOutputJSON(parsed_output, sub, user_uid);
|
|
||||||
return files_to_download;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function handleOutputJSON(output_jsons, sub, user_uid) {
|
|
||||||
if (config_api.getConfigItem('ytdl_subscriptions_redownload_fresh_uploads')) {
|
|
||||||
await setFreshUploads(sub, user_uid);
|
|
||||||
checkVideosForFreshUploads(sub, user_uid);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (output_jsons.length === 0 || (output_jsons.length === 1 && output_jsons[0] === '')) {
|
|
||||||
logger.verbose('No additional videos to download for ' + sub.name);
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const files_to_download = await getFilesToDownload(sub, output_jsons);
|
|
||||||
const base_download_options = exports.generateOptionsForSubscriptionDownload(sub, user_uid);
|
|
||||||
|
|
||||||
for (let j = 0; j < files_to_download.length; j++) {
|
|
||||||
const file_to_download = files_to_download[j];
|
|
||||||
file_to_download['formats'] = utils.stripPropertiesFromObject(file_to_download['formats'], ['format_id', 'filesize', 'filesize_approx']); // prevent download object from blowing up in size
|
|
||||||
await downloader_api.createDownload(file_to_download['webpage_url'], sub.type || 'video', base_download_options, user_uid, sub.id, sub.name, [file_to_download]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return files_to_download;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.generateOptionsForSubscriptionDownload = (sub, user_uid) => {
|
|
||||||
let basePath = null;
|
|
||||||
if (user_uid)
|
|
||||||
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
|
||||||
else
|
|
||||||
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
|
|
||||||
let default_output = config_api.getConfigItem('ytdl_default_file_output') ? config_api.getConfigItem('ytdl_default_file_output') : '%(title)s';
|
|
||||||
|
|
||||||
const base_download_options = {
|
|
||||||
maxHeight: sub.maxQuality && sub.maxQuality !== 'best' ? sub.maxQuality : null,
|
|
||||||
customFileFolderPath: getAppendedBasePath(sub, basePath),
|
|
||||||
customOutput: sub.custom_output ? `${sub.custom_output}` : `${default_output}`,
|
|
||||||
customArchivePath: path.join(basePath, 'archives', sub.name),
|
|
||||||
additionalArgs: sub.custom_args
|
|
||||||
}
|
|
||||||
|
|
||||||
return base_download_options;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function generateArgsForSubscription(sub, user_uid, redownload = false, desired_path = null) {
|
|
||||||
// get basePath
|
|
||||||
let basePath = null;
|
|
||||||
if (user_uid)
|
|
||||||
basePath = path.join(config_api.getConfigItem('ytdl_users_base_path'), user_uid, 'subscriptions');
|
|
||||||
else
|
|
||||||
basePath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
|
|
||||||
let appendedBasePath = getAppendedBasePath(sub, basePath);
|
|
||||||
|
|
||||||
const file_output = config_api.getConfigItem('ytdl_default_file_output') ? config_api.getConfigItem('ytdl_default_file_output') : '%(title)s';
|
|
||||||
|
|
||||||
let fullOutput = `"${appendedBasePath}/${file_output}.%(ext)s"`;
|
|
||||||
if (desired_path) {
|
|
||||||
fullOutput = `"${desired_path}.%(ext)s"`;
|
|
||||||
} else if (sub.custom_output) {
|
|
||||||
fullOutput = `"${appendedBasePath}/${sub.custom_output}.%(ext)s"`;
|
|
||||||
}
|
|
||||||
|
|
||||||
let downloadConfig = ['--dump-json', '-o', fullOutput, !redownload ? '-ciw' : '-ci', '--write-info-json', '--print-json'];
|
|
||||||
|
|
||||||
let qualityPath = null;
|
let qualityPath = null;
|
||||||
if (sub.type && sub.type === 'audio') {
|
if (sub.type && sub.type === 'audio') {
|
||||||
@@ -390,18 +306,8 @@ async function generateArgsForSubscription(sub, user_uid, redownload = false, de
|
|||||||
|
|
||||||
downloadConfig.push(...qualityPath)
|
downloadConfig.push(...qualityPath)
|
||||||
|
|
||||||
// skip videos that are in the archive. otherwise sub download can be permanently slow (vs. just the first time)
|
|
||||||
const archive_text = await archive_api.generateArchive(sub.type, sub.user_uid, sub.id);
|
|
||||||
const archive_count = archive_text.split('\n').length - 1;
|
|
||||||
if (archive_count > 0) {
|
|
||||||
logger.verbose(`Generating temporary archive file for subscription ${sub.name} with ${archive_count} entries.`)
|
|
||||||
const archive_path = path.join(appendedBasePath, 'archive.txt');
|
|
||||||
await fs.writeFile(archive_path, archive_text);
|
|
||||||
downloadConfig.push('--download-archive', archive_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sub.custom_args) {
|
if (sub.custom_args) {
|
||||||
const customArgsArray = sub.custom_args.split(',,');
|
customArgsArray = sub.custom_args.split(',,');
|
||||||
if (customArgsArray.indexOf('-f') !== -1) {
|
if (customArgsArray.indexOf('-f') !== -1) {
|
||||||
// if custom args has a custom quality, replce the original quality with that of custom args
|
// if custom args has a custom quality, replce the original quality with that of custom args
|
||||||
const original_output_index = downloadConfig.indexOf('-f');
|
const original_output_index = downloadConfig.indexOf('-f');
|
||||||
@@ -410,7 +316,23 @@ async function generateArgsForSubscription(sub, user_uid, redownload = false, de
|
|||||||
downloadConfig.push(...customArgsArray);
|
downloadConfig.push(...customArgsArray);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sub.timerange && !redownload) {
|
let archive_dir = null;
|
||||||
|
let archive_path = null;
|
||||||
|
|
||||||
|
if (useArchive) {
|
||||||
|
if (sub.archive) {
|
||||||
|
archive_dir = sub.archive;
|
||||||
|
archive_path = path.join(archive_dir, 'archive.txt')
|
||||||
|
}
|
||||||
|
downloadConfig.push('--download-archive', archive_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// if streaming only mode, just get the list of videos
|
||||||
|
if (sub.streamingOnly) {
|
||||||
|
downloadConfig = ['-f', 'best', '--dump-json'];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sub.timerange) {
|
||||||
downloadConfig.push('--dateafter', sub.timerange);
|
downloadConfig.push('--dateafter', sub.timerange);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -427,172 +349,159 @@ async function generateArgsForSubscription(sub, user_uid, redownload = false, de
|
|||||||
downloadConfig.push('--write-thumbnail');
|
downloadConfig.push('--write-thumbnail');
|
||||||
}
|
}
|
||||||
|
|
||||||
const rate_limit = config_api.getConfigItem('ytdl_download_rate_limit');
|
// get videos
|
||||||
if (rate_limit && downloadConfig.indexOf('-r') === -1 && downloadConfig.indexOf('--limit-rate') === -1) {
|
logger.verbose('Subscription: getting videos for subscription ' + sub.name);
|
||||||
downloadConfig.push('-r', rate_limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
const default_downloader = config_api.getConfigItem('ytdl_default_downloader');
|
return new Promise(resolve => {
|
||||||
if (default_downloader === 'yt-dlp') {
|
youtubedl.exec(sub.url, downloadConfig, {}, function(err, output) {
|
||||||
downloadConfig.push('--no-clean-info-json');
|
logger.verbose('Subscription: finished check for ' + sub.name);
|
||||||
}
|
if (err && !output) {
|
||||||
|
logger.error(err.stderr ? err.stderr : err.message);
|
||||||
|
if (err.stderr.includes('This video is unavailable')) {
|
||||||
|
logger.info('An error was encountered with at least one video, backup method will be used.')
|
||||||
|
try {
|
||||||
|
const outputs = err.stdout.split(/\r\n|\r|\n/);
|
||||||
|
for (let i = 0; i < outputs.length; i++) {
|
||||||
|
const output = JSON.parse(outputs[i]);
|
||||||
|
handleOutputJSON(sub, sub_db, output, i === 0, multiUserMode)
|
||||||
|
if (err.stderr.includes(output['id']) && archive_path) {
|
||||||
|
// we found a video that errored! add it to the archive to prevent future errors
|
||||||
|
fs.appendFileSync(archive_path, output['id']);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch(e) {
|
||||||
|
logger.error('Backup method failed. See error below:');
|
||||||
|
logger.error(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resolve(false);
|
||||||
|
} else if (output) {
|
||||||
|
if (output.length === 0 || (output.length === 1 && output[0] === '')) {
|
||||||
|
logger.verbose('No additional videos to download for ' + sub.name);
|
||||||
|
resolve(true);
|
||||||
|
}
|
||||||
|
for (let i = 0; i < output.length; i++) {
|
||||||
|
let output_json = null;
|
||||||
|
try {
|
||||||
|
output_json = JSON.parse(output[i]);
|
||||||
|
} catch(e) {
|
||||||
|
output_json = null;
|
||||||
|
}
|
||||||
|
if (!output_json) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
downloadConfig = utils.filterArgs(downloadConfig, ['--write-comments']);
|
const reset_videos = i === 0;
|
||||||
|
handleOutputJSON(sub, sub_db, output_json, multiUserMode, reset_videos);
|
||||||
|
|
||||||
return downloadConfig;
|
// TODO: Potentially store downloaded files in db?
|
||||||
}
|
|
||||||
|
|
||||||
async function getFilesToDownload(sub, output_jsons) {
|
}
|
||||||
const files_to_download = [];
|
resolve(true);
|
||||||
for (let i = 0; i < output_jsons.length; i++) {
|
|
||||||
const output_json = output_jsons[i];
|
|
||||||
const file_missing = !(await db_api.getRecord('files', {sub_id: sub.id, url: output_json['webpage_url']})) && !(await db_api.getRecord('download_queue', {sub_id: sub.id, url: output_json['webpage_url'], error: null, finished: false}));
|
|
||||||
if (file_missing) {
|
|
||||||
const file_with_path_exists = await db_api.getRecord('files', {sub_id: sub.id, path: output_json['_filename']});
|
|
||||||
if (file_with_path_exists) {
|
|
||||||
// or maybe just overwrite???
|
|
||||||
logger.info(`Skipping adding file ${output_json['_filename']} for subscription ${sub.name} as a file with that path already exists.`)
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
const exists_in_archive = await archive_api.existsInArchive(output_json['extractor'], output_json['id'], sub.type, sub.user_uid, sub.id);
|
});
|
||||||
if (exists_in_archive) continue;
|
}, err => {
|
||||||
|
logger.error(err);
|
||||||
files_to_download.push(output_json);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return files_to_download;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.cancelCheckSubscription = async (sub_id) => {
|
|
||||||
const sub = await exports.getSubscription(sub_id);
|
|
||||||
if (!sub['downloading'] && !sub['child_process']) {
|
|
||||||
logger.error('Failed to cancel subscription check, verify that it is still running!');
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if check is ongoing
|
|
||||||
if (sub['child_process']) {
|
|
||||||
const child_process = sub['child_process'];
|
|
||||||
youtubedl_api.killYoutubeDLProcess(child_process);
|
|
||||||
}
|
|
||||||
|
|
||||||
// cancel activate video downloads
|
|
||||||
await killSubDownloads(sub_id);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function killSubDownloads(sub_id, remove_downloads = false) {
|
|
||||||
const sub_downloads = await db_api.getRecords('download_queue', {sub_id: sub_id});
|
|
||||||
for (const sub_download of sub_downloads) {
|
|
||||||
if (sub_download['running'])
|
|
||||||
await downloader_api.cancelDownload(sub_download['uid']);
|
|
||||||
if (remove_downloads)
|
|
||||||
await db_api.removeRecord('download_queue', {uid: sub_download['uid']});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getSubscriptions = async (user_uid = null) => {
|
|
||||||
// TODO: fix issue where the downloading property may not match getSubscription()
|
|
||||||
return await db_api.getRecords('subscriptions', {user_uid: user_uid});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getAllSubscriptions = async () => {
|
|
||||||
const all_subs = await db_api.getRecords('subscriptions');
|
|
||||||
const multiUserMode = config_api.getConfigItem('ytdl_multi_user_mode');
|
|
||||||
return all_subs.filter(sub => !!(sub.user_uid) === !!multiUserMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getSubscription = async (subID) => {
|
|
||||||
// stringify and parse because we may override the 'downloading' property
|
|
||||||
const sub = JSON.parse(JSON.stringify(await db_api.getRecord('subscriptions', {id: subID})));
|
|
||||||
// now with the download_queue, we may need to override 'downloading'
|
|
||||||
const current_downloads = await db_api.getRecords('download_queue', {running: true, sub_id: subID}, true);
|
|
||||||
if (!sub['downloading']) sub['downloading'] = current_downloads > 0;
|
|
||||||
return sub;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getSubscriptionByName = async (subName, user_uid = null) => {
|
|
||||||
return await db_api.getRecord('subscriptions', {name: subName, user_uid: user_uid});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateSubscription = async (sub) => {
|
|
||||||
await db_api.updateRecord('subscriptions', {id: sub.id}, sub);
|
|
||||||
exports.writeSubscriptionMetadata(sub);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateSubscriptionPropertyMultiple = async (subs, assignment_obj) => {
|
|
||||||
subs.forEach(async sub => {
|
|
||||||
await updateSubscriptionProperty(sub, assignment_obj);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function updateSubscriptionProperty(sub, assignment_obj) {
|
function handleOutputJSON(sub, sub_db, output_json, multiUserMode = null, reset_videos = false) {
|
||||||
// TODO: combine with updateSubscription
|
if (sub.streamingOnly) {
|
||||||
await db_api.updateRecord('subscriptions', {id: sub.id}, assignment_obj);
|
if (reset_videos) {
|
||||||
|
sub_db.assign({videos: []}).write();
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove unnecessary info
|
||||||
|
output_json.formats = null;
|
||||||
|
|
||||||
|
// add to db
|
||||||
|
sub_db.get('videos').push(output_json).write();
|
||||||
|
} else {
|
||||||
|
db_api.registerFileDB(path.basename(output_json['_filename']), sub.type, multiUserMode, sub);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getAllSubscriptions(user_uid = null) {
|
||||||
|
if (user_uid)
|
||||||
|
return users_db.get('users').find({uid: user_uid}).get('subscriptions').value();
|
||||||
|
else
|
||||||
|
return db.get('subscriptions').value();
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSubscription(subID, user_uid = null) {
|
||||||
|
if (user_uid)
|
||||||
|
return users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: subID}).value();
|
||||||
|
else
|
||||||
|
return db.get('subscriptions').find({id: subID}).value();
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSubscriptionByName(subName, user_uid = null) {
|
||||||
|
if (user_uid)
|
||||||
|
return users_db.get('users').find({uid: user_uid}).get('subscriptions').find({name: subName}).value();
|
||||||
|
else
|
||||||
|
return db.get('subscriptions').find({name: subName}).value();
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateSubscription(sub, user_uid = null) {
|
||||||
|
if (user_uid) {
|
||||||
|
users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: sub.id}).assign(sub).write();
|
||||||
|
} else {
|
||||||
|
db.get('subscriptions').find({id: sub.id}).assign(sub).write();
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.writeSubscriptionMetadata = (sub) => {
|
function subExists(subID, user_uid = null) {
|
||||||
let basePath = sub.user_uid ? path.join(config_api.getConfigItem('ytdl_users_base_path'), sub.user_uid, 'subscriptions')
|
if (user_uid)
|
||||||
: config_api.getConfigItem('ytdl_subscriptions_base_path');
|
return !!users_db.get('users').find({uid: user_uid}).get('subscriptions').find({id: subID}).value();
|
||||||
const appendedBasePath = getAppendedBasePath(sub, basePath);
|
else
|
||||||
const metadata_path = path.join(appendedBasePath, CONSTS.SUBSCRIPTION_BACKUP_PATH);
|
return !!db.get('subscriptions').find({id: subID}).value();
|
||||||
|
|
||||||
fs.ensureDirSync(appendedBasePath);
|
|
||||||
fs.writeJSONSync(metadata_path, sub);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function setFreshUploads(sub) {
|
|
||||||
const sub_files = await db_api.getRecords('files', {sub_id: sub.id});
|
|
||||||
if (!sub_files) return;
|
|
||||||
const current_date = new Date().toISOString().split('T')[0].replace(/-/g, '');
|
|
||||||
sub_files.forEach(async file => {
|
|
||||||
if (current_date === file['upload_date'].replace(/-/g, '')) {
|
|
||||||
// set upload as fresh
|
|
||||||
const file_uid = file['uid'];
|
|
||||||
await db_api.setVideoProperty(file_uid, {'fresh_upload': true});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkVideosForFreshUploads(sub, user_uid) {
|
|
||||||
const sub_files = await db_api.getRecords('files', {sub_id: sub.id});
|
|
||||||
const current_date = new Date().toISOString().split('T')[0].replace(/-/g, '');
|
|
||||||
sub_files.forEach(async file => {
|
|
||||||
if (file['fresh_upload'] && current_date > file['upload_date'].replace(/-/g, '')) {
|
|
||||||
await checkVideoIfBetterExists(file, sub, user_uid)
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function checkVideoIfBetterExists(file_obj, sub, user_uid) {
|
|
||||||
const new_path = file_obj['path'].substring(0, file_obj['path'].length - 4);
|
|
||||||
const downloadConfig = await generateArgsForSubscription(sub, user_uid, true, new_path);
|
|
||||||
logger.verbose(`Checking if a better version of the fresh upload ${file_obj['id']} exists.`);
|
|
||||||
// simulate a download to verify that a better version exists
|
|
||||||
|
|
||||||
const info = await downloader_api.getVideoInfoByURL(file_obj['url'], downloadConfig);
|
|
||||||
if (info && info.length === 1) {
|
|
||||||
const metric_to_compare = sub.type === 'audio' ? 'abr' : 'height';
|
|
||||||
if (info[metric_to_compare] > file_obj[metric_to_compare]) {
|
|
||||||
// download new video as the simulated one is better
|
|
||||||
let {callback} = await youtubedl_api.runYoutubeDL(sub.url, downloadConfig);
|
|
||||||
const {parsed_output, err} = await callback;
|
|
||||||
if (err) {
|
|
||||||
logger.verbose(`Failed to download better version of video ${file_obj['id']}`);
|
|
||||||
} else if (parsed_output) {
|
|
||||||
logger.verbose(`Successfully upgraded video ${file_obj['id']}'s ${metric_to_compare} from ${file_obj[metric_to_compare]} to ${info[metric_to_compare]}`);
|
|
||||||
await db_api.setVideoProperty(file_obj['uid'], {[metric_to_compare]: info[metric_to_compare]});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await db_api.setVideoProperty(file_obj['uid'], {'fresh_upload': false});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// helper functions
|
// helper functions
|
||||||
|
|
||||||
function getAppendedBasePath(sub, base_path) {
|
function getAppendedBasePath(sub, base_path) {
|
||||||
|
|
||||||
return path.join(base_path, (sub.isPlaylist ? 'playlists/' : 'channels/'), sub.name);
|
return path.join(base_path, (sub.isPlaylist ? 'playlists/' : 'channels/'), sub.name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function removeIDFromArchive(archive_path, id) {
|
||||||
|
let data = await fs.readFile(archive_path, {encoding: 'utf-8'});
|
||||||
|
if (!data) {
|
||||||
|
logger.error('Archive could not be found.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let dataArray = data.split('\n'); // convert file data in an array
|
||||||
|
const searchKeyword = id; // we are looking for a line, contains, key word id in the file
|
||||||
|
let lastIndex = -1; // let say, we have not found the keyword
|
||||||
|
|
||||||
|
for (let index=0; index<dataArray.length; index++) {
|
||||||
|
if (dataArray[index].includes(searchKeyword)) { // check if a line contains the id keyword
|
||||||
|
lastIndex = index; // found a line includes a id keyword
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const line = dataArray.splice(lastIndex, 1); // remove the keyword id from the data Array
|
||||||
|
|
||||||
|
// UPDATE FILE WITH NEW DATA
|
||||||
|
const updatedData = dataArray.join('\n');
|
||||||
|
await fs.writeFile(archive_path, updatedData);
|
||||||
|
if (line) return line;
|
||||||
|
if (err) throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getSubscription : getSubscription,
|
||||||
|
getSubscriptionByName : getSubscriptionByName,
|
||||||
|
getAllSubscriptions : getAllSubscriptions,
|
||||||
|
updateSubscription : updateSubscription,
|
||||||
|
subscribe : subscribe,
|
||||||
|
unsubscribe : unsubscribe,
|
||||||
|
deleteSubscriptionFile : deleteSubscriptionFile,
|
||||||
|
getVideosForSub : getVideosForSub,
|
||||||
|
removeIDFromArchive : removeIDFromArchive,
|
||||||
|
setLogger : setLogger,
|
||||||
|
initialize : initialize
|
||||||
|
}
|
||||||
|
|||||||
343
backend/tasks.js
343
backend/tasks.js
@@ -1,343 +0,0 @@
|
|||||||
const db_api = require('./db');
|
|
||||||
const notifications_api = require('./notifications');
|
|
||||||
const youtubedl_api = require('./youtube-dl');
|
|
||||||
const archive_api = require('./archive');
|
|
||||||
const files_api = require('./files');
|
|
||||||
const subscriptions_api = require('./subscriptions');
|
|
||||||
const config_api = require('./config');
|
|
||||||
const auth_api = require('./authentication/auth');
|
|
||||||
const utils = require('./utils');
|
|
||||||
const logger = require('./logger');
|
|
||||||
const CONSTS = require('./consts');
|
|
||||||
|
|
||||||
const fs = require('fs-extra');
|
|
||||||
const path = require('path');
|
|
||||||
const scheduler = require('node-schedule');
|
|
||||||
|
|
||||||
const TASKS = {
|
|
||||||
backup_local_db: {
|
|
||||||
run: db_api.backupDB,
|
|
||||||
title: 'Backup DB',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
missing_files_check: {
|
|
||||||
run: checkForMissingFiles,
|
|
||||||
confirm: deleteMissingFiles,
|
|
||||||
title: 'Missing files check',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
missing_db_records: {
|
|
||||||
run: files_api.importUnregisteredFiles,
|
|
||||||
title: 'Import missing DB records',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
duplicate_files_check: {
|
|
||||||
run: checkForDuplicateFiles,
|
|
||||||
confirm: removeDuplicates,
|
|
||||||
title: 'Find duplicate files in DB',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
youtubedl_update_check: {
|
|
||||||
run: youtubedl_api.checkForYoutubeDLUpdate,
|
|
||||||
confirm: youtubedl_api.updateYoutubeDL,
|
|
||||||
title: 'Update youtube-dl',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
delete_old_files: {
|
|
||||||
run: checkForAutoDeleteFiles,
|
|
||||||
confirm: autoDeleteFiles,
|
|
||||||
title: 'Delete old files',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
import_legacy_archives: {
|
|
||||||
run: archive_api.importArchives,
|
|
||||||
title: 'Import legacy archives',
|
|
||||||
job: null
|
|
||||||
},
|
|
||||||
rebuild_database: {
|
|
||||||
run: rebuildDB,
|
|
||||||
title: 'Rebuild database',
|
|
||||||
job: null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultOptions = {
|
|
||||||
all: {
|
|
||||||
auto_confirm: false
|
|
||||||
},
|
|
||||||
delete_old_files: {
|
|
||||||
blacklist_files: false,
|
|
||||||
blacklist_subscription_files: false,
|
|
||||||
threshold_days: ''
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function scheduleJob(task_key, schedule) {
|
|
||||||
// schedule has to be converted from our format to one node-schedule can consume
|
|
||||||
let converted_schedule = null;
|
|
||||||
if (schedule['type'] === 'timestamp') {
|
|
||||||
converted_schedule = new Date(schedule['data']['timestamp']);
|
|
||||||
} else if (schedule['type'] === 'recurring') {
|
|
||||||
const dayOfWeek = schedule['data']['dayOfWeek'] != null ? schedule['data']['dayOfWeek'] : null;
|
|
||||||
const hour = schedule['data']['hour'] != null ? schedule['data']['hour'] : null;
|
|
||||||
const minute = schedule['data']['minute'] != null ? schedule['data']['minute'] : null;
|
|
||||||
converted_schedule = new scheduler.RecurrenceRule(null, null, null, dayOfWeek, hour, minute, undefined, schedule['data']['tz'] ? schedule['data']['tz'] : undefined);
|
|
||||||
} else {
|
|
||||||
logger.error(`Failed to schedule job '${task_key}' as the type '${schedule['type']}' is invalid.`)
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return scheduler.scheduleJob(converted_schedule, async () => {
|
|
||||||
const task_state = await db_api.getRecord('tasks', {key: task_key});
|
|
||||||
if (task_state['running'] || task_state['confirming']) {
|
|
||||||
logger.verbose(`Skipping running task ${task_state['key']} as it is already in progress.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove schedule if it's a one-time task
|
|
||||||
if (task_state['schedule']['type'] !== 'recurring') await db_api.updateRecord('tasks', {key: task_key}, {schedule: null});
|
|
||||||
// we're just "running" the task, any confirmation should be user-initiated
|
|
||||||
exports.executeRun(task_key);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (db_api.database_initialized) {
|
|
||||||
exports.setupTasks();
|
|
||||||
} else {
|
|
||||||
db_api.database_initialized_bs.subscribe(init => {
|
|
||||||
if (init) exports.setupTasks();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.setupTasks = async () => {
|
|
||||||
const tasks_keys = Object.keys(TASKS);
|
|
||||||
for (let i = 0; i < tasks_keys.length; i++) {
|
|
||||||
const task_key = tasks_keys[i];
|
|
||||||
const mergedDefaultOptions = Object.assign({}, defaultOptions['all'], defaultOptions[task_key] || {});
|
|
||||||
const task_in_db = await db_api.getRecord('tasks', {key: task_key});
|
|
||||||
if (!task_in_db) {
|
|
||||||
// insert task metadata into table if missing, eventually move title to UI
|
|
||||||
await db_api.insertRecordIntoTable('tasks', {
|
|
||||||
key: task_key,
|
|
||||||
title: TASKS[task_key]['title'],
|
|
||||||
last_ran: null,
|
|
||||||
last_confirmed: null,
|
|
||||||
running: false,
|
|
||||||
confirming: false,
|
|
||||||
data: null,
|
|
||||||
error: null,
|
|
||||||
schedule: null,
|
|
||||||
options: Object.assign({}, defaultOptions['all'], defaultOptions[task_key] || {})
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// verify all options exist in task
|
|
||||||
for (const key of Object.keys(mergedDefaultOptions)) {
|
|
||||||
const option_key = `options.${key}`;
|
|
||||||
// Remove any potential mangled option keys (#861)
|
|
||||||
await db_api.removePropertyFromRecord('tasks', {key: task_key}, {[option_key]: true});
|
|
||||||
if (!(task_in_db.options && task_in_db.options.hasOwnProperty(key))) {
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {[option_key]: mergedDefaultOptions[key]}, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// reset task if necessary
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {running: false, confirming: false});
|
|
||||||
|
|
||||||
// schedule task and save job
|
|
||||||
if (task_in_db['schedule']) {
|
|
||||||
// prevent timestamp schedules from being set to the past
|
|
||||||
if (task_in_db['schedule']['type'] === 'timestamp' && task_in_db['schedule']['data']['timestamp'] < Date.now()) {
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {schedule: null});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
TASKS[task_key]['job'] = scheduleJob(task_key, task_in_db['schedule']);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.executeTask = async (task_key) => {
|
|
||||||
if (!TASKS[task_key]) {
|
|
||||||
logger.error(`Task ${task_key} does not exist!`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
logger.verbose(`Executing task ${task_key}`);
|
|
||||||
await exports.executeRun(task_key);
|
|
||||||
if (!TASKS[task_key]['confirm']) return;
|
|
||||||
await exports.executeConfirm(task_key);
|
|
||||||
logger.verbose(`Finished executing ${task_key}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.executeRun = async (task_key) => {
|
|
||||||
logger.verbose(`Running task ${task_key}`);
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {error: null})
|
|
||||||
// don't set running to true when backup up DB as it will be stick "running" if restored
|
|
||||||
if (task_key !== 'backup_local_db') await db_api.updateRecord('tasks', {key: task_key}, {running: true});
|
|
||||||
const data = await TASKS[task_key].run();
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {data: TASKS[task_key]['confirm'] ? data : null, last_ran: Date.now()/1000, running: false});
|
|
||||||
logger.verbose(`Finished running task ${task_key}`);
|
|
||||||
const task_obj = await db_api.getRecord('tasks', {key: task_key});
|
|
||||||
await notifications_api.sendTaskNotification(task_obj, false);
|
|
||||||
|
|
||||||
if (task_obj['options'] && task_obj['options']['auto_confirm']) {
|
|
||||||
exports.executeConfirm(task_key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.executeConfirm = async (task_key) => {
|
|
||||||
logger.verbose(`Confirming task ${task_key}`);
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {error: null})
|
|
||||||
if (!TASKS[task_key]['confirm']) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {confirming: true});
|
|
||||||
const task_obj = await db_api.getRecord('tasks', {key: task_key});
|
|
||||||
const data = task_obj['data'];
|
|
||||||
await TASKS[task_key].confirm(data);
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {confirming: false, last_confirmed: Date.now()/1000, data: null});
|
|
||||||
logger.verbose(`Finished confirming task ${task_key}`);
|
|
||||||
await notifications_api.sendTaskNotification(task_obj, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateTaskSchedule = async (task_key, schedule) => {
|
|
||||||
logger.verbose(`Updating schedule for task ${task_key}`);
|
|
||||||
await db_api.updateRecord('tasks', {key: task_key}, {schedule: schedule});
|
|
||||||
if (TASKS[task_key]['job']) {
|
|
||||||
TASKS[task_key]['job'].cancel();
|
|
||||||
TASKS[task_key]['job'] = null;
|
|
||||||
}
|
|
||||||
if (schedule) {
|
|
||||||
TASKS[task_key]['job'] = scheduleJob(task_key, schedule);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// missing files check
|
|
||||||
|
|
||||||
async function checkForMissingFiles() {
|
|
||||||
const missing_files = [];
|
|
||||||
const all_files = await db_api.getRecords('files');
|
|
||||||
for (let i = 0; i < all_files.length; i++) {
|
|
||||||
const file_to_check = all_files[i];
|
|
||||||
const file_exists = fs.existsSync(file_to_check['path']);
|
|
||||||
if (!file_exists) missing_files.push(file_to_check['uid']);
|
|
||||||
}
|
|
||||||
return {uids: missing_files};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function deleteMissingFiles(data) {
|
|
||||||
const uids = data['uids'];
|
|
||||||
for (let i = 0; i < uids.length; i++) {
|
|
||||||
const uid = uids[i];
|
|
||||||
await db_api.removeRecord('files', {uid: uid});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// duplicate files check
|
|
||||||
|
|
||||||
async function checkForDuplicateFiles() {
|
|
||||||
const duplicate_files = await db_api.findDuplicatesByKey('files', 'path');
|
|
||||||
const duplicate_uids = duplicate_files.map(duplicate_file => duplicate_file['uid']);
|
|
||||||
if (duplicate_uids && duplicate_uids.length > 0) {
|
|
||||||
return {uids: duplicate_uids};
|
|
||||||
}
|
|
||||||
return {uids: []};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function removeDuplicates(data) {
|
|
||||||
for (let i = 0; i < data['uids'].length; i++) {
|
|
||||||
await db_api.removeRecord('files', {uid: data['uids'][i]});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// auto delete files
|
|
||||||
|
|
||||||
async function checkForAutoDeleteFiles() {
|
|
||||||
const task_obj = await db_api.getRecord('tasks', {key: 'delete_old_files'});
|
|
||||||
if (!task_obj['options'] || !task_obj['options']['threshold_days']) {
|
|
||||||
const error_message = 'Failed to do delete check because no limit was set!';
|
|
||||||
logger.error(error_message);
|
|
||||||
await db_api.updateRecord('tasks', {key: 'delete_old_files'}, {error: error_message})
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const delete_older_than_timestamp = Date.now() - task_obj['options']['threshold_days']*86400*1000;
|
|
||||||
const files = (await db_api.getRecords('files', {registered: {$lt: delete_older_than_timestamp}}))
|
|
||||||
const files_to_remove = files.map(file => {return {uid: file.uid, sub_id: file.sub_id}});
|
|
||||||
return {files_to_remove: files_to_remove};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function autoDeleteFiles(data) {
|
|
||||||
const task_obj = await db_api.getRecord('tasks', {key: 'delete_old_files'});
|
|
||||||
if (data['files_to_remove']) {
|
|
||||||
logger.info(`Removing ${data['files_to_remove'].length} old files!`);
|
|
||||||
for (let i = 0; i < data['files_to_remove'].length; i++) {
|
|
||||||
const file_to_remove = data['files_to_remove'][i];
|
|
||||||
await files_api.deleteFile(file_to_remove['uid'], task_obj['options']['blacklist_files'] || (file_to_remove['sub_id'] && file_to_remove['blacklist_subscription_files']));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function rebuildDB() {
|
|
||||||
await db_api.backupDB();
|
|
||||||
let subs_to_add = await guessSubscriptions(false);
|
|
||||||
subs_to_add = subs_to_add.concat(await guessSubscriptions(true));
|
|
||||||
const users_to_add = await guessUsers();
|
|
||||||
for (const user_to_add of users_to_add) {
|
|
||||||
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
|
|
||||||
const user_exists = await db_api.getRecord('users', {uid: user_to_add});
|
|
||||||
if (!user_exists) {
|
|
||||||
await auth_api.registerUser(user_to_add, user_to_add, 'password');
|
|
||||||
logger.info(`Regenerated user ${user_to_add}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const user_channel_subs = await guessSubscriptions(false, path.join(usersFileFolder, user_to_add), user_to_add);
|
|
||||||
const user_playlist_subs = await guessSubscriptions(true, path.join(usersFileFolder, user_to_add), user_to_add);
|
|
||||||
subs_to_add = subs_to_add.concat(user_channel_subs, user_playlist_subs);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const sub_to_add of subs_to_add) {
|
|
||||||
const sub_exists = !!(await subscriptions_api.getSubscriptionByName(sub_to_add['name'], sub_to_add['user_uid']));
|
|
||||||
// TODO: we shouldn't be creating this here
|
|
||||||
const new_sub = Object.assign({}, sub_to_add, {paused: true});
|
|
||||||
if (!sub_exists) {
|
|
||||||
await subscriptions_api.subscribe(new_sub, sub_to_add['user_uid'], true);
|
|
||||||
logger.info(`Regenerated subscription ${sub_to_add['name']}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(`Importing unregistered files`);
|
|
||||||
await files_api.importUnregisteredFiles();
|
|
||||||
}
|
|
||||||
|
|
||||||
const guessUsers = async () => {
|
|
||||||
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
const userPaths = await utils.getDirectoriesInDirectory(usersFileFolder);
|
|
||||||
return userPaths.map(userPath => path.basename(userPath));
|
|
||||||
}
|
|
||||||
|
|
||||||
const guessSubscriptions = async (isPlaylist, basePath = null) => {
|
|
||||||
const guessed_subs = [];
|
|
||||||
const subscriptionsFileFolder = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
|
|
||||||
const subsSubPath = basePath ? path.join(basePath, 'subscriptions') : subscriptionsFileFolder;
|
|
||||||
const subsPath = path.join(subsSubPath, isPlaylist ? 'playlists' : 'channels');
|
|
||||||
|
|
||||||
const subs = await utils.getDirectoriesInDirectory(subsPath);
|
|
||||||
for (const subPath of subs) {
|
|
||||||
const sub_backup_path = path.join(subPath, CONSTS.SUBSCRIPTION_BACKUP_PATH);
|
|
||||||
if (!fs.existsSync(sub_backup_path)) continue;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const sub_backup = fs.readJSONSync(sub_backup_path)
|
|
||||||
delete sub_backup['_id'];
|
|
||||||
guessed_subs.push(sub_backup);
|
|
||||||
} catch(err) {
|
|
||||||
logger.warn(`Failed to reimport subscription in path ${subPath}`)
|
|
||||||
logger.warn(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return guessed_subs;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.TASKS = TASKS;
|
|
||||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -1,123 +0,0 @@
|
|||||||
const config_api = require('./config');
|
|
||||||
const logger = require('./logger');
|
|
||||||
|
|
||||||
const moment = require('moment');
|
|
||||||
const fs = require('fs-extra')
|
|
||||||
const path = require('path');
|
|
||||||
const { promisify } = require('util');
|
|
||||||
const child_process = require('child_process');
|
|
||||||
const commandExistsSync = require('command-exists').sync;
|
|
||||||
|
|
||||||
async function getCommentsForVOD(vodId) {
|
|
||||||
const exec = promisify(child_process.exec);
|
|
||||||
|
|
||||||
// Reject invalid params to prevent command injection attack
|
|
||||||
if (!vodId.match(/^[0-9a-z]+$/)) {
|
|
||||||
logger.error('VOD ID must be purely alphanumeric. Twitch chat download failed!');
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const is_windows = process.platform === 'win32';
|
|
||||||
const cliExt = is_windows ? '.exe' : ''
|
|
||||||
const cliPath = `TwitchDownloaderCLI${cliExt}`
|
|
||||||
|
|
||||||
if (!commandExistsSync(cliPath)) {
|
|
||||||
logger.error(`${cliPath} does not exist. Twitch chat download failed! Get it here: https://github.com/lay295/TwitchDownloader`);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await exec(`${cliPath} chatdownload -u ${vodId} -o appdata/${vodId}.json`, {stdio:[0,1,2]});
|
|
||||||
|
|
||||||
if (result['stderr']) {
|
|
||||||
logger.error(`Failed to download twitch comments for ${vodId}`);
|
|
||||||
logger.error(result['stderr']);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const temp_chat_path = path.join('appdata', `${vodId}.json`);
|
|
||||||
|
|
||||||
const raw_json = fs.readJSONSync(temp_chat_path);
|
|
||||||
const new_json = raw_json.comments.map(comment_obj => {
|
|
||||||
return {
|
|
||||||
timestamp: comment_obj.content_offset_seconds,
|
|
||||||
timestamp_str: convertTimestamp(comment_obj.content_offset_seconds),
|
|
||||||
name: comment_obj.commenter.name,
|
|
||||||
message: comment_obj.message.body,
|
|
||||||
user_color: comment_obj.message.user_color
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
fs.unlinkSync(temp_chat_path);
|
|
||||||
|
|
||||||
return new_json;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getTwitchChatByFileID(id, type, user_uid, uuid, sub) {
|
|
||||||
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
const subscriptionsFileFolder = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
let file_path = null;
|
|
||||||
|
|
||||||
if (user_uid) {
|
|
||||||
if (sub) {
|
|
||||||
file_path = path.join(usersFileFolder, user_uid, 'subscriptions', sub.isPlaylist ? 'playlists' : 'channels', sub.name, `${id}.twitch_chat.json`);
|
|
||||||
} else {
|
|
||||||
file_path = path.join(usersFileFolder, user_uid, type, `${id}.twitch_chat.json`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (sub) {
|
|
||||||
file_path = path.join(subscriptionsFileFolder, sub.isPlaylist ? 'playlists' : 'channels', sub.name, `${id}.twitch_chat.json`);
|
|
||||||
} else {
|
|
||||||
const typeFolder = config_api.getConfigItem(`ytdl_${type}_folder_path`);
|
|
||||||
file_path = path.join(typeFolder, `${id}.twitch_chat.json`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var chat_file = null;
|
|
||||||
if (fs.existsSync(file_path)) {
|
|
||||||
chat_file = fs.readJSONSync(file_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
return chat_file;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function downloadTwitchChatByVODID(vodId, id, type, user_uid, sub, customFileFolderPath = null) {
|
|
||||||
const usersFileFolder = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
const subscriptionsFileFolder = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
const chat = await getCommentsForVOD(vodId);
|
|
||||||
|
|
||||||
// save file if needed params are included
|
|
||||||
let file_path = null;
|
|
||||||
if (customFileFolderPath) {
|
|
||||||
file_path = path.join(customFileFolderPath, `${id}.twitch_chat.json`)
|
|
||||||
} else if (user_uid) {
|
|
||||||
if (sub) {
|
|
||||||
file_path = path.join(usersFileFolder, user_uid, 'subscriptions', sub.isPlaylist ? 'playlists' : 'channels', sub.name, `${id}.twitch_chat.json`);
|
|
||||||
} else {
|
|
||||||
file_path = path.join(usersFileFolder, user_uid, type, `${id}.twitch_chat.json`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (sub) {
|
|
||||||
file_path = path.join(subscriptionsFileFolder, sub.isPlaylist ? 'playlists' : 'channels', sub.name, `${id}.twitch_chat.json`);
|
|
||||||
} else {
|
|
||||||
file_path = path.join(type, `${id}.twitch_chat.json`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (chat) fs.writeJSONSync(file_path, chat);
|
|
||||||
|
|
||||||
return chat;
|
|
||||||
}
|
|
||||||
|
|
||||||
const convertTimestamp = (timestamp) => moment.duration(timestamp, 'seconds')
|
|
||||||
.toISOString()
|
|
||||||
.replace(/P.*?T(?:(\d+?)H)?(?:(\d+?)M)?(?:(\d+).*?S)?/,
|
|
||||||
(_, ...ms) => {
|
|
||||||
const seg = v => v ? v.padStart(2, '0') : '00';
|
|
||||||
return `${seg(ms[0])}:${seg(ms[1])}:${seg(ms[2])}`;
|
|
||||||
});
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
getCommentsForVOD: getCommentsForVOD,
|
|
||||||
getTwitchChatByFileID: getTwitchChatByFileID,
|
|
||||||
downloadTwitchChatByVODID: downloadTwitchChatByVODID
|
|
||||||
}
|
|
||||||
501
backend/utils.js
501
backend/utils.js
@@ -1,19 +1,11 @@
|
|||||||
const fs = require('fs-extra');
|
var fs = require('fs-extra')
|
||||||
const path = require('path');
|
var path = require('path')
|
||||||
const ffmpeg = require('fluent-ffmpeg');
|
|
||||||
const archiver = require('archiver');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const ProgressBar = require('progress');
|
|
||||||
const winston = require('winston');
|
|
||||||
|
|
||||||
const config_api = require('./config');
|
const config_api = require('./config');
|
||||||
const logger = require('./logger');
|
|
||||||
const CONSTS = require('./consts');
|
|
||||||
|
|
||||||
const is_windows = process.platform === 'win32';
|
const is_windows = process.platform === 'win32';
|
||||||
|
|
||||||
// replaces .webm with appropriate extension
|
// replaces .webm with appropriate extension
|
||||||
exports.getTrueFileName = (unfixed_path, type, force_ext = null) => {
|
function getTrueFileName(unfixed_path, type) {
|
||||||
let fixed_path = unfixed_path;
|
let fixed_path = unfixed_path;
|
||||||
|
|
||||||
const new_ext = (type === 'audio' ? 'mp3' : 'mp4');
|
const new_ext = (type === 'audio' ? 'mp3' : 'mp4');
|
||||||
@@ -22,19 +14,19 @@ exports.getTrueFileName = (unfixed_path, type, force_ext = null) => {
|
|||||||
|
|
||||||
|
|
||||||
if (old_ext !== new_ext) {
|
if (old_ext !== new_ext) {
|
||||||
unfixed_parts[unfixed_parts.length-1] = force_ext || new_ext;
|
unfixed_parts[unfixed_parts.length-1] = new_ext;
|
||||||
fixed_path = unfixed_parts.join('.');
|
fixed_path = unfixed_parts.join('.');
|
||||||
}
|
}
|
||||||
return fixed_path;
|
return fixed_path;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getDownloadedFilesByType = async (basePath, type, full_metadata = false) => {
|
async function getDownloadedFilesByType(basePath, type) {
|
||||||
// return empty array if the path doesn't exist
|
// return empty array if the path doesn't exist
|
||||||
if (!(await fs.pathExists(basePath))) return [];
|
if (!(await fs.pathExists(basePath))) return [];
|
||||||
|
|
||||||
let files = [];
|
let files = [];
|
||||||
const ext = type === 'audio' ? 'mp3' : 'mp4';
|
const ext = type === 'audio' ? 'mp3' : 'mp4';
|
||||||
var located_files = await exports.recFindByExt(basePath, ext);
|
var located_files = await recFindByExt(basePath, ext);
|
||||||
for (let i = 0; i < located_files.length; i++) {
|
for (let i = 0; i < located_files.length; i++) {
|
||||||
let file = located_files[i];
|
let file = located_files[i];
|
||||||
var file_path = file.substring(basePath.includes('\\') ? basePath.length+1 : basePath.length, file.length);
|
var file_path = file.substring(basePath.includes('\\') ? basePath.length+1 : basePath.length, file.length);
|
||||||
@@ -42,61 +34,26 @@ exports.getDownloadedFilesByType = async (basePath, type, full_metadata = false)
|
|||||||
var stats = await fs.stat(file);
|
var stats = await fs.stat(file);
|
||||||
|
|
||||||
var id = file_path.substring(0, file_path.length-4);
|
var id = file_path.substring(0, file_path.length-4);
|
||||||
var jsonobj = await exports.getJSONByType(type, id, basePath);
|
var jsonobj = await getJSONByType(type, id, basePath);
|
||||||
if (!jsonobj) continue;
|
if (!jsonobj) continue;
|
||||||
if (full_metadata) {
|
var title = jsonobj.title;
|
||||||
jsonobj['id'] = id;
|
var url = jsonobj.webpage_url;
|
||||||
files.push(jsonobj);
|
var uploader = jsonobj.uploader;
|
||||||
continue;
|
var upload_date = jsonobj.upload_date;
|
||||||
}
|
upload_date = upload_date ? `${upload_date.substring(0, 4)}-${upload_date.substring(4, 6)}-${upload_date.substring(6, 8)}` : null;
|
||||||
var upload_date = exports.formatDateString(jsonobj.upload_date);
|
var thumbnail = jsonobj.thumbnail;
|
||||||
|
var duration = jsonobj.duration;
|
||||||
|
|
||||||
|
var size = stats.size;
|
||||||
|
|
||||||
var isaudio = type === 'audio';
|
var isaudio = type === 'audio';
|
||||||
var file_obj = new exports.File(id, jsonobj.title, jsonobj.thumbnail, isaudio, jsonobj.duration, jsonobj.webpage_url, jsonobj.uploader,
|
var file_obj = new File(id, title, thumbnail, isaudio, duration, url, uploader, size, file, upload_date);
|
||||||
stats.size, file, upload_date, jsonobj.description, jsonobj.view_count, jsonobj.height, jsonobj.abr);
|
|
||||||
files.push(file_obj);
|
files.push(file_obj);
|
||||||
}
|
}
|
||||||
return files;
|
return files;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.createContainerZipFile = async (file_name, container_file_objs) => {
|
function getJSONMp4(name, customPath, openReadPerms = false) {
|
||||||
const container_files_to_download = [];
|
|
||||||
for (let i = 0; i < container_file_objs.length; i++) {
|
|
||||||
const container_file_obj = container_file_objs[i];
|
|
||||||
container_files_to_download.push(container_file_obj.path);
|
|
||||||
}
|
|
||||||
return await exports.createZipFile(path.join('appdata', file_name + '.zip'), container_files_to_download);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.createZipFile = async (zip_file_path, file_paths) => {
|
|
||||||
let output = fs.createWriteStream(zip_file_path);
|
|
||||||
|
|
||||||
var archive = archiver('zip', {
|
|
||||||
gzip: true,
|
|
||||||
zlib: { level: 9 } // Sets the compression level.
|
|
||||||
});
|
|
||||||
|
|
||||||
archive.on('error', function(err) {
|
|
||||||
logger.error(err);
|
|
||||||
throw err;
|
|
||||||
});
|
|
||||||
|
|
||||||
// pipe archive data to the output file
|
|
||||||
archive.pipe(output);
|
|
||||||
|
|
||||||
for (let file_path of file_paths) {
|
|
||||||
const file_name = path.parse(file_path).base;
|
|
||||||
archive.file(file_path, {name: file_name})
|
|
||||||
}
|
|
||||||
|
|
||||||
await archive.finalize();
|
|
||||||
|
|
||||||
// wait a tiny bit for the zip to reload in fs
|
|
||||||
await exports.wait(100);
|
|
||||||
return zip_file_path;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getJSONMp4 = (name, customPath, openReadPerms = false) => {
|
|
||||||
var obj = null; // output
|
var obj = null; // output
|
||||||
if (!customPath) customPath = config_api.getConfigItem('ytdl_video_folder_path');
|
if (!customPath) customPath = config_api.getConfigItem('ytdl_video_folder_path');
|
||||||
var jsonPath = path.join(customPath, name + ".info.json");
|
var jsonPath = path.join(customPath, name + ".info.json");
|
||||||
@@ -111,7 +68,7 @@ exports.getJSONMp4 = (name, customPath, openReadPerms = false) => {
|
|||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getJSONMp3 = (name, customPath, openReadPerms = false) => {
|
function getJSONMp3(name, customPath, openReadPerms = false) {
|
||||||
var obj = null;
|
var obj = null;
|
||||||
if (!customPath) customPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
if (!customPath) customPath = config_api.getConfigItem('ytdl_audio_folder_path');
|
||||||
var jsonPath = path.join(customPath, name + ".info.json");
|
var jsonPath = path.join(customPath, name + ".info.json");
|
||||||
@@ -128,31 +85,16 @@ exports.getJSONMp3 = (name, customPath, openReadPerms = false) => {
|
|||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getJSON = (file_path, type) => {
|
function getJSONByType(type, name, customPath, openReadPerms = false) {
|
||||||
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
return type === 'audio' ? getJSONMp3(name, customPath, openReadPerms) : getJSONMp4(name, customPath, openReadPerms)
|
||||||
let obj = null;
|
|
||||||
var jsonPath = exports.removeFileExtension(file_path) + '.info.json';
|
|
||||||
var alternateJsonPath = exports.removeFileExtension(file_path) + `${ext}.info.json`;
|
|
||||||
if (fs.existsSync(jsonPath))
|
|
||||||
{
|
|
||||||
obj = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
|
|
||||||
} else if (fs.existsSync(alternateJsonPath)) {
|
|
||||||
obj = JSON.parse(fs.readFileSync(alternateJsonPath, 'utf8'));
|
|
||||||
}
|
|
||||||
else obj = 0;
|
|
||||||
return obj;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getJSONByType = (type, name, customPath, openReadPerms = false) => {
|
function getDownloadedThumbnail(name, type, customPath = null) {
|
||||||
return type === 'audio' ? exports.getJSONMp3(name, customPath, openReadPerms) : exports.getJSONMp4(name, customPath, openReadPerms)
|
if (!customPath) customPath = type === 'audio' ? config_api.getConfigItem('ytdl_audio_folder_path') : config_api.getConfigItem('ytdl_video_folder_path');
|
||||||
}
|
|
||||||
|
|
||||||
exports.getDownloadedThumbnail = (file_path) => {
|
let jpgPath = path.join(customPath, name + '.jpg');
|
||||||
const file_path_no_extension = exports.removeFileExtension(file_path);
|
let webpPath = path.join(customPath, name + '.webp');
|
||||||
|
let pngPath = path.join(customPath, name + '.png');
|
||||||
let jpgPath = file_path_no_extension + '.jpg';
|
|
||||||
let webpPath = file_path_no_extension + '.webp';
|
|
||||||
let pngPath = file_path_no_extension + '.png';
|
|
||||||
|
|
||||||
if (fs.existsSync(jpgPath))
|
if (fs.existsSync(jpgPath))
|
||||||
return jpgPath;
|
return jpgPath;
|
||||||
@@ -164,43 +106,39 @@ exports.getDownloadedThumbnail = (file_path) => {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getExpectedFileSize = (input_info_jsons) => {
|
function getExpectedFileSize(info_json) {
|
||||||
// treat single videos as arrays to have the file sizes checked/added to. makes the code cleaner
|
if (info_json['filesize']) {
|
||||||
const info_jsons = Array.isArray(input_info_jsons) ? input_info_jsons : [input_info_jsons];
|
return info_json['filesize'];
|
||||||
|
}
|
||||||
|
|
||||||
|
const formats = info_json['format_id'].split('+');
|
||||||
let expected_filesize = 0;
|
let expected_filesize = 0;
|
||||||
info_jsons.forEach(info_json => {
|
formats.forEach(format_id => {
|
||||||
const formats = info_json['format_id'].split('+');
|
if (!info_json.formats) return expected_filesize;
|
||||||
let individual_expected_filesize = 0;
|
info_json.formats.forEach(available_format => {
|
||||||
formats.forEach(format_id => {
|
if (available_format.format_id === format_id && available_format.filesize) {
|
||||||
if (info_json.formats !== undefined) {
|
expected_filesize += available_format.filesize;
|
||||||
info_json.formats.forEach(available_format => {
|
|
||||||
if (available_format.format_id === format_id && (available_format.filesize || available_format.filesize_approx)) {
|
|
||||||
individual_expected_filesize += (available_format.filesize ? available_format.filesize : available_format.filesize_approx);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
expected_filesize += individual_expected_filesize;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return expected_filesize;
|
return expected_filesize;
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.fixVideoMetadataPerms = (file_path, type) => {
|
function fixVideoMetadataPerms(name, type, customPath = null) {
|
||||||
if (is_windows) return;
|
if (is_windows) return;
|
||||||
|
if (!customPath) customPath = type === 'audio' ? config_api.getConfigItem('ytdl_audio_folder_path')
|
||||||
|
: config_api.getConfigItem('ytdl_video_folder_path');
|
||||||
|
|
||||||
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
||||||
|
|
||||||
const file_path_no_extension = exports.removeFileExtension(file_path);
|
|
||||||
|
|
||||||
const files_to_fix = [
|
const files_to_fix = [
|
||||||
// JSONs
|
// JSONs
|
||||||
file_path_no_extension + '.info.json',
|
path.join(customPath, name + '.info.json'),
|
||||||
file_path_no_extension + ext + '.info.json',
|
path.join(customPath, name + ext + '.info.json'),
|
||||||
// Thumbnails
|
// Thumbnails
|
||||||
file_path_no_extension + '.webp',
|
path.join(customPath, name + '.webp'),
|
||||||
file_path_no_extension + '.jpg'
|
path.join(customPath, name + '.jpg')
|
||||||
];
|
];
|
||||||
|
|
||||||
for (const file of files_to_fix) {
|
for (const file of files_to_fix) {
|
||||||
@@ -209,39 +147,21 @@ exports.fixVideoMetadataPerms = (file_path, type) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deleteJSONFile = (file_path, type) => {
|
function deleteJSONFile(name, type, customPath = null) {
|
||||||
|
if (!customPath) customPath = type === 'audio' ? config_api.getConfigItem('ytdl_audio_folder_path')
|
||||||
|
: config_api.getConfigItem('ytdl_video_folder_path');
|
||||||
|
|
||||||
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
const ext = type === 'audio' ? '.mp3' : '.mp4';
|
||||||
|
let json_path = path.join(customPath, name + '.info.json');
|
||||||
const file_path_no_extension = exports.removeFileExtension(file_path);
|
let alternate_json_path = path.join(customPath, name + ext + '.info.json');
|
||||||
|
|
||||||
let json_path = file_path_no_extension + '.info.json';
|
|
||||||
let alternate_json_path = file_path_no_extension + ext + '.info.json';
|
|
||||||
|
|
||||||
if (fs.existsSync(json_path)) fs.unlinkSync(json_path);
|
if (fs.existsSync(json_path)) fs.unlinkSync(json_path);
|
||||||
if (fs.existsSync(alternate_json_path)) fs.unlinkSync(alternate_json_path);
|
if (fs.existsSync(alternate_json_path)) fs.unlinkSync(alternate_json_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.durationStringToNumber = (dur_str) => {
|
|
||||||
if (typeof dur_str === 'number') return dur_str;
|
|
||||||
let num_sum = 0;
|
|
||||||
const dur_str_parts = dur_str.split(':');
|
|
||||||
for (let i = dur_str_parts.length-1; i >= 0; i--) {
|
|
||||||
num_sum += parseInt(dur_str_parts[i])*(60**(dur_str_parts.length-1-i));
|
|
||||||
}
|
|
||||||
return num_sum;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getMatchingCategoryFiles = (category, files) => {
|
async function recFindByExt(base,ext,files,result)
|
||||||
return files && files.filter(file => file.category && file.category.uid === category.uid);
|
{
|
||||||
}
|
|
||||||
|
|
||||||
exports.addUIDsToCategory = (category, files) => {
|
|
||||||
const files_that_match = exports.getMatchingCategoryFiles(category, files);
|
|
||||||
category['uids'] = files_that_match.map(file => file.uid);
|
|
||||||
return files_that_match;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.recFindByExt = async (base, ext, files, result, recursive = true) => {
|
|
||||||
files = files || (await fs.readdir(base))
|
files = files || (await fs.readdir(base))
|
||||||
result = result || []
|
result = result || []
|
||||||
|
|
||||||
@@ -249,8 +169,7 @@ exports.recFindByExt = async (base, ext, files, result, recursive = true) => {
|
|||||||
var newbase = path.join(base,file)
|
var newbase = path.join(base,file)
|
||||||
if ( (await fs.stat(newbase)).isDirectory() )
|
if ( (await fs.stat(newbase)).isDirectory() )
|
||||||
{
|
{
|
||||||
if (!recursive) continue;
|
result = await recFindByExt(newbase,ext,await fs.readdir(newbase),result)
|
||||||
result = await exports.recFindByExt(newbase,ext,await fs.readdir(newbase),result)
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -263,305 +182,9 @@ exports.recFindByExt = async (base, ext, files, result, recursive = true) => {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.removeFileExtension = (filename) => {
|
|
||||||
const filename_parts = filename.split('.');
|
|
||||||
filename_parts.splice(filename_parts.length - 1);
|
|
||||||
return filename_parts.join('.');
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.formatDateString = (date_string) => {
|
|
||||||
return date_string ? `${date_string.substring(0, 4)}-${date_string.substring(4, 6)}-${date_string.substring(6, 8)}` : 'N/A';
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.createEdgeNGrams = (str) => {
|
|
||||||
if (str && str.length > 3) {
|
|
||||||
const minGram = 3
|
|
||||||
const maxGram = str.length
|
|
||||||
|
|
||||||
return str.split(" ").reduce((ngrams, token) => {
|
|
||||||
if (token.length > minGram) {
|
|
||||||
for (let i = minGram; i <= maxGram && i <= token.length; ++i) {
|
|
||||||
ngrams = [...ngrams, token.substr(0, i)]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ngrams = [...ngrams, token]
|
|
||||||
}
|
|
||||||
return ngrams
|
|
||||||
}, []).join(" ")
|
|
||||||
}
|
|
||||||
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
// ffmpeg helper functions
|
|
||||||
|
|
||||||
exports.cropFile = async (file_path, start, end, ext) => {
|
|
||||||
return new Promise(resolve => {
|
|
||||||
const temp_file_path = `${file_path}.cropped${ext}`;
|
|
||||||
let base_ffmpeg_call = ffmpeg(file_path);
|
|
||||||
if (start) {
|
|
||||||
base_ffmpeg_call = base_ffmpeg_call.seekOutput(start);
|
|
||||||
}
|
|
||||||
if (end) {
|
|
||||||
base_ffmpeg_call = base_ffmpeg_call.duration(end - start);
|
|
||||||
}
|
|
||||||
base_ffmpeg_call
|
|
||||||
.on('end', () => {
|
|
||||||
logger.verbose(`Cropping for '${file_path}' complete.`);
|
|
||||||
fs.unlinkSync(file_path);
|
|
||||||
fs.moveSync(temp_file_path, file_path);
|
|
||||||
resolve(true);
|
|
||||||
})
|
|
||||||
.on('error', (err) => {
|
|
||||||
logger.error(`Failed to crop ${file_path}.`);
|
|
||||||
logger.error(err);
|
|
||||||
resolve(false);
|
|
||||||
}).save(temp_file_path);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* setTimeout, but its a promise.
|
|
||||||
* @param {number} ms
|
|
||||||
*/
|
|
||||||
exports.wait = async (ms) => {
|
|
||||||
await new Promise(resolve => {
|
|
||||||
setTimeout(resolve, ms);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.checkExistsWithTimeout = async (filePath, timeout) => {
|
|
||||||
return new Promise(function (resolve, reject) {
|
|
||||||
|
|
||||||
var timer = setTimeout(function () {
|
|
||||||
if (watcher) watcher.close();
|
|
||||||
reject(new Error('File did not exists and was not created during the timeout.'));
|
|
||||||
}, timeout);
|
|
||||||
|
|
||||||
fs.access(filePath, fs.constants.R_OK, function (err) {
|
|
||||||
if (!err) {
|
|
||||||
clearTimeout(timer);
|
|
||||||
if (watcher) watcher.close();
|
|
||||||
resolve(true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
var dir = path.dirname(filePath);
|
|
||||||
var basename = path.basename(filePath);
|
|
||||||
var watcher = fs.watch(dir, function (eventType, filename) {
|
|
||||||
if (eventType === 'rename' && filename === basename) {
|
|
||||||
clearTimeout(timer);
|
|
||||||
if (watcher) watcher.close();
|
|
||||||
resolve(true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// helper function to download file using fetch
|
|
||||||
exports.fetchFile = async (url, path, file_label) => {
|
|
||||||
var len = null;
|
|
||||||
const res = await fetch(url);
|
|
||||||
|
|
||||||
len = parseInt(res.headers.get("Content-Length"), 10);
|
|
||||||
|
|
||||||
var bar = new ProgressBar(` Downloading ${file_label} [:bar] :percent :etas`, {
|
|
||||||
complete: '=',
|
|
||||||
incomplete: ' ',
|
|
||||||
width: 20,
|
|
||||||
total: len
|
|
||||||
});
|
|
||||||
const fileStream = fs.createWriteStream(path);
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
res.body.pipe(fileStream);
|
|
||||||
res.body.on("error", (err) => {
|
|
||||||
reject(err);
|
|
||||||
});
|
|
||||||
res.body.on('data', function (chunk) {
|
|
||||||
bar.tick(chunk.length);
|
|
||||||
});
|
|
||||||
fileStream.on("finish", function() {
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.restartServer = async (is_update = false) => {
|
|
||||||
logger.info(`${is_update ? 'Update complete! ' : ''}Restarting server...`);
|
|
||||||
|
|
||||||
// the following line restarts the server through pm2
|
|
||||||
fs.writeFileSync(`restart${is_update ? '_update' : '_general'}.json`, 'internal use only');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// adds or replaces args according to the following rules:
|
|
||||||
// - if it already exists and has value, then replace both arg and value
|
|
||||||
// - if already exists and doesn't have value, ignore
|
|
||||||
// - if it doesn't exist and has value, add both arg and value
|
|
||||||
// - if it doesn't exist and doesn't have value, add arg
|
|
||||||
exports.injectArgs = (original_args, new_args) => {
|
|
||||||
const updated_args = original_args.slice();
|
|
||||||
try {
|
|
||||||
for (let i = 0; i < new_args.length; i++) {
|
|
||||||
const new_arg = new_args[i];
|
|
||||||
if (!new_arg.startsWith('-') && !new_arg.startsWith('--') && i > 0 && original_args.includes(new_args[i - 1])) continue;
|
|
||||||
|
|
||||||
if (CONSTS.YTDL_ARGS_WITH_VALUES.has(new_arg)) {
|
|
||||||
if (original_args.includes(new_arg)) {
|
|
||||||
const original_index = original_args.indexOf(new_arg);
|
|
||||||
updated_args.splice(original_index, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
updated_args.push(new_arg, new_args[i + 1]);
|
|
||||||
i++; // we need to skip the arg value on the next loop
|
|
||||||
} else {
|
|
||||||
if (!original_args.includes(new_arg)) {
|
|
||||||
updated_args.push(new_arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
logger.warn(err);
|
|
||||||
logger.warn(`Failed to inject args (${new_args}) into (${original_args})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return updated_args;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.filterArgs = (args, args_to_remove) => {
|
|
||||||
return args.filter(x => !args_to_remove.includes(x));
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.searchObjectByString = (o, s) => {
|
|
||||||
s = s.replace(/\[(\w+)\]/g, '.$1'); // convert indexes to properties
|
|
||||||
s = s.replace(/^\./, ''); // strip a leading dot
|
|
||||||
var a = s.split('.');
|
|
||||||
for (var i = 0, n = a.length; i < n; ++i) {
|
|
||||||
var k = a[i];
|
|
||||||
if (k in o) {
|
|
||||||
o = o[k];
|
|
||||||
} else {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return o;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.stripPropertiesFromObject = (obj, properties, whitelist = false) => {
|
|
||||||
if (!whitelist) {
|
|
||||||
const new_obj = JSON.parse(JSON.stringify(obj));
|
|
||||||
for (let field of properties) {
|
|
||||||
delete new_obj[field];
|
|
||||||
}
|
|
||||||
return new_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
const new_obj = {};
|
|
||||||
for (let field of properties) {
|
|
||||||
new_obj[field] = obj[field];
|
|
||||||
}
|
|
||||||
return new_obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getArchiveFolder = (type, user_uid = null, sub = null) => {
|
|
||||||
const usersFolderPath = config_api.getConfigItem('ytdl_users_base_path');
|
|
||||||
const subsFolderPath = config_api.getConfigItem('ytdl_subscriptions_base_path');
|
|
||||||
|
|
||||||
if (user_uid) {
|
|
||||||
if (sub) {
|
|
||||||
return path.join(usersFolderPath, user_uid, 'subscriptions', 'archives', sub.name);
|
|
||||||
} else {
|
|
||||||
return path.join(usersFolderPath, user_uid, type, 'archives');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (sub) {
|
|
||||||
return path.join(subsFolderPath, 'archives', sub.name);
|
|
||||||
} else {
|
|
||||||
return path.join('appdata', 'archives');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getBaseURL = () => {
|
|
||||||
return `${config_api.getConfigItem('ytdl_url')}:${config_api.getConfigItem('ytdl_port')}`
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateLoggerLevel = (new_logger_level) => {
|
|
||||||
const possible_levels = ['error', 'warn', 'info', 'verbose', 'debug'];
|
|
||||||
if (!possible_levels.includes(new_logger_level)) {
|
|
||||||
logger.error(`${new_logger_level} is not a valid logger level! Choose one of the following: ${possible_levels.join(', ')}.`)
|
|
||||||
new_logger_level = 'info';
|
|
||||||
}
|
|
||||||
logger.level = new_logger_level;
|
|
||||||
winston.loggers.get('console').level = new_logger_level;
|
|
||||||
logger.transports[2].level = new_logger_level;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.convertFlatObjectToNestedObject = (obj) => {
|
|
||||||
const result = {};
|
|
||||||
for (const key in obj) {
|
|
||||||
const nestedKeys = key.split('.');
|
|
||||||
let currentObj = result;
|
|
||||||
for (let i = 0; i < nestedKeys.length; i++) {
|
|
||||||
if (i === nestedKeys.length - 1) {
|
|
||||||
currentObj[nestedKeys[i]] = obj[key];
|
|
||||||
} else {
|
|
||||||
currentObj[nestedKeys[i]] = currentObj[nestedKeys[i]] || {};
|
|
||||||
currentObj = currentObj[nestedKeys[i]];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getDirectoriesInDirectory = async (basePath) => {
|
|
||||||
try {
|
|
||||||
const files = await fs.readdir(basePath, { withFileTypes: true });
|
|
||||||
return files
|
|
||||||
.filter((file) => file.isDirectory())
|
|
||||||
.map((file) => path.join(basePath, file.name));
|
|
||||||
} catch (err) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.parseOutputJSON = (output, err) => {
|
|
||||||
let split_output = [];
|
|
||||||
// const output_jsons = [];
|
|
||||||
if (err && !output) {
|
|
||||||
if (!err.stderr.includes('This video is unavailable') && !err.stderr.includes('Private video')) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
logger.info('An error was encountered with at least one video, backup method will be used.')
|
|
||||||
try {
|
|
||||||
split_output = err.stdout.split(/\r\n|\r|\n/);
|
|
||||||
} catch (e) {
|
|
||||||
logger.error('Backup method failed. See error below:');
|
|
||||||
logger.error(e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
} else if (output.length === 0 || (output.length === 1 && output[0].length === 0)) {
|
|
||||||
// output is '' or ['']
|
|
||||||
return [];
|
|
||||||
} else {
|
|
||||||
for (const output_item of output) {
|
|
||||||
// we have to do this because sometimes there will be leading characters before the actual json
|
|
||||||
const start_idx = output_item.indexOf('{"');
|
|
||||||
const clean_output = output_item.slice(start_idx, output_item.length);
|
|
||||||
split_output.push(clean_output);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
return split_output.map(split_output_str => JSON.parse(split_output_str));
|
|
||||||
} catch(e) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// objects
|
// objects
|
||||||
|
|
||||||
function File(id, title, thumbnailURL, isAudio, duration, url, uploader, size, path, upload_date, description, view_count, height, abr) {
|
function File(id, title, thumbnailURL, isAudio, duration, url, uploader, size, path, upload_date) {
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.title = title;
|
this.title = title;
|
||||||
this.thumbnailURL = thumbnailURL;
|
this.thumbnailURL = thumbnailURL;
|
||||||
@@ -572,11 +195,17 @@ function File(id, title, thumbnailURL, isAudio, duration, url, uploader, size, p
|
|||||||
this.size = size;
|
this.size = size;
|
||||||
this.path = path;
|
this.path = path;
|
||||||
this.upload_date = upload_date;
|
this.upload_date = upload_date;
|
||||||
this.description = description;
|
|
||||||
this.view_count = view_count;
|
|
||||||
this.height = height;
|
|
||||||
this.abr = abr;
|
|
||||||
this.favorite = false;
|
|
||||||
}
|
}
|
||||||
exports.File = File;
|
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
getJSONMp3: getJSONMp3,
|
||||||
|
getJSONMp4: getJSONMp4,
|
||||||
|
getTrueFileName: getTrueFileName,
|
||||||
|
getDownloadedThumbnail: getDownloadedThumbnail,
|
||||||
|
getExpectedFileSize: getExpectedFileSize,
|
||||||
|
fixVideoMetadataPerms: fixVideoMetadataPerms,
|
||||||
|
deleteJSONFile: deleteJSONFile,
|
||||||
|
getDownloadedFilesByType: getDownloadedFilesByType,
|
||||||
|
recFindByExt: recFindByExt,
|
||||||
|
File: File
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,159 +0,0 @@
|
|||||||
const fs = require('fs-extra');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const path = require('path');
|
|
||||||
const execa = require('execa');
|
|
||||||
const kill = require('tree-kill');
|
|
||||||
|
|
||||||
const logger = require('./logger');
|
|
||||||
const utils = require('./utils');
|
|
||||||
const CONSTS = require('./consts');
|
|
||||||
const config_api = require('./config.js');
|
|
||||||
|
|
||||||
const is_windows = process.platform === 'win32';
|
|
||||||
|
|
||||||
exports.youtubedl_forks = {
|
|
||||||
'youtube-dl': {
|
|
||||||
'download_url': 'https://github.com/ytdl-org/youtube-dl/releases/latest/download/youtube-dl',
|
|
||||||
'tags_url': 'https://api.github.com/repos/ytdl-org/youtube-dl/tags'
|
|
||||||
},
|
|
||||||
'youtube-dlc': {
|
|
||||||
'download_url': 'https://github.com/blackjack4494/yt-dlc/releases/latest/download/youtube-dlc',
|
|
||||||
'tags_url': 'https://api.github.com/repos/blackjack4494/yt-dlc/tags'
|
|
||||||
},
|
|
||||||
'yt-dlp': {
|
|
||||||
'download_url': 'https://github.com/yt-dlp/yt-dlp/releases/latest/download/yt-dlp',
|
|
||||||
'tags_url': 'https://api.github.com/repos/yt-dlp/yt-dlp/tags'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.runYoutubeDL = async (url, args, customDownloadHandler = null) => {
|
|
||||||
const output_file_path = getYoutubeDLPath();
|
|
||||||
if (!fs.existsSync(output_file_path)) await exports.checkForYoutubeDLUpdate();
|
|
||||||
let callback = null;
|
|
||||||
let child_process = null;
|
|
||||||
if (customDownloadHandler) {
|
|
||||||
callback = runYoutubeDLCustom(url, args, customDownloadHandler);
|
|
||||||
} else {
|
|
||||||
({callback, child_process} = await runYoutubeDLProcess(url, args));
|
|
||||||
}
|
|
||||||
|
|
||||||
return {child_process, callback};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run youtube-dl directly (not cancellable)
|
|
||||||
const runYoutubeDLCustom = async (url, args, customDownloadHandler) => {
|
|
||||||
const downloadHandler = customDownloadHandler;
|
|
||||||
return new Promise(resolve => {
|
|
||||||
downloadHandler(url, args, {maxBuffer: Infinity}, async function(err, output) {
|
|
||||||
const parsed_output = utils.parseOutputJSON(output, err);
|
|
||||||
resolve({parsed_output, err});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run youtube-dl in a subprocess (cancellable)
|
|
||||||
const runYoutubeDLProcess = async (url, args, youtubedl_fork = config_api.getConfigItem('ytdl_default_downloader')) => {
|
|
||||||
const youtubedl_path = getYoutubeDLPath(youtubedl_fork);
|
|
||||||
const binary_exists = fs.existsSync(youtubedl_path);
|
|
||||||
if (!binary_exists) {
|
|
||||||
const err = `Could not find path for ${youtubedl_fork} at ${youtubedl_path}`;
|
|
||||||
logger.error(err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const child_process = execa(getYoutubeDLPath(youtubedl_fork), [url, ...args], {maxBuffer: Infinity});
|
|
||||||
const callback = new Promise(async resolve => {
|
|
||||||
try {
|
|
||||||
const {stdout, stderr} = await child_process;
|
|
||||||
const parsed_output = utils.parseOutputJSON(stdout.trim().split(/\r?\n/), stderr);
|
|
||||||
resolve({parsed_output, err: stderr});
|
|
||||||
} catch (e) {
|
|
||||||
resolve({parsed_output: null, err: e})
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return {child_process, callback}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getYoutubeDLPath(youtubedl_fork = config_api.getConfigItem('ytdl_default_downloader')) {
|
|
||||||
const binary_file_name = youtubedl_fork + (is_windows ? '.exe' : '');
|
|
||||||
const binary_path = path.join('appdata', 'bin', binary_file_name);
|
|
||||||
return binary_path;
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.killYoutubeDLProcess = async (child_process) => {
|
|
||||||
kill(child_process.pid, 'SIGKILL');
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.checkForYoutubeDLUpdate = async () => {
|
|
||||||
const selected_fork = config_api.getConfigItem('ytdl_default_downloader');
|
|
||||||
const output_file_path = getYoutubeDLPath();
|
|
||||||
// get current version
|
|
||||||
let current_app_details_exists = fs.existsSync(CONSTS.DETAILS_BIN_PATH);
|
|
||||||
if (!current_app_details_exists[selected_fork]) {
|
|
||||||
logger.warn(`Failed to get youtube-dl binary details at location '${CONSTS.DETAILS_BIN_PATH}'. Generating file...`);
|
|
||||||
updateDetailsJSON(CONSTS.OUTDATED_YOUTUBEDL_VERSION, selected_fork, output_file_path);
|
|
||||||
}
|
|
||||||
const current_app_details = JSON.parse(fs.readFileSync(CONSTS.DETAILS_BIN_PATH));
|
|
||||||
const current_version = current_app_details[selected_fork]['version'];
|
|
||||||
const current_fork = current_app_details[selected_fork]['downloader'];
|
|
||||||
|
|
||||||
const latest_version = await exports.getLatestUpdateVersion(selected_fork);
|
|
||||||
// if the binary does not exist, or default_downloader doesn't match existing fork, or if the fork has been updated, redownload
|
|
||||||
// TODO: don't redownload if fork already exists
|
|
||||||
if (!fs.existsSync(output_file_path) || current_fork !== selected_fork || !current_version || current_version !== latest_version) {
|
|
||||||
logger.warn(`Updating ${selected_fork} binary to '${output_file_path}', downloading...`);
|
|
||||||
await exports.updateYoutubeDL(latest_version);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.updateYoutubeDL = async (latest_update_version, custom_output_path = null) => {
|
|
||||||
await fs.ensureDir(path.join('appdata', 'bin'));
|
|
||||||
const default_downloader = config_api.getConfigItem('ytdl_default_downloader');
|
|
||||||
await downloadLatestYoutubeDLBinaryGeneric(default_downloader, latest_update_version, custom_output_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function downloadLatestYoutubeDLBinaryGeneric(youtubedl_fork, new_version, custom_output_path = null) {
|
|
||||||
const file_ext = is_windows ? '.exe' : '';
|
|
||||||
|
|
||||||
// build the URL
|
|
||||||
const download_url = `${exports.youtubedl_forks[youtubedl_fork]['download_url']}${file_ext}`;
|
|
||||||
const output_path = custom_output_path || getYoutubeDLPath(youtubedl_fork);
|
|
||||||
|
|
||||||
await utils.fetchFile(download_url, output_path, `${youtubedl_fork} ${new_version}`);
|
|
||||||
fs.chmod(output_path, 0o777);
|
|
||||||
|
|
||||||
updateDetailsJSON(new_version, youtubedl_fork, output_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.getLatestUpdateVersion = async (youtubedl_fork) => {
|
|
||||||
const tags_url = exports.youtubedl_forks[youtubedl_fork]['tags_url'];
|
|
||||||
return new Promise(resolve => {
|
|
||||||
fetch(tags_url, {method: 'Get'})
|
|
||||||
.then(async res => res.json())
|
|
||||||
.then(async (json) => {
|
|
||||||
if (!json || !json[0]) {
|
|
||||||
logger.error(`Failed to check ${youtubedl_fork} version for an update.`)
|
|
||||||
resolve(null);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const latest_update_version = json[0]['name'];
|
|
||||||
resolve(latest_update_version);
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
logger.error(`Failed to check ${youtubedl_fork} version for an update.`)
|
|
||||||
logger.error(err);
|
|
||||||
resolve(null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateDetailsJSON(new_version, fork, output_path) {
|
|
||||||
const file_ext = is_windows ? '.exe' : '';
|
|
||||||
const details_json = fs.existsSync(CONSTS.DETAILS_BIN_PATH) ? fs.readJSONSync(CONSTS.DETAILS_BIN_PATH) : {};
|
|
||||||
if (!details_json[fork]) details_json[fork] = {};
|
|
||||||
const fork_json = details_json[fork];
|
|
||||||
fork_json['version'] = new_version;
|
|
||||||
fork_json['downloader'] = fork;
|
|
||||||
fork_json['path'] = output_path; // unused
|
|
||||||
fork_json['exec'] = fork + file_ext; // unused
|
|
||||||
fs.writeJSONSync(CONSTS.DETAILS_BIN_PATH, details_json);
|
|
||||||
}
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
# Patterns to ignore when building packages.
|
|
||||||
# This supports shell glob matching, relative path matching, and
|
|
||||||
# negation (prefixed with !). Only one pattern per line.
|
|
||||||
.DS_Store
|
|
||||||
# Common VCS dirs
|
|
||||||
.git/
|
|
||||||
.gitignore
|
|
||||||
.bzr/
|
|
||||||
.bzrignore
|
|
||||||
.hg/
|
|
||||||
.hgignore
|
|
||||||
.svn/
|
|
||||||
# Common backup files
|
|
||||||
*.swp
|
|
||||||
*.bak
|
|
||||||
*.tmp
|
|
||||||
*.orig
|
|
||||||
*~
|
|
||||||
# Various IDEs
|
|
||||||
.project
|
|
||||||
.idea/
|
|
||||||
*.tmproj
|
|
||||||
.vscode/
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
apiVersion: v2
|
|
||||||
name: youtubedl-material
|
|
||||||
description: A Helm chart for https://github.com/Tzahi12345/YoutubeDL-Material
|
|
||||||
|
|
||||||
# A chart can be either an 'application' or a 'library' chart.
|
|
||||||
#
|
|
||||||
# Application charts are a collection of templates that can be packaged into versioned archives
|
|
||||||
# to be deployed.
|
|
||||||
#
|
|
||||||
# Library charts provide useful utilities or functions for the chart developer. They're included as
|
|
||||||
# a dependency of application charts to inject those utilities and functions into the rendering
|
|
||||||
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
|
|
||||||
type: application
|
|
||||||
|
|
||||||
# This is the chart version. This version number should be incremented each time you make changes
|
|
||||||
# to the chart and its templates, including the app version.
|
|
||||||
# Versions are expected to follow Semantic Versioning (https://semver.org/)
|
|
||||||
version: 0.2.0
|
|
||||||
|
|
||||||
# This is the version number of the application being deployed. This version number should be
|
|
||||||
# incremented each time you make changes to the application. Versions are not expected to
|
|
||||||
# follow Semantic Versioning. They should reflect the version the application is using.
|
|
||||||
# It is recommended to use it with quotes.
|
|
||||||
appVersion: "4.3.2"
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
1. Get the application URL by running these commands:
|
|
||||||
{{- if .Values.ingress.enabled }}
|
|
||||||
{{- range $host := .Values.ingress.hosts }}
|
|
||||||
{{- range .paths }}
|
|
||||||
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- else if contains "NodePort" .Values.service.type }}
|
|
||||||
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "youtubedl-material.fullname" . }})
|
|
||||||
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
|
|
||||||
echo http://$NODE_IP:$NODE_PORT
|
|
||||||
{{- else if contains "LoadBalancer" .Values.service.type }}
|
|
||||||
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
|
|
||||||
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "youtubedl-material.fullname" . }}'
|
|
||||||
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "youtubedl-material.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
|
|
||||||
echo http://$SERVICE_IP:{{ .Values.service.port }}
|
|
||||||
{{- else if contains "ClusterIP" .Values.service.type }}
|
|
||||||
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "youtubedl-material.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
|
|
||||||
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
|
|
||||||
echo "Visit http://127.0.0.1:8080 to use your application"
|
|
||||||
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
{{/*
|
|
||||||
Expand the name of the chart.
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.name" -}}
|
|
||||||
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
{{/*
|
|
||||||
Create a default fully qualified app name.
|
|
||||||
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
|
|
||||||
If release name contains chart name it will be used as a full name.
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.fullname" -}}
|
|
||||||
{{- if .Values.fullnameOverride }}
|
|
||||||
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
|
|
||||||
{{- else }}
|
|
||||||
{{- $name := default .Chart.Name .Values.nameOverride }}
|
|
||||||
{{- if contains $name .Release.Name }}
|
|
||||||
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
|
|
||||||
{{- else }}
|
|
||||||
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
{{/*
|
|
||||||
Create chart name and version as used by the chart label.
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.chart" -}}
|
|
||||||
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
{{/*
|
|
||||||
Common labels
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.labels" -}}
|
|
||||||
helm.sh/chart: {{ include "youtubedl-material.chart" . }}
|
|
||||||
{{ include "youtubedl-material.selectorLabels" . }}
|
|
||||||
{{- if .Chart.AppVersion }}
|
|
||||||
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
|
|
||||||
{{- end }}
|
|
||||||
app.kubernetes.io/managed-by: {{ .Release.Service }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
{{/*
|
|
||||||
Selector labels
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.selectorLabels" -}}
|
|
||||||
app.kubernetes.io/name: {{ include "youtubedl-material.name" . }}
|
|
||||||
app.kubernetes.io/instance: {{ .Release.Name }}
|
|
||||||
{{- end }}
|
|
||||||
|
|
||||||
{{/*
|
|
||||||
Create the name of the service account to use
|
|
||||||
*/}}
|
|
||||||
{{- define "youtubedl-material.serviceAccountName" -}}
|
|
||||||
{{- if .Values.serviceAccount.create }}
|
|
||||||
{{- default (include "youtubedl-material.fullname" .) .Values.serviceAccount.name }}
|
|
||||||
{{- else }}
|
|
||||||
{{- default "default" .Values.serviceAccount.name }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{{- if and .Values.persistence.appdata.enabled (not .Values.persistence.appdata.existingClaim) }}
|
|
||||||
kind: PersistentVolumeClaim
|
|
||||||
apiVersion: v1
|
|
||||||
metadata:
|
|
||||||
name: {{ template "youtubedl-material.fullname" . }}-appdata
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- {{ .Values.persistence.appdata.accessMode | quote }}
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: {{ .Values.persistence.appdata.size | quote }}
|
|
||||||
{{- if .Values.persistence.appdata.storageClass }}
|
|
||||||
{{- if (eq "-" .Values.persistence.appdata.storageClass) }}
|
|
||||||
storageClassName: ""
|
|
||||||
{{- else }}
|
|
||||||
storageClassName: "{{ .Values.persistence.appdata.storageClass }}"
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{{- if and .Values.persistence.audio.enabled (not .Values.persistence.audio.existingClaim) }}
|
|
||||||
kind: PersistentVolumeClaim
|
|
||||||
apiVersion: v1
|
|
||||||
metadata:
|
|
||||||
name: {{ template "youtubedl-material.fullname" . }}-audio
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- {{ .Values.persistence.audio.accessMode | quote }}
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: {{ .Values.persistence.audio.size | quote }}
|
|
||||||
{{- if .Values.persistence.audio.storageClass }}
|
|
||||||
{{- if (eq "-" .Values.persistence.audio.storageClass) }}
|
|
||||||
storageClassName: ""
|
|
||||||
{{- else }}
|
|
||||||
storageClassName: "{{ .Values.persistence.audio.storageClass }}"
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,121 +0,0 @@
|
|||||||
apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: {{ include "youtubedl-material.fullname" . }}
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
replicas: 1
|
|
||||||
selector:
|
|
||||||
matchLabels:
|
|
||||||
{{- include "youtubedl-material.selectorLabels" . | nindent 6 }}
|
|
||||||
template:
|
|
||||||
metadata:
|
|
||||||
{{- with .Values.podAnnotations }}
|
|
||||||
annotations:
|
|
||||||
{{- toYaml . | nindent 8 }}
|
|
||||||
{{- end }}
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.selectorLabels" . | nindent 8 }}
|
|
||||||
spec:
|
|
||||||
{{- with .Values.imagePullSecrets }}
|
|
||||||
imagePullSecrets:
|
|
||||||
{{- toYaml . | nindent 8 }}
|
|
||||||
{{- end }}
|
|
||||||
serviceAccountName: {{ include "youtubedl-material.serviceAccountName" . }}
|
|
||||||
securityContext:
|
|
||||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
|
||||||
containers:
|
|
||||||
- name: {{ .Chart.Name }}
|
|
||||||
securityContext:
|
|
||||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
|
||||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
|
||||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
|
||||||
ports:
|
|
||||||
- name: http
|
|
||||||
containerPort: 17442
|
|
||||||
protocol: TCP
|
|
||||||
livenessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: http
|
|
||||||
readinessProbe:
|
|
||||||
httpGet:
|
|
||||||
path: /
|
|
||||||
port: http
|
|
||||||
resources:
|
|
||||||
{{- toYaml .Values.resources | nindent 12 }}
|
|
||||||
volumeMounts:
|
|
||||||
- mountPath: /app/appdata
|
|
||||||
name: appdata
|
|
||||||
{{- if .Values.persistence.appdata.subPath }}
|
|
||||||
subPath: {{ .Values.persistence.appdata.subPath }}
|
|
||||||
{{- end }}
|
|
||||||
- mountPath: /app/audio
|
|
||||||
name: audio
|
|
||||||
{{- if .Values.persistence.audio.subPath }}
|
|
||||||
subPath: {{ .Values.persistence.audio.subPath }}
|
|
||||||
{{- end }}
|
|
||||||
- mountPath: /app/video
|
|
||||||
name: video
|
|
||||||
{{- if .Values.persistence.video.subPath }}
|
|
||||||
subPath: {{ .Values.persistence.video.subPath }}
|
|
||||||
{{- end }}
|
|
||||||
- mountPath: /app/subscriptions
|
|
||||||
name: subscriptions
|
|
||||||
{{- if .Values.persistence.subscriptions.subPath }}
|
|
||||||
subPath: {{ .Values.persistence.subscriptions.subPath }}
|
|
||||||
{{- end }}
|
|
||||||
- mountPath: /app/users
|
|
||||||
name: users
|
|
||||||
{{- if .Values.persistence.users.subPath }}
|
|
||||||
subPath: {{ .Values.persistence.users.subPath }}
|
|
||||||
{{- end }}
|
|
||||||
volumes:
|
|
||||||
- name: appdata
|
|
||||||
{{- if .Values.persistence.appdata.enabled}}
|
|
||||||
persistentVolumeClaim:
|
|
||||||
claimName: {{ if .Values.persistence.appdata.existingClaim }}{{ .Values.persistence.appdata.existingClaim }}{{- else }}{{ template "youtubedl-material.fullname" . }}-appdata{{- end }}
|
|
||||||
{{- else }}
|
|
||||||
emptyDir: {}
|
|
||||||
{{- end }}
|
|
||||||
- name: audio
|
|
||||||
{{- if .Values.persistence.audio.enabled}}
|
|
||||||
persistentVolumeClaim:
|
|
||||||
claimName: {{ if .Values.persistence.audio.existingClaim }}{{ .Values.persistence.audio.existingClaim }}{{- else }}{{ template "youtubedl-material.fullname" . }}-audio{{- end }}
|
|
||||||
{{- else }}
|
|
||||||
emptyDir: {}
|
|
||||||
{{- end }}
|
|
||||||
- name: subscriptions
|
|
||||||
{{- if .Values.persistence.subscriptions.enabled}}
|
|
||||||
persistentVolumeClaim:
|
|
||||||
claimName: {{ if .Values.persistence.subscriptions.existingClaim }}{{ .Values.persistence.subscriptions.existingClaim }}{{- else }}{{ template "youtubedl-material.fullname" . }}-subscriptions{{- end }}
|
|
||||||
{{- else }}
|
|
||||||
emptyDir: {}
|
|
||||||
{{- end }}
|
|
||||||
- name: users
|
|
||||||
{{- if .Values.persistence.users.enabled}}
|
|
||||||
persistentVolumeClaim:
|
|
||||||
claimName: {{ if .Values.persistence.users.existingClaim }}{{ .Values.persistence.users.existingClaim }}{{- else }}{{ template "youtubedl-material.fullname" . }}-users{{- end }}
|
|
||||||
{{- else }}
|
|
||||||
emptyDir: {}
|
|
||||||
{{- end }}
|
|
||||||
- name: video
|
|
||||||
{{- if .Values.persistence.video.enabled}}
|
|
||||||
persistentVolumeClaim:
|
|
||||||
claimName: {{ if .Values.persistence.video.existingClaim }}{{ .Values.persistence.video.existingClaim }}{{- else }}{{ template "youtubedl-material.fullname" . }}-video{{- end }}
|
|
||||||
{{- else }}
|
|
||||||
emptyDir: {}
|
|
||||||
{{- end }}
|
|
||||||
{{- with .Values.nodeSelector }}
|
|
||||||
nodeSelector:
|
|
||||||
{{- toYaml . | nindent 8 }}
|
|
||||||
{{- end }}
|
|
||||||
{{- with .Values.affinity }}
|
|
||||||
affinity:
|
|
||||||
{{- toYaml . | nindent 8 }}
|
|
||||||
{{- end }}
|
|
||||||
{{- with .Values.tolerations }}
|
|
||||||
tolerations:
|
|
||||||
{{- toYaml . | nindent 8 }}
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,61 +0,0 @@
|
|||||||
{{- if .Values.ingress.enabled -}}
|
|
||||||
{{- $fullName := include "youtubedl-material.fullname" . -}}
|
|
||||||
{{- $svcPort := .Values.service.port -}}
|
|
||||||
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
|
|
||||||
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
|
|
||||||
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
|
|
||||||
apiVersion: networking.k8s.io/v1
|
|
||||||
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
|
|
||||||
apiVersion: networking.k8s.io/v1beta1
|
|
||||||
{{- else -}}
|
|
||||||
apiVersion: extensions/v1beta1
|
|
||||||
{{- end }}
|
|
||||||
kind: Ingress
|
|
||||||
metadata:
|
|
||||||
name: {{ $fullName }}
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
{{- with .Values.ingress.annotations }}
|
|
||||||
annotations:
|
|
||||||
{{- toYaml . | nindent 4 }}
|
|
||||||
{{- end }}
|
|
||||||
spec:
|
|
||||||
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
|
|
||||||
ingressClassName: {{ .Values.ingress.className }}
|
|
||||||
{{- end }}
|
|
||||||
{{- if .Values.ingress.tls }}
|
|
||||||
tls:
|
|
||||||
{{- range .Values.ingress.tls }}
|
|
||||||
- hosts:
|
|
||||||
{{- range .hosts }}
|
|
||||||
- {{ . | quote }}
|
|
||||||
{{- end }}
|
|
||||||
secretName: {{ .secretName }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
rules:
|
|
||||||
{{- range .Values.ingress.hosts }}
|
|
||||||
- host: {{ .host | quote }}
|
|
||||||
http:
|
|
||||||
paths:
|
|
||||||
{{- range .paths }}
|
|
||||||
- path: {{ .path }}
|
|
||||||
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
|
|
||||||
pathType: {{ .pathType }}
|
|
||||||
{{- end }}
|
|
||||||
backend:
|
|
||||||
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
|
|
||||||
service:
|
|
||||||
name: {{ $fullName }}
|
|
||||||
port:
|
|
||||||
number: {{ $svcPort }}
|
|
||||||
{{- else }}
|
|
||||||
serviceName: {{ $fullName }}
|
|
||||||
servicePort: {{ $svcPort }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Service
|
|
||||||
metadata:
|
|
||||||
name: {{ include "youtubedl-material.fullname" . }}
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
type: {{ .Values.service.type }}
|
|
||||||
ports:
|
|
||||||
- port: {{ .Values.service.port }}
|
|
||||||
targetPort: http
|
|
||||||
protocol: TCP
|
|
||||||
name: http
|
|
||||||
selector:
|
|
||||||
{{- include "youtubedl-material.selectorLabels" . | nindent 4 }}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
{{- if .Values.serviceAccount.create -}}
|
|
||||||
apiVersion: v1
|
|
||||||
kind: ServiceAccount
|
|
||||||
metadata:
|
|
||||||
name: {{ include "youtubedl-material.serviceAccountName" . }}
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
{{- with .Values.serviceAccount.annotations }}
|
|
||||||
annotations:
|
|
||||||
{{- toYaml . | nindent 4 }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{{- if and .Values.persistence.subscriptions.enabled (not .Values.persistence.subscriptions.existingClaim) }}
|
|
||||||
kind: PersistentVolumeClaim
|
|
||||||
apiVersion: v1
|
|
||||||
metadata:
|
|
||||||
name: {{ template "youtubedl-material.fullname" . }}-subscriptions
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- {{ .Values.persistence.subscriptions.accessMode | quote }}
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: {{ .Values.persistence.subscriptions.size | quote }}
|
|
||||||
{{- if .Values.persistence.subscriptions.storageClass }}
|
|
||||||
{{- if (eq "-" .Values.persistence.subscriptions.storageClass) }}
|
|
||||||
storageClassName: ""
|
|
||||||
{{- else }}
|
|
||||||
storageClassName: "{{ .Values.persistence.subscriptions.storageClass }}"
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
apiVersion: v1
|
|
||||||
kind: Pod
|
|
||||||
metadata:
|
|
||||||
name: "{{ include "youtubedl-material.fullname" . }}-test-connection"
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
annotations:
|
|
||||||
"helm.sh/hook": test
|
|
||||||
spec:
|
|
||||||
containers:
|
|
||||||
- name: wget
|
|
||||||
image: busybox
|
|
||||||
command: ['wget']
|
|
||||||
args: ['{{ include "youtubedl-material.fullname" . }}:{{ .Values.service.port }}']
|
|
||||||
restartPolicy: Never
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{{- if and .Values.persistence.users.enabled (not .Values.persistence.users.existingClaim) }}
|
|
||||||
kind: PersistentVolumeClaim
|
|
||||||
apiVersion: v1
|
|
||||||
metadata:
|
|
||||||
name: {{ template "youtubedl-material.fullname" . }}-users
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- {{ .Values.persistence.users.accessMode | quote }}
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: {{ .Values.persistence.users.size | quote }}
|
|
||||||
{{- if .Values.persistence.users.storageClass }}
|
|
||||||
{{- if (eq "-" .Values.persistence.users.storageClass) }}
|
|
||||||
storageClassName: ""
|
|
||||||
{{- else }}
|
|
||||||
storageClassName: "{{ .Values.persistence.users.storageClass }}"
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{{- if and .Values.persistence.video.enabled (not .Values.persistence.video.existingClaim) }}
|
|
||||||
kind: PersistentVolumeClaim
|
|
||||||
apiVersion: v1
|
|
||||||
metadata:
|
|
||||||
name: {{ template "youtubedl-material.fullname" . }}-video
|
|
||||||
labels:
|
|
||||||
{{- include "youtubedl-material.labels" . | nindent 4 }}
|
|
||||||
spec:
|
|
||||||
accessModes:
|
|
||||||
- {{ .Values.persistence.video.accessMode | quote }}
|
|
||||||
resources:
|
|
||||||
requests:
|
|
||||||
storage: {{ .Values.persistence.video.size | quote }}
|
|
||||||
{{- if .Values.persistence.video.storageClass }}
|
|
||||||
{{- if (eq "-" .Values.persistence.video.storageClass) }}
|
|
||||||
storageClassName: ""
|
|
||||||
{{- else }}
|
|
||||||
storageClassName: "{{ .Values.persistence.video.storageClass }}"
|
|
||||||
{{- end }}
|
|
||||||
{{- end }}
|
|
||||||
{{- end -}}
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
# Default values for youtubedl-material.
|
|
||||||
# This is a YAML-formatted file.
|
|
||||||
# Declare variables to be passed into your templates.
|
|
||||||
|
|
||||||
replicaCount: 1
|
|
||||||
|
|
||||||
image:
|
|
||||||
repository: tzahi12345/youtubedl-material
|
|
||||||
pullPolicy: IfNotPresent
|
|
||||||
# Overrides the image tag whose default is the chart appVersion.
|
|
||||||
tag: ""
|
|
||||||
|
|
||||||
imagePullSecrets: []
|
|
||||||
nameOverride: ""
|
|
||||||
fullnameOverride: ""
|
|
||||||
|
|
||||||
serviceAccount:
|
|
||||||
# Specifies whether a service account should be created
|
|
||||||
create: true
|
|
||||||
# Annotations to add to the service account
|
|
||||||
annotations: {}
|
|
||||||
# The name of the service account to use.
|
|
||||||
# If not set and create is true, a name is generated using the fullname template
|
|
||||||
name: ""
|
|
||||||
|
|
||||||
podAnnotations: {}
|
|
||||||
|
|
||||||
podSecurityContext: {}
|
|
||||||
# fsGroup: 2000
|
|
||||||
|
|
||||||
securityContext: {}
|
|
||||||
# capabilities:
|
|
||||||
# drop:
|
|
||||||
# - ALL
|
|
||||||
# readOnlyRootFilesystem: true
|
|
||||||
# runAsNonRoot: true
|
|
||||||
# runAsUser: 1000
|
|
||||||
|
|
||||||
service:
|
|
||||||
type: ClusterIP
|
|
||||||
port: 17442
|
|
||||||
|
|
||||||
ingress:
|
|
||||||
enabled: false
|
|
||||||
annotations: {}
|
|
||||||
# kubernetes.io/ingress.class: nginx
|
|
||||||
# kubernetes.io/tls-acme: "true"
|
|
||||||
hosts:
|
|
||||||
- host: chart-example.local
|
|
||||||
paths: []
|
|
||||||
tls: []
|
|
||||||
# - secretName: chart-example-tls
|
|
||||||
# hosts:
|
|
||||||
# - chart-example.local
|
|
||||||
|
|
||||||
resources: {}
|
|
||||||
# We usually recommend not to specify default resources and to leave this as a conscious
|
|
||||||
# choice for the user. This also increases chances charts run on environments with little
|
|
||||||
# resources, such as Minikube. If you do want to specify resources, uncomment the following
|
|
||||||
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
|
|
||||||
# limits:
|
|
||||||
# cpu: 100m
|
|
||||||
# memory: 128Mi
|
|
||||||
# requests:
|
|
||||||
# cpu: 100m
|
|
||||||
# memory: 128Mi
|
|
||||||
|
|
||||||
persistence:
|
|
||||||
appdata:
|
|
||||||
enabled: true
|
|
||||||
## If defined, storageClassName: <storageClass>
|
|
||||||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
|
||||||
## If undefined (the default) or set to null, no storageClassName spec is
|
|
||||||
## set, choosing the default provisioner. (gp2 on AWS, standard on
|
|
||||||
## GKE, AWS & OpenStack)
|
|
||||||
##
|
|
||||||
# storageClass: "-"
|
|
||||||
## If you want to reuse an existing claim, you can pass the name of the PVC using
|
|
||||||
## the existingClaim variable
|
|
||||||
# existingClaim: your-claim
|
|
||||||
# subPath: some-subpath
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
size: 1Gi
|
|
||||||
audio:
|
|
||||||
enabled: true
|
|
||||||
## If defined, storageClassName: <storageClass>
|
|
||||||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
|
||||||
## If undefined (the default) or set to null, no storageClassName spec is
|
|
||||||
## set, choosing the default provisioner. (gp2 on AWS, standard on
|
|
||||||
## GKE, AWS & OpenStack)
|
|
||||||
##
|
|
||||||
# storageClass: "-"
|
|
||||||
##
|
|
||||||
## If you want to reuse an existing claim, you can pass the name of the PVC using
|
|
||||||
## the existingClaim variable
|
|
||||||
# existingClaim: your-claim
|
|
||||||
# subPath: some-subpath
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
size: 50Gi
|
|
||||||
video:
|
|
||||||
enabled: true
|
|
||||||
## If defined, storageClassName: <storageClass>
|
|
||||||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
|
||||||
## If undefined (the default) or set to null, no storageClassName spec is
|
|
||||||
## set, choosing the default provisioner. (gp2 on AWS, standard on
|
|
||||||
## GKE, AWS & OpenStack)
|
|
||||||
##
|
|
||||||
# storageClass: "-"
|
|
||||||
##
|
|
||||||
## If you want to reuse an existing claim, you can pass the name of the PVC using
|
|
||||||
## the existingClaim variable
|
|
||||||
# existingClaim: your-claim
|
|
||||||
# subPath: some-subpath
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
size: 50Gi
|
|
||||||
subscriptions:
|
|
||||||
enabled: true
|
|
||||||
## If defined, storageClassName: <storageClass>
|
|
||||||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
|
||||||
## If undefined (the default) or set to null, no storageClassName spec is
|
|
||||||
## set, choosing the default provisioner. (gp2 on AWS, standard on
|
|
||||||
## GKE, AWS & OpenStack)
|
|
||||||
##
|
|
||||||
# storageClass: "-"
|
|
||||||
##
|
|
||||||
## If you want to reuse an existing claim, you can pass the name of the PVC using
|
|
||||||
## the existingClaim variable
|
|
||||||
# existingClaim: your-claim
|
|
||||||
# subPath: some-subpath
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
size: 50Gi
|
|
||||||
users:
|
|
||||||
enabled: true
|
|
||||||
## If defined, storageClassName: <storageClass>
|
|
||||||
## If set to "-", storageClassName: "", which disables dynamic provisioning
|
|
||||||
## If undefined (the default) or set to null, no storageClassName spec is
|
|
||||||
## set, choosing the default provisioner. (gp2 on AWS, standard on
|
|
||||||
## GKE, AWS & OpenStack)
|
|
||||||
##
|
|
||||||
# storageClass: "-"
|
|
||||||
##
|
|
||||||
## If you want to reuse an existing claim, you can pass the name of the PVC using
|
|
||||||
## the existingClaim variable
|
|
||||||
# existingClaim: your-claim
|
|
||||||
# subPath: some-subpath
|
|
||||||
accessMode: ReadWriteOnce
|
|
||||||
size: 50Gi
|
|
||||||
|
|
||||||
nodeSelector: {}
|
|
||||||
|
|
||||||
tolerations: []
|
|
||||||
|
|
||||||
affinity: {}
|
|
||||||
28
chrome-extension.pem
Normal file
28
chrome-extension.pem
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDMX9Wk5SM5cIfY
|
||||||
|
6ReKX3ybY1rsbNbOzG8ceN7yyeXB0mor8pVsX1MOna2HewOyBuaaYNJRO4tJBxic
|
||||||
|
7a8zQErfgHL/i/QrVvVCpfJ7xKvq6zij5NYoqd/FBUwawqjeH5/voIcAp9z5Vmsr
|
||||||
|
kL0sxJUKy6b4IWNp3noU7Nvq2RwxnXQbKDhz8FrX6oQAnDC6gsG5a2OSPsaE4oqw
|
||||||
|
6nmonORJypmpP5hqyHY8ffXBT2lAxjHT7OnYbaCBe2TQP8+rH6rDBOhjVNtUJ089
|
||||||
|
ocTQL6LtQEPkcF4yKJmtcOwHl8OPGZs5l9i8xb4j9RuSPkm2lbzZX8sOsdGGoqJZ
|
||||||
|
q68nYhsHAgMBAAECggEAXmtKEzfPObq88B/kAcgSk+FngMHZzcmR7bgD3GwdSxnQ
|
||||||
|
dkRI9zvk7eQ35tcUwntAr4Lat6/ILjFqlBmVLxrdXHuF5Xz9jcZLYgKzz61xdYM9
|
||||||
|
dC6FKF0u5eGIIvbauGAo7jaeGFX1F3Zu5b4lP9kEOGwU1B7sxF0FzsQM5+dtCJgv
|
||||||
|
We/hWQeF+9gtoVnkCSS/Mq2p0UomXXHW0Bz4+HuHlTR9aiYbviYnotABiLUhZyzt
|
||||||
|
v5yUaktb9qniBfdLpRlq8cp06xYlTEA9gJpa4Pnok8OWUsbAiW6EiXUSaZ/cchVa
|
||||||
|
AnO8WWYvVOnnt6WHI3+QdFTnqVjE5TBX4N/7bVhHGQKBgQD0dtbFqp7vZK/jVYvE
|
||||||
|
z0WPdySOg2ZDmoSfk5ZlR1+Y9zWToHv0qu8zqoOjL8Ubxrh9fGlOow+cCVdkEuaa
|
||||||
|
jWC2AWetuRvW0Z5A3XMXr0/N/1fgOkTqtp3WNrUPjVJahEg3lN+90opgFoT8swSi
|
||||||
|
s1oxW0oLcVIlrjhGBXAPCfsAuQKBgQDWBLRhHsRAvGcK5wGuVnxVApTIyBOermsW
|
||||||
|
3bJt+7+UI+4sYrBAwkWdQG93IG0cQtn48TEPBgmR2fjRF5IFT9M4/u+QOeeByT7I
|
||||||
|
we7nVtHgSY5ByC9N0mjWbcmSg8fktz/LonjldNC4kWdOFb75fxGf8kOGS5rUaMA4
|
||||||
|
zHucfB6ZvwKBgQCPHJrysMXGY21MaqIeHzEboaX3ABl37hdBzAa5V6UxSVdGCydF
|
||||||
|
vmO2HVZey/JaJmWOoKyNaowSzq0oWqBBTg6VvhDR9JHFmoVId9uOvAS+FYN+Mt5x
|
||||||
|
gWK5KuGoLxVNBC+6yh6JY526TrSfsrU+Aj0Es+qO9FIg2PL8muZVB4S3kQKBgH/5
|
||||||
|
CDMaxpc/EQ5/2413wZjDllwI51J3USm3Hz6Mzp2ybnSz/lh60k2Zfg1polTH1Lb6
|
||||||
|
4i7tmUNRZ2sAARyUAuWN64n+VeRRhe1dqZFDZPQMh7fmEAMk0fOGaoXlrt2ghdEq
|
||||||
|
Mchi9Xun1nHmpu9hgBR4NNBU3RwuFuLfwvprbZDZAoGAWa62QJChE86xQGP1MrL2
|
||||||
|
SbIzw3cfeP5xdQ3MKldJiy5IkbMR7Z13WZ7FwvPTy0g/onLHD1rqlm1kUMsGRHpD
|
||||||
|
5vH06PNpKXQ6x8BYaRGtE6P39jLycO/X+WK/lYTrWo1bR+mGCebDh4B5XrwT3gI6
|
||||||
|
x4Gvz134pZCTyQCf5JCwbQs=
|
||||||
|
-----END PRIVATE KEY-----
|
||||||
20
chrome-extension/background.js
Normal file
20
chrome-extension/background.js
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
// background.js
|
||||||
|
|
||||||
|
// Called when the user clicks on the browser action.
|
||||||
|
chrome.browserAction.onClicked.addListener(function(tab) {
|
||||||
|
// get the frontend_url
|
||||||
|
chrome.storage.sync.get({
|
||||||
|
frontend_url: 'http://localhost',
|
||||||
|
audio_only: false
|
||||||
|
}, function(items) {
|
||||||
|
chrome.tabs.query({active: true, currentWindow: true}, function(tabs) {
|
||||||
|
var activeTab = tabs[0];
|
||||||
|
var url = activeTab.url;
|
||||||
|
if (url.includes('youtube.com')) {
|
||||||
|
var new_url = items.frontend_url + '/#/home;url=' + encodeURIComponent(url) + ';audioOnly=' + items.audio_only;
|
||||||
|
chrome.tabs.create({ url: new_url });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
});
|
||||||
1
chrome-extension/css/bootstrap.min.css
vendored
1
chrome-extension/css/bootstrap.min.css
vendored
File diff suppressed because one or more lines are too long
@@ -1,17 +1,17 @@
|
|||||||
{
|
{
|
||||||
"manifest_version": 2,
|
"manifest_version": 2,
|
||||||
"name": "YoutubeDL-Material",
|
"name": "YoutubeDL-Material",
|
||||||
"version": "0.4",
|
"version": "0.3",
|
||||||
"description": "The Official Firefox & Chrome Extension of YoutubeDL-Material, an open-source and self-hosted YouTube downloader.",
|
"description": "The Official Firefox & Chrome Extension of YoutubeDL-Material, an open-source and self-hosted YouTube downloader.",
|
||||||
|
"background": {
|
||||||
|
"scripts": ["background.js"]
|
||||||
|
},
|
||||||
"browser_action": {
|
"browser_action": {
|
||||||
"default_icon": "favicon.png",
|
"default_icon": "favicon.png"
|
||||||
"default_popup": "popup.html",
|
|
||||||
"default_title": "YoutubeDL-Material"
|
|
||||||
},
|
},
|
||||||
"permissions": [
|
"permissions": [
|
||||||
"tabs",
|
"tabs",
|
||||||
"storage",
|
"storage"
|
||||||
"contextMenus"
|
|
||||||
],
|
],
|
||||||
"options_ui": {
|
"options_ui": {
|
||||||
"page": "options.html",
|
"page": "options.html",
|
||||||
|
|||||||
@@ -1,35 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<!-- Scripts -->
|
|
||||||
<script src="js/jquery-3.4.1.min.js"></script>
|
|
||||||
<script src="js/popper.min.js"></script>
|
|
||||||
<script src="js/bootstrap.min.js"></script>
|
|
||||||
|
|
||||||
<!-- Cascading Style Sheets -->
|
|
||||||
<link href="css/bootstrap.min.css" rel="stylesheet" media="screen">
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<div style="width: 400px; margin: 0 auto;">
|
|
||||||
<div style="margin: 10px;">
|
|
||||||
<div class="checkbox">
|
|
||||||
<label>
|
|
||||||
<input type="checkbox" id="audio_only">
|
|
||||||
Audio only
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="input-group mb-3">
|
|
||||||
<input id="url_input" type="text" class="form-control" placeholder="URL" aria-label="URL" aria-describedby="basic-addon2">
|
|
||||||
<div class="input-group-append">
|
|
||||||
<button class="btn btn-outline-secondary" type="button" id="download">Download</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script src="popup.js"></script>
|
|
||||||
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
function audioOnlyClicked() {
|
|
||||||
console.log('audio only clicked');
|
|
||||||
var audio_only = document.getElementById("audio_only").checked;
|
|
||||||
|
|
||||||
// save state
|
|
||||||
|
|
||||||
chrome.storage.sync.set({
|
|
||||||
audio_only: audio_only
|
|
||||||
}, function() {});
|
|
||||||
}
|
|
||||||
|
|
||||||
function downloadVideo() {
|
|
||||||
var input_url = document.getElementById("url_input").value
|
|
||||||
// get the frontend_url
|
|
||||||
chrome.storage.sync.get({
|
|
||||||
frontend_url: 'http://localhost',
|
|
||||||
audio_only: false
|
|
||||||
}, function(items) {
|
|
||||||
var download_url = items.frontend_url + '/#/home;url=' + encodeURIComponent(input_url) + ';audioOnly=' + items.audio_only;
|
|
||||||
chrome.tabs.create({ url: download_url });
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadInputs() {
|
|
||||||
// load audio-only input
|
|
||||||
chrome.storage.sync.get({
|
|
||||||
frontend_url: 'http://localhost',
|
|
||||||
audio_only: false
|
|
||||||
}, function(items) {
|
|
||||||
document.getElementById("audio_only").checked = items.audio_only;
|
|
||||||
});
|
|
||||||
|
|
||||||
// load url input
|
|
||||||
chrome.tabs.query({active: true, currentWindow: true}, function(tabs) {
|
|
||||||
var activeTab = tabs[0];
|
|
||||||
var current_url = activeTab.url;
|
|
||||||
console.log(current_url);
|
|
||||||
if (current_url && current_url.includes('youtube.com')) {
|
|
||||||
document.getElementById("url_input").value = current_url;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
document.getElementById('download').addEventListener('click',
|
|
||||||
downloadVideo);
|
|
||||||
|
|
||||||
document.getElementById('audio_only').addEventListener('click',
|
|
||||||
audioOnlyClicked);
|
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', loadInputs);
|
|
||||||
Binary file not shown.
Binary file not shown.
@@ -2,12 +2,8 @@ version: "2"
|
|||||||
services:
|
services:
|
||||||
ytdl_material:
|
ytdl_material:
|
||||||
environment:
|
environment:
|
||||||
ytdl_mongodb_connection_string: 'mongodb://ytdl-mongo-db:27017'
|
ALLOW_CONFIG_MUTATIONS: 'true'
|
||||||
ytdl_use_local_db: 'false'
|
|
||||||
write_ytdl_config: 'true'
|
|
||||||
restart: always
|
restart: always
|
||||||
depends_on:
|
|
||||||
- ytdl-mongo-db
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./appdata:/app/appdata
|
- ./appdata:/app/appdata
|
||||||
- ./audio:/app/audio
|
- ./audio:/app/audio
|
||||||
@@ -17,12 +13,3 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "8998:17442"
|
- "8998:17442"
|
||||||
image: tzahi12345/youtubedl-material:latest
|
image: tzahi12345/youtubedl-material:latest
|
||||||
ytdl-mongo-db:
|
|
||||||
# If you are using a Raspberry Pi, use mongo:4.4.18
|
|
||||||
image: mongo:4
|
|
||||||
logging:
|
|
||||||
driver: "none"
|
|
||||||
container_name: mongo-db
|
|
||||||
restart: always
|
|
||||||
volumes:
|
|
||||||
- ./db/:/data/db
|
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
import platform
|
|
||||||
import requests
|
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
from github import Github
|
|
||||||
|
|
||||||
machine = platform.machine()
|
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/45125516/possible-values-for-uname-m
|
|
||||||
MACHINES_TO_ZIP = OrderedDict([
|
|
||||||
("x86_64", "Linux-x64"),
|
|
||||||
("aarch64", "LinuxArm64"),
|
|
||||||
("armv8", "LinuxArm64"),
|
|
||||||
("arm", "LinuxArm"),
|
|
||||||
("AMD64", "Windows-x64")
|
|
||||||
])
|
|
||||||
|
|
||||||
def getZipName():
|
|
||||||
for possibleMachine, possibleZipName in MACHINES_TO_ZIP.items():
|
|
||||||
if possibleMachine in machine:
|
|
||||||
return possibleZipName
|
|
||||||
|
|
||||||
def getLatestFileInRepo(repo, search_string):
|
|
||||||
# Create an unauthenticated instance of the Github object
|
|
||||||
g = Github(os.environ.get('GH_TOKEN'))
|
|
||||||
|
|
||||||
# Replace with the repository owner and name
|
|
||||||
repo = g.get_repo(repo)
|
|
||||||
|
|
||||||
# Get all releases of the repository
|
|
||||||
releases = repo.get_releases()
|
|
||||||
|
|
||||||
# Loop through the releases in reverse order (from latest to oldest)
|
|
||||||
for release in list(releases):
|
|
||||||
# Get the release assets (files attached to the release)
|
|
||||||
assets = release.get_assets()
|
|
||||||
|
|
||||||
# Loop through the assets
|
|
||||||
for asset in assets:
|
|
||||||
if re.search(search_string, asset.name):
|
|
||||||
print(f'Downloading: {asset.name}')
|
|
||||||
response = requests.get(asset.browser_download_url)
|
|
||||||
with open(asset.name, 'wb') as f:
|
|
||||||
f.write(response.content)
|
|
||||||
print(f'Download complete: {asset.name}. Unzipping...')
|
|
||||||
shutil.unpack_archive(asset.name, './')
|
|
||||||
print(f'Unzipping complete!')
|
|
||||||
os.remove(asset.name)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# If no matching release is found, print a message
|
|
||||||
print(f'No release found with {search_string}')
|
|
||||||
|
|
||||||
def getLatestCLIRelease():
|
|
||||||
zipName = getZipName()
|
|
||||||
if not zipName:
|
|
||||||
print(f"GetTwitchDownloader.py could not get valid path for '{machine}'. Exiting...")
|
|
||||||
sys.exit(1)
|
|
||||||
searchString = r'.*CLI.*' + zipName
|
|
||||||
getLatestFileInRepo("lay295/TwitchDownloader", searchString)
|
|
||||||
|
|
||||||
getLatestCLIRelease()
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# THANK YOU TALULAH (https://github.com/nottalulah) for your help in figuring this out
|
|
||||||
# and also optimizing some code with this commit.
|
|
||||||
# xoxo :D
|
|
||||||
|
|
||||||
case $(uname -m) in
|
|
||||||
x86_64)
|
|
||||||
ARCH=Linux-x64;;
|
|
||||||
aarch64)
|
|
||||||
ARCH=LinuxArm64;;
|
|
||||||
armhf)
|
|
||||||
ARCH=LinuxArm;;
|
|
||||||
armv7)
|
|
||||||
ARCH=LinuxArm;;
|
|
||||||
armv7l)
|
|
||||||
ARCH=LinuxArm;;
|
|
||||||
*)
|
|
||||||
echo "Unsupported architecture: $(uname -m)"
|
|
||||||
exit 1
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "(INFO) Architecture detected: $ARCH"
|
|
||||||
echo "(1/5) READY - Install unzip"
|
|
||||||
apt-get update && apt-get -y install unzip curl jq libicu70
|
|
||||||
VERSION=$(curl --silent "https://api.github.com/repos/lay295/TwitchDownloader/releases" | jq -r --arg arch "$ARCH" '[.[] | select(.assets | length > 0) | select(.assets[].name | contains("CLI") and contains($arch))] | max_by(.published_at) | .tag_name')
|
|
||||||
echo "(2/5) DOWNLOAD - Acquire twitchdownloader"
|
|
||||||
curl -o twitchdownloader.zip \
|
|
||||||
--connect-timeout 5 \
|
|
||||||
--max-time 120 \
|
|
||||||
--retry 5 \
|
|
||||||
--retry-delay 0 \
|
|
||||||
--retry-max-time 40 \
|
|
||||||
-L "https://github.com/lay295/TwitchDownloader/releases/download/$VERSION/TwitchDownloaderCLI-$VERSION-$ARCH.zip"
|
|
||||||
unzip twitchdownloader.zip
|
|
||||||
chmod +x TwitchDownloaderCLI
|
|
||||||
echo "(3/5) Smoke test"
|
|
||||||
./TwitchDownloaderCLI --help
|
|
||||||
cp ./TwitchDownloaderCLI /usr/local/bin/TwitchDownloaderCLI
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# THANK YOU TALULAH (https://github.com/nottalulah) for your help in figuring this out
|
|
||||||
# and also optimizing some code with this commit.
|
|
||||||
# xoxo :D
|
|
||||||
|
|
||||||
case $(uname -m) in
|
|
||||||
x86_64)
|
|
||||||
ARCH=amd64;;
|
|
||||||
aarch64)
|
|
||||||
ARCH=arm64;;
|
|
||||||
armhf)
|
|
||||||
ARCH=armhf;;
|
|
||||||
armv7)
|
|
||||||
ARCH=armel;;
|
|
||||||
armv7l)
|
|
||||||
ARCH=armel;;
|
|
||||||
*)
|
|
||||||
echo "Unsupported architecture: $(uname -m)"
|
|
||||||
exit 1
|
|
||||||
esac
|
|
||||||
|
|
||||||
echo "(INFO) Architecture detected: $ARCH"
|
|
||||||
echo "(1/5) READY - Acquire temp dependencies in ffmpeg obtain layer"
|
|
||||||
apt-get update && apt-get -y install curl xz-utils
|
|
||||||
echo "(2/5) DOWNLOAD - Acquire latest ffmpeg and ffprobe from John van Sickle's master-sourced builds in ffmpeg obtain layer"
|
|
||||||
curl -o ffmpeg.txz \
|
|
||||||
--connect-timeout 5 \
|
|
||||||
--max-time 120 \
|
|
||||||
--retry 5 \
|
|
||||||
--retry-delay 0 \
|
|
||||||
--retry-max-time 40 \
|
|
||||||
"https://johnvansickle.com/ffmpeg/old-releases/ffmpeg-5.1.1-${ARCH}-static.tar.xz"
|
|
||||||
mkdir /tmp/ffmpeg
|
|
||||||
tar xf ffmpeg.txz -C /tmp/ffmpeg
|
|
||||||
echo "(3/5) CLEANUP - Remove temp dependencies from ffmpeg obtain layer"
|
|
||||||
apt-get -y remove curl xz-utils
|
|
||||||
apt-get -y autoremove
|
|
||||||
echo "(4/5) PROVISION - Provide ffmpeg and ffprobe from ffmpeg obtain layer"
|
|
||||||
cp /tmp/ffmpeg/*/ffmpeg /usr/local/bin/ffmpeg
|
|
||||||
cp /tmp/ffmpeg/*/ffprobe /usr/local/bin/ffprobe
|
|
||||||
echo "(5/5) CLEANUP - Remove temporary downloads from ffmpeg obtain layer"
|
|
||||||
rm -rf /tmp/ffmpeg ffmpeg.txz
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
build:
|
|
||||||
docker:
|
|
||||||
web: Dockerfile.heroku
|
|
||||||
27887
package-lock.json
generated
27887
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
106
package.json
106
package.json
@@ -1,20 +1,16 @@
|
|||||||
{
|
{
|
||||||
"name": "youtube-dl-material",
|
"name": "youtube-dl-material",
|
||||||
"version": "4.3.2",
|
"version": "4.1.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"ng": "ng",
|
"ng": "ng",
|
||||||
"start": "ng serve",
|
"start": "ng serve",
|
||||||
"codespaces": "ng serve --configuration=codespaces",
|
"build": "ng build",
|
||||||
"build": "ng build --configuration production",
|
|
||||||
"prebuild": "node src/postbuild.mjs",
|
|
||||||
"heroku-postbuild": "npm install --prefix backend",
|
"heroku-postbuild": "npm install --prefix backend",
|
||||||
"test": "ng test",
|
"test": "ng test",
|
||||||
"lint": "ng lint",
|
"lint": "ng lint",
|
||||||
"e2e": "ng e2e",
|
"e2e": "ng e2e",
|
||||||
"electron": "ng build --base-href ./ && electron .",
|
"electron": "ng build --base-href ./ && electron ."
|
||||||
"generate": "openapi --input ./\"Public API v1.yaml\" --output ./src/api-types --exportCore false --exportServices false --exportModels true",
|
|
||||||
"i18n-source": "ng extract-i18n --output-path=src/assets/i18n --out-file=messages.en.xlf"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "12.3.1",
|
"node": "12.3.1",
|
||||||
@@ -22,68 +18,56 @@
|
|||||||
},
|
},
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@angular-devkit/core": "^15.0.1",
|
"@angular-devkit/core": "^9.0.6",
|
||||||
"@angular/animations": "^15.0.1",
|
"@angular/animations": "^9.1.0",
|
||||||
"@angular/cdk": "^15.0.0",
|
"@angular/cdk": "^9.2.0",
|
||||||
"@angular/common": "^15.0.1",
|
"@angular/common": "^9.1.0",
|
||||||
"@angular/compiler": "^15.0.1",
|
"@angular/compiler": "^9.1.0",
|
||||||
"@angular/core": "^15.0.1",
|
"@angular/core": "^9.0.7",
|
||||||
"@angular/forms": "^15.0.1",
|
"@angular/forms": "^9.1.0",
|
||||||
"@angular/localize": "^15.0.1",
|
"@angular/localize": "^9.1.0",
|
||||||
"@angular/material": "^15.0.0",
|
"@angular/material": "^9.2.0",
|
||||||
"@angular/platform-browser": "^15.0.1",
|
"@angular/platform-browser": "^9.1.0",
|
||||||
"@angular/platform-browser-dynamic": "^15.0.1",
|
"@angular/platform-browser-dynamic": "^9.1.0",
|
||||||
"@angular/router": "^15.0.1",
|
"@angular/router": "^9.1.0",
|
||||||
"@fontsource/material-icons": "^4.5.4",
|
"@ngneat/content-loader": "^5.0.0",
|
||||||
"@ngneat/content-loader": "^7.0.0",
|
|
||||||
"@videogular/ngx-videogular": "^6.0.0",
|
|
||||||
"core-js": "^2.4.1",
|
"core-js": "^2.4.1",
|
||||||
"crypto-js": "^4.1.1",
|
|
||||||
"file-saver": "^2.0.2",
|
"file-saver": "^2.0.2",
|
||||||
"filesize": "^10.0.7",
|
"filesize": "^6.1.0",
|
||||||
"fs-extra": "^10.0.0",
|
"fingerprintjs2": "^2.1.0",
|
||||||
"material-icons": "^1.10.8",
|
|
||||||
"nan": "^2.14.1",
|
"nan": "^2.14.1",
|
||||||
"ngx-avatars": "^1.4.1",
|
"ng-lazyload-image": "^7.0.1",
|
||||||
"ngx-file-drop": "^15.0.0",
|
"ngx-avatar": "^4.0.0",
|
||||||
"rxjs": "^6.6.3",
|
"ngx-file-drop": "^9.0.1",
|
||||||
"rxjs-compat": "^6.6.7",
|
"ngx-videogular": "^9.0.1",
|
||||||
"tslib": "^2.0.0",
|
"rxjs": "^6.5.3",
|
||||||
"typescript": "~4.8.4",
|
"rxjs-compat": "^6.0.0-rc.0",
|
||||||
"xliff-to-json": "^1.0.4",
|
"tslib": "^1.10.0",
|
||||||
"zone.js": "~0.11.4"
|
"typescript": "~3.7.5",
|
||||||
|
"web-animations-js": "^2.3.2",
|
||||||
|
"zone.js": "~0.10.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@angular-devkit/build-angular": "^15.0.1",
|
"@angular-devkit/build-angular": "^0.901.0",
|
||||||
"@angular/cli": "^15.0.1",
|
"@angular/cli": "^9.0.7",
|
||||||
"@angular/compiler-cli": "^15.0.1",
|
"@angular/compiler-cli": "^9.0.7",
|
||||||
"@angular/language-service": "^15.0.1",
|
"@angular/language-service": "^9.0.7",
|
||||||
"@types/core-js": "^2.5.2",
|
"@types/core-js": "^2.5.2",
|
||||||
"@types/file-saver": "^2.0.1",
|
"@types/file-saver": "^2.0.1",
|
||||||
"@types/jasmine": "^4.3.1",
|
"@types/jasmine": "2.5.45",
|
||||||
"@types/node": "^12.11.1",
|
"@types/node": "^12.11.1",
|
||||||
"@typescript-eslint/eslint-plugin": "^4.29.0",
|
"codelyzer": "^5.1.2",
|
||||||
"@typescript-eslint/parser": "^4.29.0",
|
"electron": "^8.0.1",
|
||||||
"ajv": "^7.2.4",
|
"jasmine-core": "~2.6.2",
|
||||||
"codelyzer": "^6.0.0",
|
"jasmine-spec-reporter": "~4.1.0",
|
||||||
"eslint": "^7.32.0",
|
"karma": "~1.7.0",
|
||||||
"jasmine-core": "~3.6.0",
|
"karma-chrome-launcher": "~2.1.1",
|
||||||
"jasmine-spec-reporter": "~5.0.0",
|
|
||||||
"karma": "~6.4.2",
|
|
||||||
"karma-chrome-launcher": "~3.1.0",
|
|
||||||
"karma-cli": "~1.0.1",
|
"karma-cli": "~1.0.1",
|
||||||
"karma-coverage-istanbul-reporter": "~3.0.2",
|
"karma-coverage-istanbul-reporter": "^1.2.1",
|
||||||
"karma-jasmine": "~5.1.0",
|
"karma-jasmine": "~1.1.0",
|
||||||
"karma-jasmine-html-reporter": "^1.5.0",
|
"karma-jasmine-html-reporter": "^0.2.2",
|
||||||
"openapi-typescript-codegen": "^0.23.0",
|
"protractor": "~5.1.2",
|
||||||
"protractor": "~7.0.0",
|
|
||||||
"ts-node": "~3.0.4",
|
"ts-node": "~3.0.4",
|
||||||
"tslint": "~6.1.0"
|
"tslint": "~5.3.2"
|
||||||
},
|
}
|
||||||
"overrides": {
|
|
||||||
"ngx-avatars": {
|
|
||||||
"@angular/common": "15.0.1",
|
|
||||||
"@angular/core": "15.0.1"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
BIN
releases/youtubedl-material-latest.zip
Normal file
BIN
releases/youtubedl-material-latest.zip
Normal file
Binary file not shown.
@@ -1,11 +1,11 @@
|
|||||||
/* Coolors Exported Palette - coolors.co/e8aeb7-b8e1ff-a9fff7-94fbab-82aba1 */
|
/* Coolors Exported Palette - coolors.co/e8aeb7-b8e1ff-a9fff7-94fbab-82aba1 */
|
||||||
|
|
||||||
/* HSL */
|
/* HSL */
|
||||||
$color1: hsla(351, 56%, 80%, 1);
|
$color1: hsla(351%, 56%, 80%, 1);
|
||||||
$softblue: hsla(205, 100%, 86%, 1);
|
$softblue: hsla(205%, 100%, 86%, 1);
|
||||||
$color3: hsla(174, 100%, 83%, 1);
|
$color3: hsla(174%, 100%, 83%, 1);
|
||||||
$color4: hsla(133, 93%, 78%, 1);
|
$color4: hsla(133%, 93%, 78%, 1);
|
||||||
$color5: hsla(165, 20%, 59%, 1);
|
$color5: hsla(165%, 20%, 59%, 1);
|
||||||
|
|
||||||
/* RGB */
|
/* RGB */
|
||||||
$color1: rgba(232, 174, 183, 1);
|
$color1: rgba(232, 174, 183, 1);
|
||||||
|
|||||||
@@ -1,133 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
export type { AddFileToPlaylistRequest } from './models/AddFileToPlaylistRequest';
|
|
||||||
export type { Archive } from './models/Archive';
|
|
||||||
export type { BaseChangePermissionsRequest } from './models/BaseChangePermissionsRequest';
|
|
||||||
export type { binary } from './models/binary';
|
|
||||||
export type { body_19 } from './models/body_19';
|
|
||||||
export type { body_20 } from './models/body_20';
|
|
||||||
export type { Category } from './models/Category';
|
|
||||||
export { CategoryRule } from './models/CategoryRule';
|
|
||||||
export type { ChangeRolePermissionsRequest } from './models/ChangeRolePermissionsRequest';
|
|
||||||
export type { ChangeUserPermissionsRequest } from './models/ChangeUserPermissionsRequest';
|
|
||||||
export type { CheckConcurrentStreamRequest } from './models/CheckConcurrentStreamRequest';
|
|
||||||
export type { CheckConcurrentStreamResponse } from './models/CheckConcurrentStreamResponse';
|
|
||||||
export type { CheckSubscriptionRequest } from './models/CheckSubscriptionRequest';
|
|
||||||
export type { ClearDownloadsRequest } from './models/ClearDownloadsRequest';
|
|
||||||
export type { ConcurrentStream } from './models/ConcurrentStream';
|
|
||||||
export type { Config } from './models/Config';
|
|
||||||
export type { ConfigResponse } from './models/ConfigResponse';
|
|
||||||
export type { CreateCategoryRequest } from './models/CreateCategoryRequest';
|
|
||||||
export type { CreateCategoryResponse } from './models/CreateCategoryResponse';
|
|
||||||
export type { CreatePlaylistRequest } from './models/CreatePlaylistRequest';
|
|
||||||
export type { CreatePlaylistResponse } from './models/CreatePlaylistResponse';
|
|
||||||
export type { CropFileSettings } from './models/CropFileSettings';
|
|
||||||
export type { DatabaseFile } from './models/DatabaseFile';
|
|
||||||
export { DBBackup } from './models/DBBackup';
|
|
||||||
export type { DBInfoResponse } from './models/DBInfoResponse';
|
|
||||||
export type { DeleteAllFilesResponse } from './models/DeleteAllFilesResponse';
|
|
||||||
export type { DeleteArchiveItemsRequest } from './models/DeleteArchiveItemsRequest';
|
|
||||||
export type { DeleteCategoryRequest } from './models/DeleteCategoryRequest';
|
|
||||||
export type { DeleteMp3Mp4Request } from './models/DeleteMp3Mp4Request';
|
|
||||||
export type { DeleteNotificationRequest } from './models/DeleteNotificationRequest';
|
|
||||||
export type { DeletePlaylistRequest } from './models/DeletePlaylistRequest';
|
|
||||||
export type { DeleteSubscriptionFileRequest } from './models/DeleteSubscriptionFileRequest';
|
|
||||||
export type { DeleteUserRequest } from './models/DeleteUserRequest';
|
|
||||||
export type { Download } from './models/Download';
|
|
||||||
export type { DownloadArchiveRequest } from './models/DownloadArchiveRequest';
|
|
||||||
export type { DownloadFileRequest } from './models/DownloadFileRequest';
|
|
||||||
export type { DownloadRequest } from './models/DownloadRequest';
|
|
||||||
export type { DownloadResponse } from './models/DownloadResponse';
|
|
||||||
export type { DownloadTwitchChatByVODIDRequest } from './models/DownloadTwitchChatByVODIDRequest';
|
|
||||||
export type { DownloadTwitchChatByVODIDResponse } from './models/DownloadTwitchChatByVODIDResponse';
|
|
||||||
export type { DownloadVideosForSubscriptionRequest } from './models/DownloadVideosForSubscriptionRequest';
|
|
||||||
export { FileType } from './models/FileType';
|
|
||||||
export { FileTypeFilter } from './models/FileTypeFilter';
|
|
||||||
export type { GenerateArgsResponse } from './models/GenerateArgsResponse';
|
|
||||||
export type { GenerateNewApiKeyResponse } from './models/GenerateNewApiKeyResponse';
|
|
||||||
export type { GetAllCategoriesResponse } from './models/GetAllCategoriesResponse';
|
|
||||||
export type { GetAllDownloadsRequest } from './models/GetAllDownloadsRequest';
|
|
||||||
export type { GetAllDownloadsResponse } from './models/GetAllDownloadsResponse';
|
|
||||||
export type { GetAllFilesRequest } from './models/GetAllFilesRequest';
|
|
||||||
export type { GetAllFilesResponse } from './models/GetAllFilesResponse';
|
|
||||||
export type { GetAllSubscriptionsResponse } from './models/GetAllSubscriptionsResponse';
|
|
||||||
export type { GetAllTasksResponse } from './models/GetAllTasksResponse';
|
|
||||||
export type { GetArchivesRequest } from './models/GetArchivesRequest';
|
|
||||||
export type { GetArchivesResponse } from './models/GetArchivesResponse';
|
|
||||||
export type { GetDBBackupsResponse } from './models/GetDBBackupsResponse';
|
|
||||||
export type { GetDownloadRequest } from './models/GetDownloadRequest';
|
|
||||||
export type { GetDownloadResponse } from './models/GetDownloadResponse';
|
|
||||||
export type { GetFileFormatsRequest } from './models/GetFileFormatsRequest';
|
|
||||||
export type { GetFileFormatsResponse } from './models/GetFileFormatsResponse';
|
|
||||||
export type { GetFileRequest } from './models/GetFileRequest';
|
|
||||||
export type { GetFileResponse } from './models/GetFileResponse';
|
|
||||||
export type { GetFullTwitchChatRequest } from './models/GetFullTwitchChatRequest';
|
|
||||||
export type { GetFullTwitchChatResponse } from './models/GetFullTwitchChatResponse';
|
|
||||||
export type { GetLogsRequest } from './models/GetLogsRequest';
|
|
||||||
export type { GetLogsResponse } from './models/GetLogsResponse';
|
|
||||||
export type { GetMp3sResponse } from './models/GetMp3sResponse';
|
|
||||||
export type { GetMp4sResponse } from './models/GetMp4sResponse';
|
|
||||||
export type { GetNotificationsResponse } from './models/GetNotificationsResponse';
|
|
||||||
export type { GetPlaylistRequest } from './models/GetPlaylistRequest';
|
|
||||||
export type { GetPlaylistResponse } from './models/GetPlaylistResponse';
|
|
||||||
export type { GetPlaylistsRequest } from './models/GetPlaylistsRequest';
|
|
||||||
export type { GetPlaylistsResponse } from './models/GetPlaylistsResponse';
|
|
||||||
export type { GetRolesResponse } from './models/GetRolesResponse';
|
|
||||||
export type { GetSubscriptionRequest } from './models/GetSubscriptionRequest';
|
|
||||||
export type { GetSubscriptionResponse } from './models/GetSubscriptionResponse';
|
|
||||||
export type { GetTaskRequest } from './models/GetTaskRequest';
|
|
||||||
export type { GetTaskResponse } from './models/GetTaskResponse';
|
|
||||||
export type { GetUsersResponse } from './models/GetUsersResponse';
|
|
||||||
export type { ImportArchiveRequest } from './models/ImportArchiveRequest';
|
|
||||||
export type { IncrementViewCountRequest } from './models/IncrementViewCountRequest';
|
|
||||||
export type { inline_response_200_15 } from './models/inline_response_200_15';
|
|
||||||
export type { LoginRequest } from './models/LoginRequest';
|
|
||||||
export type { LoginResponse } from './models/LoginResponse';
|
|
||||||
export type { Notification } from './models/Notification';
|
|
||||||
export { NotificationAction } from './models/NotificationAction';
|
|
||||||
export { NotificationType } from './models/NotificationType';
|
|
||||||
export type { Playlist } from './models/Playlist';
|
|
||||||
export type { RegisterRequest } from './models/RegisterRequest';
|
|
||||||
export type { RegisterResponse } from './models/RegisterResponse';
|
|
||||||
export type { RestartDownloadResponse } from './models/RestartDownloadResponse';
|
|
||||||
export type { RestoreDBBackupRequest } from './models/RestoreDBBackupRequest';
|
|
||||||
export { Schedule } from './models/Schedule';
|
|
||||||
export type { SetConfigRequest } from './models/SetConfigRequest';
|
|
||||||
export type { SetNotificationsToReadRequest } from './models/SetNotificationsToReadRequest';
|
|
||||||
export type { SharingToggle } from './models/SharingToggle';
|
|
||||||
export type { Sort } from './models/Sort';
|
|
||||||
export type { SubscribeRequest } from './models/SubscribeRequest';
|
|
||||||
export type { SubscribeResponse } from './models/SubscribeResponse';
|
|
||||||
export type { Subscription } from './models/Subscription';
|
|
||||||
export type { SubscriptionRequestData } from './models/SubscriptionRequestData';
|
|
||||||
export type { SuccessObject } from './models/SuccessObject';
|
|
||||||
export type { TableInfo } from './models/TableInfo';
|
|
||||||
export type { Task } from './models/Task';
|
|
||||||
export { TaskType } from './models/TaskType';
|
|
||||||
export type { TestConnectionStringRequest } from './models/TestConnectionStringRequest';
|
|
||||||
export type { TestConnectionStringResponse } from './models/TestConnectionStringResponse';
|
|
||||||
export type { TransferDBRequest } from './models/TransferDBRequest';
|
|
||||||
export type { TransferDBResponse } from './models/TransferDBResponse';
|
|
||||||
export type { TwitchChatMessage } from './models/TwitchChatMessage';
|
|
||||||
export type { UnsubscribeRequest } from './models/UnsubscribeRequest';
|
|
||||||
export type { UnsubscribeResponse } from './models/UnsubscribeResponse';
|
|
||||||
export type { UpdateCategoriesRequest } from './models/UpdateCategoriesRequest';
|
|
||||||
export type { UpdateCategoryRequest } from './models/UpdateCategoryRequest';
|
|
||||||
export type { UpdateConcurrentStreamRequest } from './models/UpdateConcurrentStreamRequest';
|
|
||||||
export type { UpdateConcurrentStreamResponse } from './models/UpdateConcurrentStreamResponse';
|
|
||||||
export type { UpdateFileRequest } from './models/UpdateFileRequest';
|
|
||||||
export type { UpdatePlaylistRequest } from './models/UpdatePlaylistRequest';
|
|
||||||
export type { UpdaterStatus } from './models/UpdaterStatus';
|
|
||||||
export type { UpdateServerRequest } from './models/UpdateServerRequest';
|
|
||||||
export type { UpdateTaskDataRequest } from './models/UpdateTaskDataRequest';
|
|
||||||
export type { UpdateTaskOptionsRequest } from './models/UpdateTaskOptionsRequest';
|
|
||||||
export type { UpdateTaskScheduleRequest } from './models/UpdateTaskScheduleRequest';
|
|
||||||
export type { UpdateUserRequest } from './models/UpdateUserRequest';
|
|
||||||
export type { UploadCookiesRequest } from './models/UploadCookiesRequest';
|
|
||||||
export type { User } from './models/User';
|
|
||||||
export { UserPermission } from './models/UserPermission';
|
|
||||||
export type { Version } from './models/Version';
|
|
||||||
export type { VersionInfoResponse } from './models/VersionInfoResponse';
|
|
||||||
export { YesNo } from './models/YesNo';
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
export type AddFileToPlaylistRequest = {
|
|
||||||
file_uid: string;
|
|
||||||
playlist_id: string;
|
|
||||||
};
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { FileType } from './FileType';
|
|
||||||
|
|
||||||
export type Archive = {
|
|
||||||
extractor: string;
|
|
||||||
id: string;
|
|
||||||
type: FileType;
|
|
||||||
title: string;
|
|
||||||
user_uid?: string;
|
|
||||||
sub_id?: string;
|
|
||||||
timestamp: number;
|
|
||||||
uid: string;
|
|
||||||
};
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { UserPermission } from './UserPermission';
|
|
||||||
import type { YesNo } from './YesNo';
|
|
||||||
|
|
||||||
export type BaseChangePermissionsRequest = {
|
|
||||||
permission: UserPermission;
|
|
||||||
new_value: YesNo;
|
|
||||||
};
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { CategoryRule } from './CategoryRule';
|
|
||||||
|
|
||||||
export type Category = {
|
|
||||||
name?: string;
|
|
||||||
uid?: string;
|
|
||||||
rules?: Array<CategoryRule>;
|
|
||||||
/**
|
|
||||||
* Overrides file output for downloaded files in category
|
|
||||||
*/
|
|
||||||
custom_output?: string;
|
|
||||||
};
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
export type CategoryRule = {
|
|
||||||
preceding_operator?: CategoryRule.preceding_operator;
|
|
||||||
comparator?: CategoryRule.comparator;
|
|
||||||
};
|
|
||||||
|
|
||||||
export namespace CategoryRule {
|
|
||||||
|
|
||||||
export enum preceding_operator {
|
|
||||||
OR = 'or',
|
|
||||||
AND = 'and',
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum comparator {
|
|
||||||
INCLUDES = 'includes',
|
|
||||||
NOT_INCLUDES = 'not_includes',
|
|
||||||
EQUALS = 'equals',
|
|
||||||
NOT_EQUALS = 'not_equals',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { BaseChangePermissionsRequest } from './BaseChangePermissionsRequest';
|
|
||||||
|
|
||||||
export type ChangeRolePermissionsRequest = (BaseChangePermissionsRequest & {
|
|
||||||
role: string;
|
|
||||||
});
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { BaseChangePermissionsRequest } from './BaseChangePermissionsRequest';
|
|
||||||
|
|
||||||
export type ChangeUserPermissionsRequest = (BaseChangePermissionsRequest & {
|
|
||||||
user_uid: string;
|
|
||||||
});
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
export type CheckConcurrentStreamRequest = {
|
|
||||||
/**
|
|
||||||
* UID of the concurrent stream
|
|
||||||
*/
|
|
||||||
uid: string;
|
|
||||||
};
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
import type { ConcurrentStream } from './ConcurrentStream';
|
|
||||||
|
|
||||||
export type CheckConcurrentStreamResponse = {
|
|
||||||
stream: ConcurrentStream;
|
|
||||||
};
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
/* istanbul ignore file */
|
|
||||||
/* tslint:disable */
|
|
||||||
/* eslint-disable */
|
|
||||||
|
|
||||||
export type CheckSubscriptionRequest = {
|
|
||||||
sub_id: string;
|
|
||||||
};
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user