Add basic docker config

This commit is contained in:
GHOSCHT 2024-03-03 20:04:13 +01:00
parent 1a16816996
commit e17fb2a821
Signed by: ghoscht
GPG key ID: 2C2C1C62A5388E82
30 changed files with 5600 additions and 22 deletions

7
.sops.yaml Normal file
View file

@ -0,0 +1,7 @@
keys:
- &franz age1uauvjwfvg8u0zkn58ematurcptf43gz6vx44nwkq3xcnmwq95psqna9psw
creation_rules:
- path_regex: secrets/franz.yaml$
key_groups:
- age:
- *franz

View file

@ -62,7 +62,7 @@
},
"devshell": {
"inputs": {
"nixpkgs": "nixpkgs_6",
"nixpkgs": "nixpkgs_7",
"systems": "systems_5"
},
"locked": {
@ -342,7 +342,7 @@
},
"home-manager_2": {
"inputs": {
"nixpkgs": "nixpkgs_7"
"nixpkgs": "nixpkgs_8"
},
"locked": {
"lastModified": 1701071203,
@ -361,7 +361,7 @@
"hyprland": {
"inputs": {
"hyprland-protocols": "hyprland-protocols",
"nixpkgs": "nixpkgs_8",
"nixpkgs": "nixpkgs_9",
"systems": "systems_6",
"wlroots": "wlroots",
"xdph": "xdph"
@ -529,6 +529,22 @@
"type": "github"
}
},
"nixpkgs-stable_2": {
"locked": {
"lastModified": 1709428628,
"narHash": "sha256-//ZCCnpVai/ShtO2vPjh3AWgo8riXCaret6V9s7Hew4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "66d65cb00b82ffa04ee03347595aa20e41fe3555",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "release-23.11",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1709237383,
@ -545,6 +561,22 @@
"type": "github"
}
},
"nixpkgs_10": {
"locked": {
"lastModified": 1701336116,
"narHash": "sha256-kEmpezCR/FpITc6yMbAh4WrOCiT2zg5pSjnKrq51h5Y=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "f5c27c6136db4d76c30e533c20517df6864c46ee",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_2": {
"locked": {
"lastModified": 1706098335,
@ -610,6 +642,22 @@
}
},
"nixpkgs_6": {
"locked": {
"lastModified": 1709356872,
"narHash": "sha256-mvxCirJbtkP0cZ6ABdwcgTk0u3bgLoIoEFIoYBvD6+4=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "458b097d81f90275b3fdf03796f0563844926708",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"nixpkgs_7": {
"locked": {
"lastModified": 1677383253,
"narHash": "sha256-UfpzWfSxkfXHnb4boXZNaKsAcUrZT9Hw+tao1oZxd08=",
@ -625,7 +673,7 @@
"type": "github"
}
},
"nixpkgs_7": {
"nixpkgs_8": {
"locked": {
"lastModified": 1700794826,
"narHash": "sha256-RyJTnTNKhO0yqRpDISk03I/4A67/dp96YRxc86YOPgU=",
@ -641,7 +689,7 @@
"type": "github"
}
},
"nixpkgs_8": {
"nixpkgs_9": {
"locked": {
"lastModified": 1700612854,
"narHash": "sha256-yrQ8osMD+vDLGFX7pcwsY/Qr5PUd6OmDMYJZzZi0+zc=",
@ -657,22 +705,6 @@
"type": "github"
}
},
"nixpkgs_9": {
"locked": {
"lastModified": 1701336116,
"narHash": "sha256-kEmpezCR/FpITc6yMbAh4WrOCiT2zg5pSjnKrq51h5Y=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "f5c27c6136db4d76c30e533c20517df6864c46ee",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixpkgs-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"pre-commit": {
"inputs": {
"flake-compat": "flake-compat_2",
@ -708,9 +740,29 @@
"nix-colors": "nix-colors",
"nixpkgs": "nixpkgs_5",
"nixpkgs-unstable": "nixpkgs-unstable",
"sops-nix": "sops-nix",
"xremap": "xremap"
}
},
"sops-nix": {
"inputs": {
"nixpkgs": "nixpkgs_6",
"nixpkgs-stable": "nixpkgs-stable_2"
},
"locked": {
"lastModified": 1709434911,
"narHash": "sha256-UN47hQPM9ijwoz7cYq10xl19hvlSP/232+M5vZDOMs4=",
"owner": "Mic92",
"repo": "sops-nix",
"rev": "075df9d85ee70cfb53e598058045e1738f05e273",
"type": "github"
},
"original": {
"owner": "Mic92",
"repo": "sops-nix",
"type": "github"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
@ -895,7 +947,7 @@
"flake-parts": "flake-parts_2",
"home-manager": "home-manager_2",
"hyprland": "hyprland",
"nixpkgs": "nixpkgs_9",
"nixpkgs": "nixpkgs_10",
"xremap": "xremap_2"
},
"locked": {

View file

@ -36,6 +36,7 @@
xremap.url = "github:xremap/nix-flake";
flatpaks.url = "github:GermanBread/declarative-flatpak/stable";
heliox-cli.url = "git+https://git.ghoscht.com/heliox/cli/";
sops-nix.url = "github:Mic92/sops-nix";
};
outputs = {

View file

@ -11,4 +11,9 @@ in {
];
colorScheme = inputs.nix-colors.colorSchemes.catppuccin-mocha;
home.file.".docker" = {
source = ../rsc/docker/franz;
recursive = true;
};
}

View file

@ -21,6 +21,8 @@ in {
../common/optional/gnome-keyring.nix
../common/optional/docker.nix
../common/optional/vsftpd.nix
./sops.nix
./docker.nix
];
users.mutableUsers = true;

21
hosts/franz/docker.nix Normal file
View file

@ -0,0 +1,21 @@
{config, ...}: {
systemd.services.init-traefik-net-bridge-network = {
description = "Create the network bridge traefik-net for the Docker stack.";
after = ["network.target"];
wantedBy = ["multi-user.target"];
serviceConfig.Type = "oneshot";
script = let
dockercli = "${config.virtualisation.docker.package}/bin/docker";
in ''
# Put a true at the end to prevent getting non-zero return code, which will
# crash the whole service.
check=$(${dockercli} network ls | grep "traefik-net" || true)
if [ -z "$check" ]; then
${dockercli} network create traefik-net
else
echo "traefik-net already exists in docker"
fi
'';
};
}

70
hosts/franz/sops.nix Normal file
View file

@ -0,0 +1,70 @@
{
pkgs,
inputs,
config,
...
}: let
vars = import ../../vars.nix;
in {
imports = [
inputs.sops-nix.nixosModules.sops
];
environment.systemPackages = with pkgs; [sops];
sops.defaultSopsFile = ../../secrets/franz.yaml;
sops.defaultSopsFormat = "yaml";
sops.age.keyFile = "/home/${vars.user}/.config/sops/age/keys.txt";
sops.secrets."cloudflared/tunnel_token" = {
owner = vars.user;
};
sops.secrets."traefik/cloudflare_email" = {
owner = vars.user;
};
sops.secrets."traefik/cloudflare_api_key" = {
owner = vars.user;
};
sops.secrets."nextcloud/mysql_root_password" = {
owner = vars.user;
};
sops.secrets."nextcloud/mysql_password" = {
owner = vars.user;
};
sops.secrets."nextcloud/mysql_database" = {
owner = vars.user;
};
sops.secrets."nextcloud/mysql_user" = {
owner = vars.user;
};
systemd.services.docker-env-secrets = {
description = "Populate the .env files for the docker stack with values from SOPS";
after = ["home-manager-${vars.user}.service"];
wantedBy = ["multi-user.target"];
script = ''
echo "
TUNNEL_TOKEN="$(cat ${config.sops.secrets."cloudflared/tunnel_token".path})"
" > /home/${vars.user}/.docker/infrastructure/cloudflared.env
echo "
CLOUDFLARE_EMAIL="$(cat ${config.sops.secrets."traefik/cloudflare_email".path})"
CLOUDFLARE_API_KEY="$(cat ${config.sops.secrets."traefik/cloudflare_api_key".path})"
" > /home/${vars.user}/.docker/infrastructure/traefik.env
echo "
MYSQL_ROOT_PASSWORD="$(cat ${config.sops.secrets."nextcloud/mysql_root_password".path})"
MYSQL_PASSWORD="$(cat ${config.sops.secrets."nextcloud/mysql_password".path})"
MYSQL_DATABASE="$(cat ${config.sops.secrets."nextcloud/mysql_database".path})"
MYSQL_USER="$(cat ${config.sops.secrets."nextcloud/mysql_user".path})"
" > /home/${vars.user}/.docker/nas/nextcloud.env
'';
serviceConfig = {
User = "ghoscht";
WorkingDirectory = "/home/${vars.user}/.docker";
};
};
}

View file

@ -0,0 +1,67 @@
version: '2'
services:
pihole:
container_name: pihole
hostname: pihole
image: pihole/pihole:latest
volumes:
- pihole_dnsmasq:/etc/dnsmasq.d
- pihole_data:/etc/pihole
restart: always
environment:
- IPv6=True
- TZ=Europe/Berlin
- SKIPGRAVITYONBOOT=1
- VIRTUAL_HOST=pihole.ghoscht.com
- FTL_CMD="no-daemon"
ports:
- 8420:80
- "53:53/tcp"
- "53:53/udp"
cap_add:
- NET_ADMIN
networks:
traefik_net:
dns_net:
ipv4_address: 172.28.1.6
dns:
- 1.1.1.1
labels:
- traefik.enable=true
- traefik.http.routers.pihole.entrypoints=websecure
- traefik.http.routers.pihole.rule=Host(`pihole.ghoscht.com`)
- traefik.http.services.pihole.loadbalancer.server.port=80
- traefik.docker.network=traefik-net
- traefik.http.routers.pihole.tls=true
- traefik.http.routers.pihole.tls.certresolver=lencrypt
unbound:
container_name: unbound
image: mvance/unbound:latest
volumes:
- unbound_data:/opt/unbound/etc/unbound
dns:
- 1.1.1.1
restart: always
networks:
traefik_net:
dns_net:
ipv4_address: 172.28.1.5
networks:
traefik_net:
name: traefik-net
external: true
dns_net:
name: dns-net
driver: bridge
ipam:
config:
- subnet: 172.28.1.0/24
ip_range: 172.28.1.5/30
gateway: 172.28.1.1
volumes:
pihole_dnsmasq:
name: pihole_dnsmasq
pihole_data:
name: pihole_data
unbound_data:
name: unbound_data

View file

@ -0,0 +1,49 @@
version: "3"
services:
rss:
image: wangqiru/ttrss:latest
container_name: ttrss
ports:
- 181:80
environment:
- SELF_URL_PATH=http://192.168.178.43:181/
- DB_PASS=ttrss # use the same password defined in `database.postgres`
- PUID=1000
- PGID=1000
volumes:
- feed-icons:/var/www/feed-icons/
dns:
- 1.1.1.1
networks:
traefik_net:
database_only:
restart: always
labels:
- traefik.enable=true
- traefik.http.routers.ttrss.rule=Host(`rss.ghoscht.com`)
- traefik.http.routers.ttrss.entrypoints=websecure
- traefik.http.services.ttrss.loadbalancer.server.port=80
- traefik.http.routers.ttrss.tls=true
- traefik.http.routers.ttrss.tls.certresolver=lencrypt
database.postgres:
image: postgres:13-alpine
container_name: ttrss-postgres
environment:
- POSTGRES_PASSWORD=ttrss # feel free to change the password
volumes:
- ~/postgres/data/:/var/lib/postgresql/data # persist postgres data to ~/postgres/data/ on the host
networks:
- database_only
restart: always
volumes:
feed-icons:
networks:
public_access: # Provide the access for ttrss UI
service_only: # Provide the communication network between services only
internal: true
database_only: # Provide the communication between ttrss and database only
internal: true
traefik_net:
name: traefik-net
driver: bridge
external: true

View file

@ -0,0 +1,59 @@
version: "3"
services:
server:
image: codeberg.org/forgejo/forgejo:v1.21.5-0
container_name: gitea
environment:
- USER_UID=1000
- USER_GID=1000
- GITEA__database__DB_TYPE=postgres
- GITEA__database__HOST=db:5432
- GITEA__database__NAME=gitea
- GITEA__database__USER=gitea
- GITEA__database__PASSWD=gitea
#- START_SSH_SERVER = true
#- SSH_PORT = 2222
#- SSH_DOMAIN = git.ghoscht.com
#- ROOT_URL=https://git.ghoscht.com
restart: always
volumes:
- gitea_data:/data
- /etc/timezone:/etc/timezone:ro
- /etc/localtime:/etc/localtime:ro
ports:
- "2222:22"
networks:
traefik_net:
database_net:
dns:
- 1.1.1.1
labels:
- traefik.enable=true
- traefik.http.routers.gitea.entrypoints=websecure
- traefik.http.routers.gitea.rule=Host(`git.local.ghoscht.com`,`git.ghoscht.com`)
- traefik.http.services.gitea.loadbalancer.server.port=3000
- traefik.docker.network=traefik-net
- traefik.http.routers.gitea.tls=true
- traefik.http.routers.gitea.tls.certresolver=lencrypt
db:
image: postgres:15.3-bullseye
container_name: gitea_db
restart: always
volumes:
- gitea_db:/var/lib/postgresql/data
environment:
- POSTGRES_USER=gitea
- POSTGRES_PASSWORD=gitea
- POSTGRES_DB=gitea
networks:
database_net:
networks:
traefik_net:
name: traefik-net
external: true
database_net:
volumes:
gitea_data:
name: gitea_data
gitea_db:
name: gitea_db

View file

@ -0,0 +1,126 @@
version: '3'
services:
traefik:
image: traefik
container_name: traefik
restart: always
ports:
- "80:80"
- "443:443"
- "6666:8080"
volumes:
- traefik_data:/etc/traefik
- /var/run/docker.sock:/var/run/docker.sock:ro
networks:
traefik_net:
labels:
- traefik.enable=true
- traefik.http.routers.dashboard.rule=Host(`traefik.ghoscht.com`)
- traefik.http.routers.dashboard.entrypoints=websecure
- traefik.http.services.dashboard.loadbalancer.server.port=8080
- traefik.http.routers.dashboard.tls=true
- traefik.http.routers.dashboard.tls.certresolver=lencrypt
env_file:
- traefik.env
dns:
- 1.1.1.1
homarr:
container_name: homarr
image: ghcr.io/ajnart/homarr:latest
restart: always
volumes:
- homarr_data:/app/data/configs
- homarr_icons:/app/public/imgs
networks:
traefik_net:
labels:
- traefik.enable=true
- traefik.http.routers.homarr.entrypoints=websecure
- traefik.http.routers.homarr.rule=Host(`dashboard.ghoscht.com`)
- traefik.http.routers.homarr.tls=true
- traefik.http.routers.homarr.tls.certresolver=lencrypt
dns:
- 1.1.1.1
scrutiny:
container_name: scrutiny
image: ghcr.io/analogj/scrutiny:master-omnibus
restart: always
cap_add:
- SYS_RAWIO
volumes:
- /run/udev:/run/udev:ro
- scrutiny_data:/opt/scrutiny/config
- scrutiny_db:/opt/scrutiny/influxdb
labels:
- traefik.enable=true
- traefik.http.routers.scrutiny.entrypoints=websecure
- traefik.http.routers.scrutiny.rule=Host(`scrutiny.ghoscht.com`)
- traefik.http.services.scrutiny.loadbalancer.server.port=8080
- traefik.http.routers.scrutiny.tls=true
- traefik.http.routers.scrutiny.tls.certresolver=lencrypt
networks:
traefik_net:
devices:
- "/dev/sda"
- "/dev/sdb"
ntfy:
image: binwiederhier/ntfy
container_name: ntfy
command:
- serve
environment:
- TZ=UTC # optional: set desired timezone
user: 1000:1000 # optional: replace with your own user/group or uid/gid
volumes:
- ./ntfy/server.yml:/etc/ntfy/server.yml
labels:
- traefik.enable=true
- traefik.http.routers.ntfy.entrypoints=websecure
- traefik.http.routers.ntfy.rule=Host(`ntfy.ghoscht.com`,`ntfy.local.ghoscht.com`)
- traefik.http.routers.ntfy.tls=true
- traefik.http.routers.ntfy.tls.certresolver=lencrypt
networks:
traefik_net:
homeassistant:
container_name: homeassistant
image: "ghcr.io/home-assistant/home-assistant:stable"
volumes:
- /mnt/hdd/docker/home-assistant_data:/config
- /etc/localtime:/etc/localtime:ro
- /run/dbus:/run/dbus:ro
restart: unless-stopped
privileged: true
labels:
- traefik.enable=true
- traefik.http.routers.homeassistant.entrypoints=websecure
- traefik.http.routers.homeassistant.rule=Host(`home.ghoscht.com`,`home.local.ghoscht.com`)
- traefik.http.routers.homeassistant.tls=true
- traefik.http.routers.homeassistant.tls.certresolver=lencrypt
- traefik.http.services.homeassistant.loadbalancer.server.port=8123
networks:
traefik_net:
cloudflared:
container_name: cloudflared
image: cloudflare/cloudflared:latest
restart: always
command: tunnel --no-autoupdate --protocol http2 run
env_file:
- cloudflared.env
networks:
traefik_net:
networks:
traefik_net:
name: traefik-net
driver: bridge
external: true
volumes:
traefik_data:
name: traefik_data
homarr_data:
name: homarr_data
homarr_icons:
name: homarr_icons
scrutiny_data:
name: scrutiny_data
scrutiny_db:
name: scrutiny_db

View file

@ -0,0 +1,363 @@
# ntfy server config file
#
# Please refer to the documentation at https://ntfy.sh/docs/config/ for details.
# All options also support underscores (_) instead of dashes (-) to comply with the YAML spec.
# Public facing base URL of the service (e.g. https://ntfy.sh or https://ntfy.example.com)
#
# This setting is required for any of the following features:
# - attachments (to return a download URL)
# - e-mail sending (for the topic URL in the email footer)
# - iOS push notifications for self-hosted servers (to calculate the Firebase poll_request topic)
# - Matrix Push Gateway (to validate that the pushkey is correct)
#
base-url: https://ntfy.ghoscht.com
# Listen address for the HTTP & HTTPS web server. If "listen-https" is set, you must also
# set "key-file" and "cert-file". Format: [<ip>]:<port>, e.g. "1.2.3.4:8080".
#
# To listen on all interfaces, you may omit the IP address, e.g. ":443".
# To disable HTTP, set "listen-http" to "-".
#
# listen-http: ":80"
# listen-https:
# Listen on a Unix socket, e.g. /var/lib/ntfy/ntfy.sock
# This can be useful to avoid port issues on local systems, and to simplify permissions.
#
# listen-unix: <socket-path>
# listen-unix-mode: <linux permissions, e.g. 0700>
# Path to the private key & cert file for the HTTPS web server. Not used if "listen-https" is not set.
#
# key-file: <filename>
# cert-file: <filename>
# If set, also publish messages to a Firebase Cloud Messaging (FCM) topic for your app.
# This is optional and only required to save battery when using the Android app.
#
# firebase-key-file: <filename>
# If "cache-file" is set, messages are cached in a local SQLite database instead of only in-memory.
# This allows for service restarts without losing messages in support of the since= parameter.
#
# The "cache-duration" parameter defines the duration for which messages will be buffered
# before they are deleted. This is required to support the "since=..." and "poll=1" parameter.
# To disable the cache entirely (on-disk/in-memory), set "cache-duration" to 0.
# The cache file is created automatically, provided that the correct permissions are set.
#
# The "cache-startup-queries" parameter allows you to run commands when the database is initialized,
# e.g. to enable WAL mode (see https://phiresky.github.io/blog/2020/sqlite-performance-tuning/)).
# Example:
# cache-startup-queries: |
# pragma journal_mode = WAL;
# pragma synchronous = normal;
# pragma temp_store = memory;
# pragma busy_timeout = 15000;
# vacuum;
#
# The "cache-batch-size" and "cache-batch-timeout" parameter allow enabling async batch writing
# of messages. If set, messages will be queued and written to the database in batches of the given
# size, or after the given timeout. This is only required for high volume servers.
#
# Debian/RPM package users:
# Use /var/cache/ntfy/cache.db as cache file to avoid permission issues. The package
# creates this folder for you.
#
# Check your permissions:
# If you are running ntfy with systemd, make sure this cache file is owned by the
# ntfy user and group by running: chown ntfy.ntfy <filename>.
#
# cache-file: <filename>
# cache-duration: "12h"
# cache-startup-queries:
# cache-batch-size: 0
# cache-batch-timeout: "0ms"
# If set, access to the ntfy server and API can be controlled on a granular level using
# the 'ntfy user' and 'ntfy access' commands. See the --help pages for details, or check the docs.
#
# - auth-file is the SQLite user/access database; it is created automatically if it doesn't already exist
# - auth-default-access defines the default/fallback access if no access control entry is found; it can be
# set to "read-write" (default), "read-only", "write-only" or "deny-all".
# - auth-startup-queries allows you to run commands when the database is initialized, e.g. to enable
# WAL mode. This is similar to cache-startup-queries. See above for details.
#
# Debian/RPM package users:
# Use /var/lib/ntfy/user.db as user database to avoid permission issues. The package
# creates this folder for you.
#
# Check your permissions:
# If you are running ntfy with systemd, make sure this user database file is owned by the
# ntfy user and group by running: chown ntfy.ntfy <filename>.
#
# auth-file: <filename>
# auth-default-access: "read-write"
# auth-startup-queries:
# If set, the X-Forwarded-For header is used to determine the visitor IP address
# instead of the remote address of the connection.
#
# WARNING: If you are behind a proxy, you must set this, otherwise all visitors are rate limited
# as if they are one.
#
# behind-proxy: false
# If enabled, clients can attach files to notifications as attachments. Minimum settings to enable attachments
# are "attachment-cache-dir" and "base-url".
#
# - attachment-cache-dir is the cache directory for attached files
# - attachment-total-size-limit is the limit of the on-disk attachment cache directory (total size)
# - attachment-file-size-limit is the per-file attachment size limit (e.g. 300k, 2M, 100M)
# - attachment-expiry-duration is the duration after which uploaded attachments will be deleted (e.g. 3h, 20h)
#
# attachment-cache-dir:
# attachment-total-size-limit: "5G"
# attachment-file-size-limit: "15M"
# attachment-expiry-duration: "3h"
# If enabled, allow outgoing e-mail notifications via the 'X-Email' header. If this header is set,
# messages will additionally be sent out as e-mail using an external SMTP server.
#
# As of today, only SMTP servers with plain text auth (or no auth at all), and STARTLS are supported.
# Please also refer to the rate limiting settings below (visitor-email-limit-burst & visitor-email-limit-burst).
#
# - smtp-sender-addr is the hostname:port of the SMTP server
# - smtp-sender-from is the e-mail address of the sender
# - smtp-sender-user/smtp-sender-pass are the username and password of the SMTP user (leave blank for no auth)
#
# smtp-sender-addr:
# smtp-sender-from:
# smtp-sender-user:
# smtp-sender-pass:
# If enabled, ntfy will launch a lightweight SMTP server for incoming messages. Once configured, users can send
# emails to a topic e-mail address to publish messages to a topic.
#
# - smtp-server-listen defines the IP address and port the SMTP server will listen on, e.g. :25 or 1.2.3.4:25
# - smtp-server-domain is the e-mail domain, e.g. ntfy.sh
# - smtp-server-addr-prefix is an optional prefix for the e-mail addresses to prevent spam. If set to "ntfy-",
# for instance, only e-mails to ntfy-$topic@ntfy.sh will be accepted. If this is not set, all emails to
# $topic@ntfy.sh will be accepted (which may obviously be a spam problem).
#
# smtp-server-listen:
# smtp-server-domain:
# smtp-server-addr-prefix:
# Web Push support (background notifications for browsers)
#
# If enabled, allows ntfy to receive push notifications, even when the ntfy web app is closed. When enabled, users
# can enable background notifications in the web app. Once enabled, ntfy will forward published messages to the push
# endpoint, which will then forward it to the browser.
#
# You must configure web-push-public/private key, web-push-file, and web-push-email-address below to enable Web Push.
# Run "ntfy webpush keys" to generate the keys.
#
# - web-push-public-key is the generated VAPID public key, e.g. AA1234BBCCddvveekaabcdfqwertyuiopasdfghjklzxcvbnm1234567890
# - web-push-private-key is the generated VAPID private key, e.g. AA2BB1234567890abcdefzxcvbnm1234567890
# - web-push-file is a database file to keep track of browser subscription endpoints, e.g. `/var/cache/ntfy/webpush.db`
# - web-push-email-address is the admin email address send to the push provider, e.g. `sysadmin@example.com`
# - web-push-startup-queries is an optional list of queries to run on startup`
#
# web-push-public-key:
# web-push-private-key:
# web-push-file:
# web-push-email-address:
# web-push-startup-queries:
# If enabled, ntfy can perform voice calls via Twilio via the "X-Call" header.
#
# - twilio-account is the Twilio account SID, e.g. AC12345beefbeef67890beefbeef122586
# - twilio-auth-token is the Twilio auth token, e.g. affebeef258625862586258625862586
# - twilio-phone-number is the outgoing phone number you purchased, e.g. +18775132586
# - twilio-verify-service is the Twilio Verify service SID, e.g. VA12345beefbeef67890beefbeef122586
#
# twilio-account:
# twilio-auth-token:
# twilio-phone-number:
# twilio-verify-service:
# Interval in which keepalive messages are sent to the client. This is to prevent
# intermediaries closing the connection for inactivity.
#
# Note that the Android app has a hardcoded timeout at 77s, so it should be less than that.
#
# keepalive-interval: "45s"
# Interval in which the manager prunes old messages, deletes topics
# and prints the stats.
#
# manager-interval: "1m"
# Defines topic names that are not allowed, because they are otherwise used. There are a few default topics
# that cannot be used (e.g. app, account, settings, ...). To extend the default list, define them here.
#
# Example:
# disallowed-topics:
# - about
# - pricing
# - contact
#
# disallowed-topics:
# Defines the root path of the web app, or disables the web app entirely.
#
# Can be any simple path, e.g. "/", "/app", or "/ntfy". For backwards-compatibility reasons,
# the values "app" (maps to "/"), "home" (maps to "/app"), or "disable" (maps to "") to disable
# the web app entirely.
#
# web-root: /
# Various feature flags used to control the web app, and API access, mainly around user and
# account management.
#
# - enable-signup allows users to sign up via the web app, or API
# - enable-login allows users to log in via the web app, or API
# - enable-reservations allows users to reserve topics (if their tier allows it)
#
# enable-signup: false
# enable-login: false
# enable-reservations: false
# Server URL of a Firebase/APNS-connected ntfy server (likely "https://ntfy.sh").
#
# iOS users:
# If you use the iOS ntfy app, you MUST configure this to receive timely notifications. You'll like want this:
# upstream-base-url: "https://ntfy.sh"
#
# If set, all incoming messages will publish a "poll_request" message to the configured upstream server, containing
# the message ID of the original message, instructing the iOS app to poll this server for the actual message contents.
# This is to prevent the upstream server and Firebase/APNS from being able to read the message.
#
# - upstream-base-url is the base URL of the upstream server. Should be "https://ntfy.sh".
# - upstream-access-token is the token used to authenticate with the upstream server. This is only required
# if you exceed the upstream rate limits, or the uptream server requires authentication.
#
# upstream-base-url:
# upstream-access-token:
# Rate limiting: Total number of topics before the server rejects new topics.
#
# global-topic-limit: 15000
# Rate limiting: Number of subscriptions per visitor (IP address)
#
# visitor-subscription-limit: 30
# Rate limiting: Allowed GET/PUT/POST requests per second, per visitor:
# - visitor-request-limit-burst is the initial bucket of requests each visitor has
# - visitor-request-limit-replenish is the rate at which the bucket is refilled
# - visitor-request-limit-exempt-hosts is a comma-separated list of hostnames, IPs or CIDRs to be
# exempt from request rate limiting. Hostnames are resolved at the time the server is started.
# Example: "1.2.3.4,ntfy.example.com,8.7.6.0/24"
#
# visitor-request-limit-burst: 60
# visitor-request-limit-replenish: "5s"
# visitor-request-limit-exempt-hosts: ""
# Rate limiting: Hard daily limit of messages per visitor and day. The limit is reset
# every day at midnight UTC. If the limit is not set (or set to zero), the request
# limit (see above) governs the upper limit.
#
# visitor-message-daily-limit: 0
# Rate limiting: Allowed emails per visitor:
# - visitor-email-limit-burst is the initial bucket of emails each visitor has
# - visitor-email-limit-replenish is the rate at which the bucket is refilled
#
# visitor-email-limit-burst: 16
# visitor-email-limit-replenish: "1h"
# Rate limiting: Attachment size and bandwidth limits per visitor:
# - visitor-attachment-total-size-limit is the total storage limit used for attachments per visitor
# - visitor-attachment-daily-bandwidth-limit is the total daily attachment download/upload traffic limit per visitor
#
# visitor-attachment-total-size-limit: "100M"
# visitor-attachment-daily-bandwidth-limit: "500M"
# Rate limiting: Enable subscriber-based rate limiting (mostly used for UnifiedPush)
#
# If enabled, subscribers may opt to have published messages counted against their own rate limits, as opposed
# to the publisher's rate limits. This is especially useful to increase the amount of messages that high-volume
# publishers (e.g. Matrix/Mastodon servers) are allowed to send.
#
# Once enabled, a client may send a "Rate-Topics: <topic1>,<topic2>,..." header when subscribing to topics via
# HTTP stream, or websockets, thereby registering itself as the "rate visitor", i.e. the visitor whose rate limits
# to use when publishing on this topic. Note: Setting the rate visitor requires READ-WRITE permission on the topic.
#
# UnifiedPush only: If this setting is enabled, publishing to UnifiedPush topics will lead to a HTTP 507 response if
# no "rate visitor" has been previously registered. This is to avoid burning the publisher's "visitor-message-daily-limit".
#
# visitor-subscriber-rate-limiting: false
# Payments integration via Stripe
#
# - stripe-secret-key is the key used for the Stripe API communication. Setting this values
# enables payments in the ntfy web app (e.g. Upgrade dialog). See https://dashboard.stripe.com/apikeys.
# - stripe-webhook-key is the key required to validate the authenticity of incoming webhooks from Stripe.
# Webhooks are essential up keep the local database in sync with the payment provider. See https://dashboard.stripe.com/webhooks.
# - billing-contact is an email address or website displayed in the "Upgrade tier" dialog to let people reach
# out with billing questions. If unset, nothing will be displayed.
#
# stripe-secret-key:
# stripe-webhook-key:
# billing-contact:
# Metrics
#
# ntfy can expose Prometheus-style metrics via a /metrics endpoint, or on a dedicated listen IP/port.
# Metrics may be considered sensitive information, so before you enable them, be sure you know what you are
# doing, and/or secure access to the endpoint in your reverse proxy.
#
# - enable-metrics enables the /metrics endpoint for the default ntfy server (i.e. HTTP, HTTPS and/or Unix socket)
# - metrics-listen-http exposes the metrics endpoint via a dedicated [IP]:port. If set, this option implicitly
# enables metrics as well, e.g. "10.0.1.1:9090" or ":9090"
#
# enable-metrics: false
# metrics-listen-http:
# Profiling
#
# ntfy can expose Go's net/http/pprof endpoints to support profiling of the ntfy server. If enabled, ntfy will listen
# on a dedicated listen IP/port, which can be accessed via the web browser on http://<ip>:<port>/debug/pprof/.
# This can be helpful to expose bottlenecks, and visualize call flows. See https://pkg.go.dev/net/http/pprof for details.
#
# profile-listen-http:
# Logging options
#
# By default, ntfy logs to the console (stderr), with an "info" log level, and in a human-readable text format.
# ntfy supports five different log levels, can also write to a file, log as JSON, and even supports granular
# log level overrides for easier debugging. Some options (log-level and log-level-overrides) can be hot reloaded
# by calling "kill -HUP $pid" or "systemctl reload ntfy".
#
# - log-format defines the output format, can be "text" (default) or "json"
# - log-file is a filename to write logs to. If this is not set, ntfy logs to stderr.
# - log-level defines the default log level, can be one of "trace", "debug", "info" (default), "warn" or "error".
# Be aware that "debug" (and particularly "trace") can be VERY CHATTY. Only turn them on briefly for debugging purposes.
# - log-level-overrides lets you override the log level if certain fields match. This is incredibly powerful
# for debugging certain parts of the system (e.g. only the account management, or only a certain visitor).
# This is an array of strings in the format:
# - "field=value -> level" to match a value exactly, e.g. "tag=manager -> trace"
# - "field -> level" to match any value, e.g. "time_taken_ms -> debug"
# Warning: Using log-level-overrides has a performance penalty. Only use it for temporary debugging.
#
# Check your permissions:
# If you are running ntfy with systemd, make sure this log file is owned by the
# ntfy user and group by running: chown ntfy.ntfy <filename>.
#
# Example (good for production):
# log-level: info
# log-format: json
# log-file: /var/log/ntfy.log
#
# Example level overrides (for debugging, only use temporarily):
# log-level-overrides:
# - "tag=manager -> trace"
# - "visitor_ip=1.2.3.4 -> debug"
# - "time_taken_ms -> debug"
#
# log-level: info
# log-level-overrides:
# log-format: text
# log-file:

View file

@ -0,0 +1,57 @@
version: "3"
services:
server:
image: ollama/ollama
container_name: ollama
ports:
- 11434:11434
environment:
- USER_UID=1000
- USER_GID=1000
- OLLAMA_ORIGINS=http://192.168.178.43:*,https://llm.ghoscht.com
- OLLAMA_HOST=0.0.0.0
restart: always
volumes:
- ollama_data:/root/.ollama
networks:
traefik_net:
dns:
- 1.1.1.1
labels:
- traefik.enable=true
- traefik.http.routers.ollama.entrypoints=websecure
- traefik.http.routers.ollama.rule=Host(`ollama.ghoscht.com`)
- traefik.http.services.ollama.loadbalancer.server.port=11434
- traefik.docker.network=traefik-net
- traefik.http.routers.ollama.tls=true
- traefik.http.routers.ollama.tls.certresolver=lencrypt
- traefik.http.middlewares.cors.headers.customResponseHeaders.Access-Control-Allow-Origin=https://llm.ghoscht.com
- "traefik.http.middlewares.cors.headers.accesscontrolallowmethods=*"
- "traefik.http.middlewares.cors.headers.accesscontrolalloworiginlist=*"
- "traefik.http.middlewares.cors.headers.accesscontrolmaxage=100"
- "traefik.http.middlewares.cors.headers.addvaryheader=true"
- "traefik.http.middlewares.cors.headers.accesscontrolallowheaders=*"
- traefik.http.routers.ollama.middlewares=cors
webui:
image: ollamawebui/ollama-webui
container_name: ollama-webui
restart: always
environment:
- PUBLIC_API_BASE_URL=https://ollama.ghoscht.com/api
networks:
traefik_net:
labels:
- traefik.enable=true
- traefik.http.routers.ollama-webui.entrypoints=websecure
- traefik.http.routers.ollama-webui.rule=Host(`llm.ghoscht.com`)
- traefik.http.services.ollama-webui.loadbalancer.server.port=8080
- traefik.docker.network=traefik-net
- traefik.http.routers.ollama-webui.tls=true
- traefik.http.routers.ollama-webui.tls.certresolver=lencrypt
networks:
traefik_net:
name: traefik-net
external: true
volumes:
ollama_data:
name: ollama_data

View file

@ -0,0 +1,96 @@
version: '2.3'
services:
postgres:
container_name: synapse_db
image: postgres:14
restart: unless-stopped
volumes:
- /mnt/hdd/docker/synapse_db:/var/lib/postgresql/data
# These will be used in homeserver.yaml later on
environment:
- POSTGRES_DB=synapse
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=EjZ5AWAZAme2YvSr8uoWMm7csmXGY3rq
networks:
db_net:
synapse:
container_name: synapse
image: matrixdotorg/synapse:latest
restart: unless-stopped
volumes:
- /mnt/hdd/docker/matrix/synapse_data/:/data
environment:
UID: "1000"
GID: "1000"
TZ: "Europe/Berlin"
labels:
- traefik.enable=true
- traefik.http.routers.synapse.entrypoints=websecure
- traefik.http.routers.synapse.rule=Host(`synapse.ghoscht.com`, `localsynapse.ghoscht.com`,`synapse.local.ghoscht.com`)
- traefik.docker.network=traefik-net
- traefik.http.routers.synapse.tls=true
- traefik.http.routers.synapse.tls.certresolver=lencrypt
networks:
net:
db_net:
# profiles:
# - donotstart
element:
container_name: element
image: vectorim/element-web:latest
restart: unless-stopped
volumes:
- /mnt/hdd/docker/element_data/element-config.json:/app/config.json
labels:
- traefik.enable=true
- traefik.http.routers.element.entrypoints=websecure
- traefik.http.routers.element.rule=Host(`chat.ghoscht.com`)
- traefik.docker.network=traefik-net
- traefik.http.routers.element.tls=true
- traefik.http.routers.element.tls.certresolver=lencrypt
networks:
net:
redis:
container_name: synapse_cache
image: "redis:latest"
restart: "unless-stopped"
networks:
db_net:
nginx:
container_name: matrix_nginx
image: "nginx:latest"
restart: "unless-stopped"
volumes:
- /mnt/hdd/docker/matrix/nginx_data/matrix.conf:/etc/nginx/conf.d/matrix.conf
- /mnt/hdd/docker/matrix/nginx_data/www:/var/www/
labels:
- traefik.enable=true
- traefik.http.routers.matrix-nginx.entrypoints=websecure
- traefik.http.routers.matrix-nginx.rule=Host(`matrix.ghoscht.com`, `localmatrix.ghoscht.com`,`matrix.local.ghoscht.com`)
- traefik.docker.network=traefik-net
- traefik.http.routers.matrix-nginx.tls=true
- traefik.http.routers.matrix-nginx.tls.certresolver=lencrypt
networks:
net:
db_net:
# cloudflared:
# container_name: cloudflared
# image: cloudflare/cloudflared:latest
# restart: always
# command: tunnel --no-autoupdate run
# env_file:
# - cloudflared.env
# networks:
# net:
# db_net:
# matterbridge:
# container_name: matterbridge
# image: 42wim/matterbridge:stable
# restart: unless-stopped
networks:
net:
name: traefik-net
external: true
db_net:

View file

@ -0,0 +1,404 @@
version: "3.5"
services:
jellyfin:
image: jellyfin/jellyfin:latest
container_name: jellyfin
restart: always
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Berlin
volumes:
- jellyfin_data:/config
- jellyfin_cache:/cache
- /mnt/hdd/data/media/tv:/tv
- /mnt/hdd/data/media/anime:/anime
- /mnt/hdd/data/media/movies:/movies
labels:
- traefik.enable=true
- traefik.http.routers.jellyfin.entrypoints=websecure
- traefik.http.routers.jellyfin.rule=Host(`jellyfin.ghoscht.com`)
- traefik.http.services.jellyfin.loadbalancer.server.port=8096
- traefik.http.services.jellyfin.loadbalancer.passHostHeader=true
- traefik.http.routers.jellyfin.tls=true
- traefik.http.routers.jellyfin.tls.certresolver=lencrypt
networks:
traefik_net:
dns:
- 1.1.1.1
ports:
- 8096:8096
navidrome:
image: deluan/navidrome:latest
container_name: navidrome
restart: always
environment:
- ND_SESSIONTIMEOUT=336h
env_file:
- navidrome_secrets.env
volumes:
- navidrome_data:/data
- /mnt/hdd/data/media/music:/music
labels:
- traefik.enable=true
- traefik.http.routers.navidrome.entrypoints=websecure
- traefik.http.routers.navidrome.rule=Host(`navidrome.ghoscht.com`)
- traefik.http.services.navidrome.loadbalancer.server.port=4533
- traefik.http.routers.navidrome.tls=true
- traefik.http.routers.navidrome.tls.certresolver=lencrypt
networks:
traefik_net:
dns:
- 1.1.1.1
komga:
image: gotson/komga
container_name: komga
volumes:
- /mnt/hdd/docker/komga:/config
- /mnt/hdd/data/:/data
ports:
- 25600:25600
user: "1000:1000"
environment:
- TZ=Europe/Berlin
restart: unless-stopped
labels:
- traefik.enable=true
- traefik.http.routers.komga.entrypoints=websecure
- traefik.http.routers.komga.rule=Host(`komga.ghoscht.com`)
- traefik.http.services.komga.loadbalancer.server.port=25600
- traefik.http.routers.komga.tls=true
- traefik.http.routers.komga.tls.certresolver=lencrypt
networks:
traefik_net:
dns:
- 1.1.1.1
prowlarr:
image: linuxserver/prowlarr:latest
container_name: prowlarr
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- prowlarr_data:/config
labels:
- traefik.enable=true
- traefik.http.routers.prowlarr.entrypoints=websecure
- traefik.http.routers.prowlarr.rule=Host(`prowlarr.ghoscht.com`)
- traefik.http.services.prowlarr.loadbalancer.server.port=9696
- traefik.docker.network=traefik-net
- traefik.http.routers.prowlarr.tls=true
- traefik.http.routers.prowlarr.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
sonarr:
image: linuxserver/sonarr:latest
container_name: sonarr
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- sonarr_data:/config
- /mnt/hdd/data:/data
labels:
- traefik.enable=true
- traefik.http.routers.sonarr.entrypoints=websecure
- traefik.http.routers.sonarr.rule=Host(`sonarr.ghoscht.com`)
- traefik.http.services.sonarr.loadbalancer.server.port=8989
- traefik.docker.network=traefik-net
- traefik.http.routers.sonarr.tls=true
- traefik.http.routers.sonarr.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
prowlarr:
condition: service_started
radarr:
image: linuxserver/radarr:latest
container_name: radarr
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- radarr_data:/config
- /mnt/hdd/data:/data
labels:
- traefik.enable=true
- traefik.http.routers.radarr.entrypoints=websecure
- traefik.http.routers.radarr.rule=Host(`radarr.ghoscht.com`)
- traefik.http.services.radarr.loadbalancer.server.port=7878
- traefik.docker.network=traefik-net
- traefik.http.routers.radarr.tls=true
- traefik.http.routers.radarr.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
prowlarr:
condition: service_started
lidarr:
image: linuxserver/lidarr:latest
container_name: lidarr
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- /mnt/hdd/docker/media/lidarr_data:/config
- /mnt/hdd/data:/data
- ./lidarr/custom-services.d:/custom-services.d
- ./lidarr/custom-cont-init.d:/custom-cont-init.d
labels:
- traefik.enable=true
- traefik.http.routers.lidarr.entrypoints=websecure
- traefik.http.routers.lidarr.rule=Host(`lidarr.ghoscht.com`)
- traefik.http.services.lidarr.loadbalancer.server.port=8686
- traefik.http.routers.lidarr.service=lidarr
- traefik.docker.network=traefik-net
- traefik.http.routers.lidarr.tls=true
- traefik.http.routers.lidarr.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
prowlarr:
condition: service_started
bazarr:
image: hotio/bazarr:latest
container_name: bazarr
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- bazarr_data:/config
- /mnt/hdd/data:/data
labels:
- traefik.enable=true
- traefik.http.routers.bazarr.entrypoints=websecure
- traefik.http.routers.bazarr.rule=Host(`bazarr.ghoscht.com`)
- traefik.http.services.bazarr.loadbalancer.server.port=6767
- traefik.docker.network=traefik-net
- traefik.http.routers.bazarr.tls=true
- traefik.http.routers.bazarr.tls.certresolver=lencrypt
networks:
traefik_net:
dns:
- 1.1.1.1
jellyseerr:
container_name: jellyseerr
image: fallenbagel/jellyseerr:latest
restart: always
environment:
- TZ=Europe/Berlin
- PUID=1000
- PGID=1000
volumes:
- jellyseerr_data:/app/config
labels:
- traefik.enable=true
- traefik.http.routers.jellyseerr.entrypoints=websecure
- traefik.http.routers.jellyseerr.rule=Host(`jellyseerr.ghoscht.com`)
- traefik.http.services.jellyseerr.loadbalancer.server.port=5055
- traefik.docker.network=traefik-net
- traefik.http.routers.jellyseerr.tls=true
- traefik.http.routers.jellyseerr.tls.certresolver=lencrypt
networks:
traefik_net:
depends_on:
- jellyfin
dns:
- 1.1.1.1
vpn:
image: haugene/transmission-openvpn
container_name: transmission
restart: always
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/Berlin
- OPENVPN_PROVIDER=WINDSCRIBE
- OPENVPN_CONFIG=Vienna-Boltzmann-udp
- OVPN_PROTOCOL=udp
- OPENVPN_OPTS=--pull-filter ignore ping --ping 10 --ping-restart 120
- LOCAL_NETWORK=192.168.0.0/16
- TRANSMISSION_DOWNLOAD_DIR=/data/torrents
- TRANSMISSION_INCOMPLETE_DIR=/data/torrents/incomplete
- TRANSMISSION_WEB_UI=flood-for-transmission
env_file:
- transmission_secrets.env
volumes:
- transmission_data:/config
- /mnt/hdd/data:/data
labels:
- traefik.enable=true
- traefik.http.routers.transmission.entrypoints=websecure
- traefik.http.routers.transmission.rule=Host(`transmission.ghoscht.com`)
- traefik.http.services.transmission.loadbalancer.server.port=9091
- traefik.docker.network=traefik-net
- traefik.http.routers.transmission.tls=true
- traefik.http.routers.transmission.tls.certresolver=lencrypt
networks:
traefik_net:
ports:
- 1080:1080 # socks proxy
cap_add:
- NET_ADMIN
dns:
- 1.1.1.1
koblas:
image: ynuwenhof/koblas:latest
container_name: socks5
restart: unless-stopped
environment:
RUST_LOG: debug
KOBLAS_LIMIT: 256
KOBLAS_NO_AUTHENTICATION: true
KOBLAS_ANONYMIZATION: true
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
unpackerr:
image: golift/unpackerr
container_name: unpackerr
volumes:
- /mnt/hdd/data:/data
restart: always
user: 1000:1000
environment:
- TZ=Europe/Berlin
# General config
- UN_DEBUG=false
- UN_INTERVAL=2m
- UN_START_DELAY=1m
- UN_RETRY_DELAY=5m
- UN_MAX_RETRIES=3
- UN_PARALLEL=1
- UN_FILE_MODE=0644
- UN_DIR_MODE=0755
# Sonarr Config
- UN_SONARR_0_URL=http://transmission:8989
- UN_SONARR_0_API_KEY=e0d0c7fcba7c40d082849ec899205225
- UN_SONARR_0_PATHS_0=/data/torrents/tv
- UN_SONARR_0_PROTOCOLS=torrent
- UN_SONARR_0_TIMEOUT=10s
- UN_SONARR_0_DELETE_ORIG=false
- UN_SONARR_0_DELETE_DELAY=5m
# Radarr Config
- UN_RADARR_0_URL=http://transmission:7878
- UN_RADARR_0_API_KEY=e54a37ae42df43bfa4d4bdbad7974d93
- UN_RADARR_0_PATHS_0=/data/torrents/movies
- UN_RADARR_0_PROTOCOLS=torrent
- UN_RADARR_0_TIMEOUT=10s
- UN_RADARR_0_DELETE_ORIG=false
- UN_RADARR_0_DELETE_DELAY=5m
# Lidarr Config
- UN_LIDARR_0_URL=http://transmission:8686
- UN_LIDARR_0_API_KEY=0acedbcf8d6243adb17417a10fdaf00a
- UN_LIDARR_0_PATHS_0=/data/torrents/music
- UN_LIDARR_0_PROTOCOLS=torrent
- UN_LIDARR_0_TIMEOUT=10s
- UN_LIDARR_0_DELETE_ORIG=false
- UN_LIDARR_0_DELETE_DELAY=5m
security_opt:
- no-new-privileges:true
networks:
traefik_net:
depends_on:
- sonarr
- radarr
- lidarr
deemix:
container_name: deemix
image: finniedj/deemix
restart: always
environment:
- PUID=1000
- PGID=1000
- UMASK_SET=022
volumes:
- deemix_data:/config
- /mnt/hdd/data/deemix/music:/downloads
labels:
- traefik.enable=true
- traefik.http.routers.deemix.entrypoints=websecure
- traefik.http.routers.deemix.rule=Host(`deemix.ghoscht.com`)
- traefik.http.services.deemix.loadbalancer.server.port=6595
- traefik.docker.network=traefik-net
- traefik.http.routers.deemix.tls=true
- traefik.http.routers.deemix.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
autobrr:
container_name: autobrr
image: ghcr.io/autobrr/autobrr:latest
restart: always
environment:
- TZ=Europe/Berlin
volumes:
- autobrr_data:/config
labels:
- traefik.enable=true
- traefik.http.routers.autobrr.entrypoints=websecure
- traefik.http.routers.autobrr.rule=Host(`autobrr.ghoscht.com`)
- traefik.http.services.autobrr.loadbalancer.server.port=7474
- traefik.docker.network=traefik-net
- traefik.http.routers.autobrr.tls=true
- traefik.http.routers.autobrr.tls.certresolver=lencrypt
network_mode: service:vpn
depends_on:
vpn:
condition: service_healthy
prowlarr:
condition: service_started
radarr:
condition: service_started
sonarr:
condition: service_started
networks:
traefik_net:
name: traefik-net
external: true
volumes:
jellyfin_data:
name: jellyfin_data
windscribe_data:
name: windscribe_data
jellyfin_cache:
name: jellyfin_cache
transmission_data:
name: transmission_data
sonarr_data:
name: sonarr_data
prowlarr_data:
name: prowlarr_data
radarr_data:
name: radarr_data
bazarr_data:
name: bazarr_data
jellyseerr_data:
name: jellyseerr_data
lidarr_data:
name: lidarr_data
navidrome_data:
name: navidrome_data
deemix_data:
name: deemix_data
rarbg_data:
name: rarbg_data
autobrr_data:
name: autobrr_data

View file

@ -0,0 +1,3 @@
#!/usr/bin/with-contenv bash
curl https://raw.githubusercontent.com/RandomNinjaAtk/arr-scripts/main/lidarr/setup.bash | bash
exit

View file

@ -0,0 +1,32 @@
#!/usr/bin/with-contenv bash
### Default values
scriptVersion="1.5"
scriptName="ARLChecker"
sleepInterval='24h'
### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
if [ "$dlClientSource" == "tidal" ]; then
log "Script is not enabled, enable by setting dlClientSource to \"deezer\" or \"both\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
log "Starting ARL Token Check..."
# run py script
python /custom-services.d/python/ARLChecker.py -c
# If variable doesn't exist, or not set by user in extended.conf, fallback to 24h
# See issue #189
if [[ -v arlUpdateInterval ]] && [ "$arlUpdateInterval" != "" ]
then
log "Found Interval in extended.conf"
sleepInterval="$arlUpdateInterval"
else
log "Interval Fallback"
fi
log "ARL Token Check Complete. Sleeping for ${sleepInterval}."
sleep ${sleepInterval}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,322 @@
#!/usr/bin/with-contenv bash
scriptVersion="2.1"
scriptName="AutoArtistAdder"
### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
verifyConfig () {
if echo "$addDeezerTopArtists $addDeezerTopAlbumArtists $addDeezerTopTrackArtists $addRelatedArtists" | grep -i "true" | read; then
sleep 0.01
else
log "Script is not enabled, enable by setting addDeezerTopArtists. addDeezerTopAlbumArtists, addDeezerTopTrackArtists or addRelatedArtists to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ -z "$autoArtistAdderInterval" ]; then
autoArtistAdderInterval="12h"
fi
if [ -z "$autoArtistAdderMonitored" ]; then
autoArtistAdderMonitored="true"
elif [ "$autoArtistAdderMonitored" != "true" ]; then
autoArtistAdderMonitored="false"
fi
}
sleepTimer=0.5
NotifyWebhook () {
if [ "$webHook" ]
then
content="$1: $2"
curl -s -X POST "{$webHook}" -H 'Content-Type: application/json' -d '{"event":"'"$1"'", "message":"'"$2"'", "content":"'"$content"'"}'
fi
}
AddDeezerTopArtists () {
getDeezerArtistsIds=$(curl -s "https://api.deezer.com/chart/0/artists?limit=$1" | jq -r ".data[].id")
getDeezerArtistsIdsCount=$(echo "$getDeezerArtistsIds" | wc -l)
getDeezerArtistsIds=($(echo "$getDeezerArtistsIds"))
sleep $sleepTimer
description="Top Artists"
AddDeezerArtistToLidarr
}
AddDeezerTopAlbumArtists () {
getDeezerArtistsIds=$(curl -s "https://api.deezer.com/chart/0/albums?limit=$1" | jq -r ".data[].artist.id")
getDeezerArtistsIdsCount=$(echo "$getDeezerArtistsIds" | wc -l)
getDeezerArtistsIds=($(echo "$getDeezerArtistsIds"))
sleep $sleepTimer
description="Top Album Artists"
AddDeezerArtistToLidarr
}
AddDeezerTopTrackArtists () {
getDeezerArtistsIds=$(curl -s "https://api.deezer.com/chart/0/tracks?limit=$1" | jq -r ".data[].artist.id")
getDeezerArtistsIdsCount=$(echo "$getDeezerArtistsIds" | wc -l)
getDeezerArtistsIds=($(echo "$getDeezerArtistsIds"))
sleep $sleepTimer
description="Top Track Artists"
AddDeezerArtistToLidarr
}
AddDeezerArtistToLidarr () {
lidarrArtistsData="$(curl -s "$arrUrl/api/v1/artist?apikey=${arrApiKey}")"
lidarrArtistIds="$(echo "${lidarrArtistsData}" | jq -r ".[].foreignArtistId")"
deezerArtistsUrl=$(echo "${lidarrArtistsData}" | jq -r ".[].links | .[] | select(.name==\"deezer\") | .url")
deezerArtistIds="$(echo "$deezerArtistsUrl" | grep -o '[[:digit:]]*' | sort -u)"
log "Finding $description..."
log "$getDeezerArtistsIdsCount $description Found..."
for id in ${!getDeezerArtistsIds[@]}; do
currentprocess=$(( $id + 1 ))
deezerArtistId="${getDeezerArtistsIds[$id]}"
deezerArtistName="$(curl -s https://api.deezer.com/artist/$deezerArtistId | jq -r .name)"
deezerArtistNameEncoded="$(jq -R -r @uri <<<"$deezerArtistName")"
sleep $sleepTimer
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: Searching Musicbrainz for Deezer artist id ($deezerArtistId)"
if echo "$deezerArtistIds" | grep "^${deezerArtistId}$" | read; then
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: $deezerArtistId already in Lidarr..."
continue
fi
lidarrArtistSearchData="$(curl -s "$arrUrl/api/v1/search?term=${deezerArtistNameEncoded}&apikey=${arrApiKey}")"
lidarrArtistMatchedData=$(echo $lidarrArtistSearchData | jq -r ".[] | select(.artist) | select(.artist.links[].name==\"deezer\") | select(.artist.links[].url | contains (\"artist/$deezerArtistId\"))" 2>/dev/null)
if [ ! -z "$lidarrArtistMatchedData" ]; then
data="$lidarrArtistMatchedData"
artistName="$(echo "$data" | jq -r ".artist.artistName" | head -n1)"
foreignId="$(echo "$data" | jq -r ".foreignId" | head -n1)"
importListExclusionData=$(curl -s "$arrUrl/api/v1/importlistexclusion" -H "X-Api-Key: $arrApiKey" | jq -r ".[].foreignId")
if echo "$importListExclusionData" | grep "^${foreignId}$" | read; then
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: ERROR :: Artist is on import exclusion block list, skipping...."
continue
fi
data=$(curl -s "$arrUrl/api/v1/rootFolder" -H "X-Api-Key: $arrApiKey" | jq -r ".[]")
path="$(echo "$data" | jq -r ".path")"
path=$(echo $path | cut -d' ' -f1)
qualityProfileId="$(echo "$data" | jq -r ".defaultQualityProfileId")"
qualityProfileId=$(echo $qualityProfileId | cut -d' ' -f1)
metadataProfileId="$(echo "$data" | jq -r ".defaultMetadataProfileId")"
metadataProfileId=$(echo $metadataProfileId | cut -d' ' -f1)
data="{
\"artistName\": \"$artistName\",
\"foreignArtistId\": \"$foreignId\",
\"qualityProfileId\": $qualityProfileId,
\"metadataProfileId\": $metadataProfileId,
\"monitored\":$autoArtistAdderMonitored,
\"monitor\":\"all\",
\"rootFolderPath\": \"$path\",
\"addOptions\":{\"searchForMissingAlbums\":$lidarrSearchForMissing}
}"
if echo "$lidarrArtistIds" | grep "^${foreignId}$" | read; then
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: Already in Lidarr ($foreignId), skipping..."
continue
fi
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: Adding $artistName to Lidarr ($foreignId)..."
LidarrTaskStatusCheck
lidarrAddArtist=$(curl -s "$arrUrl/api/v1/artist" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" --data-raw "$data")
else
log "$currentprocess of $getDeezerArtistsIdsCount :: $deezerArtistName :: Artist not found in Musicbrainz, please add \"https://deezer.com/artist/${deezerArtistId}\" to the correct artist on Musicbrainz"
NotifyWebhook "ArtistError" "Artist not found in Musicbrainz, please add <https://deezer.com/artist/${deezerArtistId}> to the correct artist on Musicbrainz"
fi
LidarrTaskStatusCheck
done
}
AddDeezerRelatedArtists () {
log "Begin adding Lidarr related Artists from Deezer..."
lidarrArtistsData="$(curl -s "$arrUrl/api/v1/artist?apikey=${arrApiKey}")"
lidarrArtistTotal=$(echo "${lidarrArtistsData}"| jq -r '.[].sortName' | wc -l)
lidarrArtistList=($(echo "${lidarrArtistsData}" | jq -r ".[].foreignArtistId"))
lidarrArtistIds="$(echo "${lidarrArtistsData}" | jq -r ".[].foreignArtistId")"
lidarrArtistLinkDeezerIds="$(echo "${lidarrArtistsData}" | jq -r ".[] | .links[] | select(.name==\"deezer\") | .url" | grep -o '[[:digit:]]*')"
log "$lidarrArtistTotal Artists Found"
deezerArtistsUrl=$(echo "${lidarrArtistsData}" | jq -r ".[].links | .[] | select(.name==\"deezer\") | .url")
deezerArtistIds="$(echo "$deezerArtistsUrl" | grep -o '[[:digit:]]*' | sort -u)"
for id in ${!lidarrArtistList[@]}; do
artistNumber=$(( $id + 1 ))
musicbrainzId="${lidarrArtistList[$id]}"
lidarrArtistData=$(echo "${lidarrArtistsData}" | jq -r ".[] | select(.foreignArtistId==\"${musicbrainzId}\")")
lidarrArtistName="$(echo "${lidarrArtistData}" | jq -r " .artistName")"
deezerArtistUrl=$(echo "${lidarrArtistData}" | jq -r ".links | .[] | select(.name==\"deezer\") | .url")
deezerArtistIds=($(echo "$deezerArtistUrl" | grep -o '[[:digit:]]*' | sort -u))
lidarrArtistMonitored=$(echo "${lidarrArtistData}" | jq -r ".monitored")
log "$artistNumber of $lidarrArtistTotal :: $wantedAlbumListSource :: $lidarrArtistName :: Adding Related Artists..."
if [ "$lidarrArtistMonitored" == "false" ]; then
log "$artistNumber of $lidarrArtistTotal :: $wantedAlbumListSource :: $lidarrArtistName :: Artist is not monitored :: skipping..."
continue
fi
for dId in ${!deezerArtistIds[@]}; do
deezerArtistId="${deezerArtistIds[$dId]}"
deezerRelatedArtistData=$(curl -sL --fail "https://api.deezer.com/artist/$deezerArtistId/related?limit=$numberOfRelatedArtistsToAddPerArtist"| jq -r ".data | sort_by(.nb_fan) | reverse | .[]")
sleep $sleepTimer
getDeezerArtistsIds=($(echo $deezerRelatedArtistData | jq -r .id))
getDeezerArtistsIdsCount=$(echo $deezerRelatedArtistData | jq -r .id | wc -l)
description="$lidarrArtistName Related Artists"
AddDeezerArtistToLidarr
done
done
}
LidarrTaskStatusCheck () {
alerted=no
until false
do
taskCount=$(curl -s "$arrUrl/api/v1/command?apikey=${arrApiKey}" | jq -r '.[] | select(.status=="started") | .name' | wc -l)
if [ "$taskCount" -ge "1" ]; then
if [ "$alerted" == "no" ]; then
alerted=yes
log "STATUS :: LIDARR BUSY :: Pausing/waiting for all active Lidarr tasks to end..."
fi
sleep 2
else
break
fi
done
}
AddTidalRelatedArtists () {
log "Begin adding Lidarr related Artists from Tidal..."
lidarrArtistsData="$(curl -s "$arrUrl/api/v1/artist?apikey=${arrApiKey}")"
lidarrArtistTotal=$(echo "${lidarrArtistsData}"| jq -r '.[].sortName' | wc -l)
lidarrArtistList=($(echo "${lidarrArtistsData}" | jq -r ".[].foreignArtistId"))
lidarrArtistIds="$(echo "${lidarrArtistsData}" | jq -r ".[].foreignArtistId")"
lidarrArtistLinkTidalIds="$(echo "${lidarrArtistsData}" | jq -r ".[] | .links[] | select(.name==\"tidal\") | .url" | grep -o '[[:digit:]]*' | sort -u)"
log "$lidarrArtistTotal Artists Found"
for id in ${!lidarrArtistList[@]}; do
artistNumber=$(( $id + 1 ))
musicbrainzId="${lidarrArtistList[$id]}"
lidarrArtistData=$(echo "${lidarrArtistsData}" | jq -r ".[] | select(.foreignArtistId==\"${musicbrainzId}\")")
lidarrArtistName="$(echo "${lidarrArtistData}" | jq -r " .artistName")"
serviceArtistUrl=$(echo "${lidarrArtistData}" | jq -r ".links | .[] | select(.name==\"tidal\") | .url")
serviceArtistIds=($(echo "$serviceArtistUrl" | grep -o '[[:digit:]]*' | sort -u))
lidarrArtistMonitored=$(echo "${lidarrArtistData}" | jq -r ".monitored")
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: Adding Related Artists..."
if [ "$lidarrArtistMonitored" == "false" ]; then
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: Artist is not monitored :: skipping..."
continue
fi
for Id in ${!serviceArtistIds[@]}; do
serviceArtistId="${serviceArtistIds[$Id]}"
serviceRelatedArtistData=$(curl -sL --fail "https://api.tidal.com/v1/pages/single-module-page/ae223310-a4c2-4568-a770-ffef70344441/4/b4b95795-778b-49c5-a34f-59aac055b662/1?artistId=$serviceArtistId&countryCode=$tidalCountryCode&deviceType=BROWSER" -H 'x-tidal-token: CzET4vdadNUFQ5JU' | jq -r .rows[].modules[].pagedList.items[])
sleep $sleepTimer
serviceRelatedArtistsIds=($(echo $serviceRelatedArtistData | jq -r .id))
serviceRelatedArtistsIdsCount=$(echo $serviceRelatedArtistData | jq -r .id | wc -l)
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $serviceArtistId :: Found $serviceRelatedArtistsIdsCount Artists, adding $numberOfRelatedArtistsToAddPerArtist..."
AddTidalArtistToLidarr
done
done
}
AddTidalArtistToLidarr () {
currentprocess=0
for id in ${!serviceRelatedArtistsIds[@]}; do
currentprocess=$(( $id + 1 ))
if [ $currentprocess -gt $numberOfRelatedArtistsToAddPerArtist ]; then
break
fi
serviceArtistId="${serviceRelatedArtistsIds[$id]}"
serviceArtistName="$(echo "$serviceRelatedArtistData"| jq -r "select(.id==$serviceArtistId) | .name")"
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: Searching Musicbrainz for Tidal artist id ($serviceArtistId)"
if echo "$lidarrArtistLinkTidalIds" | grep "^${serviceArtistId}$" | read; then
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: $serviceArtistId already in Lidarr..."
continue
fi
serviceArtistNameEncoded="$(jq -R -r @uri <<<"$serviceArtistName")"
lidarrArtistSearchData="$(curl -s "$arrUrl/api/v1/search?term=${serviceArtistNameEncoded}&apikey=${arrApiKey}")"
lidarrArtistMatchedData=$(echo $lidarrArtistSearchData | jq -r ".[] | select(.artist) | select(.artist.links[].name==\"tidal\") | select(.artist.links[].url | contains (\"artist/$serviceArtistId\"))" 2>/dev/null)
if [ ! -z "$lidarrArtistMatchedData" ]; then
data="$lidarrArtistMatchedData"
artistName="$(echo "$data" | jq -r ".artist.artistName" | head -n1)"
foreignId="$(echo "$data" | jq -r ".foreignId" | head -n1)"
importListExclusionData=$(curl -s "$arrUrl/api/v1/importlistexclusion" -H "X-Api-Key: $arrApiKey" | jq -r ".[].foreignId")
if echo "$importListExclusionData" | grep "^${foreignId}$" | read; then
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: ERROR :: Artist is on import exclusion block list, skipping...."
continue
fi
data=$(curl -s "$arrUrl/api/v1/rootFolder" -H "X-Api-Key: $arrApiKey" | jq -r ".[]")
path="$(echo "$data" | jq -r ".path")"
path=$(echo $path | cut -d' ' -f1)
qualityProfileId="$(echo "$data" | jq -r ".defaultQualityProfileId")"
qualityProfileId=$(echo $qualityProfileId | cut -d' ' -f1)
metadataProfileId="$(echo "$data" | jq -r ".defaultMetadataProfileId")"
metadataProfileId=$(echo $metadataProfileId | cut -d' ' -f1)
data="{
\"artistName\": \"$artistName\",
\"foreignArtistId\": \"$foreignId\",
\"qualityProfileId\": $qualityProfileId,
\"metadataProfileId\": $metadataProfileId,
\"monitored\":$autoArtistAdderMonitored,
\"monitor\":\"all\",
\"rootFolderPath\": \"$path\",
\"addOptions\":{\"searchForMissingAlbums\":$lidarrSearchForMissing}
}"
if echo "$lidarrArtistIds" | grep "^${foreignId}$" | read; then
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: Already in Lidarr ($foreignId), skipping..."
continue
fi
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: Adding $artistName to Lidarr ($foreignId)..."
LidarrTaskStatusCheck
lidarrAddArtist=$(curl -s "$arrUrl/api/v1/artist" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" --data-raw "$data")
else
log "$artistNumber of $lidarrArtistTotal :: $lidarrArtistName :: $currentprocess of $numberOfRelatedArtistsToAddPerArtist :: $serviceArtistName :: ERROR :: Artist not found in Musicbrainz, please add \"https://listen.tidal.com/artist/${serviceArtistId}\" to the correct artist on Musicbrainz"
NotifyWebhook "ArtistError" "Artist not found in Musicbrainz, please add <https://listen.tidal.com/artist/${serviceArtistId}> to the correct artist on Musicbrainz"
fi
LidarrTaskStatusCheck
done
}
# Loop Script
for (( ; ; )); do
let i++
logfileSetup
log "Script starting..."
verifyConfig
getArrAppInfo
verifyApiAccess
if [ -z $lidarrSearchForMissing ]; then
lidarrSearchForMissing=true
fi
if [ "$addDeezerTopArtists" == "true" ]; then
AddDeezerTopArtists "$topLimit"
fi
if [ "$addDeezerTopAlbumArtists" == "true" ]; then
AddDeezerTopAlbumArtists "$topLimit"
fi
if [ "$addDeezerTopTrackArtists" == "true" ]; then
AddDeezerTopTrackArtists "$topLimit"
fi
if [ "$addRelatedArtists" == "true" ]; then
AddDeezerRelatedArtists
AddTidalRelatedArtists
fi
log "Script sleeping for $autoArtistAdderInterval..."
sleep $autoArtistAdderInterval
done
exit

View file

@ -0,0 +1,100 @@
#!/usr/bin/env bash
scriptVersion="3.2"
scriptName="AutoConfig"
### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
logfileSetup
if [ "$enableAutoConfig" != "true" ]; then
log "Script is not enabled, enable by setting enableAutoConfig to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
getArrAppInfo
verifyApiAccess
if [ "$configureMediaManagement" == "true" ] || [ -z "$configureMediaManagement" ]; then
log "Configuring Lidarr Media Management Settings"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/mediamanagement" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"autoUnmonitorPreviouslyDownloadedTracks":false,"recycleBin":"","recycleBinCleanupDays":7,"downloadPropersAndRepacks":"preferAndUpgrade","createEmptyArtistFolders":true,"deleteEmptyFolders":true,"fileDate":"albumReleaseDate","watchLibraryForChanges":false,"rescanAfterRefresh":"always","allowFingerprinting":"newFiles","setPermissionsLinux":false,"chmodFolder":"777","chownGroup":"","skipFreeSpaceCheckWhenImporting":false,"minimumFreeSpaceWhenImporting":100,"copyUsingHardlinks":true,"importExtraFiles":true,"extraFileExtensions":"jpg,png,lrc","id":1}')
fi
if [ "$configureMetadataConsumerSettings" == "true" ] || [ -z "$configureMetadataConsumerSettings" ]; then
log "Configuring Lidarr Metadata ConsumerSettings"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/metadata/1?" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"enable":true,"name":"Kodi (XBMC) / Emby","fields":[{"name":"artistMetadata","value":true},{"name":"albumMetadata","value":true},{"name":"artistImages","value":true},{"name":"albumImages","value":true}],"implementationName":"Kodi (XBMC) / Emby","implementation":"XbmcMetadata","configContract":"XbmcMetadataSettings","infoLink":"https://wiki.servarr.com/lidarr/supported#xbmcmetadata","tags":[],"id":1}')
fi
if [ "$configureMetadataProviderSettings" == "true" ] || [ -z "$configureMetadataProviderSettings" ]; then
log "Configuring Lidarr Metadata Provider Settings"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/metadataProvider" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"metadataSource":"","writeAudioTags":"newFiles","scrubAudioTags":false,"id":1}')
fi
if [ "$configureCustomScripts" == "true" ] || [ -z "$configureCustomScripts" ]; then
log "Configuring Lidarr Custom Scripts"
if curl -s "$arrUrl/api/v1/notification" -H "X-Api-Key: ${arrApiKey}" | jq -r .[].name | grep "PlexNotify.bash" | read; then
log "PlexNotify.bash Already added to Lidarr custom scripts"
else
log "Adding PlexNotify.bash to Lidarr custom scripts"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/filesystem?path=%2Fconfig%2Fextended%2FPlexNotify.bash&allowFoldersWithoutTrailingSlashes=true&includeFiles=true" -H "X-Api-Key: ${arrApiKey}")
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/notification?" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"onGrab":false,"onReleaseImport":true,"onUpgrade":true,"onRename":true,"onHealthIssue":false,"onDownloadFailure":false,"onImportFailure":false,"onTrackRetag":false,"onApplicationUpdate":false,"supportsOnGrab":true,"supportsOnReleaseImport":true,"supportsOnUpgrade":true,"supportsOnRename":true,"supportsOnHealthIssue":true,"includeHealthWarnings":false,"supportsOnDownloadFailure":false,"supportsOnImportFailure":false,"supportsOnTrackRetag":true,"supportsOnApplicationUpdate":true,"name":"PlexNotify.bash","fields":[{"name":"path","value":"/config/extended/PlexNotify.bash"},{"name":"arguments"}],"implementationName":"Custom Script","implementation":"CustomScript","configContract":"CustomScriptSettings","infoLink":"https://wiki.servarr.com/lidarr/supported#customscript","message":{"message":"Testing will execute the script with the EventType set to Test, ensure your script handles this correctly","type":"warning"},"tags":[]}')
fi
if curl -s "$arrUrl/api/v1/notification" -H "X-Api-Key: ${arrApiKey}" | jq -r .[].name | grep "LyricExtractor.bash" | read; then
log "LyricExtractor.bash Already added to Lidarr custom scripts"
else
log "Adding LyricExtractor.bash to Lidarr custom scripts"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/filesystem?path=%2Fconfig%2Fextended%2FLyricExtractor.bash&allowFoldersWithoutTrailingSlashes=true&includeFiles=true" -H "X-Api-Key: ${arrApiKey}")
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/notification?" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"onGrab":false,"onReleaseImport":true,"onUpgrade":true,"onRename":true,"onHealthIssue":false,"onDownloadFailure":false,"onImportFailure":false,"onTrackRetag":false,"onApplicationUpdate":false,"supportsOnGrab":true,"supportsOnReleaseImport":true,"supportsOnUpgrade":true,"supportsOnRename":true,"supportsOnHealthIssue":true,"includeHealthWarnings":false,"supportsOnDownloadFailure":false,"supportsOnImportFailure":false,"supportsOnTrackRetag":true,"supportsOnApplicationUpdate":true,"name":"LyricExtractor.bash","fields":[{"name":"path","value":"/config/extended/LyricExtractor.bash"},{"name":"arguments"}],"implementationName":"Custom Script","implementation":"CustomScript","configContract":"CustomScriptSettings","infoLink":"https://wiki.servarr.com/lidarr/supported#customscript","message":{"message":"Testing will execute the script with the EventType set to Test, ensure your script handles this correctly","type":"warning"},"tags":[]}')
fi
if curl -s "$arrUrl/api/v1/notification" -H "X-Api-Key: ${arrApiKey}" | jq -r .[].name | grep "ArtworkExtractor.bash" | read; then
log "ArtworkExtractor.bash Already added to Lidarr custom scripts"
else
log "Adding ArtworkExtractor.bash to Lidarr custom scripts"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/filesystem?path=%2Fconfig%2Fextended%2FArtworkExtractor.bash&allowFoldersWithoutTrailingSlashes=true&includeFiles=true" -H "X-Api-Key: ${arrApiKey}")
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/notification?" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"onGrab":false,"onReleaseImport":true,"onUpgrade":true,"onRename":true,"onHealthIssue":false,"onDownloadFailure":false,"onImportFailure":false,"onTrackRetag":false,"onApplicationUpdate":false,"supportsOnGrab":true,"supportsOnReleaseImport":true,"supportsOnUpgrade":true,"supportsOnRename":true,"supportsOnHealthIssue":true,"includeHealthWarnings":false,"supportsOnDownloadFailure":false,"supportsOnImportFailure":false,"supportsOnTrackRetag":true,"supportsOnApplicationUpdate":true,"name":"ArtworkExtractor.bash","fields":[{"name":"path","value":"/config/extended/ArtworkExtractor.bash"},{"name":"arguments"}],"implementationName":"Custom Script","implementation":"CustomScript","configContract":"CustomScriptSettings","infoLink":"https://wiki.servarr.com/lidarr/supported#customscript","message":{"message":"Testing will execute the script with the EventType set to Test, ensure your script handles this correctly","type":"warning"},"tags":[]}')
fi
if curl -s "$arrUrl/api/v1/notification" -H "X-Api-Key: ${arrApiKey}" | jq -r .[].name | grep "BeetsTagger.bash" | read; then
log "BeetsTagger.bash Already added to Lidarr custom scripts"
else
log "Adding BeetsTagger.bash to Lidarr custom scripts"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/filesystem?path=%2Fconfig%2Fextended%2FBeetsTagger.bash&allowFoldersWithoutTrailingSlashes=true&includeFiles=true" -H "X-Api-Key: ${arrApiKey}")
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/notification?" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"onGrab":false,"onReleaseImport":true,"onUpgrade":true,"onRename":true,"onHealthIssue":false,"onDownloadFailure":false,"onImportFailure":false,"onTrackRetag":false,"onApplicationUpdate":false,"supportsOnGrab":true,"supportsOnReleaseImport":true,"supportsOnUpgrade":true,"supportsOnRename":true,"supportsOnHealthIssue":true,"includeHealthWarnings":false,"supportsOnDownloadFailure":false,"supportsOnImportFailure":false,"supportsOnTrackRetag":true,"supportsOnApplicationUpdate":true,"name":"BeetsTagger.bash","fields":[{"name":"path","value":"/config/extended/BeetsTagger.bash"},{"name":"arguments"}],"implementationName":"Custom Script","implementation":"CustomScript","configContract":"CustomScriptSettings","infoLink":"https://wiki.servarr.com/lidarr/supported#customscript","message":{"message":"Testing will execute the script with the EventType set to Test, ensure your script handles this correctly","type":"warning"},"tags":[]}')
fi
fi
if [ "$configureLidarrUiSettings" == "true" ] || [ -z "$configureLidarrUiSettings" ]; then
log "Configuring Lidarr UI Settings"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/ui" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"firstDayOfWeek":0,"calendarWeekColumnHeader":"ddd M/D","shortDateFormat":"MMM D YYYY","longDateFormat":"dddd, MMMM D YYYY","timeFormat":"h(:mm)a","showRelativeDates":true,"enableColorImpairedMode":true,"uiLanguage":1,"expandAlbumByDefault":true,"expandSingleByDefault":true,"expandEPByDefault":true,"expandBroadcastByDefault":true,"expandOtherByDefault":true,"theme":"auto","id":1}')
fi
if [ "$configureMetadataProfileSettings" == "true" ] || [ -z "$configureMetadataProfileSettings" ]; then
log "Configuring Lidarr Standard Metadata Profile"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/metadataprofile/1?" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"name":"Standard","primaryAlbumTypes":[{"albumType":{"id":2,"name":"Single"},"allowed":true},{"albumType":{"id":4,"name":"Other"},"allowed":true},{"albumType":{"id":1,"name":"EP"},"allowed":true},{"albumType":{"id":3,"name":"Broadcast"},"allowed":true},{"albumType":{"id":0,"name":"Album"},"allowed":true}],"secondaryAlbumTypes":[{"albumType":{"id":0,"name":"Studio"},"allowed":true},{"albumType":{"id":3,"name":"Spokenword"},"allowed":true},{"albumType":{"id":2,"name":"Soundtrack"},"allowed":true},{"albumType":{"id":7,"name":"Remix"},"allowed":true},{"albumType":{"id":9,"name":"Mixtape/Street"},"allowed":true},{"albumType":{"id":6,"name":"Live"},"allowed":false},{"albumType":{"id":4,"name":"Interview"},"allowed":false},{"albumType":{"id":8,"name":"DJ-mix"},"allowed":true},{"albumType":{"id":10,"name":"Demo"},"allowed":true},{"albumType":{"id":1,"name":"Compilation"},"allowed":true}],"releaseStatuses":[{"releaseStatus":{"id":3,"name":"Pseudo-Release"},"allowed":false},{"releaseStatus":{"id":1,"name":"Promotion"},"allowed":false},{"releaseStatus":{"id":0,"name":"Official"},"allowed":true},{"releaseStatus":{"id":2,"name":"Bootleg"},"allowed":false}],"id":1}')
fi
if [ "$configureTrackNamingSettings" == "true" ] || [ -z "$configureTrackNamingSettings" ]; then
log "Configuring Lidarr Track Naming Settings"
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/naming" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"renameTracks":true,"replaceIllegalCharacters":true,"standardTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","multiDiscTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","artistFolderFormat":"{Artist CleanName}{ (Artist Disambiguation)}","includeArtistName":false,"includeAlbumTitle":false,"includeQuality":false,"replaceSpaces":false,"id":1}')
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/naming" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"renameTracks":true,"replaceIllegalCharacters":true,"standardTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","multiDiscTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","artistFolderFormat":"{Artist CleanName}{ (Artist Disambiguation)}","includeArtistName":false,"includeAlbumTitle":false,"includeQuality":false,"replaceSpaces":false,"id":1}')
postSettingsToLidarr=$(curl -s "$arrUrl/api/v1/config/naming" -X PUT -H 'Content-Type: application/json' -H "X-Api-Key: ${arrApiKey}" --data-raw '{"renameTracks":true,"replaceIllegalCharacters":true,"standardTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","multiDiscTrackFormat":"{Artist CleanName} - {Album Type} - {Release Year} - {Album CleanTitle}/{medium:00}{track:00} - {Track CleanTitle}","artistFolderFormat":"{Artist CleanName}{ (Artist Disambiguation)}","includeArtistName":false,"includeAlbumTitle":false,"includeQuality":false,"replaceSpaces":false,"id":1}')
fi
sleep infinity
exit $?

View file

@ -0,0 +1,91 @@
#!/usr/bin/with-contenv bash
scriptVersion="1.7"
scriptName="QueueCleaner"
#### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
#### Create Log File
logfileSetup
#### Check Arr App
getArrAppInfo
verifyApiAccess
verifyConfig () {
#### Import Settings
source /config/extended.conf
if [ "$enableQueueCleaner" != "true" ]; then
log "Script is not enabled, enable by setting enableQueueCleaner to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ -z "$queueCleanerScriptInterval" ]; then
queueCleanerScriptInterval="15m"
fi
}
QueueCleanerProcess () {
# Sonarr
if [ "$arrPort" == "8989" ]; then
arrQueueData="$(curl -s "$arrUrl/api/v3/queue?page=1&pagesize=200&sortDirection=descending&sortKey=progress&includeUnknownSeriesItems=true&apikey=${arrApiKey}" | jq -r .records[])"
fi
# Radarr
if [ "$arrPort" == "7878" ]; then
arrQueueData="$(curl -s "$arrUrl/api/v3/queue?page=1&pagesize=200&sortDirection=descending&sortKey=progress&includeUnknownMovieItems=true&apikey=${arrApiKey}" | jq -r .records[])"
fi
# Lidarr
if [ "$arrPort" == "8686" ]; then
arrQueueData="$(curl -s "$arrUrl/api/v1/queue?page=1&pagesize=200&sortDirection=descending&sortKey=progress&includeUnknownArtistItems=true&apikey=${arrApiKey}" | jq -r .records[])"
fi
# Readarr
if [ "$arrPort" == "8787" ]; then
arrQueueData="$(curl -s "$arrUrl/api/v1/queue?page=1&pagesize=200&sortDirection=descending&sortKey=progress&includeUnknownAuthorItems=true&apikey=${arrApiKey}" | jq -r .records[])"
fi
arrQueueCompletedIds=$(echo "$arrQueueData" | jq -r 'select(.status=="completed") | select(.trackedDownloadStatus=="warning") | .id')
arrQueueIdsCompletedCount=$(echo "$arrQueueData" | jq -r 'select(.status=="completed") | select(.trackedDownloadStatus=="warning") | .id' | wc -l)
arrQueueFailedIds=$(echo "$arrQueueData" | jq -r 'select(.status=="failed") | .id')
arrQueueIdsFailedCount=$(echo "$arrQueueData" | jq -r 'select(.status=="failed") | .id' | wc -l)
arrQueuedIds=$(echo "$arrQueueCompletedIds"; echo "$arrQueueFailedIds")
arrQueueIdsCount=$(( $arrQueueIdsCompletedCount + $arrQueueIdsFailedCount ))
if [ $arrQueueIdsCount -eq 0 ]; then
log "No items in queue to clean up"
else
for queueId in $(echo $arrQueuedIds); do
arrQueueItemData="$(echo "$arrQueueData" | jq -r "select(.id==$queueId)")"
arrQueueItemTitle="$(echo "$arrQueueItemData" | jq -r .title)"
if [ "$arrPort" == "8989" ]; then
arrEpisodeId="$(echo "$arrQueueItemData" | jq -r .episodeId)"
arrEpisodeData="$(curl -s "$arrUrl/api/v3/episode/$arrEpisodeId?apikey=${arrApiKey}")"
arrEpisodeTitle="$(echo "$arrEpisodeData" | jq -r .title)"
arrEpisodeSeriesId="$(echo "$arrEpisodeData" | jq -r .seriesId)"
if [ "$arrEpisodeTitle" == "TBA" ]; then
log "$queueId ($arrQueueItemTitle) :: ERROR :: Episode title is \"$arrEpisodeTitle\" and prevents auto-import, refreshing series..."
refreshSeries=$(curl -s "$arrUrl/api/$arrApiVersion/command" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" --data-raw "{\"name\":\"RefreshSeries\",\"seriesId\":$arrEpisodeSeriesId}")
continue
fi
fi
log "$queueId ($arrQueueItemTitle) :: Removing Failed Queue Item from $arrName..."
deleteItem=$(curl -sX DELETE "$arrUrl/api/$arrApiVersion/queue/$queueId?removeFromClient=true&blocklist=true&apikey=${arrApiKey}")
done
fi
}
for (( ; ; )); do
let i++
logfileSetup
verifyConfig
log "Starting..."
QueueCleanerProcess
log "Sleeping $queueCleanerScriptInterval..."
sleep $queueCleanerScriptInterval
done
exit

View file

@ -0,0 +1,515 @@
#!/usr/bin/with-contenv bash
scriptVersion="1.9"
scriptName="TidalVideoDownloader"
#### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
verifyConfig () {
videoContainer=mkv
if [ "$enableVideo" != "true" ]; then
log "Script is not enabled, enable by setting enableVideo to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ -z "$downloadPath" ]; then
downloadPath="/config/extended/downloads"
fi
videoDownloadPath="$downloadPath/tidal/videos"
if [ -z "$videoScriptInterval" ]; then
videoScriptInterval="15m"
fi
if [ -z "$videoPath" ]; then
log "ERROR: videoPath is not configured via the \"/config/extended.conf\" config file..."
log "Updated your \"/config/extended.conf\" file with the latest options, see: https://github.com/RandomNinjaAtk/arr-scripts/blob/main/lidarr/extended.conf"
log "Sleeping (infinity)"
sleep infinity
fi
if [ "$dlClientSource" == "tidal" ] || [ "$dlClientSource" == "both" ]; then
sleep 0.01
else
log "ERROR: Tidal is not enabled, set dlClientSource setting to either \"both\" or \"tidal\"..."
log "Sleeping (infinity)"
sleep infinity
fi
}
TidalClientSetup () {
log "TIDAL :: Verifying tidal-dl configuration"
if [ ! -f /config/xdg/.tidal-dl.json ]; then
log "TIDAL :: No default config found, importing default config \"tidal.json\""
if [ -f /config/extended/tidal-dl.json ]; then
cp /config/extended/tidal-dl.json /config/xdg/.tidal-dl.json
chmod 777 -R /config/xdg/
fi
fi
tidal-dl -o "$videoDownloadPath"/incomplete 2>&1 | tee -a "/config/logs/$logFileName"
tidalQuality=HiFi
if [ ! -f /config/xdg/.tidal-dl.token.json ]; then
#log "TIDAL :: ERROR :: Downgrade tidal-dl for workaround..."
#pip3 install tidal-dl==2022.3.4.2 --no-cache-dir &>/dev/null
log "TIDAL :: ERROR :: Loading client for required authentication, please authenticate, then exit the client..."
NotifyWebhook "FatalError" "TIDAL requires authentication, please authenticate now (check logs)"
tidal-dl 2>&1 | tee -a "/config/logs/$logFileName"
fi
if [ ! -d "$videoDownloadPath/incomplete" ]; then
mkdir -p "$videoDownloadPath"/incomplete
chmod 777 "$videoDownloadPath"/incomplete
else
rm -rf "$videoDownloadPath"/incomplete/*
fi
#log "TIDAL :: Upgrade tidal-dl to newer version..."
#pip3 install tidal-dl==2022.07.06.1 --no-cache-dir &>/dev/null
}
TidaldlStatusCheck () {
until false
do
running=no
if ps aux | grep "tidal-dl" | grep -v "grep" | read; then
running=yes
log "STATUS :: TIDAL-DL :: BUSY :: Pausing/waiting for all active tidal-dl tasks to end..."
sleep 2
continue
fi
break
done
}
TidalClientTest () {
log "TIDAL :: tidal-dl client setup verification..."
i=0
while [ $i -lt 3 ]; do
i=$(( $i + 1 ))
TidaldlStatusCheck
tidal-dl -q Normal -o "$videoDownloadPath"/incomplete -l "$tidalClientTestDownloadId" 2>&1 | tee -a "/config/logs/$logFileName"
downloadCount=$(find "$videoDownloadPath"/incomplete -type f -regex ".*/.*\.\(flac\|opus\|m4a\|mp3\)" | wc -l)
if [ $downloadCount -le 0 ]; then
continue
else
break
fi
done
tidalClientTest="unknown"
if [ $downloadCount -le 0 ]; then
if [ -f /config/xdg/.tidal-dl.token.json ]; then
rm /config/xdg/.tidal-dl.token.json
fi
log "TIDAL :: ERROR :: Download failed"
log "TIDAL :: ERROR :: You will need to re-authenticate on next script run..."
log "TIDAL :: ERROR :: Exiting..."
rm -rf "$videoDownloadPath"/incomplete/*
NotifyWebhook "Error" "TIDAL not authenticated but configured"
tidalClientTest="failed"
exit
else
rm -rf "$videoDownloadPath"/incomplete/*
log "TIDAL :: Successfully Verified"
tidalClientTest="success"
fi
}
AddFeaturedVideoArtists () {
if [ "$addFeaturedVideoArtists" != "true" ]; then
log "-----------------------------------------------------------------------------"
log "Add Featured Music Video Artists to Lidarr :: DISABLED"
log "-----------------------------------------------------------------------------"
return
fi
log "-----------------------------------------------------------------------------"
log "Add Featured Music Video Artists to Lidarr :: ENABLED"
log "-----------------------------------------------------------------------------"
lidarrArtistsData="$(curl -s "$arrUrl/api/v1/artist?apikey=${arrApiKey}" | jq -r ".[]")"
artistTidalUrl=$(echo $lidarrArtistsData | jq -r '.links[] | select(.name=="tidal") | .url')
videoArtists=$(ls /config/extended/cache/tidal-videos/)
videoArtistsCount=$(ls /config/extended/cache/tidal-videos/ | wc -l)
if [ "$videoArtistsCount" == "0" ]; then
log "$videoArtistsCount Artists found for processing, skipping..."
return
fi
loopCount=0
for slug in $(echo $videoArtists); do
loopCount=$(( $loopCount + 1))
artistName="$(cat /config/extended/cache/tidal-videos/$slug)"
if echo "$artistTidalUrl" | grep -i "tidal.com/artist/${slug}$" | read; then
log "$loopCount of $videoArtistsCount :: $artistName :: Already added to Lidarr, skipping..."
continue
fi
log "$loopCount of $videoArtistsCount :: $artistName :: Processing url :: https://tidal.com/artist/${slug}"
artistNameEncoded="$(jq -R -r @uri <<<"$artistName")"
lidarrArtistSearchData="$(curl -s "$arrUrl/api/v1/search?term=${artistNameEncoded}&apikey=${arrApiKey}")"
lidarrArtistMatchedData=$(echo $lidarrArtistSearchData | jq -r ".[] | select(.artist) | select(.artist.links[].url | contains (\"tidal.com/artist/${slug}\"))" 2>/dev/null)
if [ ! -z "$lidarrArtistMatchedData" ]; then
data="$lidarrArtistMatchedData"
artistName="$(echo "$data" | jq -r ".artist.artistName")"
foreignId="$(echo "$data" | jq -r ".foreignId")"
else
log "$loopCount of $videoArtistsCount :: $artistName :: ERROR : Musicbrainz ID Not Found, skipping..."
continue
fi
data=$(curl -s "$arrUrl/api/v1/rootFolder" -H "X-Api-Key: $arrApiKey" | jq -r ".[]")
path="$(echo "$data" | jq -r ".path")"
qualityProfileId="$(echo "$data" | jq -r ".defaultQualityProfileId")"
metadataProfileId="$(echo "$data" | jq -r ".defaultMetadataProfileId")"
data="{
\"artistName\": \"$artistName\",
\"foreignArtistId\": \"$foreignId\",
\"qualityProfileId\": $qualityProfileId,
\"metadataProfileId\": $metadataProfileId,
\"monitored\":true,
\"monitor\":\"all\",
\"rootFolderPath\": \"$path\",
\"addOptions\":{\"searchForMissingAlbums\":false}
}"
if echo "$lidarrArtistIds" | grep "^${foreignId}$" | read; then
log "$loopCount of $videoArtistsCount :: $artistName :: Already in Lidarr ($foreignId), skipping..."
continue
fi
log "$loopCount of $videoArtistsCount :: $artistName :: Adding $artistName to Lidarr ($foreignId)..."
LidarrTaskStatusCheck
lidarrAddArtist=$(curl -s "$arrUrl/api/v1/artist" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" --data-raw "$data")
done
}
LidarrTaskStatusCheck () {
alerted=no
until false
do
taskCount=$(curl -s "$arrUrl/api/v1/command?apikey=${arrApiKey}" | jq -r '.[] | select(.status=="started") | .name' | wc -l)
if [ "$taskCount" -ge "1" ]; then
if [ "$alerted" = "no" ]; then
alerted=yes
log "STATUS :: LIDARR BUSY :: Pausing/waiting for all active Lidarr tasks to end..."
fi
sleep 2
else
break
fi
done
}
VideoProcess () {
lidarrArtists=$(wget --timeout=0 -q -O - "$arrUrl/api/v1/artist?apikey=$arrApiKey" | jq -r .[])
lidarrArtistIds=$(echo $lidarrArtists | jq -r .id)
lidarrArtistCount=$(echo "$lidarrArtistIds" | wc -l)
processCount=0
for lidarrArtistId in $(echo $lidarrArtistIds); do
processCount=$(( $processCount + 1))
lidarrArtistData=$(wget --timeout=0 -q -O - "$arrUrl/api/v1/artist/$lidarrArtistId?apikey=$arrApiKey")
lidarrArtistName=$(echo $lidarrArtistData | jq -r .artistName)
lidarrArtistMusicbrainzId=$(echo $lidarrArtistData | jq -r .foreignArtistId)
lidarrArtistPath="$(echo "${lidarrArtistData}" | jq -r " .path")"
lidarrArtistFolder="$(basename "${lidarrArtistPath}")"
lidarrArtistFolderNoDisambig="$(echo "$lidarrArtistFolder" | sed "s/ (.*)$//g" | sed "s/\.$//g")" # Plex Sanitization, remove disambiguation
artistGenres=""
OLDIFS="$IFS"
IFS=$'\n'
artistGenres=($(echo $lidarrArtistData | jq -r ".genres[]"))
IFS="$OLDIFS"
if [ ! -z "$artistGenres" ]; then
for genre in ${!artistGenres[@]}; do
artistGenre="${artistGenres[$genre]}"
OUT=$OUT"$artistGenre / "
done
genre="${OUT%???}"
else
genre=""
fi
tidalArtistUrl=$(echo "${lidarrArtistData}" | jq -r ".links | .[] | select(.name==\"tidal\") | .url")
tidalArtistIds="$(echo "$tidalArtistUrl" | grep -o '[[:digit:]]*' | sort -u | head -n1)"
lidarrArtistTrackData=$(wget --timeout=0 -q -O - "$arrUrl/api/v1/track?artistId=$lidarrArtistId&apikey=${arrApiKey}" | jq -r .[].title)
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: Getting Tidal Video Data..."
tidalVideosData=$(curl -s "https://api.tidal.com/v1/artists/${tidalArtistIds}/videos?countryCode=${tidalCountryCode}&offset=0&limit=100" -H "x-tidal-token: CzET4vdadNUFQ5JU" | jq -r ".items | sort_by(.explicit) | reverse | .[]")
tidalVideoIds=$(echo $tidalVideosData | jq -r .id)
tidalVideoIdsCount=$(echo "$tidalVideoIds" | wc -l)
tidalVideoProcessNumber=0
for id in $(echo "$tidalVideoIds"); do
tidalVideoProcessNumber=$(( $tidalVideoProcessNumber + 1 ))
videoData=$(echo $tidalVideosData | jq -r "select(.id==$id)")
videoTitle=$(echo $videoData | jq -r .title)
videoTitleClean="$(echo "$videoTitle" | sed 's%/%-%g')"
videoTitleClean="$(echo "$videoTitleClean" | sed -e "s/[:alpha:][:digit:]._' -/ /g" -e "s/ */ /g" | sed 's/^[.]*//' | sed 's/[.]*$//g' | sed 's/^ *//g' | sed 's/ *$//g')"
videoExplicit=$(echo $videoData | jq -r .explicit)
videoUrl="https://tidal.com/browse/video/$id"
videoDate="$(echo "$videoData" | jq -r ".releaseDate")"
videoDate="${videoDate:0:10}"
videoYear="${videoDate:0:4}"
videoImageId="$(echo "$videoData" | jq -r ".imageId")"
videoImageIdFix="$(echo "$videoImageId" | sed "s/-/\//g")"
videoThumbnailUrl="https://resources.tidal.com/images/$videoImageIdFix/750x500.jpg"
videoSource="tidal"
videoArtists="$(echo "$videoData" | jq -r ".artists[]")"
videoArtistsIds="$(echo "$videoArtists" | jq -r ".id")"
videoType=""
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Processing..."
if echo "$videoTitle" | grep -i "official" | grep -i "video" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Official Music Video Match Found!"
videoType="-video"
elif echo "$videoTitle" | grep -i "official" | grep -i "lyric" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Official Lyric Video Match Found!"
videoType="-lyrics"
elif echo "$videoTitle" | grep -i "video" | grep -i "lyric" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Official Lyric Video Match Found!"
videoType="-lyrics"
elif echo "$videoTitle" | grep -i "4k upgrade" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: 4K Upgrade Found!"
videoType="-video"
elif echo "$videoTitle" | grep -i "\(.*live.*\)" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Live Video Found!"
videoType="-live"
elif echo $lidarrArtistTrackData | grep -i "$videoTitle" | read; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Music Video Track Name Match Found!"
videoType="-video"
else
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: ERROR :: Unable to match!"
continue
fi
videoFileName="${videoTitleClean}${videoType}.mkv"
existingFileSize=""
existingFile=""
if [ -d "$videoPath/$lidarrArtistFolderNoDisambig" ]; then
existingFile="$(find "$videoPath/$lidarrArtistFolderNoDisambig" -type f -iname "${videoFileName}")"
existingFileNfo="$(find "$videoPath/$lidarrArtistFolderNoDisambig" -type f -iname "${videoTitleClean}${videoType}.nfo")"
existingFileJpg="$(find "$videoPath/$lidarrArtistFolderNoDisambig" -type f -iname "${videoTitleClean}${videoType}.jpg")"
fi
if [ -f "$existingFile" ]; then
existingFileSize=$(stat -c "%s" "$existingFile")
fi
if [ -f "/config/extended/logs/tidal-video/$id" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Previously Downloaded"
if [ -f "$existingFile" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Previously Downloaded, skipping..."
continue
else
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Previously Downloaded file missing, re-downloading..."
fi
fi
if [ ! -d "/config/extended/cache/tidal-videos" ]; then
mkdir -p "/config/extended/cache/tidal-videos"
chmod 777 "/config/extended/cache/tidal-videos"
fi
if [ ! -f "/config/extended/cache/tidal-videos/$tidalArtistIds" ]; then
echo -n "$lidarrArtistName" > "/config/extended/cache/tidal-videos/$tidalArtistIds"
fi
for videoArtistId in $(echo "$videoArtistsIds"); do
videoArtistData=$(echo "$videoArtists" | jq -r "select(.id==$videoArtistId)")
videoArtistName=$(echo "$videoArtistData" | jq -r .name)
videoArtistType=$(echo "$videoArtistData" | jq -r .type)
if [ ! -f "/config/extended/cache/tidal-videos/$videoArtistId" ]; then
echo -n "$videoArtistName" > "/config/extended/cache/tidal-videos/$videoArtistId"
fi
done
if [ ! -d "$videoDownloadPath/incomplete" ]; then
mkdir -p "$videoDownloadPath/incomplete"
fi
downloadFailed=false
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Downloading..."
tidal-dl -r P1080 -o "$videoDownloadPath/incomplete" -l "$videoUrl" 2>&1 | tee -a "/config/logs/$logFileName"
find "$videoDownloadPath/incomplete" -type f -exec mv "{}" "$videoDownloadPath/incomplete"/ \;
find "$videoDownloadPath/incomplete" -mindepth 1 -type d -exec rm -rf "{}" \; &>/dev/null
find "$videoDownloadPath/incomplete" -type f -regex ".*/.*\.\(mkv\|mp4\)" -print0 | while IFS= read -r -d '' video; do
file="${video}"
filenoext="${file%.*}"
filename="$(basename "$video")"
extension="${filename##*.}"
filenamenoext="${filename%.*}"
mv "$file" "$videoDownloadPath/$filename"
if [ -f "$videoDownloadPath/$filename" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Download Complete!"
chmod 666 "$videoDownloadPath/$filename"
downloadFailed=false
else
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: ERROR :: Download failed!"
downloadFailed=true
break
fi
if [ "$videoDownloadPath/incomplete" ]; then
rm -rf "$videoDownloadPath/incomplete"
fi
if python3 /usr/local/sma/manual.py --config "/config/extended/sma.ini" -i "$videoDownloadPath/$filename" -nt; then
sleep 0.01
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Processed with SMA..."
rm /usr/local/sma/config/*log*
else
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: ERROR: SMA Processing Error"
rm "$videoDownloadPath/$filename"
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: INFO: deleted: $filename"
fi
if [ -f "$videoDownloadPath/${filenamenoext}.mkv" ]; then
curl -s "$videoThumbnailUrl" -o "$videoDownloadPath/poster.jpg"
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Tagging file"
ffmpeg -y \
-i "$videoDownloadPath/${filenamenoext}.mkv" \
-c copy \
-metadata TITLE="$videoTitle" \
-metadata DATE_RELEASE="$videoDate" \
-metadata DATE="$videoDate" \
-metadata YEAR="$videoYear" \
-metadata GENRE="$genre" \
-metadata ARTIST="$lidarrArtistName" \
-metadata ALBUMARTIST="$lidarrArtistName" \
-metadata ENCODED_BY="lidarr-extended" \
-attach "$videoDownloadPath/poster.jpg" -metadata:s:t mimetype=image/jpeg \
"$videoDownloadPath/$videoFileName" 2>&1 | tee -a "/config/logs/$logFileName"
chmod 666 "$videoDownloadPath/$videoFileName"
fi
if [ -f "$videoDownloadPath/$videoFileName" ]; then
if [ -f "$videoDownloadPath/${filenamenoext}.mkv" ]; then
rm "$videoDownloadPath/${filenamenoext}.mkv"
fi
fi
done
if [ "$downloadFailed" == "true" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Skipping due to failed download..."
continue
fi
downloadedFileSize=$(stat -c "%s" "$videoDownloadPath/$videoFileName")
if [ -f "$existingFile" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Logging completed download $id to: /config/extended/logs/tidal-video/$id"
touch /config/extended/logs/tidal-video/$id
chmod 666 "/config/extended/logs/tidal-video/$id"
if [ $downloadedFileSize -lt $existingFileSize ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Downloaded file is smaller than existing file ($downloadedFileSize -lt $existingFileSize), skipping..."
rm -rf "$videoDownloadPath"/*
continue
fi
if [ $downloadedFileSize == $existingFileSize ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Existing File is the same size as the download ($downloadedFileSize = $existingFileSize), skipping..."
rm -rf "$videoDownloadPath"/*
continue
fi
if [ $downloadedFileSize -gt $existingFileSize ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Downloaded File is bigger than existing file ($downloadedFileSize -gt $existingFileSize), removing existing file to import the new file..."
rm "$existingFile"
fi
fi
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Writing NFO"
nfo="$videoDownloadPath/${videoTitleClean}${videoType}.nfo"
if [ -f "$nfo" ]; then
rm "$nfo"
fi
echo "<musicvideo>" >> "$nfo"
echo " <title>${videoTitle}</title>" >> "$nfo"
echo " <userrating/>" >> "$nfo"
echo " <track/>" >> "$nfo"
echo " <studio/>" >> "$nfo"
if [ ! -z "$artistGenres" ]; then
for genre in ${!artistGenres[@]}; do
artistGenre="${artistGenres[$genre]}"
echo " <genre>$artistGenre</genre>" >> "$nfo"
done
fi
echo " <premiered/>" >> "$nfo"
echo " <year>$videoYear</year>" >> "$nfo"
for videoArtistId in $(echo "$videoArtistsIds"); do
videoArtistData=$(echo "$videoArtists" | jq -r "select(.id==$videoArtistId)")
videoArtistName=$(echo "$videoArtistData" | jq -r .name)
videoArtistType=$(echo "$videoArtistData" | jq -r .type)
echo " <artist>$videoArtistName</artist>" >> "$nfo"
done
echo " <albumArtistCredits>" >> "$nfo"
echo " <artist>$lidarrArtistName</artist>" >> "$nfo"
echo " <musicBrainzArtistID>$lidarrArtistMusicbrainzId</musicBrainzArtistID>" >> "$nfo"
echo " </albumArtistCredits>" >> "$nfo"
echo " <thumb>${videoTitleClean}${videoType}.jpg</thumb>" >> "$nfo"
echo " <source>tidal</source>" >> "$nfo"
echo "</musicvideo>" >> "$nfo"
tidy -w 2000 -i -m -xml "$nfo" &>/dev/null
chmod 666 "$nfo"
if [ -f "$videoDownloadPath/$videoFileName" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Moving Download to final destination"
if [ ! -d "$videoPath/$lidarrArtistFolderNoDisambig" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Creating Destination Directory \"$videoPath/$lidarrArtistFolderNoDisambig\""
mkdir -p "$videoPath/$lidarrArtistFolderNoDisambig"
chmod 777 "$videoPath/$lidarrArtistFolderNoDisambig"
fi
mv "$videoDownloadPath/$videoFileName" "$videoPath/$lidarrArtistFolderNoDisambig/${videoFileName}"
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Setting permissions"
chmod 666 "$videoPath/$lidarrArtistFolderNoDisambig/${videoFileName}"
if [ -f "$nfo" ]; then
if [ -f "$existingFileNfo" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Deleting existing video nfo"
rm "$existingFileNfo"
fi
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Moving video nfo to final destination"
mv "$nfo" "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${videoType}.nfo"
chmod 666 "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${videoType}.nfo"
fi
if [ -f "$videoDownloadPath/poster.jpg" ]; then
if [ -f "$existingFileJpg" ]; then
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Deleting existing video jpg"
rm "$existingFileJpg"
fi
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Moving video poster to final destination"
mv "$videoDownloadPath/poster.jpg" "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${videoType}.jpg"
chmod 666 "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${videoType}.jpg"
fi
fi
if [ ! -d /config/extended/logs/tidal-video ]; then
mkdir -p /config/extended/logs/tidal-video
chmod 777 /config/extended/logs/tidal-video
fi
log "$processCount/$lidarrArtistCount :: $lidarrArtistName :: $tidalVideoProcessNumber/$tidalVideoIdsCount :: $videoTitle ($id) :: Logging completed download $id to: /config/extended/logs/tidal-video/$id"
touch /config/extended/logs/tidal-video/$id
chmod 666 "/config/extended/logs/tidal-video/$id"
done
done
}
log "Starting Script...."
for (( ; ; )); do
let i++
verifyConfig
getArrAppInfo
verifyApiAccess
TidalClientSetup
AddFeaturedVideoArtists
VideoProcess
log "Script sleeping for $videoScriptInterval..."
sleep $videoScriptInterval
done
exit

View file

@ -0,0 +1,60 @@
#!/usr/bin/with-contenv bash
scriptVersion="1.3"
scriptName="UnmappedFilesCleaner"
#### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
verifyConfig () {
if [ "$enableUnmappedFilesCleaner" != "true" ]; then
log "Script is not enabled, enable by setting enableUnmappedFilesCleaner to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ -z "$unmappedFolderCleanerScriptInterval" ]; then
unmappedFolderCleanerScriptInterval="15m"
fi
}
UnmappedFilesCleanerProcess () {
log "Finding UnmappedFiles to purge..."
OLDIFS="$IFS"
IFS=$'\n'
unamppedFilesData="$(curl -s "$arrUrl/api/v1/trackFile?unmapped=true" -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" | jq -r .[])"
unamppedFileIds="$(curl -s "$arrUrl/api/v1/trackFile?unmapped=true" -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" | jq -r .[].id)"
if [ -z "$unamppedFileIds" ]; then
log "No unmapped files to process"
return
fi
for id in $(echo "$unamppedFileIds"); do
unmappedFilePath=$(echo "$unamppedFilesData" | jq -r ". | select(.id==$id)| .path")
unmappedFileName=$(basename "$unmappedFilePath")
unmappedFileDirectory=$(dirname "$unmappedFilePath")
if [ -d "$unmappedFileDirectory" ]; then
log "Deleting \"$unmappedFileDirectory\""
rm -rf "$unmappedFileDirectory"
fi
log "Removing $unmappedFileName ($id) entry from lidarr..."
lidarrCommand=$(curl -s "$arrUrl/api/v1/trackFile/$id" -X DELETE -H "X-Api-Key: $arrApiKey")
done
}
# Loop Script
for (( ; ; )); do
let i++
logfileSetup
log "Script starting..."
verifyConfig
getArrAppInfo
verifyApiAccess
UnmappedFilesCleanerProcess
log "Script sleeping for $unmappedFolderCleanerScriptInterval..."
sleep $unmappedFolderCleanerScriptInterval
done
exit

View file

@ -0,0 +1,727 @@
#!/usr/bin/with-contenv bash
scriptVersion="3.7"
scriptName="Video"
### Import Settings
source /config/extended.conf
#### Import Functions
source /config/extended/functions
verifyConfig () {
if [ -z "$videoContainer" ]; then
videoContainer="mkv"
fi
if [ -z "$disableImvd" ]; then
disableImvd="false"
fi
if [ "$enableVideo" != "true" ]; then
log "Script is not enabled, enable by setting enableVideo to \"true\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ "$disableImvd" != "false" ]; then
log "Script is not enabled, enable by setting disableImvd to \"false\" by modifying the \"/config/extended.conf\" config file..."
log "Sleeping (infinity)"
sleep infinity
fi
if [ -z "$downloadPath" ]; then
downloadPath="/config/extended/downloads"
fi
if [ -z "$videoScriptInterval" ]; then
videoScriptInterval="15m"
fi
if [ -z "$videoPath" ]; then
log "ERROR: videoPath is not configured via the \"/config/extended.conf\" config file..."
log "Updated your \"/config/extended.conf\" file with the latest options, see: https://github.com/RandomNinjaAtk/arr-scripts/blob/main/lidarr/extended.conf"
log "Sleeping (infinity)"
sleep infinity
fi
}
Configuration () {
if [ "$dlClientSource" = "tidal" ] || [ "$dlClientSource" = "both" ]; then
sourcePreference=tidal
fi
log "-----------------------------------------------------------------------------"
log "|~) _ ._ _| _ ._ _ |\ |o._ o _ |~|_|_|"
log "|~\(_|| |(_|(_)| | || \||| |_|(_||~| | |<"
log " Presents: $scriptName ($scriptVersion)"
log " May the beats be with you!"
log "-----------------------------------------------------------------------------"
log "Donate: https://github.com/sponsors/RandomNinjaAtk"
log "Project: https://github.com/RandomNinjaAtk/arr-scripts"
log "Support: https://github.com/RandomNinjaAtk/arr-scripts/discussions"
log "-----------------------------------------------------------------------------"
sleep 5
log ""
log "Lift off in..."; sleep 0.5
log "5"; sleep 1
log "4"; sleep 1
log "3"; sleep 1
log "2"; sleep 1
log "1"; sleep 1
verifyApiAccess
videoDownloadPath="$downloadPath/videos"
log "CONFIG :: Download Location :: $videoDownloadPath"
log "CONFIG :: Music Video Location :: $videoPath"
log "CONFIG :: Subtitle Language set to: $youtubeSubtitleLanguage"
log "CONFIG :: Video container set to format: $videoContainer"
if [ "$videoContainer" == "mkv" ]; then
log "CONFIG :: yt-dlp format: $videoFormat"
fi
if [ "$videoContainer" == "mp4" ]; then
log "CONFIG :: yt-dlp format: --format-sort ext:mp4:m4a --merge-output-format mp4"
fi
if [ -n "$videoDownloadTag" ]; then
log "CONFIG :: Video download tag set to: $videoDownloadTag"
fi
if [ -f "/config/cookies.txt" ]; then
cookiesFile="/config/cookies.txt"
log "CONFIG :: Cookies File Found! (/config/cookies.txt)"
else
log "CONFIG :: ERROR :: Cookies File Not Found!"
log "CONFIG :: ERROR :: Add yt-dlp compatible cookies.txt to the following location: /config/cookies.txt"
cookiesFile=""
fi
log "CONFIG :: Complete"
}
ImvdbCache () {
if [ -z "$artistImvdbSlug" ]; then
return
fi
if [ ! -d "/config/extended/cache/imvdb" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Creating Cache Folder..."
mkdir -p "/config/extended/cache/imvdb"
chmod 777 "/config/extended/cache/imvdb"
fi
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Caching Records..."
if [ ! -f /config/extended/cache/imvdb/$artistImvdbSlug ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Recording Artist Slug into cache"
echo -n "$lidarrArtistName" > /config/extended/cache/imvdb/$artistImvdbSlug
fi
count=0
attemptError="false"
until false; do
count=$(( $count + 1 ))
artistImvdbVideoUrls=$(curl -s "https://imvdb.com/n/$artistImvdbSlug" --compressed -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'DNT: 1' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' | grep "$artistImvdbSlug" | grep -Eoi '<a [^>]+>' | grep -Eo 'href="[^\"]+"' | grep -Eo '(http|https)://[^"]+' | grep -i ".com/video/$artistImvdbSlug/" | sed "s%/[0-9]$%%g" | sort -u)
if echo "$artistImvdbVideoUrls" | grep -i "imvdb.com" | read; then
break
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ERROR :: Cannot connect to imvdb, retrying..."
sleep 0.5
fi
if [ $count == 10 ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: ERROR :: All attempts at connecting failed, skipping..."
attemptError="true"
break
fi
done
if [ "$attemptError" == "true" ]; then
return
fi
artistImvdbVideoUrlsCount=$(echo "$artistImvdbVideoUrls" | wc -l)
cachedArtistImvdbVideoUrlsCount=$(ls /config/extended/cache/imvdb/$lidarrArtistMusicbrainzId--* 2>/dev/null | wc -l)
if [ "$artistImvdbVideoUrlsCount" == "$cachedArtistImvdbVideoUrlsCount" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Cache is already up-to-date ($artistImvdbVideoUrlsCount==$cachedArtistImvdbVideoUrlsCount), skipping..."
return
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Cache needs updating (${artistImvdbVideoUrlsCount}!=${cachedArtistImvdbVideoUrlsCount})..."
if [ -f "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Removing Artist completed log file to allow artist re-processing..."
rm "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId"
fi
fi
sleep 0.5
imvdbProcessCount=0
for imvdbVideoUrl in $(echo "$artistImvdbVideoUrls"); do
imvdbProcessCount=$(( $imvdbProcessCount + 1 ))
imvdbVideoUrlSlug=$(basename "$imvdbVideoUrl")
imvdbVideoData="/config/extended/cache/imvdb/$lidarrArtistMusicbrainzId--$imvdbVideoUrlSlug.json"
#echo "$imvdbVideoUrl :: $imvdbVideoUrlSlug :: $imvdbVideoId"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Caching video data..."
if [ -f "$imvdbVideoData" ]; then
if [ ! -s "$imvdbVideoData" ]; then # if empty, delete file
rm "$imvdbVideoData"
fi
fi
if [ -f "$imvdbVideoData" ]; then
if jq -e . >/dev/null 2>&1 <<<"$(cat "$imvdbVideoData")"; then # verify file is valid json
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Video Data already downloaded"
continue
fi
fi
if [ ! -f "$imvdbVideoData" ]; then
count=0
until false; do
count=$(( $count + 1 ))
#echo "$count"
if [ ! -f "$imvdbVideoData" ]; then
imvdbVideoId=$(curl -s "$imvdbVideoUrl" --compressed -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'DNT: 1' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' | grep "<p>ID:" | grep -o "[[:digit:]]*")
imvdbVideoJsonUrl="https://imvdb.com/api/v1/video/$imvdbVideoId?include=sources,featured,credits"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Downloading Video data"
curl -s "$imvdbVideoJsonUrl" --compressed -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/116.0' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' -H 'Accept-Language: en-US,en;q=0.5' -H 'Accept-Encoding: gzip, deflate, br' -H 'DNT: 1' -H 'Connection: keep-alive' -H 'Upgrade-Insecure-Requests: 1' -H 'Sec-Fetch-Dest: document' -H 'Sec-Fetch-Mode: navigate' -H 'Sec-Fetch-Site: none' -H 'Sec-Fetch-User: ?1' -o "$imvdbVideoData"
sleep 0.5
fi
if [ -f "$imvdbVideoData" ]; then
if jq -e . >/dev/null 2>&1 <<<"$(cat "$imvdbVideoData")"; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Download Complete"
break
else
rm "$imvdbVideoData"
if [ $count = 2 ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Download Failed, skipping..."
break
fi
fi
else
if [ $count = 5 ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${artistImvdbVideoUrlsCount} :: Download Failed, skipping..."
break
fi
fi
done
fi
done
}
DownloadVideo () {
if [ -d "$videoDownloadPath/incomplete" ]; then
rm -rf "$videoDownloadPath/incomplete"
fi
if [ ! -d "$videoDownloadPath/incomplete" ]; then
mkdir -p "$videoDownloadPath/incomplete"
chmod 777 "$videoDownloadPath/incomplete"
fi
if echo "$1" | grep -i "youtube" | read; then
if [ $videoContainer = mkv ]; then
if [ ! -z "$cookiesFile" ]; then
yt-dlp -f "$videoFormat" --no-video-multistreams --cookies "$cookiesFile" -o "$videoDownloadPath/incomplete/${2}${3}" --embed-subs --sub-lang $youtubeSubtitleLanguage --merge-output-format mkv --remux-video mkv --no-mtime --geo-bypass "$1"
else
yt-dlp -f "$videoFormat" --no-video-multistreams -o "$videoDownloadPath/incomplete/${2}${3}" --embed-subs --sub-lang $youtubeSubtitleLanguage --merge-output-format mkv --remux-video mkv --no-mtime --geo-bypass "$1"
fi
if [ -f "$videoDownloadPath/incomplete/${2}${3}.mkv" ]; then
chmod 666 "$videoDownloadPath/incomplete/${2}${3}.mkv"
downloadFailed=false
else
downloadFailed=true
fi
else
if [ ! -z "$cookiesFile" ]; then
yt-dlp --format-sort ext:mp4:m4a --merge-output-format mp4 --no-video-multistreams --cookies "$cookiesFile" -o "$videoDownloadPath/incomplete/${2}${3}" --embed-subs --sub-lang $youtubeSubtitleLanguage --no-mtime --geo-bypass "$1"
else
yt-dlp --format-sort ext:mp4:m4a --merge-output-format mp4 --no-video-multistreams -o "$videoDownloadPath/incomplete/${2}${3}" --embed-subs --sub-lang $youtubeSubtitleLanguage --no-mtime --geo-bypass "$1"
fi
if [ -f "$videoDownloadPath/incomplete/${2}${3}.mp4" ]; then
chmod 666 "$videoDownloadPath/incomplete/${2}${3}.mp4"
downloadFailed=false
else
downloadFailed=true
fi
fi
fi
}
DownloadThumb () {
curl -s "$1" -o "$videoDownloadPath/incomplete/${2}${3}.jpg"
chmod 666 "$videoDownloadPath/incomplete/${2}${3}.jpg"
}
VideoProcessWithSMA () {
find "$videoDownloadPath/incomplete" -type f -regex ".*/.*\.\(mkv\|mp4\)" -print0 | while IFS= read -r -d '' video; do
count=$(($count+1))
file="${video}"
filenoext="${file%.*}"
filename="$(basename "$video")"
extension="${filename##*.}"
filenamenoext="${filename%.*}"
if [[ $filenoext.$videoContainer == *.mkv ]]
then
if python3 /usr/local/sma/manual.py --config "/config/extended/sma.ini" -i "$file" -nt &>/dev/null; then
sleep 0.01
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: Processed with SMA..."
rm /usr/local/sma/config/*log*
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: ERROR: SMA Processing Error"
rm "$video"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: INFO: deleted: $filename"
fi
else
if python3 /usr/local/sma/manual.py --config "/config/extended/sma-mp4.ini" -i "$file" -nt &>/dev/null; then
sleep 0.01
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: Processed with SMA..."
rm /usr/local/sma/config/*log*
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: ERROR: SMA Processing Error"
rm "$video"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: $2 :: INFO: deleted: $filename"
fi
fi
done
}
VideoTagProcess () {
find "$videoDownloadPath/incomplete" -type f -regex ".*/.*\.\(mkv\|mp4\)" -print0 | while IFS= read -r -d '' video; do
count=$(($count+1))
file="${video}"
filenoext="${file%.*}"
filename="$(basename "$video")"
extension="${filename##*.}"
filenamenoext="${filename%.*}"
artistGenres=""
OLDIFS="$IFS"
IFS=$'\n'
artistGenres=($(echo $lidarrArtistData | jq -r ".genres[]"))
IFS="$OLDIFS"
if [ ! -z "$artistGenres" ]; then
for genre in ${!artistGenres[@]}; do
artistGenre="${artistGenres[$genre]}"
OUT=$OUT"$artistGenre / "
done
genre="${OUT%???}"
else
genre=""
fi
if [[ $filenoext.$videoContainer == *.mkv ]]; then
mv "$filenoext.$videoContainer" "$filenoext-temp.$videoContainer"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${1}${2} $3 :: Tagging file"
ffmpeg -y \
-i "$filenoext-temp.$videoContainer" \
-c copy \
-metadata TITLE="${1}" \
-metadata DATE_RELEASE="$3" \
-metadata DATE="$3" \
-metadata YEAR="$3" \
-metadata GENRE="$genre" \
-metadata ARTIST="$lidarrArtistName" \
-metadata ALBUMARTIST="$lidarrArtistName" \
-metadata ENCODED_BY="lidarr-extended" \
-attach "$videoDownloadPath/incomplete/${1}${2}.jpg" -metadata:s:t mimetype=image/jpeg \
"$filenoext.$videoContainer" &>/dev/null
rm "$filenoext-temp.$videoContainer"
chmod 666 "$filenoext.$videoContainer"
else
mv "$filenoext.$videoContainer" "$filenoext-temp.$videoContainer"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${1}${2} $3 :: Tagging file"
ffmpeg -y \
-i "$filenoext-temp.$videoContainer" \
-i "$videoDownloadPath/incomplete/${1}${2}.jpg" \
-map 1 \
-map 0 \
-c copy \
-c:v:0 mjpeg \
-disposition:0 attached_pic \
-movflags faststart \
-metadata TITLE="${1}" \
-metadata ARTIST="$lidarrArtistName" \
-metadata DATE="$3" \
-metadata GENRE="$genre" \
"$filenoext.$videoContainer" &>/dev/null
rm "$filenoext-temp.$videoContainer"
chmod 666 "$filenoext.$videoContainer"
fi
done
}
VideoNfoWriter () {
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${3} :: Writing NFO"
nfo="$videoDownloadPath/incomplete/${1}${2}.nfo"
if [ -f "$nfo" ]; then
rm "$nfo"
fi
echo "<musicvideo>" >> "$nfo"
echo " <title>${3}${4}</title>" >> "$nfo"
echo " <userrating/>" >> "$nfo"
echo " <track/>" >> "$nfo"
echo " <studio/>" >> "$nfo"
artistGenres=""
OLDIFS="$IFS"
IFS=$'\n'
artistGenres=($(echo $lidarrArtistData | jq -r ".genres[]"))
IFS="$OLDIFS"
if [ ! -z "$artistGenres" ]; then
for genre in ${!artistGenres[@]}; do
artistGenre="${artistGenres[$genre]}"
echo " <genre>$artistGenre</genre>" >> "$nfo"
done
fi
echo " <premiered/>" >> "$nfo"
echo " <year>$6</year>" >> "$nfo"
if [ "$5" = "musicbrainz" ]; then
OLDIFS="$IFS"
IFS=$'\n'
for artistName in $(echo "$musicbrainzVideoArtistCreditsNames"); do
echo " <artist>$artistName</artist>" >> "$nfo"
done
IFS="$OLDIFS"
fi
if [ "$5" = "imvdb" ]; then
echo " <artist>$lidarrArtistName</artist>" >> "$nfo"
for featuredArtistSlug in $(echo "$imvdbVideoFeaturedArtistsSlug"); do
if [ -f /config/extended/cache/imvdb/$featuredArtistSlug ]; then
featuredArtistName="$(cat /config/extended/cache/imvdb/$featuredArtistSlug)"
echo " <artist>$featuredArtistName</artist>" >> "$nfo"
fi
done
fi
echo " <albumArtistCredits>" >> "$nfo"
echo " <artist>$lidarrArtistName</artist>" >> "$nfo"
echo " <musicBrainzArtistID>$lidarrArtistMusicbrainzId</musicBrainzArtistID>" >> "$nfo"
echo " </albumArtistCredits>" >> "$nfo"
echo " <thumb>${1}${2}.jpg</thumb>" >> "$nfo"
echo " <source>$8</source>" >> "$nfo"
echo "</musicvideo>" >> "$nfo"
tidy -w 2000 -i -m -xml "$nfo" &>/dev/null
chmod 666 "$nfo"
}
LidarrTaskStatusCheck () {
alerted=no
until false
do
taskCount=$(curl -s "$arrUrl/api/v1/command?apikey=${arrApiKey}" | jq -r '.[] | select(.status=="started") | .name' | wc -l)
if [ "$taskCount" -ge "1" ]; then
if [ "$alerted" = "no" ]; then
alerted=yes
log "STATUS :: LIDARR BUSY :: Pausing/waiting for all active Lidarr tasks to end..."
fi
sleep 2
else
break
fi
done
}
AddFeaturedVideoArtists () {
if [ "$addFeaturedVideoArtists" != "true" ]; then
log "-----------------------------------------------------------------------------"
log "Add Featured Music Video Artists to Lidarr :: DISABLED"
log "-----------------------------------------------------------------------------"
return
fi
log "-----------------------------------------------------------------------------"
log "Add Featured Music Video Artists to Lidarr :: ENABLED"
log "-----------------------------------------------------------------------------"
lidarrArtistsData="$(curl -s "$arrUrl/api/v1/artist?apikey=${arrApiKey}" | jq -r ".[]")"
artistImvdbUrl=$(echo $lidarrArtistsData | jq -r '.links[] | select(.name=="imvdb") | .url')
videoArtists=$(ls /config/extended/cache/imvdb/ | grep -Ev ".*--.*")
videoArtistsCount=$(ls /config/extended/cache/imvdb/ | grep -Ev ".*--.*" | wc -l)
if [ "$videoArtistsCount" == "0" ]; then
log "$videoArtistsCount Artists found for processing, skipping..."
return
fi
loopCount=0
for slug in $(echo $videoArtists); do
loopCount=$(( $loopCount + 1))
artistName="$(cat /config/extended/cache/imvdb/$slug)"
if echo "$artistImvdbUrl" | grep -i "imvdb.com/n/${slug}$" | read; then
log "$loopCount of $videoArtistsCount :: $artistName :: Already added to Lidarr, skipping..."
continue
fi
log "$loopCount of $videoArtistsCount :: $artistName :: Processing url :: https://imvdb.com/n/$slug"
artistNameEncoded="$(jq -R -r @uri <<<"$artistName")"
lidarrArtistSearchData="$(curl -s "$arrUrl/api/v1/search?term=${artistNameEncoded}&apikey=${arrApiKey}")"
lidarrArtistMatchedData=$(echo $lidarrArtistSearchData | jq -r ".[] | select(.artist) | select(.artist.links[].url | contains (\"imvdb.com/n/${slug}\"))" 2>/dev/null)
if [ ! -z "$lidarrArtistMatchedData" ]; then
data="$lidarrArtistMatchedData"
artistName="$(echo "$data" | jq -r ".artist.artistName")"
foreignId="$(echo "$data" | jq -r ".foreignId")"
else
log "$loopCount of $videoArtistsCount :: $artistName :: ERROR : Musicbrainz ID Not Found, skipping..."
continue
fi
data=$(curl -s "$arrUrl/api/v1/rootFolder" -H "X-Api-Key: $arrApiKey" | jq -r ".[]")
path="$(echo "$data" | jq -r ".path")"
qualityProfileId="$(echo "$data" | jq -r ".defaultQualityProfileId")"
metadataProfileId="$(echo "$data" | jq -r ".defaultMetadataProfileId")"
data="{
\"artistName\": \"$artistName\",
\"foreignArtistId\": \"$foreignId\",
\"qualityProfileId\": $qualityProfileId,
\"metadataProfileId\": $metadataProfileId,
\"monitored\":true,
\"monitor\":\"all\",
\"rootFolderPath\": \"$path\",
\"addOptions\":{\"searchForMissingAlbums\":false}
}"
if echo "$lidarrArtistIds" | grep "^${foreignId}$" | read; then
log "$loopCount of $videoArtistsCount :: $artistName :: Already in Lidarr ($foreignId), skipping..."
continue
fi
log "$loopCount of $videoArtistsCount :: $artistName :: Adding $artistName to Lidarr ($foreignId)..."
LidarrTaskStatusCheck
lidarrAddArtist=$(curl -s "$arrUrl/api/v1/artist" -X POST -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" --data-raw "$data")
done
}
NotifyWebhook () {
if [ "$webHook" ]
then
content="$1: $2"
curl -X POST "{$webHook}" -H 'Content-Type: application/json' -d '{"event":"'"$1"'", "message":"'"$2"'", "content":"'"$content"'"}'
fi
}
VideoProcess () {
Configuration
AddFeaturedVideoArtists
log "-----------------------------------------------------------------------------"
log "Finding Videos"
log "-----------------------------------------------------------------------------"
if [ -z "$videoDownloadTag" ]; then
lidarrArtists=$(wget --timeout=0 -q -O - "$arrUrl/api/v1/artist?apikey=$arrApiKey" | jq -r .[])
lidarrArtistIds=$(echo $lidarrArtists | jq -r .id)
else
lidarrArtists=$(curl -s "$arrUrl/api/v1/tag/detail" -H 'Content-Type: application/json' -H "X-Api-Key: $arrApiKey" | jq -r -M ".[] | select(.label == \"$videoDownloadTag\") | .artistIds")
lidarrArtistIds=$(echo $lidarrArtists | jq -r .[])
fi
lidarrArtistIdsCount=$(echo "$lidarrArtistIds" | wc -l)
processCount=0
for lidarrArtistId in $(echo $lidarrArtistIds); do
processCount=$(( $processCount + 1))
lidarrArtistData=$(wget --timeout=0 -q -O - "$arrUrl/api/v1/artist/$lidarrArtistId?apikey=$arrApiKey")
lidarrArtistName=$(echo $lidarrArtistData | jq -r .artistName)
lidarrArtistMusicbrainzId=$(echo $lidarrArtistData | jq -r .foreignArtistId)
if [ "$lidarrArtistName" == "Various Artists" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: Skipping, not processed by design..."
continue
fi
lidarrArtistPath="$(echo "${lidarrArtistData}" | jq -r " .path")"
lidarrArtistFolder="$(basename "${lidarrArtistPath}")"
lidarrArtistFolderNoDisambig="$(echo "$lidarrArtistFolder" | sed "s/ (.*)$//g" | sed "s/\.$//g")" # Plex Sanitization, remove disambiguation
lidarrArtistNameSanitized="$(echo "$lidarrArtistFolderNoDisambig" | sed 's% (.*)$%%g')"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: Checking for IMVDB Slug"
artistImvdbUrl=$(echo $lidarrArtistData | jq -r '.links[] | select(.name=="imvdb") | .url')
artistImvdbSlug=$(basename "$artistImvdbUrl")
if [ ! -z "$artistImvdbSlug" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Slug :: $artistImvdbSlug"
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ERROR :: Slug Not Found, skipping..."
continue
fi
if [ -d /config/extended/logs/video/complete ]; then
if [ -f "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId" ]; then
# Only update cache for artist if the completed log file is older than 7 days...
if [[ $(find "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId" -mtime +7 -print) ]]; then
ImvdbCache
fi
else
ImvdbCache
fi
else
# Always run cache process if completed log folder does not exist
ImvdbCache
fi
if [ -d /config/extended/logs/video/complete ]; then
# If completed log file found for artist, end processing and skip...
if [ -f "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: Music Videos previously downloaded, skipping..."
continue
fi
fi
if [ -z "$artistImvdbSlug" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: No IMVDB artist link found, skipping..."
# Create log of missing IMVDB url...
if [ ! -d "/config/extended/logs/video/imvdb-link-missing" ]; then
mkdir -p "/config/extended/logs/video/imvdb-link-missing"
chmod 777 "/config/extended/logs/video"
chmod 777 "/config/extended/logs/video/imvdb-link-missing"
fi
if [ -d "/config/extended/logs/video/imvdb-link-missing" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Logging missing IMVDB artist in folder: /config/extended/logs/video/imvdb-link-missing"
touch "/config/extended/logs/video/imvdb-link-missing/${lidarrArtistFolderNoDisambig}--mbid-${lidarrArtistMusicbrainzId}"
fi
else
# Remove missing IMVDB log file, now that it is found...
if [ -f "/config/extended/logs/video/imvdb-link-missing/${lidarrArtistFolderNoDisambig}--mbid-${lidarrArtistMusicbrainzId}" ]; then
rm "/config/extended/logs/video/imvdb-link-missing/${lidarrArtistFolderNoDisambig}--mbid-${lidarrArtistMusicbrainzId}"
fi
imvdbArtistVideoCount=$(ls /config/extended/cache/imvdb/$lidarrArtistMusicbrainzId--*.json 2>/dev/null | wc -l)
if [ $imvdbArtistVideoCount = 0 ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: No videos found, skipping..."
else
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: Processing $imvdbArtistVideoCount Videos!"
find /config/extended/cache/imvdb -type f -empty -delete # delete empty files
imvdbProcessCount=0
for imvdbVideoData in $(ls /config/extended/cache/imvdb/$lidarrArtistMusicbrainzId--*.json); do
imvdbProcessCount=$(( $imvdbProcessCount + 1 ))
imvdbVideoTitle="$(cat "$imvdbVideoData" | jq -r .song_title)"
videoTitleClean="$(echo "$imvdbVideoTitle" | sed 's%/%-%g')"
videoTitleClean="$(echo "$videoTitleClean" | sed -e "s/[:alpha:][:digit:]._' -/ /g" -e "s/ */ /g" | sed 's/^[.]*//' | sed 's/[.]*$//g' | sed 's/^ *//g' | sed 's/ *$//g')"
imvdbVideoYear=""
imvdbVideoYear="$(cat "$imvdbVideoData" | jq -r .year)"
imvdbVideoImage="$(cat "$imvdbVideoData" | jq -r .image.o)"
imvdbVideoArtistsSlug="$(cat "$imvdbVideoData" | jq -r .artists[].slug)"
echo "$lidarrArtistName" > /config/extended/cache/imvdb/$imvdbVideoArtistsSlug
imvdbVideoFeaturedArtistsSlug="$(cat "$imvdbVideoData" | jq -r .featured_artists[].slug)"
imvdbVideoYoutubeId="$(cat "$imvdbVideoData" | jq -r ".sources[] | select(.is_primary==true) | select(.source==\"youtube\") | .source_data")"
#"/config/extended/cache/musicbrainz/$lidarrArtistId--$lidarrArtistMusicbrainzId--recordings.json"
#echo "$imvdbVideoTitle :: $imvdbVideoYear :: $imvdbVideoYoutubeId :: $imvdbVideoArtistsSlug"
if [ -z "$imvdbVideoYoutubeId" ]; then
continue
fi
videoDownloadUrl="https://www.youtube.com/watch?v=$imvdbVideoYoutubeId"
plexVideoType="-video"
if [ -d "$videoPath/$lidarrArtistFolderNoDisambig" ]; then
if [ -f "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo" ]; then
if cat "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo" | grep "source" | read; then
sleep 0
else
sed -i '$d' "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo"
echo " <source>youtube</source>" >> "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo"
echo "</musicvideo>" >> "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo"
tidy -w 2000 -i -m -xml "$videoPath/$lidarrArtistFolderNoDisambig/${videoTitleClean}${plexVideoType}.nfo" &>/dev/null
fi
fi
if [[ -n $(find "$videoPath/$lidarrArtistFolderNoDisambig" -maxdepth 1 -iname "${videoTitleClean}${plexVideoType}.mkv") ]] || [[ -n $(find "$videoPath/$lidarrArtistFolderNoDisambig" -maxdepth 1 -iname "${videoTitleClean}${plexVideoType}.mp4") ]]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${imvdbVideoTitle} :: Previously Downloaded, skipping..."
continue
fi
fi
if [ ! -z "$imvdbVideoFeaturedArtistsSlug" ]; then
for featuredArtistSlug in $(echo "$imvdbVideoFeaturedArtistsSlug"); do
if [ -f /config/extended/cache/imvdb/$featuredArtistSlug ]; then
featuredArtistName="$(cat /config/extended/cache/imvdb/$featuredArtistSlug)"
fi
find /config/extended/cache/imvdb -type f -empty -delete # delete empty files
if [ -z "$featuredArtistName" ]; then
continue
fi
done
fi
if [ ! -z "$cookiesFile" ]; then
videoData="$(yt-dlp --cookies "$cookiesFile" -j "$videoDownloadUrl")"
else
videoData="$(yt-dlp -j "$videoDownloadUrl")"
fi
videoThumbnail="$imvdbVideoImage"
if [ -z "$imvdbVideoYear" ]; then
videoUploadDate="$(echo "$videoData" | jq -r .upload_date)"
videoYear="${videoUploadDate:0:4}"
else
videoYear="$imvdbVideoYear"
fi
videoSource="youtube"
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${imvdbVideoTitle} :: $videoDownloadUrl..."
DownloadVideo "$videoDownloadUrl" "$videoTitleClean" "$plexVideoType" "IMVDB"
if [ "$downloadFailed" = "true" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: $lidarrArtistName :: IMVDB :: ${imvdbProcessCount}/${imvdbArtistVideoCount} :: ${imvdbVideoTitle} :: Download failed, skipping..."
continue
fi
DownloadThumb "$imvdbVideoImage" "$videoTitleClean" "$plexVideoType" "IMVDB"
VideoProcessWithSMA "IMVDB" "$imvdbVideoTitle"
VideoTagProcess "$videoTitleClean" "$plexVideoType" "$videoYear" "IMVDB"
VideoNfoWriter "$videoTitleClean" "$plexVideoType" "$imvdbVideoTitle" "" "imvdb" "$videoYear" "IMVDB" "$videoSource"
if [ ! -d "$videoPath/$lidarrArtistFolderNoDisambig" ]; then
mkdir -p "$videoPath/$lidarrArtistFolderNoDisambig"
chmod 777 "$videoPath/$lidarrArtistFolderNoDisambig"
fi
mv $videoDownloadPath/incomplete/* "$videoPath/$lidarrArtistFolderNoDisambig"/
done
fi
fi
if [ ! -d /config/extended/logs/video ]; then
mkdir -p /config/extended/logs/video
chmod 777 /config/extended/logs/video
fi
if [ ! -d /config/extended/logs/video/complete ]; then
mkdir -p /config/extended/logs/video/complete
chmod 777 /config/extended/logs/video/complete
fi
touch "/config/extended/logs/video/complete/$lidarrArtistMusicbrainzId"
# Import Artist.nfo file
if [ -d "$lidarrArtistPath" ]; then
if [ -d "$videoPath/$lidarrArtistFolderNoDisambig" ]; then
if [ -f "$lidarrArtistPath/artist.nfo" ]; then
if [ ! -f "$videoPath/$lidarrArtistFolderNoDisambig/artist.nfo" ]; then
log "${processCount}/${lidarrArtistIdsCount} :: Copying Artist NFO to music-video artist directory"
cp "$lidarrArtistPath/artist.nfo" "$videoPath/$lidarrArtistFolderNoDisambig/artist.nfo"
chmod 666 "$videoPath/$lidarrArtistFolderNoDisambig/artist.nfo"
fi
fi
fi
fi
done
}
log "Starting Script...."
for (( ; ; )); do
let i++
logfileSetup
verifyConfig
getArrAppInfo
verifyApiAccess
VideoProcess
log "Script sleeping for $videoScriptInterval..."
sleep $videoScriptInterval
done
exit

View file

@ -0,0 +1,389 @@
import re
from pathlib import Path
from dataclasses import dataclass
from requests import Session
from argparse import ArgumentParser
from sys import argv, stdout
from colorama import Fore, init
from telegram import Update
from telegram.ext import ApplicationBuilder, ContextTypes, CommandHandler
import logging
import os
from datetime import datetime
# Pull script version from bash script. will likely change this to a var passthrough
with open("/custom-services.d/ARLChecker", "r") as r:
for line in r:
if 'scriptVersion' in line:
VERSION = re.search(r'"([A-Za-z0-9_\./\\-]*)"', line)[0].replace('"','')
# Get current log file
path = '/config/logs'
latest_file = max([os.path.join(path, f) for f in os.listdir(path) if 'ARLChecker' in f],key=os.path.getctime)
# Logging Setup
logging.basicConfig(
format=f'%(asctime)s :: ARLChecker :: {VERSION} :: %(levelname)s :: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.INFO,
handlers=[
logging.StreamHandler(stdout),
logging.FileHandler(latest_file, mode="a")
]
)
logger = logging.getLogger(__name__)
# Initialize colorama
init(autoreset=True)
# Web agent used to access Deezer
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:83.0) Gecko/20100101 Firefox/110.0'
@dataclass
class Plan:
name: str
expires: str
active: bool
download: bool
lossless: bool
explicit: bool
@dataclass
class Account:
id: int
token: str
country: str
plan: Plan
class AuthError(Exception):
pass
class ParseError(Exception):
pass
class ServiceError(Exception):
pass
class DeezerPlatformProvider:
NAME = 'Deezer'
BASE_URL = 'http://www.deezer.com'
API_PATH = '/ajax/gw-light.php'
SESSION_DATA = {
'api_token': 'null',
'api_version': '1.0',
'input': '3',
'method': 'deezer.getUserData'
}
def __init__(self):
super().__init__()
self.session = Session()
self.session.headers.update({'User-Agent': USER_AGENT})
def login(self, username, secret):
try:
res = self.session.post(
self.BASE_URL + self.API_PATH,
cookies={'arl': secret},
data=self.SESSION_DATA
)
res.raise_for_status()
except Exception as error:
logger.error(Fore.RED + 'Could not connect! Service down, API changed, wrong credentials or code-related issue.' + Fore.LIGHTWHITE_EX)
raise ConnectionError()
self.session.cookies.clear()
try:
res = res.json()
except Exception as error:
logger.error(Fore.RED + "Could not parse JSON response from DEEZER!" + Fore.LIGHTWHITE_EX)
raise ParseError()
if 'error' in res and res['error']:
logger.error(Fore.RED + "Deezer returned the following error:{}".format(res["error"]) + Fore.LIGHTWHITE_EX)
raise ServiceError()
res = res['results']
if res['USER']['USER_ID'] == 0:
logger.error(Fore.RED+"ARL Token Expired. Update the token in extended.conf"+Fore.LIGHTWHITE_EX)
raise AuthError()
return Account(username, secret, res['COUNTRY'], Plan(
res['OFFER_NAME'],
'Unknown',
True,
True,
res['USER']['OPTIONS']['web_sound_quality']['lossless'],
res['USER']['EXPLICIT_CONTENT_LEVEL']
))
class LidarrExtendedAPI:
# sets new token to extended.conf
def __init__(self, new_arl_token):
workingDir = Path(os.getcwd())
print(workingDir)
#self.parentDir = str(workingDir.parents[1])
self.parentDir = str(workingDir.parents[3])
print(self.parentDir)
self.extendedConfDir = self.parentDir + '/config/extended.conf'
self.newARLToken = new_arl_token
self.arlToken = None
self.arlLineText = None
self.arlLineIndex = None
self.fileText = None
self.enable_telegram_bot = False
self.telegram_bot_running = False
self.telegram_bot_token = None
self.telegram_user_chat_id = None
self.telegramBotEnableLineText = None
self.telegramBotEnableLineIndex = None
self.bot = None
self.parse_extended_conf()
def parse_extended_conf(self):
deezer_active = False
self.arlToken = None
arl_token_match = None
re_search_pattern = r'"([^"]*)"'
try: # Try to open extended.conf and read all text into a var.
with open(self.extendedConfDir, 'r', encoding='utf-8') as file:
self.fileText = file.readlines()
file.close()
except:
logger.error(f"Could not find {self.extendedConfDir}")
exit(1)
# Ensure Deezer is enabled and ARL token is populated
for line in self.fileText:
if 'dlClientSource="deezer"' in line or 'dlClientSource="both"' in line:
deezer_active = True
if 'arlToken=' in line:
self.arlLineText = line
self.arlLineIndex = self.fileText.index(self.arlLineText)
arl_token_match = re.search(re_search_pattern, line)
break
# ARL Token wrong flag error handling.
if arl_token_match is None:
logger.error("ARL Token not found in extended.conf. Exiting")
exit(1)
elif deezer_active is False:
logger.error("Deezer not set as an active downloader in extended.conf. Exiting")
file.close()
exit(1)
self.arlToken = arl_token_match[0]
logger.info('ARL Found in extended.conf')
for line in self.fileText:
if 'telegramBotEnable=' in line:
self.telegramBotEnableLineText = line
self.telegramBotEnableLineIndex = self.fileText.index(self.telegramBotEnableLineText)
self.enable_telegram_bot = re.search(re_search_pattern, line)[0].replace('"', '').lower() in 'true'
if 'telegramBotToken=' in line:
self.telegram_bot_token = re.search(re_search_pattern, line)[0].replace('"', '')
if 'telegramUserChatID=' in line:
self.telegram_user_chat_id = re.search(re_search_pattern, line)[0].replace('"', '')
if self.enable_telegram_bot:
logger.info('Telegram bot is enabled.')
if self.telegram_bot_token is None or self.telegram_user_chat_id is None:
logger.error('Telegram bot token or user chat ID not set in extended.conf. Exiting')
exit(1)
else:
logger.info('Telegram bot is disabled. Set the flag in extended.conf to enable.')
# Uses DeezerPlatformProvider to check if the token is valid
def check_token(self, token=None):
logger.info('Checking ARL Token Validity...')
if token == '""':
logger.info(Fore.YELLOW+"No ARL Token set in Extended.conf"+Fore.LIGHTWHITE_EX)
self.report_status("NOT SET")
exit(0)
if token is None:
print('Invalid ARL Token Entry')
return False
try:
deezer_check = DeezerPlatformProvider()
account = deezer_check.login('', token.replace('"',''))
if account.plan:
logger.info(Fore.GREEN + f'Deezer Account Found.'+ Fore.LIGHTWHITE_EX)
logger.info('-------------------------------')
logger.info(f'Plan: {account.plan.name}')
logger.info(f'Expiration: {account.plan.expires}')
logger.info(f'Active: {Fore.GREEN+"Y" if account.plan.active else "N"}'+Fore.LIGHTWHITE_EX)
logger.info(f'Download: {Fore.GREEN+"Y" if account.plan.download else Fore.RED+"N"}'+Fore.LIGHTWHITE_EX)
logger.info(f'Lossless: {Fore.GREEN+"Y" if account.plan.lossless else Fore.RED+"N"}'+Fore.LIGHTWHITE_EX)
logger.info(f'Explicit: {Fore.GREEN+"Y" if account.plan.explicit else Fore.RED+"N"}'+Fore.LIGHTWHITE_EX)
logger.info('-------------------------------')
self.report_status('VALID')
return True
except Exception as e:
print(e)
self.report_status('EXPIRED')
if self.telegram_bot_running:
return False
if self.enable_telegram_bot:
logger.info('Starting Telegram bot...Check Telegram and follow instructions.')
self.telegram_bot_running = True
self.start_telegram_bot()
exit(420)
def set_new_token(self): # Re-writes extended.conf with previously read-in text, replacing w/ new ARL
self.fileText[self.arlLineIndex] = self.arlLineText.replace(self.arlToken, self.newARLToken)
with open(self.extendedConfDir, 'w', encoding='utf-8') as file:
file.writelines(self.fileText)
file.close()
logger.info("New ARL token written to extended.conf")
# After new token is set, clean up notfound and failed downloads to bypass the default 30 day wait
def clear_not_found(self):
paths = [self.parentDir + '/config/extended/logs/notfound',self.parentDir+'/config/extended/logs/downloaded/failed/deezer']
for path in paths:
for file in os.listdir(path):
file_to_delete = os.path.join(path,file)
os.remove(file_to_delete)
def report_status(self, status):
f = open("/custom-services.d/python/ARLStatus.txt", "w")
now = datetime.strftime(datetime.now(),"%b-%d-%Y at %H:%M:%S")
f.write(f"{now}: ARL Token is {status}.{' Please update arlToken in extended.conf' if status=='EXPIRED' else ''}")
f.close()
def start_telegram_bot(self):
self.bot = TelegramBotControl(self,self.telegram_bot_token,self.telegram_user_chat_id)
def disable_telegram_bot(self):
compiled = re.compile(re.escape('true'), re.IGNORECASE)
self.fileText[self.telegramBotEnableLineIndex] = compiled.sub('false', self.telegramBotEnableLineText)
with open(self.extendedConfDir, 'w', encoding='utf-8') as file:
file.writelines(self.fileText)
file.close()
logger.info("Telegram Bot Disabled.")
class TelegramBotControl:
def __init__(self, parent,telegram_bot_token,telegram_user_chat_id):
async def send_expired_token_notification(application):
await application.bot.sendMessage(chat_id=self.telegram_chat_id,text='---\U0001F6A8WARNING\U0001F6A8-----\nARL TOKEN EXPIRED\n Update Token by running "/set_token <TOKEN>"\n You can find a new ARL at:\nhttps://rentry.org/firehawk52#deezer-arls\n\n\n Other Commands:\n/cancel - Cancel this session\n/disable - Disable Telegram Bot',disable_web_page_preview=True)
# TODO: Get Chat ID/ test on new bot
self.parent = parent
self.telegram_bot_token = telegram_bot_token
self.telegram_chat_id = telegram_user_chat_id
# start bot control
self.application = ApplicationBuilder().token(self.telegram_bot_token).post_init(send_expired_token_notification).build()
token_handler = CommandHandler('set_token', self.set_token)
cancel_handler = CommandHandler('cancel', self.cancel)
disable_handler = CommandHandler('disable', self.disable_bot)
self.application.add_handler(token_handler)
self.application.add_handler(cancel_handler)
self.application.add_handler(disable_handler)
self.application.run_polling(allowed_updates=Update.ALL_TYPES)
async def disable_bot(self, update, context: ContextTypes.DEFAULT_TYPE):
self.parent.disable_telegram_bot()
await update.message.reply_text('Disabled Telegram Bot. \U0001F614\nIf you would like to re-enable,\nset telegramBotEnable to true\nin extended.conf')
self.application.stop_running()
async def cancel(self, update, context: ContextTypes.DEFAULT_TYPE):
await update.message.reply_text('Canceling...ARLToken is still expired.')
try:
self.application.stop_running()
except Exception:
pass
async def set_token(self, update, context: ContextTypes.DEFAULT_TYPE):
try:
new_token = update.message.text.split('/set_token ')[1]
if new_token == '':
raise Exception
except:
await update.message.reply_text('Invalid Entry... please try again.')
return
print(new_token)
logger.info("Testing ARL Token Validity...")
token_validity = self.parent.check_token(new_token)
if token_validity:
await context.bot.send_message(chat_id=update.effective_chat.id, text="ARL valid, applying...")
self.parent.newARLToken = '"'+new_token+'"'
self.parent.set_new_token()
self.parent.arlToken = self.parent.newARLToken
# TODO Fix this garbage - move functionality out of telegram stuff
await context.bot.send_message(chat_id=update.effective_chat.id, text="Checking configuration")
# reparse extended.conf
self.parent.parse_extended_conf()
token_validity = self.parent.check_token(self.parent.arlToken)
if token_validity:
await context.bot.send_message(chat_id=update.effective_chat.id, text="ARL Updated! \U0001F44D")
try:
await self.application.stop_running()
except Exception:
pass
else:# If Token invalid
await update.message.reply_text(text="Token expired or inactive. try another token.")
return
def main(arlToken = None):
parser = ArgumentParser(prog='Account Checker', description='Check if Deezer ARL Token is valid')
parser.add_argument('-c', '--check', help='Check if current ARL Token is active/valid',required=False, default=False, action='store_true')
parser.add_argument('-n', '--new', help='Set new ARL Token',type = str, required=False, default=False)
if not argv[1:]:
parser.print_help()
parser.exit()
args = parser.parse_args()
arlToken_instance = LidarrExtendedAPI(arlToken)
if args.check is True:
if arlToken_instance.arlToken == '':
print("ARL Token not set. re-run with -n flag")
exit(1)
try:
arlToken_instance.check_token(arlToken_instance.arlToken)
except Exception as e:
if 'Chat not found' in str(e):
logger.error(Fore.RED + "Chat not found. Check your chat ID in extended.conf, or start a chat with your bot."+Fore.LIGHTWHITE_EX)
elif 'The token' in str(e):
logger.error(Fore.RED + "Check your Bot Token in extended.conf."+Fore.LIGHTWHITE_EX)
else:
print(e)
exit(1)
elif args.new:
if args.new == '':
print("Please pass new ARL token as an argument")
exit(96)
arlToken_instance.newARLToken = '"'+args.new+'"'
arlToken_instance.set_new_token()
else:
parser.print_help()
if __name__ == '__main__':
main('FAKETOKEN')

View file

@ -0,0 +1 @@
Mar-03-2024 at 10:25:14: ARL Token is EXPIRED. Please update arlToken in extended.conf

View file

@ -0,0 +1,63 @@
version: '2'
services:
samba:
container_name: samba
image: dperson/samba
volumes:
- /mnt/hdd/nas:/mount
environment:
- USERID=1000
- GROUPID=1000
ports:
- "139:139"
- "445:445"
restart: always
command: >
-s "public;/mount;yes;no;yes" -p
networks:
net:
nextcloud-db:
image: mariadb:10.5
container_name: nextcloud-db
restart: always
command: --transaction-isolation=READ-COMMITTED --binlog-format=ROW
volumes:
- /mnt/hdd/docker/nextcloud_db:/var/lib/mysql
networks:
nas_net:
env_file:
- nextcloud.env
nextcloud:
image: nextcloud:latest
container_name: nextcloud
restart: always
volumes:
- /mnt/hdd/docker/nextcloud_data:/var/www/html
env_file:
- nextcloud.env
environment:
- MYSQL_HOST=nextcloud-db
labels:
- traefik.enable=true
- traefik.http.routers.nextcloud.entrypoints=websecure
- traefik.http.routers.nextcloud.rule=Host(`nextcloud.ghoscht.com`)
- traefik.docker.network=traefik-net
- traefik.http.routers.nextcloud.tls=true
- traefik.http.routers.nextcloud.tls.certresolver=lencrypt
networks:
nas_net:
net:
dns:
- 1.1.1.1
networks:
net:
name: traefik-net
external: true
nas_net:
name: nas-net
volumes:
nextcloud_data:
name: nextcloud_data
nextcloud_db:
name: nextcloud_db

View file

@ -0,0 +1,22 @@
version: '3'
services:
vaultwarden:
image: vaultwarden/server:latest
container_name: vaultwarden
restart: always
environment:
DOMAIN: "http://vaultwarden.ghoscht.com"
volumes:
- /mnt/hdd/docker/vaultwarden_data/:/data
labels:
- traefik.enable=true
- traefik.http.routers.vaultwarden.entrypoints=websecure
- traefik.http.routers.vaultwarden.rule=Host(`vaultwarden.ghoscht.com`)
- traefik.http.routers.vaultwarden.tls=true
- traefik.http.routers.vaultwarden.tls.certresolver=lencrypt
networks:
traefik-net:
networks:
traefik-net:
name: traefik-net
external: true

View file

@ -0,0 +1,19 @@
version: "3.5"
services:
volman:
image: ubuntu
container_name: volman
command: sleep infinity
dns:
- 1.1.1.1
volumes:
- gitea_db:/gitea_db
- gitea_data:/navidrome_data
- /mnt/hdd/docker:/docker
volumes:
gitea_data:
name: navidrome_data
external: true
gitea_db:
name: gitea_data
external: true

30
secrets/franz.yaml Normal file
View file

@ -0,0 +1,30 @@
cloudflared:
tunnel_token: ENC[AES256_GCM,data:KEnrTkTCuicpUg51AHrAj08aexQKyPdS42QexuOeK/OeQ4/px3Xrz/95XYztEjdF5eg4c0GNnJidJ2nx7UlGYq+Wp8NINZtrOWB3Vm3pq/4pjdfyX7sMTCvrYE23/pT6kAC1KH/hkhFnauCeqgOlqBDe+I3kM0lVBzIakmSfnHNWJ3PzM9kFpRSD/EprzYyUJoFW7bKY3TlngheQhXc+v0rCMXj/EsZZQRS0L3sGkvbK/xA3PKKsBA==,iv:Xsx/CwGmkr5FoL8zOsfD6ZwhHq8qLgpKEihiAg1iCsI=,tag:mewbduDjTYsAR/f+4h3y4w==,type:str]
traefik:
cloudflare_email: ENC[AES256_GCM,data:MXd2rbFmRiQFb+N4d5Ncm0FxYg==,iv:bwVm5+j+zvdw4XecSnBIVWwmvaEkwQtI8J3XQpq/lOc=,tag:7ptLXgQ9pxkuWquPkYKgCA==,type:str]
cloudflare_api_key: ENC[AES256_GCM,data:S4iozYRQSK9Gd1UWiV1MqZE8vCTZ7aSU83SH83n17VoJFuQbSA==,iv:CEqAUMW9SUrS6ndo9meiY4DQFwuivWOJMzWi5UHXFqI=,tag:4V7S107Lr5qyh4UyNSVsjw==,type:str]
nextcloud:
mysql_root_password: ENC[AES256_GCM,data:bCghTvvQ8eR76g1tTbtOE/MB8UcnVUsn5ooQ9+tKdB8=,iv:tmopYWAIVHNVcYYOWJy2uedP38nM5WR5nzD7pjD9w0Y=,tag:E8VIkOu2bWHxq94w7YyC2Q==,type:str]
mysql_password: ENC[AES256_GCM,data:g+xf2rbj1HMMF0vLoXHlvrX2ct9/OXCystt42cdkodk=,iv:6Q7JAWR8WMmSKo21k+zmqGcSEnpTOoO38G66UMHc5qM=,tag:LQHHAH69EFk0v5LVBznjzA==,type:str]
mysql_database: ENC[AES256_GCM,data:2OP4bt4Tq09q,iv:l6k5lW0PsfciPv3uhVjxrILZ7hNGKQNPtF2QSmtlym0=,tag:HXYilVMhngdeMP1qQWDGBg==,type:str]
mysql_user: ENC[AES256_GCM,data:AixE7ec9SjO4,iv:cnxCAt+MAr0BXixkqH77JC5kjb7p1vKZlD5hkemtKvE=,tag:/m8TBXht6RuB5QE4MFRUBA==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1uauvjwfvg8u0zkn58ematurcptf43gz6vx44nwkq3xcnmwq95psqna9psw
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBxSUV4ZlIzM0xOc3VsV0lN
TlpWMWswdEI2QWxvcklkeWpRcTg4T0V5eUI4Ck1FSGZ2K2NqcEExRUEzQlpoZFVi
eTNLV0R2UzFsWmIwNWpmUnBVUVRFUk0KLS0tIHJJc2dtdkJmQzF5OWN0eDIycGJw
VUUxcEhvYi8zeXlCUUViUTl0eWdhcU0KXOfbnDc+zc8lnBcyEAV5EiJSjcSU6AgI
EfeRw8qVqwChrYn1agslcNnDbE0WQsOCBuA6cE4V3kRofp9HU949ig==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2024-03-03T16:00:04Z"
mac: ENC[AES256_GCM,data:TvA9zrLlN6AIxYFOuoVIpo/mhzymxhMqq+iyExy0vXUUI94D2yNs8lexPko3HxJBp7FisNpLkIaNxNKkr0qno39ZTwDWcws86fTW9dSpB1uhQP/A8hrjUirjOxX0hqk+vI1Uh4Ungwrc/5itz+1NmrYYJCM62KGv73RDYKEUqzE=,iv:I4N0L6Dp4YJC5QrHfToQb65v6KSa/V0e/88CUzM3Pms=,tag:RZjgAiuCf6UmNE4s8A3dvg==,type:str]
pgp: []
unencrypted_suffix: _unencrypted
version: 3.8.1