diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..709cf84 --- /dev/null +++ b/.env.sample @@ -0,0 +1,32 @@ +# .env +# Environment Configuration StarrScripts Sample 2024-04 +# Rename this file to .env and fill in the values accordingly. +# Xseed +## Download Client Names +TORRENT_CLIENT_NAME="" # Example: "Qbit" +USENET_CLIENT_NAME="" # Example: "SABnzbd" +## Cross Seed API configuration +XSEED_HOST="" # Example: "crossseed" +XSEED_PORT="" # Example: "2468" +## API Key for Cross Seed, if applicable +XSEED_APIKEY="" # Example: "your-api-key" +## Path to store the script's database of prior searches +LOG_FILE="" # Example: "/config/xseed_db.log" +# ZFS Destory +VERBOSE=0 +MAX_FREQ=2 +MAX_HOURLY=2 +MAX_DAILY=1 +MAX_WEEKLY=0 +MAX_MONTHLY=0 +# Jdupes +JDUPES_OUTPUT_LOG="" # Example: "/.config/jdupes.log" +JDUPES_SOURCE_DIR="" # Example: "/mnt/data/media/" +JDUPES_DESTINATION_DIR="" # Example: "/mnt/data/torrents/" +JDUPES_HASH_DB="" # Example: "/.config/jdupes_hashdb" +# Qbittorrent Manage +QBIT_MANAGE_LOCK_FILE_PATH="" # Example: "/var/lock/qbm-qbit.lock" +QBIT_MANAGE_PATH="" # Example: "/opt/qbit-manage" +QBIT_MANAGE_VENV_PATH="" # Example: "/opt/qbit-manage/.venv" +QBIT_MANAGE_CONFIG_PATH="" # Example: "/opt/qbit-manage/config.yml" +QBIT_MANAGE_OPTIONS="" # Example: "-cs -re -cu -tu -ru -sl -r" diff --git a/.gitignore b/.gitignore index dfcfd56..5f74ef5 100644 --- a/.gitignore +++ b/.gitignore @@ -348,3 +348,6 @@ MigrationBackup/ # Ionide (cross platform F# VS Code tools) working folder .ionide/ + +# Ignore .env +.env diff --git a/dupe.sh b/dupe.sh index 8aabb03..6213409 100755 --- a/dupe.sh +++ b/dupe.sh @@ -1,15 +1,29 @@ #!/bin/bash -jdupes_command="/usr/bin/jdupes" -exclude_dirs="-X nostr:.RecycleBin -X nostr:.trash" -include_ext="-X onlyext:mp4,mkv,avi" -output_log="/.config/jdupes.log" -source_dir="/mnt/data/media/" -destination_dir="/mnt/data/torrents/" -hash_db="/.config/jdupes_hashdb" +# Load environment variables from .env file +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" +fi +# Variables +JDUPES_OUTPUT_LOG=${JDUPES_OUTPUT_LOG:-"/var/log/jdupes.log"} +JDUPES_SOURCE_DIR=${JDUPES_SOURCE_DIR:-"/mnt/data/media/"} +JDUPES_DESTINATION_DIR=${JDUPES_DESTINATION_DIR:-"/mnt/data/torrents/"} +JDUPES_HASH_DB=${JDUPES_HASH_DB:-"/var/lib/jdupes_hashdb"} +## Secret Variables +JDUPES_COMMAND=${JDUPES_COMMAND:-"/usr/bin/jdupes"} +JDUPES_EXCLUDE_DIRS=${JDUPES_EXCLUDE_DIRS:-"-X nostr:.RecycleBin -X nostr:.trash"} +JDUPES_INCLUDE_EXT=${JDUPES_INCLUDE_EXT:-"mp4,mkv,avi"} +# Logging the start of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search started for $source_dir and $destination_dir." >> "$output_log" -$jdupes_command $exclude_dirs $include_ext -L -r -Z -y "$hash_db" "$source_dir" "$destination_dir" >> "$output_log" +echo "[$timestamp] Duplicate search started for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" +echo "command is" +# Running jdupes with the loaded environment variables +echo $JDUPES_COMMAND "$JDUPES_EXCLUDE_DIRS" "$JDUPES_INCLUDE_EXT" -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" +$JDUPES_COMMAND $JDUPES_EXCLUDE_DIRS -X onlyext:$JDUPES_INCLUDE_EXT -L -r -Z -y "$JDUPES_HASH_DB" "$JDUPES_SOURCE_DIR" "$JDUPES_DESTINATION_DIR" >>"$JDUPES_OUTPUT_LOG" + +# Logging the completion of the operation timestamp=$(date +"%Y-%m-%d %H:%M:%S") -echo "[$timestamp] Duplicate search completed for $source_dir and $destination_dir." >> "$output_log" +echo "[$timestamp] Duplicate search completed for $JDUPES_SOURCE_DIR and $JDUPES_DESTINATION_DIR." >>"$JDUPES_OUTPUT_LOG" diff --git a/notifiarr-branch-builder.sh b/notifiarr-branch-builder.sh index 114e6a1..3d80da5 100755 --- a/notifiarr-branch-builder.sh +++ b/notifiarr-branch-builder.sh @@ -1,61 +1,43 @@ #!/bin/bash + +# Extend the PATH to include the go binary directory export PATH=$PATH:/usr/local/go/bin -# Function to display an error message and exit +# Function to display error messages and exit with status 1 handle_error() { - echo "Error: $1" + echo "Error: $1" >&2 exit 1 } -# Display usage information +# Function to display usage information display_help() { echo "Usage: $0 [options]" echo "Options:" - echo " -h Display this help message" - echo " --repo-url URL Set the repository URL (default: https://github.com/Notifiarr/notifiarr.git)" - echo " --repo-dir DIR Set the repository directory (default: /home/bakerboy448/notifiarr)" - echo " --bin-path PATH Set the binary path (default: /usr/bin/notifiarr)" - echo " --branch BRANCH Set the branch (default: master)" - echo " --reinstall-apt Reinstall Notifiarr using apt without prompting." + echo " -h, --help Display this help message" + echo " --repo-url URL Set the repository URL (default: https://github.com/Notifiarr/notifiarr.git)" + echo " --repo-dir DIR Set the repository directory (default: /opt/notifiarr-repo)" + echo " --bin-path PATH Set the binary path (default: /usr/bin/notifiarr)" + echo " --branch BRANCH Set the branch (default: master)" + echo " --reinstall-apt Reinstall Notifiarr using apt without prompting." exit 0 } -#TODO Fix this later -# Check if Golang is installed, install if not -#if ! command -v go &>/dev/null; then -# read -p "Golang is not installed. Do you want to install it? [Y/n] " go_install_choice -# if [[ "$go_install_choice" == [Yy]* ]]; then -# # Download Go tarball to /tmp directory -# if curl -o /tmp/go1.21.3.linux-amd64.tar.gz https://go.dev/dl/go1.21.3.linux-amd64.tar.gz; then -# # Remove any existing Go installation, extract Go, update PATH, and check Go version -# sudo rm -rf /usr/local/go && \ -# sudo tar -C /usr/local -xzf /tmp/go1.21.3.linux-amd64.tar.gz && \ -# echo "export PATH=\$PATH:/usr/local/go/bin" >> ~/.bashrc && \ -# source ~/.bashrc && \ -# go version && \ -# rm /tmp/go1.21.3.linux-amd64.tar.gz -# else -# echo "Failed to download Golang." -# exit 1 -# fi -# else -# echo "Golang is required for this script. Exiting." -# exit 1 -# fi -#fi - -# Check if Make is installed, install if not -if ! command -v make &>/dev/null; then - read -p "Make is not installed. Do you want to install it? [Y/n] " make_install_choice - if [[ "$make_install_choice" == [Yy]* ]]; then - sudo apt update && sudo apt install -y make || handle_error "Failed to install Make." - else - echo "Make is required for this script. Exiting." - exit 1 +# Function to check and prompt for installation of a required tool +ensure_tool_installed() { + local tool=$1 + local install_cmd=$2 + if ! command -v "$tool" &>/dev/null; then + read -p "$tool is not installed. Do you want to install it? [Y/n] " response + if [[ "$response" =~ ^[Yy] ]]; then + eval "$install_cmd" || handle_error "Failed to install $tool." + else + echo "$tool is required for this script. Exiting." + exit 1 + fi fi -fi +} -# Default parameter values +# Default parameters repo_url="https://github.com/Notifiarr/notifiarr.git" repo_dir="/opt/notifiarr-repo" bin_path="/usr/bin/notifiarr" @@ -65,7 +47,7 @@ apt_reinstall=false # Parse command line options while [[ $# -gt 0 ]]; do case "$1" in - -h | --help) + -h|--help) display_help ;; --repo-url) @@ -86,8 +68,7 @@ while [[ $# -gt 0 ]]; do ;; --reinstall-apt) apt_reinstall=true - ;; - + ;; *) echo "Invalid option: $1. Use -h for help." exit 1 @@ -96,81 +77,62 @@ while [[ $# -gt 0 ]]; do shift done -# Check if user wants to reinstall using apt -if [[ $apt_reinstall == true || ( $apt_reinstall == false && $(read -p "Do you want to reinstall Notifiarr using apt? [Y/n] " apt_choice; echo "$apt_choice") == [Yy]* ) ]]; then +# Ensure required tools are installed +ensure_tool_installed "make" "sudo apt update && sudo apt install -y make" +# Reinstallation condition handling +reinstall_notifiarr() { sudo apt update && sudo apt install --reinstall notifiarr || handle_error "Failed to reinstall Notifiarr using apt." - exit 0 -fi +} + +[[ $apt_reinstall == true ]] && reinstall_notifiarr -# Clone the repo if it doesn't exist, else fetch the latest +# Repository management if [[ ! -d "$repo_dir" ]]; then git clone "$repo_url" "$repo_dir" || handle_error "Failed to clone repository." else git -C "$repo_dir" fetch --all --prune || handle_error "Failed to fetch updates from remote." fi -# Get the current branch +# Branch handling and updating current_branch=$(git -C "$repo_dir" rev-parse --abbrev-ref HEAD) -echo "Current branch is: $current_branch" -read -p "Do you want to use the current branch? [Y/n] " choice - -if [[ "$choice" != [Yy]* ]]; then - # List all available branches - branches=$(git -C "$repo_dir" branch -r | sed 's/origin\///' | sed 's/* //') +read -p "Do you want to use the current branch ($current_branch)? [Y/n] " choice +if [[ "$choice" =~ ^[Nn] ]]; then + branches=$(git -C "$repo_dir" branch -r | sed 's/origin\///;s/* //') echo "Available branches:" echo "$branches" - while true; do read -p "Enter the branch name you want to use: " branch if [[ $branches =~ $branch ]]; then + git -C "$repo_dir" checkout "$branch" || handle_error "Failed to checkout branch $branch." break else echo "Invalid choice. Please select a valid branch." fi done - - # Checkout the selected branch - git -C "$repo_dir" checkout "$branch" || handle_error "Failed to checkout branch $branch." -else - branch=$current_branch fi -# Pull latest changes from the selected branch git -C "$repo_dir" pull || handle_error "Failed to pull latest changes." - -# Compile the code (assuming the repository requires a 'make' step) make --directory="$repo_dir" || handle_error "Failed to compile." +# Service management echo "Stopping notifiarr..." sudo systemctl stop notifiarr -# Move the binaries if [[ -f "$bin_path" ]]; then sudo mv "$bin_path" "$repo_dir".old && echo "Old binary moved to $repo_dir.old" fi sudo mv "$repo_dir/notifiarr" "$bin_path" && echo "New binary moved to $bin_path" -# Change owner of the compiled binary -sudo chown "root:root" "$bin_path" +sudo chown root:root "$bin_path" -# Start the service again +echo "Starting Notifiarr..." sudo systemctl start notifiarr -# Check if the service started successfully -if [[ $? -eq 0 ]]; then - echo "Notifiarr service started successfully" - - # Check the status of the service - sudo systemctl is-active --quiet notifiarr - if [[ $? -eq 0 ]]; then - echo "Notifiarr service is currently running" - else - echo "Notifiarr service is not running" - fi +if sudo systemctl is-active –quiet notifiarr; then +echo “Notifiarr service started and is currently running” else - echo "Failed to start Notifiarr service" +handle_error “Failed to start Notifiarr service” fi -# Exit the script exit 0 diff --git a/omegabrr_upgrade.sh b/omegabrr_upgrade.sh index 0881239..87f9827 100755 --- a/omegabrr_upgrade.sh +++ b/omegabrr_upgrade.sh @@ -1,29 +1,40 @@ #!/bin/bash +# Define service name as a variable +service_name="omegabrr@bakerboy448" + +# Function to handle errors and exit +handle_error() { + echo "Error: $1" >&2 + exit 1 +} + # Get the old version of omegabrr old_version=$(omegabrr version) # Fetch the URL of the latest release for linux_x86_64 -dlurl=$(curl -s https://api.github.com/repos/autobrr/omegabrr/releases/latest | grep -E 'browser_download_url.*linux_x86_64' | cut -d\" -f4) +dlurl=$(curl -s https://api.github.com/repos/autobrr/omegabrr/releases/latest | \ + grep -E 'browser_download_url.*linux_x86_64' | cut -d\" -f4) -# Download the latest release -if [ -n "$dlurl" ]; then - wget "$dlurl" - # Extract the downloaded archive - sudo tar -xzf omegabrr*.tar.gz - # Move omegabrr to /usr/bin - sudo mv omegabrr /usr/bin/omegabrr - # Clean up downloaded files - rm omegabrr*.tar.gz - echo "Omegabrr Updated" -else - echo "Failed to fetch download URL. Exiting..." - exit 1 +# Validate the download URL +if [ -z "$dlurl" ]; then + handle_error "Failed to fetch download URL." fi +# Download the latest release +wget "$dlurl" -O omegabrr_latest.tar.gz || handle_error "Failed to download the latest version." + +# Extract the downloaded archive +sudo tar -xzf omegabrr_latest.tar.gz -C /usr/bin/ || handle_error "Failed to extract files." + +# Clean up downloaded files +rm omegabrr_latest.tar.gz + # Display old and new versions -echo "Old Version: $old_version" -echo "New Version: $(omegabrr version)" +new_version=$(omegabrr version) +echo "Omegabrr updated from $old_version to $new_version" + +# Restart the specified service +sudo systemctl restart $service_name || handle_error "Failed to restart the service $service_name." -# Restart the omegabrr service (assuming sysrestart command exists) -sysrestart omegabrr@bakerboy448 +echo "Update and restart successful!" diff --git a/qbm-qbit.sh b/qbm-qbit.sh index 5ac6e98..951ffb6 100755 --- a/qbm-qbit.sh +++ b/qbm-qbit.sh @@ -1,21 +1,48 @@ #!/bin/bash -LOCK=/var/lock/qbm-qbit.lock -PATH_QBM=/opt/QbitManage + +# Check if lockfile command exists +if ! command -v lockfile &>/dev/null; then + echo "Error: lockfile command not found. Please install the procmail package." >&2 + exit 1 +fi + +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + source ".env" +fi + +# Use environment variables with descriptive default values +QBQBM_LOCK=${QBIT_MANAGE_LOCK_FILE_PATH:-/var/lock/qbm-qbit.lock} +QBQBM_PATH_QBM=${QBIT_MANAGE_PATH:-/opt/qbit-manage} +QBQBM_VENV_PATH=${QBIT_MANAGE_VENV_PATH:-/opt/qbit-manage/.venv} +QBQBM_CONFIG_PATH=${QBIT_MANAGE_CONFIG_PATH:-/opt/qbit-manage/config.yml} +QBQBM_QBIT_OPTIONS=${QBIT_MANAGE_OPTIONS:-"-cs -re -cu -tu -ru -sl -r"} +QBQBM_SLEEP_TIME=600 +QBQBM_LOCK_TIME=3600 + +# Function to remove the lock file remove_lock() { rm -f "$LOCK" } + +# Function to handle detection of another running instance another_instance() { - echo "There is another instance running, exiting" + echo "There is another instance running, exiting." exit 1 } -lockfile -r 0 -l 3600 "$LOCK" || another_instance + +echo "Acquiring Lock" +# Acquire a lock to prevent concurrent execution, with a timeout and lease time +lockfile -r 0 -l "$QBQBM_SLEEP_TIME" "$QBQBM_LOCK" || another_instance + +# Ensure the lock is removed when the script exits trap remove_lock EXIT -sleep 600 -# -cs = cross-seed -# -re = recheck -# -cu = cat-update -# -tu = tag-update -# -ru = remove unregistered -# Do not remove orphaned torrents as imports may be in-progress -# -sl = share limits -/opt/.venv/qbm-venv/bin/python "$PATH_QBM"/qbit_manage.py -cs -re -cu -tu -ru -sl -r --config-file /.config/QbitMngr/config.yml + +echo "sleeping for $QBQBM_SLEEP_TIME" +# Pause the script to wait for any pending operations (i.e. Starr Imports) + +sleep $QBQBM_SLEEP_TIME + +# Execute qbit_manage with configurable options +echo "Executing Command" +"$VENV_PATH"/bin/python "$PATH_QBM"/qbit_manage.py "$QBIT_OPTIONS" --config-file "$CONFIG_PATH" diff --git a/xseed.sh b/xseed.sh index fe4fe63..848ced8 100755 --- a/xseed.sh +++ b/xseed.sh @@ -1,137 +1,135 @@ #!/bin/bash -# Configure variables to fit your setup -# Assumes download clients have the same name across all Starrs using this script. -# See https://www.cross-seed.org/docs/basics/faq-troubleshooting#searching-media-libraries-vs-torrent-data-data-based-searching -# For how to configure Cross Seed with Starr Data Matching -# Download Client Names in Starr -torrentclientname="Qbit" -usenetclientname="SABnzbd" -# Cross seed host (ip or container name) and port information -xseed_host="crossseed" -xseed_port="2468" -# Set a path to store this script's database of prior searched -# This is mounted to /config in the containers -log_file="/config/xseed_db.log" -# Optional; Set to "" to ignore -xseed_apikey="" - -# Determine app and set variables -if [ -n "$radarr_eventtype" ]; then - app="radarr" - # shellcheck disable=SC2154 - clientID="$radarr_download_client" - # shellcheck disable=SC2154 - downloadID="$radarr_download_id" - # shellcheck disable=SC2154 - filePath="$radarr_moviefile_path" - # shellcheck disable=SC2154 - eventType="$radarr_eventtype" -elif [ -n "$sonarr_eventtype" ]; then - app="sonarr" - # shellcheck disable=SC2154 - clientID="$sonarr_download_client" - # shellcheck disable=SC2154 - downloadID="$sonarr_download_id" - # shellcheck disable=SC2154 - filePath="$sonarr_episodefile_path" - # shellcheck disable=SC2154 - folderPath="$sonarr_episodefile_sourcefolder" - # shellcheck disable=SC2154 - eventType="$sonarr_eventtype" -elif [ -n "$Lidarr_EventType" ]; then - app="lidarr" - # shellcheck disable=SC2154 - clientID="$Lidarr_Download_Client" - # shellcheck disable=SC2154 - filePath="$Lidarr_Artist_Path" - # shellcheck disable=SC2154 - downloadID="$Lidarr_Download_Id" - # shellcheck disable=SC2154 - eventType="$Lidarr_EventType" -elif [ -n "$Readarr_EventType" ]; then - app="readarr" - # shellcheck disable=SC2154 - clientID="$Readarr_Download_Client" - # shellcheck disable=SC2154 - filePath="$Readarr_Author_Path" - # shellcheck disable=SC2154 - downloadID="$Readarr_Download_Id" - # shellcheck disable=SC2154 - eventType="$Readarr_EventType" -else - echo "|WARN| Unknown Event Type. Failing." - exit 1 +# Load environment variables from .env file if it exists +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" fi -echo "$app detected with event type $eventType" -# Function to send request to cross-seed +# Use environment variables with descriptive default values +TORRENT_CLIENT_NAME=${TORRENT_CLIENT_NAME:-Qbit} +USENET_CLIENT_NAME=${USENET_CLIENT_NAME:-SABnzbd} +XSEED_HOST=${XSEED_HOST:-crossseed} +XSEED_PORT=${XSEED_PORT:-8080} +LOG_FILE=${LOG_FILE:-/var/log/xseed.log} + +# Function to send a request to Cross Seed API cross_seed_request() { local endpoint="$1" local data="$2" - if [ -n "$xseed_apikey" ]; then - curl --silent --output /dev/null --write-out "%{http_code}" -X POST "http://$xseed_host:$xseed_port/api/$endpoint" -H "X-Api-Key: $xseed_apikey" --data-urlencode "$data" - else - curl --silent --output /dev/null --write-out "%{http_code}" -X POST "http://$xseed_host:$xseed_port/api/$endpoint" --data-urlencode "$data" + local headers=(-X POST "http://$XSEED_HOST:$XSEED_PORT/api/$endpoint" --data-urlencode "$data") + if [ -n "$xseed_apikey" ]; then + headers+=(-H "X-Api-Key: $xseed_apikey") fi + response=$(curl --silent --output /dev/null --write-out "%{http_code}" "${headers[@]}") + echo "$response" } -# Create the log file if it doesn't exist -[ ! -f "$log_file" ] && touch "$log_file" - -# Check if the downloadID exists in the log file -unique_id="${downloadID}-${clientID}" -# if id is blank (i.e. manual import skip) -if [ -z "$unique_id" ]; then - echo "UniqueDownloadID $unique_id is blanking. Ignoring." - exit 0 -fi -# If unique_id is not blank, then proceed with checking the id -grep -qF "$unique_id" "$log_file" && echo "UniqueDownloadID $unique_id has already been processed. Skipping..." && exit 0 +# Detect application and set environment +detect_application() { + app="unknown" + if [ -n "$radarr_eventtype" ]; then + app="radarr" + clientID="$radarr_download_client" + downloadID="$radarr_download_id" + filePath="$radarr_moviefile_path" + eventType="$radarr_eventtype" + elif [ -n "$sonarr_eventtype" ]; then + app="sonarr" + clientID="$sonarr_download_client" + downloadID="$sonarr_download_id" + filePath="$sonarr_episodefile_path" + folderPath="$sonarr_episodefile_sourcefolder" + eventType="$sonarr_eventtype" + elif [ -n "$lidarr_eventtype" ]; then + app="lidarr" + clientID="$lidarr_download_client" + filePath="$lidarr_artist_path" + downloadID="$lidarr_download_id" + eventType="$lidarr_eventtype" + elif [ -n "$readarr_eventtype" ]; then + app="readarr" + clientID="$readarr_download_client" + filePath="$readarr_author_path" + downloadID="$readarr_download_id" + eventType="$readarr_eventtype" + fi + [ "$app" == "unknown" ] && { + echo "Unknown application type detected. Exiting." + exit 1 + } +} -# Handle Unknown Event Type -[ -z "$eventType" ] && echo "|WARN| Unknown Event Type. Failing." && exit 1 +# Validate the process +validate_process() { + [ ! -f "$LOG_FILE" ] && touch "$LOG_FILE" + unique_id="${downloadID}-${clientID}" -# Handle Test Event -[ "$eventType" == "Test" ] && echo "Test passed for $app. DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath" && exit 0 + [ -z "$unique_id" ] && return + grep -qF "$unique_id" "$LOG_FILE" && { + echo "Download ID $unique_id already processed. Exiting." + exit 0 + } -# Ensure we have necessary details -[ -z "$downloadID" ] && echo "DownloadID is empty from $app. Skipping cross-seed search. DownloadClient: $clientID and DownloadId: $downloadID" && exit 0 -[ -z "$filePath" ] && echo "FilePath is empty from $app. Skipping cross-seed search. DownloadClient: $clientID and FilePath: $filePath" && exit 0 + [ -z "$eventType" ] && { + echo "No event type specified. Exiting." + exit 1 + } + [ "$eventType" == "Test" ] && { + echo "Test event detected. Exiting." + exit 0 + } + [ -z "$filePath" ] && [ -z "$downloadID" ] && { + echo "Essential parameters missing. Exiting." + exit 1 + } -# Handle client based operations -case "$clientID" in - "$torrentclientname") - echo "Client $torrentclientname triggered id search for DownloadId $downloadID with FilePath $filePath and FolderPath $folderPath" - xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID") - - if [ "$xseed_resp" != "204" ]; then - echo "Client $torrentclientname triggered data search for DownloadId $downloadID using FilePath $filePath with FolderPath $folderPath" - sleep 15 - xseed_resp=$(cross_seed_request "webhook" "path=$filePath") + if [ -z "$downloadID" ] || [ -z "$filePath" ]; then + echo "Download ID is missing. Checking if file path works for data/path based cross-seeding." + if [ -z "$filePath" ]; then + echo "File path is missing. Exiting." + exit 1 fi + fi + + [ -z "$filePath" ] && [ -z "$downloadID" ] && { + echo "Essential parameters missing. Exiting." + exit 1 + } +} + +# Main logic for handling operations +handle_operations() { + detect_application + validate_process + + case "$clientID" in + "$TORRENT_CLIENT_NAME") + echo "Processing torrent client operations..." + [ -n "$downloadID" ] && { xseed_resp=$(cross_seed_request "webhook" "infoHash=$downloadID"); } + [ "$xseed_resp" != "204" ] && sleep 15 && xseed_resp=$(cross_seed_request "webhook" "path=$filePath") ;; - "$usenetclientname") - if [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]]; then - echo "Client $usenetclientname skipped search for FolderPath $folderPath due to being a SeasonPack for Usenet" + "$USENET_CLIENT_NAME") + [[ "$folderPath" =~ S[0-9]{1,2}(?!\.E[0-9]{1,2}) ]] && { + echo "Skipping season pack search." exit 0 - else - echo "Client $usenetclientname triggered data search for DownloadId $downloadID using FilePath $filePath with FolderPath $folderPath" - xseed_resp=$(cross_seed_request "webhook" "path=$filePath") - fi + } + echo "Processing Usenet client operations..." + xseed_resp=$(cross_seed_request "webhook" "path=$filePath") ;; *) - echo "|WARN| Client $clientID does not match configured Clients of $torrentclientname or $usenetclientname. Skipping..." - exit 0 + echo "Unrecognized client $clientID. Exiting." + exit 1 ;; -esac + esac + echo "Cross-seed API response: $xseed_resp" + if [ "$xseed_resp" == "204" ]; then + echo "$unique_id" >>"$LOG_FILE" + echo "Process completed successfully." + else + echo "Process failed with API response: $xseed_resp" + exit 1 + fi +} -# Handle Cross Seed Response -if [ "$xseed_resp" == "204" ]; then - echo "Success. Cross-seed search triggered by $app for DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath with FolderPath $folderPath" - echo "$unique_id" >> "$log_file" - exit 0 -else - echo "|WARN| Cross-seed webhook failed - HTTP Code $xseed_resp from $app for DownloadClient: $clientID, DownloadId: $downloadID and FilePath: $filePath with FolderPath $folderPath" - exit 1 -fi +handle_operations diff --git a/zfsburn.sh b/zfsburn.sh index 1e4718e..807e3c4 100755 --- a/zfsburn.sh +++ b/zfsburn.sh @@ -1,12 +1,19 @@ #!/bin/bash -# Constants -VERBOSE=0 # Set this to 1 for trace-level logging, 0 for informational logging -MAX_FREQ=2 -MAX_HOURLY=2 -MAX_DAILY=1 -MAX_WEEKLY=0 -MAX_MONTHLY=0 +# Load .env file +set -o allexport +if [ -f ".env" ]; then + # shellcheck source=.env + source ".env" +fi +set +o allexport0 + +VERBOSE=${VERBOSE:-1} +MAX_FREQ=${MAX_FREQ:-4} +MAX_HOURLY=${MAX_HOURLY:-2} +MAX_DAILY=${MAX_DAILY:-7} +MAX_WEEKLY=${MAX_WEEKLY:-4} +MAX_MONTHLY=${MAX_MONTHLY:-3} # Logging function based on verbosity level log() { @@ -22,16 +29,15 @@ bytes_to_human_readable() { local bytes=$1 local units=('B' 'KB' 'MB' 'GB' 'TB' 'PB' 'EB' 'ZB' 'YB') local unit=0 - - while (( bytes > 1024 )); do - (( bytes /= 1024 )) - (( unit++ )) + + while ((bytes > 1024)); do + ((bytes /= 1024)) + ((unit++)) done - + echo "${bytes} ${units[unit]}" } - # Function to retrieve snapshot counts for a specific snapshot type get_snapshot_count() { local snapshot_type="$1"