initial commit

This commit is contained in:
2025-08-13 22:51:13 -03:00
commit 68377fc083
13 changed files with 35400 additions and 0 deletions

3
backup.log Normal file
View File

@@ -0,0 +1,3 @@
20240308_030001_services_backup.tar.gz - backup complete
20240309_030001_services_backup.tar.gz - backup complete
20240310_030001_services_backup.tar.gz - backup complete

25
backup/backup_arr.sh Normal file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
# Set the source and destination paths
source_path=~/
backup_path=/mnt/server_backup/arr/
timestamp=$(date "+%Y%m%d_%H%M%S")
log_path=~/scripts/
# Stop containers
docker stop $(docker ps -q)
# Create a tar archive excluding cache and hidden files
file_name="${timestamp}_arr_backup.tar.gz"
tar -czf $source_path$file_name --exclude=*/cache --exclude=*/blobs --exclude=*/generated --exclude=*/MediaCover --exclude=*/logs --exclude=*/Cache --exclude=*/Logs --exclude=*/Metadata --exclude=*/Crash\ Reports --exclude=*/Media --exclude=~/.* $source_path
# Use rsync to copy the created archive to the destination
rsync -av --progress "${source_path}${file_name}" "$backup_path"
# Optionally, remove the local backup file after copying
rm $source_path$file_name
# Restart containers
docker start $(docker ps -a -q)
echo "${file_name} - backup complete" >> ${log_path}backup.log

31
backup/backup_services.sh Normal file
View File

@@ -0,0 +1,31 @@
#!/bin/bash
# Set the source and destination paths
source_path=/home/ccalifice/
backup_path=/mnt/server_backup/services/
timestamp=$(date "+%Y%m%d_%H%M%S")
# Stop containers
echo "stopping containers" >> ${source_path}backup.log
docker stop $(docker ps -q)
# Create a tar archive excluding cache and hidden files
file_name="${timestamp}_services_backup.tar.gz"
echo "Creating archive $source_path$file_name" >> ${source_path}backup.log
tar -czf $source_path$file_name --exclude=*/cache --exclude=*/logs --exclude=*/Cache --exclude=*/Logs --exclude=*/photoprism/storage --exclude=*/model-cache --exclude=*/immich-app/library $source_path
# Use rsync to copy the created archive to the destination
rsync -av --progress "${source_path}${file_name}" "$backup_path"
# Optionally, remove the local backup file after copying
rm $source_path$file_name
# Restart containers
echo "starting containers" >> ${source_path}backup.log
docker start $(docker ps -a -q)
echo "${file_name} - backup complete" >> ${source_path}backup.log

2
backup/exclude_file.txt Normal file
View File

@@ -0,0 +1,2 @@
#exclude mounted partitions
/mnt

4
backup/include_file.txt Normal file
View File

@@ -0,0 +1,4 @@
#include the following paths in backup
/home
/etc
/var/lib/docker/volumes

76
backup/restic_backup.sh Normal file
View File

@@ -0,0 +1,76 @@
#!/bin/bash
user=$1
# Set the source and destination paths
include_file=/home/$user/scripts/backup/include_file.txt
exclude_file=/home/$user/scripts/backup/exclude_file.txt
respository=/mnt/restic/
timestamp=$(date "+%Y-%m-%d_%H:%M:%S")
log_file=/home/$user/scripts/backup/restic_log.log
restic_repo="/mnt/restic"
echo "${timestamp} Starting backup" >> $log_file
echo "User: ${user}" >> $log_file
export RESTIC_PASSWORD=asdf1234
# Stop containers
echo "${timestamp} Stopping containers" >> $log_file
#docker stop $(docker ps -q) >> $log_file
for dir in /home/$user/*; do
# Remove trailing slash
dir=${dir%/}
# Check if it is a directory and does NOT end with "DISABLED"
if [[ -d "$dir" && ! "$dir" =~ DISABLED$ ]]; then
if [ -f "$dir/docker-compose.yml" ]; then
echo "Bringing down Docker Compose in $dir" >> $log_file
(cd "$dir" && docker compose down)
else
echo "No docker-compose.yml found in $dir, skipping." >> $log_file
fi
else
echo "Project is disabled. Skipping $dir" >> $log_file
fi
done
restic_command="restic -r ${restic_repo} backup --files-from ${include_file} --exclude-file=${exclude_file}"
echo "${timestamp} Starting restic backup. Command: ${restic_command}" >> $log_file
# Run restic backup
$restic_command >> $log_file
# Restart containers
echo "Restarting containers" >> $log_file
#docker start $(docker ps -a -q) >> $log_file
for dir in /home/$user/*; do
# Remove trailing slash
dir=${dir%/}
# Check if it is a directory and does NOT end with "DISABLED"
if [[ -d "$dir" && ! "$dir" =~ DISABLED$ ]]; then
if [ -f "$dir/docker-compose.yml" ]; then
echo "Bringing up Docker Compose in $dir" >> $log_file
(cd "$dir" && docker compose up -d)
else
echo "No docker-compose.yml found in $dir, skipping." >> $log_file
fi
else
echo "Project is disabled. Skipping $dir" >> $log_file
fi
done
echo "${timestamp} Backup complete." >> $log_file
# Remove old backups with policy
forget_command="restic -r ${restic_repo} forget --keep-daily 2 --keep-weekly 1 --keep-monthly 1 --keep-yearly 1 --prune"
echo "${timestamp} Removing old backups. Command: ${forget_command}" >> $log_file
$forget_command >> $log_file
echo "${timestamp} Backup script done" >> $log_file

35051
backup/restic_log.log Normal file

File diff suppressed because it is too large Load Diff

42
backup/restore_script.py Normal file
View File

@@ -0,0 +1,42 @@
import tarfile
import shutil
import os
import glob
def extract_tar_gz(source_file, destination_dir, extract_path):
# Ensure destination directory exists
os.makedirs(destination_dir, exist_ok=True)
# Extract files from tar.gz file
with tarfile.open(source_file, "r:gz") as tar:
for member in tar.getmembers():
if member.name.startswith(extract_path):
tar.extract(member, destination_dir)
def main():
# Source file pattern
source_pattern = "/mnt/server_backup/restore/*.tar.gz"
# Destination directory
destination_dir = "/home/ccalifice/"
# Path inside the tarfile to extract
extract_path = "home/ccalifce/"
# Get list of tar.gz files
tar_files = glob.glob(source_pattern)
if not tar_files:
print("No matching tar.gz files found.")
return
# Copy and extract each tar.gz file
for source_file in tar_files:
try:
extract_tar_gz(source_file, destination_dir, extract_path)
print(f"Extraction successful for {source_file}.")
except Exception as e:
print(f"Error extracting {source_file}: {e}")
if __name__ == "__main__":
main()

25
backup_services.sh Normal file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
# Set the source and destination paths
source_path=/home/ccalifice/
backup_path=/mnt/server_backup/
timestamp=$(date "+%Y%m%d_%H%M%S")
log_path=/scripts/
# Stop containers
docker stop $(docker ps -q)
# Create a tar archive excluding cache and hidden files
file_name="${timestamp}_services_backup.tar.gz"
tar -czf $source_path$file_name --exclude=*/cache --exclude=*/logs --exclude=*/Cache --exclude=*/Logs --exclude=*/library --exclude=*/sidecar --exclude=*/model-cache $source_path
# Use rsync to copy the created archive to the destination
rsync -av --progress "${source_path}${file_name}" "$backup_path"
# Optionally, remove the local backup file after copying
rm $source_path$file_name
# Restart containers
docker start $(docker ps -a -q)
echo "${file_name} - backup complete" >> ${source_path}${log_path}backup.log

26
move-into-folders.py Normal file
View File

@@ -0,0 +1,26 @@
import os
import shutil
import datetime
# Set the source directory
source_dir = 'C:/Users/chris/Pictures/Python-test'
# Set the destination directory
dest_dir = 'C:/Users/chris/Pictures/Python-test'
# Get the list of files in the source directory
files = os.listdir(source_dir)
# Loop through the files
for file in files:
# Get the creation time of the file
creation_time = os.path.getmtime(os.path.join(source_dir, file))
# Convert the creation time to a datetime object
creation_date = datetime.datetime.fromtimestamp(creation_time)
# Create a new directory based on the year and month of the creation date
new_dir = os.path.join(dest_dir, f'{creation_date.year}' + '-' + f'{creation_date.month:02d}' + '-' + f'{creation_date.day:02d}')
# Create the new directory if it doesn't exist
if not os.path.exists(new_dir):
os.makedirs(new_dir)
# Move the file to the new directory
shutil.move(os.path.join(source_dir, file), os.path.join(new_dir, file))

42
restore_script.py Normal file
View File

@@ -0,0 +1,42 @@
import tarfile
import shutil
import os
import glob
def extract_tar_gz(source_file, destination_dir, extract_path):
# Ensure destination directory exists
os.makedirs(destination_dir, exist_ok=True)
# Extract files from tar.gz file
with tarfile.open(source_file, "r:gz") as tar:
for member in tar.getmembers():
if member.name.startswith(extract_path):
tar.extract(member, destination_dir)
def main():
# Source file pattern
source_pattern = "/mnt/server_backup/restore/*.tar.gz"
# Destination directory
destination_dir = "/home/ccalifice/"
# Path inside the tarfile to extract
extract_path = "home/ccalifce/"
# Get list of tar.gz files
tar_files = glob.glob(source_pattern)
if not tar_files:
print("No matching tar.gz files found.")
return
# Copy and extract each tar.gz file
for source_file in tar_files:
try:
extract_tar_gz(source_file, destination_dir, extract_path)
print(f"Extraction successful for {source_file}.")
except Exception as e:
print(f"Error extracting {source_file}: {e}")
if __name__ == "__main__":
main()

18
stop_all_containers.sh Normal file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
for dir in /home/ccalifice/*; do
# Remove trailing slash
dir=${dir%/}
# Check if it is a directory and does NOT end with "DISABLED"
if [[ -d "$dir" && ! "$dir" =~ DISABLED$ ]]; then
if [ -f "$dir/docker-compose.yml" ]; then
echo "Bringing down Docker Compose in $dir"
(cd "$dir" && docker compose down)
else
echo "No docker-compose.yml found in $dir, skipping."
fi
else
echo "Project is disabled. Skipping $dir"
fi
done

55
updateCloudflateDNS.py Normal file
View File

@@ -0,0 +1,55 @@
import requests
import json
#import logging
# logging.basicConfig()
# logging.getLogger().setLevel(logging.DEBUG)
# requests_log = logging.getLogger("requests.packages.urllib3")
# requests_log.setLevel(logging.DEBUG)
# requests_log.propagate = True
ip = requests.get("http://api.ipify.org")
print(ip.text)
zoneId = "5f5b0bf2ebe816578be8c5553c98be6d"
apiToken = "fupBXzTMFsmgAxcna8ZTcmHnsX5dRznK2QtSSPl_"
apiKey = "58ec6020b9886fd8e30275b55ddc8b3c79bea"
apiEmail = "christoph.califice@hotmail.com"
url_getEntries = f"https://api.cloudflare.com/client/v4/zones/{zoneId}/dns_records"
url_updateEntry = f"https://api.cloudflare.com/client/v4/zones/{zoneId}/dns_records/"
reqHeaders = {
#"Authorization": "Bearer " + apiToken,
"X-Auth-Email": apiEmail,
"X-Auth-Key": apiKey
}
req = requests.get(url_getEntries, headers=reqHeaders)
entriesJson = json.loads(req.text)
for entry in entriesJson["result"]:
if(entry["type"] == "A"):
id = entry["id"]
name = entry["name"]
data = {
"content": f"{ip.text}",
"name": f"{name}",
"ttl": "1"
}
print(data)
resp = requests.patch(url_updateEntry + id, json=data, headers=reqHeaders)
print(resp.text)