2025-11-10 07:53:20 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
2025-11-04 15:19:37 +01:00
|
|
|
import urllib.request
|
|
|
|
|
import os
|
|
|
|
|
import urllib.error
|
|
|
|
|
import threading
|
|
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
|
|
|
from tqdm import tqdm
|
|
|
|
|
import logging
|
|
|
|
|
import time
|
|
|
|
|
import hashlib
|
|
|
|
|
import json
|
|
|
|
|
import datetime
|
|
|
|
|
import shutil
|
2025-11-10 11:28:22 +01:00
|
|
|
import subprocess
|
2025-11-04 15:19:37 +01:00
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s')
|
|
|
|
|
|
2025-11-10 11:28:22 +01:00
|
|
|
# Update the local repository via git pull before proceeding
|
|
|
|
|
try:
|
|
|
|
|
subprocess.run(['git', 'pull'], check=True, capture_output=True)
|
|
|
|
|
logging.info("Updated repo via git pull")
|
|
|
|
|
except subprocess.CalledProcessError as e:
|
|
|
|
|
logging.warning(f"Git pull failed (non-zero exit): {e}")
|
|
|
|
|
except FileNotFoundError:
|
|
|
|
|
logging.warning("Git not found; skipping pull")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.warning(f"Unexpected error during git pull: {e}")
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
api_base = "https://nachos386.lcube-server.de/api/v1"
|
|
|
|
|
|
|
|
|
|
def fetch_release_info(repo_owner, repo_name):
|
|
|
|
|
"""Fetch latest release info including assets from Gitea API."""
|
|
|
|
|
api_url = f"{api_base}/repos/{repo_owner}/{repo_name}/releases/latest"
|
2025-11-04 15:19:37 +01:00
|
|
|
try:
|
|
|
|
|
with urllib.request.urlopen(api_url, timeout=30) as response:
|
|
|
|
|
release_info = json.loads(response.read().decode())
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
tag_name = release_info.get('tag_name', '')
|
|
|
|
|
published_at = release_info.get('published_at', '')
|
|
|
|
|
if not tag_name or not published_at:
|
|
|
|
|
raise ValueError("Invalid release data: missing tag_name or published_at")
|
|
|
|
|
|
|
|
|
|
assets = {}
|
2025-11-04 15:19:37 +01:00
|
|
|
for asset in release_info.get('assets', []):
|
|
|
|
|
filename = asset.get('name', '')
|
|
|
|
|
if filename.endswith('.rpm'):
|
2025-11-10 10:42:57 +01:00
|
|
|
download_url = asset.get('browser_download_url', '')
|
|
|
|
|
if not download_url:
|
|
|
|
|
logging.warning(f"No download URL for {filename}")
|
|
|
|
|
continue
|
|
|
|
|
# Gitea does not provide digests; hash will be None
|
|
|
|
|
hash_value = None
|
|
|
|
|
# For compatibility, check if 'digest' exists (unlikely in Gitea)
|
2025-11-04 15:19:37 +01:00
|
|
|
digest = asset.get('digest', '')
|
2025-11-10 10:42:57 +01:00
|
|
|
if digest and digest.startswith('sha256:'):
|
2025-11-04 15:19:37 +01:00
|
|
|
hash_value = digest[7:].lower()
|
|
|
|
|
logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...")
|
|
|
|
|
else:
|
2025-11-10 10:42:57 +01:00
|
|
|
logging.info(f"No hash available for {filename}; verification will be skipped")
|
|
|
|
|
assets[filename] = {'url': download_url, 'hash': hash_value}
|
2025-11-04 15:19:37 +01:00
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
if not assets:
|
|
|
|
|
logging.warning("No RPM assets found in release.")
|
2025-11-04 15:19:37 +01:00
|
|
|
logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}")
|
|
|
|
|
else:
|
2025-11-10 10:42:57 +01:00
|
|
|
logging.info(f"Fetched {len(assets)} assets from API.")
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
'tag_name': tag_name,
|
|
|
|
|
'published_at': published_at,
|
|
|
|
|
'assets': assets
|
|
|
|
|
}
|
2025-11-04 15:19:37 +01:00
|
|
|
except urllib.error.URLError as e:
|
2025-11-10 10:42:57 +01:00
|
|
|
logging.error(f"Failed to fetch release info from API: {e}")
|
2025-11-04 15:19:37 +01:00
|
|
|
raise
|
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
|
logging.error(f"Failed to parse API JSON: {e}")
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
2025-11-10 10:42:57 +01:00
|
|
|
logging.error(f"Unexpected error fetching release info: {e}")
|
2025-11-04 15:19:37 +01:00
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
def compute_sha256(file_path):
|
|
|
|
|
"""Compute SHA256 hash of a file."""
|
|
|
|
|
sha256 = hashlib.sha256()
|
|
|
|
|
try:
|
|
|
|
|
with open(file_path, 'rb') as file:
|
|
|
|
|
while True:
|
|
|
|
|
chunk = file.read(8192)
|
|
|
|
|
if not chunk:
|
|
|
|
|
break
|
|
|
|
|
sha256.update(chunk)
|
|
|
|
|
return sha256.hexdigest().lower()
|
|
|
|
|
except IOError as e:
|
|
|
|
|
logging.error(f"Failed to compute hash for {file_path}: {e}")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def download_chunk(url, save_path, start, end, total_size, progress_bar, lock):
|
|
|
|
|
try:
|
|
|
|
|
req = urllib.request.Request(url)
|
|
|
|
|
req.add_header('Range', f'bytes={start}-{end}')
|
|
|
|
|
with urllib.request.urlopen(req, timeout=30) as response:
|
|
|
|
|
downloaded = 0
|
|
|
|
|
chunk_size = 8192
|
|
|
|
|
while True:
|
|
|
|
|
chunk = response.read(chunk_size)
|
|
|
|
|
if not chunk:
|
|
|
|
|
break
|
|
|
|
|
downloaded += len(chunk)
|
|
|
|
|
with lock:
|
|
|
|
|
with open(save_path, 'r+b') as file:
|
|
|
|
|
file.seek(start + downloaded - len(chunk))
|
|
|
|
|
file.write(chunk)
|
|
|
|
|
progress_bar.update(len(chunk))
|
|
|
|
|
logging.info(f"Completed range {start}-{end}")
|
|
|
|
|
except urllib.error.HTTPError as http_err:
|
|
|
|
|
logging.error(f"HTTP error in range {start}-{end}: {http_err}")
|
|
|
|
|
except urllib.error.URLError as url_err:
|
|
|
|
|
logging.error(f"URL error in range {start}-{end}: {url_err}")
|
|
|
|
|
except IOError as io_err:
|
|
|
|
|
logging.error(f"File error in range {start}-{end}: {io_err}")
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.error(f"Unexpected error in range {start}-{end}: {e}")
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
def download_file(url, save_path, expected_hash=None, num_threads=4):
|
2025-11-04 15:19:37 +01:00
|
|
|
try:
|
|
|
|
|
with urllib.request.urlopen(url, timeout=30) as response:
|
|
|
|
|
total_size = int(response.getheader('Content-Length', 0))
|
|
|
|
|
if total_size == 0:
|
|
|
|
|
raise ValueError("Could not determine file size")
|
|
|
|
|
logging.info(f"Total file size: {total_size} bytes")
|
|
|
|
|
|
|
|
|
|
os.makedirs(os.path.dirname(save_path), exist_ok=True)
|
|
|
|
|
with open(save_path, 'wb') as file:
|
|
|
|
|
file.truncate(total_size)
|
|
|
|
|
|
|
|
|
|
progress_bar = tqdm(total=total_size, unit='B', unit_scale=True, desc=os.path.basename(save_path), mininterval=0.1)
|
|
|
|
|
lock = threading.Lock()
|
|
|
|
|
|
|
|
|
|
chunk_size = total_size // num_threads
|
|
|
|
|
ranges = [(i * chunk_size, (i + 1) * chunk_size - 1 if i < num_threads - 1 else total_size - 1)
|
|
|
|
|
for i in range(num_threads)]
|
|
|
|
|
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
with ThreadPoolExecutor(max_workers=num_threads) as executor:
|
|
|
|
|
futures = [executor.submit(download_chunk, url, save_path, start, end, total_size, progress_bar, lock)
|
|
|
|
|
for start, end in ranges]
|
|
|
|
|
for future in futures:
|
|
|
|
|
future.result()
|
|
|
|
|
end_time = time.time()
|
|
|
|
|
|
|
|
|
|
progress_bar.close()
|
|
|
|
|
print(f"File downloaded successfully and saved to {save_path}")
|
|
|
|
|
logging.info(f"Download completed in {end_time - start_time:.2f} seconds")
|
|
|
|
|
|
|
|
|
|
filename = os.path.basename(save_path)
|
2025-11-10 10:42:57 +01:00
|
|
|
if expected_hash:
|
|
|
|
|
computed_hash = compute_sha256(save_path)
|
|
|
|
|
if computed_hash is None:
|
|
|
|
|
raise Exception(f"Hash computation failed for {save_path}")
|
|
|
|
|
if computed_hash != expected_hash:
|
|
|
|
|
raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}")
|
|
|
|
|
print(f"Hash verified for {filename}: {computed_hash}")
|
|
|
|
|
else:
|
|
|
|
|
logging.warning(f"Skipping hash verification for {filename} (no expected hash available)")
|
2025-11-04 15:19:37 +01:00
|
|
|
|
|
|
|
|
except urllib.error.HTTPError as http_err:
|
|
|
|
|
logging.error(f"HTTP error occurred: {http_err}")
|
|
|
|
|
raise
|
|
|
|
|
except urllib.error.URLError as url_err:
|
|
|
|
|
logging.error(f"URL error occurred: {url_err}")
|
|
|
|
|
raise
|
|
|
|
|
except IOError as io_err:
|
|
|
|
|
logging.error(f"File error occurred while saving: {io_err}")
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.error(f"An unexpected error occurred: {e}")
|
|
|
|
|
raise
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
def is_newer_release(repo_owner, repo_name, last_release_file="/temp/packages/last_release.json"):
|
2025-11-04 15:19:37 +01:00
|
|
|
"""Check if the current release is newer than the last downloaded one."""
|
|
|
|
|
try:
|
2025-11-10 10:42:57 +01:00
|
|
|
release_data = fetch_release_info(repo_owner, repo_name)
|
|
|
|
|
current_tag = release_data['tag_name']
|
|
|
|
|
current_published_at = release_data['published_at']
|
|
|
|
|
|
|
|
|
|
# Handle timezone in published_at (Gitea may include TZ offset, GitHub uses Z)
|
|
|
|
|
if 'Z' in current_published_at:
|
|
|
|
|
current_published_at = current_published_at.replace('Z', '+00:00')
|
|
|
|
|
current_time = datetime.datetime.fromisoformat(current_published_at)
|
2025-11-04 15:19:37 +01:00
|
|
|
|
|
|
|
|
if os.path.exists(last_release_file):
|
|
|
|
|
with open(last_release_file, 'r') as f:
|
|
|
|
|
last_release = json.load(f)
|
|
|
|
|
last_tag = last_release.get('tag_name', '')
|
|
|
|
|
last_published_at = last_release.get('published_at', '')
|
|
|
|
|
if last_published_at:
|
2025-11-10 10:42:57 +01:00
|
|
|
if 'Z' in last_published_at:
|
|
|
|
|
last_published_at = last_published_at.replace('Z', '+00:00')
|
|
|
|
|
last_time = datetime.datetime.fromisoformat(last_published_at)
|
|
|
|
|
if current_time <= last_time and last_tag == current_tag:
|
|
|
|
|
logging.info(f"No newer release found. Current: {current_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})")
|
2025-11-04 15:19:37 +01:00
|
|
|
return False, None
|
2025-11-10 10:42:57 +01:00
|
|
|
|
|
|
|
|
logging.info(f"Newer release detected or no previous download: {current_tag} ({current_published_at})")
|
|
|
|
|
return True, {
|
|
|
|
|
'tag_name': current_tag,
|
|
|
|
|
'published_at': current_published_at,
|
|
|
|
|
'assets': release_data['assets']
|
|
|
|
|
}
|
2025-11-04 15:19:37 +01:00
|
|
|
except urllib.error.URLError as e:
|
|
|
|
|
logging.error(f"Failed to fetch release info: {e}")
|
|
|
|
|
raise
|
|
|
|
|
except json.JSONDecodeError as e:
|
|
|
|
|
logging.error(f"Failed to parse API JSON: {e}")
|
|
|
|
|
raise
|
|
|
|
|
except IOError as e:
|
|
|
|
|
logging.error(f"Failed to read/write {last_release_file}: {e}")
|
|
|
|
|
raise
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logging.error(f"Unexpected error checking release: {e}")
|
|
|
|
|
raise
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
repo_owner = "REYHER-Systemtechnik-Public"
|
|
|
|
|
repo_name = "VDI-Linux"
|
2025-11-04 15:19:37 +01:00
|
|
|
last_release_file = "/temp/packages/last_release.json"
|
|
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
is_newer, release_data = is_newer_release(repo_owner, repo_name, last_release_file)
|
2025-11-04 15:19:37 +01:00
|
|
|
if is_newer:
|
2025-11-10 10:42:57 +01:00
|
|
|
assets = release_data['assets']
|
|
|
|
|
if not assets:
|
|
|
|
|
raise Exception("No RPM assets available in the latest release. Check logs for details.")
|
2025-11-04 15:19:37 +01:00
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
rpm_files = {
|
|
|
|
|
"/temp/packages/ctxusb.rpm": "ctxusb.rpm",
|
|
|
|
|
"/temp/packages/ICAClient.rpm": "ICAClient.rpm"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for local_path, filename in rpm_files.items():
|
|
|
|
|
if filename not in assets:
|
|
|
|
|
raise Exception(f"Required asset {filename} not found in release. Available: {list(assets.keys())}")
|
|
|
|
|
|
|
|
|
|
asset_info = assets[filename]
|
|
|
|
|
url = asset_info['url']
|
|
|
|
|
expected_hash = asset_info['hash']
|
|
|
|
|
|
|
|
|
|
if os.path.exists(local_path):
|
|
|
|
|
os.remove(local_path)
|
2025-11-04 15:19:37 +01:00
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
download_file(url, local_path, expected_hash)
|
2025-11-04 15:19:37 +01:00
|
|
|
|
2025-11-10 10:42:57 +01:00
|
|
|
os.makedirs("/var/local/citrix-repo", exist_ok=True)
|
2025-11-04 15:19:37 +01:00
|
|
|
|
|
|
|
|
logging.info("Copying ctxusb.rpm...")
|
|
|
|
|
shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm")
|
|
|
|
|
logging.info("Finished copying ctxusb.rpm")
|
|
|
|
|
|
|
|
|
|
logging.info("Copying ICAClient.rpm...")
|
|
|
|
|
shutil.copyfile("/temp/packages/ICAClient.rpm", "/var/local/citrix-repo/ICAClient.rpm")
|
|
|
|
|
logging.info("Finished copying ICAClient.rpm")
|
|
|
|
|
|
|
|
|
|
os.system("createrepo_c /var/local/citrix-repo")
|
|
|
|
|
|
|
|
|
|
logging.info("Copying local-citrix-repo.repo...")
|
|
|
|
|
shutil.copyfile("./local-citrix-repo.repo", "/etc/yum.repos.d/local-citrix-repo.repo")
|
|
|
|
|
logging.info("Finished copying local-citrix-repo.repo")
|
|
|
|
|
|
|
|
|
|
logging.info("Copying RPM-GPG-KEY-citrix-local...")
|
|
|
|
|
shutil.copyfile("./RPM-GPG-KEY-citrix-local", "/var/local/citrix-repo/RPM-GPG-KEY-citrix-local")
|
|
|
|
|
logging.info("Finished copying RPM-GPG-KEY-citrix-local")
|
|
|
|
|
|
|
|
|
|
os.system("rpm --import /var/local/citrix-repo/RPM-GPG-KEY-citrix-local")
|
|
|
|
|
|
|
|
|
|
os.system("dnf makecache")
|
|
|
|
|
os.system("pkcon refresh force")
|
|
|
|
|
|
|
|
|
|
os.makedirs(os.path.dirname(last_release_file), exist_ok=True)
|
|
|
|
|
with open(last_release_file, 'w') as f:
|
2025-11-10 10:42:57 +01:00
|
|
|
# Save only tag and published_at; assets are transient
|
|
|
|
|
json.dump({'tag_name': release_data['tag_name'], 'published_at': release_data['published_at']}, f)
|
2025-11-04 15:19:37 +01:00
|
|
|
logging.info(f"Saved last release info to {last_release_file}")
|
|
|
|
|
else:
|
|
|
|
|
logging.info("Skipping download; no new release available.")
|
|
|
|
|
|
|
|
|
|
print("All files processed successfully!")
|