#!/usr/bin/env python3 import urllib.request import os import urllib.error import threading from concurrent.futures import ThreadPoolExecutor from tqdm import tqdm import logging import time import hashlib import json import datetime import shutil logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s') api_base = "https://nachos386.lcube-server.de/api/v1" def fetch_release_info(repo_owner, repo_name): """Fetch latest release info including assets from Gitea API.""" api_url = f"{api_base}/repos/{repo_owner}/{repo_name}/releases/latest" try: with urllib.request.urlopen(api_url, timeout=30) as response: release_info = json.loads(response.read().decode()) tag_name = release_info.get('tag_name', '') published_at = release_info.get('published_at', '') if not tag_name or not published_at: raise ValueError("Invalid release data: missing tag_name or published_at") assets = {} for asset in release_info.get('assets', []): filename = asset.get('name', '') if filename.endswith('.rpm'): download_url = asset.get('browser_download_url', '') if not download_url: logging.warning(f"No download URL for {filename}") continue # Gitea does not provide digests; hash will be None hash_value = None # For compatibility, check if 'digest' exists (unlikely in Gitea) digest = asset.get('digest', '') print(asset) if digest and digest.startswith('sha256:'): hash_value = digest[7:].lower() logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...") else: logging.info(f"No hash available for {filename}; verification will be skipped") assets[filename] = {'url': download_url, 'hash': hash_value} if not assets: logging.warning("No RPM assets found in release.") logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}") else: logging.info(f"Fetched {len(assets)} assets from API.") return { 'tag_name': tag_name, 'published_at': published_at, 'assets': assets } except urllib.error.URLError as e: logging.error(f"Failed to fetch release info from API: {e}") raise except json.JSONDecodeError as e: logging.error(f"Failed to parse API JSON: {e}") raise except Exception as e: logging.error(f"Unexpected error fetching release info: {e}") raise def compute_sha256(file_path): """Compute SHA256 hash of a file.""" sha256 = hashlib.sha256() try: with open(file_path, 'rb') as file: while True: chunk = file.read(8192) if not chunk: break sha256.update(chunk) return sha256.hexdigest().lower() except IOError as e: logging.error(f"Failed to compute hash for {file_path}: {e}") return None def download_chunk(url, save_path, start, end, total_size, progress_bar, lock): try: req = urllib.request.Request(url) req.add_header('Range', f'bytes={start}-{end}') with urllib.request.urlopen(req, timeout=30) as response: downloaded = 0 chunk_size = 8192 while True: chunk = response.read(chunk_size) if not chunk: break downloaded += len(chunk) with lock: with open(save_path, 'r+b') as file: file.seek(start + downloaded - len(chunk)) file.write(chunk) progress_bar.update(len(chunk)) logging.info(f"Completed range {start}-{end}") except urllib.error.HTTPError as http_err: logging.error(f"HTTP error in range {start}-{end}: {http_err}") except urllib.error.URLError as url_err: logging.error(f"URL error in range {start}-{end}: {url_err}") except IOError as io_err: logging.error(f"File error in range {start}-{end}: {io_err}") except Exception as e: logging.error(f"Unexpected error in range {start}-{end}: {e}") def download_file(url, save_path, expected_hash=None, num_threads=4): try: with urllib.request.urlopen(url, timeout=30) as response: total_size = int(response.getheader('Content-Length', 0)) if total_size == 0: raise ValueError("Could not determine file size") logging.info(f"Total file size: {total_size} bytes") os.makedirs(os.path.dirname(save_path), exist_ok=True) with open(save_path, 'wb') as file: file.truncate(total_size) progress_bar = tqdm(total=total_size, unit='B', unit_scale=True, desc=os.path.basename(save_path), mininterval=0.1) lock = threading.Lock() chunk_size = total_size // num_threads ranges = [(i * chunk_size, (i + 1) * chunk_size - 1 if i < num_threads - 1 else total_size - 1) for i in range(num_threads)] start_time = time.time() with ThreadPoolExecutor(max_workers=num_threads) as executor: futures = [executor.submit(download_chunk, url, save_path, start, end, total_size, progress_bar, lock) for start, end in ranges] for future in futures: future.result() end_time = time.time() progress_bar.close() print(f"File downloaded successfully and saved to {save_path}") logging.info(f"Download completed in {end_time - start_time:.2f} seconds") filename = os.path.basename(save_path) if expected_hash: computed_hash = compute_sha256(save_path) if computed_hash is None: raise Exception(f"Hash computation failed for {save_path}") if computed_hash != expected_hash: raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}") print(f"Hash verified for {filename}: {computed_hash}") else: logging.warning(f"Skipping hash verification for {filename} (no expected hash available)") except urllib.error.HTTPError as http_err: logging.error(f"HTTP error occurred: {http_err}") raise except urllib.error.URLError as url_err: logging.error(f"URL error occurred: {url_err}") raise except IOError as io_err: logging.error(f"File error occurred while saving: {io_err}") raise except Exception as e: logging.error(f"An unexpected error occurred: {e}") raise def is_newer_release(repo_owner, repo_name, last_release_file="/temp/packages/last_release.json"): """Check if the current release is newer than the last downloaded one.""" try: release_data = fetch_release_info(repo_owner, repo_name) current_tag = release_data['tag_name'] current_published_at = release_data['published_at'] # Handle timezone in published_at (Gitea may include TZ offset, GitHub uses Z) if 'Z' in current_published_at: current_published_at = current_published_at.replace('Z', '+00:00') current_time = datetime.datetime.fromisoformat(current_published_at) if os.path.exists(last_release_file): with open(last_release_file, 'r') as f: last_release = json.load(f) last_tag = last_release.get('tag_name', '') last_published_at = last_release.get('published_at', '') if last_published_at: if 'Z' in last_published_at: last_published_at = last_published_at.replace('Z', '+00:00') last_time = datetime.datetime.fromisoformat(last_published_at) if current_time <= last_time and last_tag == current_tag: logging.info(f"No newer release found. Current: {current_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})") return False, None logging.info(f"Newer release detected or no previous download: {current_tag} ({current_published_at})") return True, { 'tag_name': current_tag, 'published_at': current_published_at, 'assets': release_data['assets'] } except urllib.error.URLError as e: logging.error(f"Failed to fetch release info: {e}") raise except json.JSONDecodeError as e: logging.error(f"Failed to parse API JSON: {e}") raise except IOError as e: logging.error(f"Failed to read/write {last_release_file}: {e}") raise except Exception as e: logging.error(f"Unexpected error checking release: {e}") raise repo_owner = "REYHER-Systemtechnik-Public" repo_name = "VDI-Linux" last_release_file = "/temp/packages/last_release.json" is_newer, release_data = is_newer_release(repo_owner, repo_name, last_release_file) if is_newer: assets = release_data['assets'] if not assets: raise Exception("No RPM assets available in the latest release. Check logs for details.") rpm_files = { "/temp/packages/ctxusb.rpm": "ctxusb.rpm", "/temp/packages/ICAClient.rpm": "ICAClient.rpm" } for local_path, filename in rpm_files.items(): if filename not in assets: raise Exception(f"Required asset {filename} not found in release. Available: {list(assets.keys())}") asset_info = assets[filename] url = asset_info['url'] expected_hash = asset_info['hash'] if os.path.exists(local_path): os.remove(local_path) download_file(url, local_path, expected_hash) os.makedirs("/var/local/citrix-repo", exist_ok=True) logging.info("Copying ctxusb.rpm...") shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm") logging.info("Finished copying ctxusb.rpm") logging.info("Copying ICAClient.rpm...") shutil.copyfile("/temp/packages/ICAClient.rpm", "/var/local/citrix-repo/ICAClient.rpm") logging.info("Finished copying ICAClient.rpm") os.system("createrepo_c /var/local/citrix-repo") logging.info("Copying local-citrix-repo.repo...") shutil.copyfile("./local-citrix-repo.repo", "/etc/yum.repos.d/local-citrix-repo.repo") logging.info("Finished copying local-citrix-repo.repo") logging.info("Copying RPM-GPG-KEY-citrix-local...") shutil.copyfile("./RPM-GPG-KEY-citrix-local", "/var/local/citrix-repo/RPM-GPG-KEY-citrix-local") logging.info("Finished copying RPM-GPG-KEY-citrix-local") os.system("rpm --import /var/local/citrix-repo/RPM-GPG-KEY-citrix-local") os.system("dnf makecache") os.system("pkcon refresh force") os.makedirs(os.path.dirname(last_release_file), exist_ok=True) with open(last_release_file, 'w') as f: # Save only tag and published_at; assets are transient json.dump({'tag_name': release_data['tag_name'], 'published_at': release_data['published_at']}, f) logging.info(f"Saved last release info to {last_release_file}") else: logging.info("Skipping download; no new release available.") print("All files processed successfully!")