commit a6fd239f21c9d5871beaa109bb63dd8a53ecf32d Author: lwagner2 Date: Tue Nov 4 15:19:37 2025 +0100 Initial Commit diff --git a/README.md b/README.md new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ + diff --git a/RPM-GPG-KEY-citrix-local b/RPM-GPG-KEY-citrix-local new file mode 100644 index 0000000..dccd908 --- /dev/null +++ b/RPM-GPG-KEY-citrix-local @@ -0,0 +1,52 @@ +-----BEGIN PGP PUBLIC KEY BLOCK----- + +mQINBGjKsRABEACnXAeBthrUEHI+wN9rfBZP6CdwKjJQ6SAL1J5W+De0R6Jr4nFY +emikSlPE1KKgo1UVuFJkcDi8gj5mFWUV81X6Q2WuRPDlVwPkmaHVqnMqi3Ul2mMf +pOSEHVrcalXQjrTCI6WaEry6o8SbrDnq2EmXvju0B9aNUYN9khnCzFr3wfXj8pam +7BFx/Gv8bzjUxfdLgwJWA+gJFZr84/w4WYCjNngvIfuG9MVh1FikA8tbQzlQmhVI +v6TPzosTh2k6TrG3S6F9k295fCtx+IFwaU+to6GRFtI9IyzL0Oo8N1ZKsvLq56cy +yY6RE1HyYGY5LPzHqvgacPMomTg2VlwPSzXXgbROzX0kIF9jrh+WRywT1ozMRrbA +avSD5yYMeWCj+hwYkLOW+bz3ECEcfDuAAAolKk6YYf3ByYQFgS3Ker/ezGZawqsT +wb1USMoOMmudMdJkBWv5jgYfT/XKtOvUl3tm6qMD48uPVYBTCzVAygb0j8b5yo2B +qbuhSZ7dNRXaGubG4ixeaJyJYEJbnD9f2tV0YftuFSnoKtbnpcssFHiL2Mie/syQ +bp+HB3wNKrBboYFqrOsr61nVJeiALBkfE5nKgVw1V97LatbuDLgRpWa2PmI6nmb8 +mAGq4UPmXqjoQPKhb2aAKz0C7I3Nt4mW3hnjuB0L2PJtHqApDHnlIiUwqQARAQAB +tEtSRVlIRVIgQ2l0cml4IChSZXloZXIgQ2l0cml4IFdvcmtzcGFjZSBSUE0gU2ln +bmluZyBLZXkpIDxzdXBwb3J0QHJleWhlci5kZT6JAlEEEwEKADsWIQQAvWgMJe0m +HbsOfkaYnMfHn6z0UAUCaMqxEAIbAwULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIX +gAAKCRCYnMfHn6z0UG7rEACcq7WRYT1Cn1z3iw++ZOzlJPfO46LteSLana+vuhZh +jc5BxAcEnQ+72e+0Ykdxzo+TaQ3C/snQtlw+YFKfl3QRs5f0hFrTPHmhlDuSSASA +jh3cnkJxPZcjn7ms9jIz3FvXN2Q6K4GFbCvmoeOEMp/lr3Ou1dx5JBGRVmftxtU+ +3KcEWbgWpEmwJDONwQpH8va+CO1Mxg9fezAvqYFvUkJAK207f3HHdWbU98jWZIxE +Dv9Iwb+YQZ1P3gH4HzZbY35mdhHBivDPY2kWhOGKF3blpRPWpdDdD2DPv7absj75 +F2UBPVSU9gt6RH6BxREXztuEwPOB6tq2kIWpPSpsJ5pjCRB/F17bdK3q5MU+qhYG +UUjdkEQyBmZuxsQTKbJIYI4/cdiu3PGa7MKZlN7551R8xNYqVsyrgT8S3KaGMarK +JbszB6dE5fK1ZR8HOv3muQML2pByk4CraxKFX+mIki5WpnbFMba0P9Zg+dBufKfg +EkivUwycwN9sF/udUcZe5//KKX/m+g0TjDdDxNWlek3UPV0L4rKb8LxqnQV6cu/X +Sd3KejNd7wzbfcXwD8VLKJRZHmOjrBnBNlBWxxJCXUB0sXw+F3exmky9o2mp7vWD +KPM1Fc09DnExxTgi99Wh2bwvLAPkYhQOTVg/RJYohcP1mehJzPHaAZCJ2Oqw9tli +yrkCDQRoyrEQARAArqkscxKsqV/j2DLHl7T+KTRjjd9syZSmjFdwCfazZn18dlaK +YfGGw600cCEDfdZWlYp01YFNplXF6XZj51af9WjtS32ZWBp9WS0N1QzS7yIIP9FT +UuXVmd8im3JSuUMFWTAFklN8iiQhRmHfYA0+TsHOzqwa2vYq0tQwOHsfP0hI4WG9 +7BJtVK08Dk1toeOyiqAEPpOXa3FD0LUdxnI2E66n/cwl1hBI4omOGKD4Mlr1TYzR +8BJ+D3TA8vwCeLRavTNvst2SRtKDNyxlEdD0xUyXlk/klT7mYXi8v+gpNtlRWCIr +wnyu1r526zJfTdvn5WykUL7g/3BVIdawvyYKsN/yEbOcnsZyYXiarA78I4qacAYo +TsGPmB/SSDD3b3dBNL9s+fL9PqsovXpgdxr/yTUtzDj6gCWMB/IirzXHyO/3LWtM +UjVlH0IacFRGb1TF0QG5IRsTFdkGyu13X48U8qvIsunZZ3XRRVTm6D1DmHNhQkd6 +12RoS4vMgamZ2Mij5VJX+tQ5VHmU4tjkjCmpvsedAjjjSXMUtFFLVJZ878KdeW49 +n9GSsRlStjM+HFCon1Fvj6gBxUC+dtc/P+HbOfIVsUe3NocsX6tqSp2s97SWeWZk +KO8nlFUVHqI2DvU7xSBCQ1Z03a4qF8EZc5nIxPxMU773EfG0FHogYqrXaNcAEQEA +AYkCNgQYAQoAIBYhBAC9aAwl7SYduw5+Rpicx8efrPRQBQJoyrEQAhsMAAoJEJic +x8efrPRQ2QkQAJwS341HYxGtKszMXlm/K0v7EaqrWTHStvSN1UDOX/tHMzUAyx6x +9Y257WEi4rwGk85Uh5+1+4MeqzmpHrb4ZCGzN+hkm0s9iE9G2XKCogaEuvzLByW0 +UpEvk+b2OHQQ53XO6Q0D80Xh7WHDFbsUWEkuXM4CYgIAL3E27pYqgp20rHcm8epj +gZ8pQ7wz7y1/oDF7YunhTlNFDXe7tnjGRbj5fWjx7T0ehNo/lC+ySs7IocUauhpG +mlyPcKqKUpzftI7WYuqdpfoOU3aPi7OHbAfIY5I3dW1yq2/nLYHVbtROSA3D7ZKB +54PFrkgfyVyMaXwdQQnqGLy66EtiZcv5+E8qkgD3jM4pXf6zKH6PtgdyKMR+pGoD +AN0SEYkCm5RIbBRy0kFwXBRNVbdU5l9UMLzBelXR8Z1NFZwil8iuhc8N1PdE5LGi +NikK9rYhWnECW6Y7bOUejTYigNj2uYJz2Zi34Q3q/d0mK9R6UcL6Jc3SlIWCPMVQ +waLxi7QODwlqrV1DBh7lXbW4HHXsEVoa+vLtHpA8A7g84VWO5mkp6uafKEatvVfE +CTUAkpBzyppdy8iBm4Stt/pODQePilbcoz3ytYi1gxWtxjpdQA2aJSZor03zkmlk +jP/3unI/mM7fD+4Q+hVc5TB8DQxVgHJHcxmSF6TPy79WdZMhbDKnv3aZ +=beHO +-----END PGP PUBLIC KEY BLOCK----- diff --git a/local-citrix-repo.repo b/local-citrix-repo.repo new file mode 100644 index 0000000..33b01c9 --- /dev/null +++ b/local-citrix-repo.repo @@ -0,0 +1,6 @@ +[local-citrix-repo] +name=Citrix Local Respository +baseurl=file:///var/local/citrix-repo +enabled=1 +gpgcheck=1 +metadata_expire=60 diff --git a/update_local_repo.py b/update_local_repo.py new file mode 100644 index 0000000..1c97ea5 --- /dev/null +++ b/update_local_repo.py @@ -0,0 +1,235 @@ +import urllib.request +import os +import urllib.error +import threading +from concurrent.futures import ThreadPoolExecutor +from tqdm import tqdm +import logging +import time +import hashlib +import json +import datetime +import shutil + +logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s') + +def get_expected_hashes_from_api(repo_owner, repo_name): + """Fetch expected SHA256 hashes directly from GitHub API release assets.""" + api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" + try: + with urllib.request.urlopen(api_url, timeout=30) as response: + release_info = json.loads(response.read().decode()) + + expected_hashes = {} + for asset in release_info.get('assets', []): + filename = asset.get('name', '') + if filename.endswith('.rpm'): + digest = asset.get('digest', '') + if digest.startswith('sha256:'): + hash_value = digest[7:].lower() + expected_hashes[filename] = hash_value + logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...") + else: + logging.warning(f"No valid SHA256 digest found for {filename}: {digest}") + + if not expected_hashes: + logging.warning("No RPM assets with SHA256 digests found in release.") + logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}") + else: + logging.info(f"Fetched {len(expected_hashes)} expected hashes from API.") + return expected_hashes + except urllib.error.URLError as e: + logging.error(f"Failed to fetch release assets from API: {e}") + raise + except json.JSONDecodeError as e: + logging.error(f"Failed to parse API JSON: {e}") + raise + except Exception as e: + logging.error(f"Unexpected error fetching hashes from API: {e}") + raise + +def compute_sha256(file_path): + """Compute SHA256 hash of a file.""" + sha256 = hashlib.sha256() + try: + with open(file_path, 'rb') as file: + while True: + chunk = file.read(8192) + if not chunk: + break + sha256.update(chunk) + return sha256.hexdigest().lower() + except IOError as e: + logging.error(f"Failed to compute hash for {file_path}: {e}") + return None + +def download_chunk(url, save_path, start, end, total_size, progress_bar, lock): + try: + req = urllib.request.Request(url) + req.add_header('Range', f'bytes={start}-{end}') + with urllib.request.urlopen(req, timeout=30) as response: + downloaded = 0 + chunk_size = 8192 + while True: + chunk = response.read(chunk_size) + if not chunk: + break + downloaded += len(chunk) + with lock: + with open(save_path, 'r+b') as file: + file.seek(start + downloaded - len(chunk)) + file.write(chunk) + progress_bar.update(len(chunk)) + logging.info(f"Completed range {start}-{end}") + except urllib.error.HTTPError as http_err: + logging.error(f"HTTP error in range {start}-{end}: {http_err}") + except urllib.error.URLError as url_err: + logging.error(f"URL error in range {start}-{end}: {url_err}") + except IOError as io_err: + logging.error(f"File error in range {start}-{end}: {io_err}") + except Exception as e: + logging.error(f"Unexpected error in range {start}-{end}: {e}") + +def download_file(url, save_path, expected_hashes, num_threads=4): + try: + with urllib.request.urlopen(url, timeout=30) as response: + total_size = int(response.getheader('Content-Length', 0)) + if total_size == 0: + raise ValueError("Could not determine file size") + logging.info(f"Total file size: {total_size} bytes") + + os.makedirs(os.path.dirname(save_path), exist_ok=True) + with open(save_path, 'wb') as file: + file.truncate(total_size) + + progress_bar = tqdm(total=total_size, unit='B', unit_scale=True, desc=os.path.basename(save_path), mininterval=0.1) + lock = threading.Lock() + + chunk_size = total_size // num_threads + ranges = [(i * chunk_size, (i + 1) * chunk_size - 1 if i < num_threads - 1 else total_size - 1) + for i in range(num_threads)] + + start_time = time.time() + with ThreadPoolExecutor(max_workers=num_threads) as executor: + futures = [executor.submit(download_chunk, url, save_path, start, end, total_size, progress_bar, lock) + for start, end in ranges] + for future in futures: + future.result() + end_time = time.time() + + progress_bar.close() + print(f"File downloaded successfully and saved to {save_path}") + logging.info(f"Download completed in {end_time - start_time:.2f} seconds") + + filename = os.path.basename(save_path) + if filename not in expected_hashes: + raise Exception(f"No expected hash found for {filename} in API response. Available: {list(expected_hashes.keys())}") + expected_hash = expected_hashes[filename] + computed_hash = compute_sha256(save_path) + if computed_hash is None: + raise Exception(f"Hash computation failed for {save_path}") + if computed_hash != expected_hash: + raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}") + print(f"Hash verified for {filename}: {computed_hash}") + + except urllib.error.HTTPError as http_err: + logging.error(f"HTTP error occurred: {http_err}") + raise + except urllib.error.URLError as url_err: + logging.error(f"URL error occurred: {url_err}") + raise + except IOError as io_err: + logging.error(f"File error occurred while saving: {io_err}") + raise + except Exception as e: + logging.error(f"An unexpected error occurred: {e}") + raise + +def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/temp/packages/last_release.json"): + """Check if the current release is newer than the last downloaded one.""" + try: + api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" + with urllib.request.urlopen(api_url, timeout=30) as response: + release_info = json.loads(response.read().decode()) + current_published_at = release_info.get('published_at', '') + if not current_published_at: + logging.error("No published_at timestamp found in release info") + raise ValueError("Invalid release data") + + current_time = datetime.datetime.fromisoformat(current_published_at.replace('Z', '+00:00')) + + if os.path.exists(last_release_file): + with open(last_release_file, 'r') as f: + last_release = json.load(f) + last_tag = last_release.get('tag_name', '') + last_published_at = last_release.get('published_at', '') + if last_published_at: + last_time = datetime.datetime.fromisoformat(last_published_at.replace('Z', '+00:00')) + if current_time <= last_time and last_tag == release_tag: + logging.info(f"No newer release found. Current: {release_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})") + return False, None + logging.info(f"Newer release detected or no previous download: {release_tag} ({current_published_at})") + return True, {'tag_name': release_tag, 'published_at': current_published_at} + except urllib.error.URLError as e: + logging.error(f"Failed to fetch release info: {e}") + raise + except json.JSONDecodeError as e: + logging.error(f"Failed to parse API JSON: {e}") + raise + except IOError as e: + logging.error(f"Failed to read/write {last_release_file}: {e}") + raise + except Exception as e: + logging.error(f"Unexpected error checking release: {e}") + raise + +repo_owner = "Reyher-VDI" +repo_name = "Citrix-RPM-Repository" +last_release_file = "/temp/packages/last_release.json" + +is_newer, release_info = is_newer_release(repo_owner, repo_name, last_release_file) +if is_newer: + expected_hashes = get_expected_hashes_from_api(repo_owner, repo_name) + if not expected_hashes: + raise Exception("Failed to auto-fetch expected hashes from GitHub API. Check logs for details.") + + for rpm_file in ["/temp/packages/ctxusb.rpm", "/temp/packages/ICAClient.rpm"]: + if os.path.exists(rpm_file): + os.remove(rpm_file) + + download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ctxusb.rpm", "/temp/packages/ctxusb.rpm", expected_hashes) + download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ICAClient.rpm", "/temp/packages/ICAClient.rpm", expected_hashes) + + os.makedirs("/var/local/citrix-repo", exist_ok = True) + + logging.info("Copying ctxusb.rpm...") + shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm") + logging.info("Finished copying ctxusb.rpm") + + logging.info("Copying ICAClient.rpm...") + shutil.copyfile("/temp/packages/ICAClient.rpm", "/var/local/citrix-repo/ICAClient.rpm") + logging.info("Finished copying ICAClient.rpm") + + os.system("createrepo_c /var/local/citrix-repo") + + logging.info("Copying local-citrix-repo.repo...") + shutil.copyfile("./local-citrix-repo.repo", "/etc/yum.repos.d/local-citrix-repo.repo") + logging.info("Finished copying local-citrix-repo.repo") + + logging.info("Copying RPM-GPG-KEY-citrix-local...") + shutil.copyfile("./RPM-GPG-KEY-citrix-local", "/var/local/citrix-repo/RPM-GPG-KEY-citrix-local") + logging.info("Finished copying RPM-GPG-KEY-citrix-local") + + os.system("rpm --import /var/local/citrix-repo/RPM-GPG-KEY-citrix-local") + + os.system("dnf makecache") + os.system("pkcon refresh force") + + os.makedirs(os.path.dirname(last_release_file), exist_ok=True) + with open(last_release_file, 'w') as f: + json.dump(release_info, f) + logging.info(f"Saved last release info to {last_release_file}") +else: + logging.info("Skipping download; no new release available.") + +print("All files processed successfully!")