update script again

This commit is contained in:
2025-11-10 10:42:57 +01:00
parent 29667222b2
commit 13503619a8

View File

@@ -15,39 +15,59 @@ import shutil
logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s') logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s')
def get_expected_hashes_from_api(repo_owner, repo_name): api_base = "https://nachos386.lcube-server.de/api/v1"
"""Fetch expected SHA256 hashes directly from GitHub API release assets."""
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" def fetch_release_info(repo_owner, repo_name):
"""Fetch latest release info including assets from Gitea API."""
api_url = f"{api_base}/repos/{repo_owner}/{repo_name}/releases/latest"
try: try:
with urllib.request.urlopen(api_url, timeout=30) as response: with urllib.request.urlopen(api_url, timeout=30) as response:
release_info = json.loads(response.read().decode()) release_info = json.loads(response.read().decode())
expected_hashes = {} tag_name = release_info.get('tag_name', '')
published_at = release_info.get('published_at', '')
if not tag_name or not published_at:
raise ValueError("Invalid release data: missing tag_name or published_at")
assets = {}
for asset in release_info.get('assets', []): for asset in release_info.get('assets', []):
filename = asset.get('name', '') filename = asset.get('name', '')
if filename.endswith('.rpm'): if filename.endswith('.rpm'):
download_url = asset.get('browser_download_url', '')
if not download_url:
logging.warning(f"No download URL for {filename}")
continue
# Gitea does not provide digests; hash will be None
hash_value = None
# For compatibility, check if 'digest' exists (unlikely in Gitea)
digest = asset.get('digest', '') digest = asset.get('digest', '')
if digest.startswith('sha256:'): print(asset)
if digest and digest.startswith('sha256:'):
hash_value = digest[7:].lower() hash_value = digest[7:].lower()
expected_hashes[filename] = hash_value
logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...") logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...")
else: else:
logging.warning(f"No valid SHA256 digest found for {filename}: {digest}") logging.info(f"No hash available for {filename}; verification will be skipped")
assets[filename] = {'url': download_url, 'hash': hash_value}
if not expected_hashes: if not assets:
logging.warning("No RPM assets with SHA256 digests found in release.") logging.warning("No RPM assets found in release.")
logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}") logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}")
else: else:
logging.info(f"Fetched {len(expected_hashes)} expected hashes from API.") logging.info(f"Fetched {len(assets)} assets from API.")
return expected_hashes
return {
'tag_name': tag_name,
'published_at': published_at,
'assets': assets
}
except urllib.error.URLError as e: except urllib.error.URLError as e:
logging.error(f"Failed to fetch release assets from API: {e}") logging.error(f"Failed to fetch release info from API: {e}")
raise raise
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logging.error(f"Failed to parse API JSON: {e}") logging.error(f"Failed to parse API JSON: {e}")
raise raise
except Exception as e: except Exception as e:
logging.error(f"Unexpected error fetching hashes from API: {e}") logging.error(f"Unexpected error fetching release info: {e}")
raise raise
def compute_sha256(file_path): def compute_sha256(file_path):
@@ -92,7 +112,7 @@ def download_chunk(url, save_path, start, end, total_size, progress_bar, lock):
except Exception as e: except Exception as e:
logging.error(f"Unexpected error in range {start}-{end}: {e}") logging.error(f"Unexpected error in range {start}-{end}: {e}")
def download_file(url, save_path, expected_hashes, num_threads=4): def download_file(url, save_path, expected_hash=None, num_threads=4):
try: try:
with urllib.request.urlopen(url, timeout=30) as response: with urllib.request.urlopen(url, timeout=30) as response:
total_size = int(response.getheader('Content-Length', 0)) total_size = int(response.getheader('Content-Length', 0))
@@ -124,15 +144,15 @@ def download_file(url, save_path, expected_hashes, num_threads=4):
logging.info(f"Download completed in {end_time - start_time:.2f} seconds") logging.info(f"Download completed in {end_time - start_time:.2f} seconds")
filename = os.path.basename(save_path) filename = os.path.basename(save_path)
if filename not in expected_hashes: if expected_hash:
raise Exception(f"No expected hash found for {filename} in API response. Available: {list(expected_hashes.keys())}") computed_hash = compute_sha256(save_path)
expected_hash = expected_hashes[filename] if computed_hash is None:
computed_hash = compute_sha256(save_path) raise Exception(f"Hash computation failed for {save_path}")
if computed_hash is None: if computed_hash != expected_hash:
raise Exception(f"Hash computation failed for {save_path}") raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}")
if computed_hash != expected_hash: print(f"Hash verified for {filename}: {computed_hash}")
raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}") else:
print(f"Hash verified for {filename}: {computed_hash}") logging.warning(f"Skipping hash verification for {filename} (no expected hash available)")
except urllib.error.HTTPError as http_err: except urllib.error.HTTPError as http_err:
logging.error(f"HTTP error occurred: {http_err}") logging.error(f"HTTP error occurred: {http_err}")
@@ -147,18 +167,17 @@ def download_file(url, save_path, expected_hashes, num_threads=4):
logging.error(f"An unexpected error occurred: {e}") logging.error(f"An unexpected error occurred: {e}")
raise raise
def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/temp/packages/last_release.json"): def is_newer_release(repo_owner, repo_name, last_release_file="/temp/packages/last_release.json"):
"""Check if the current release is newer than the last downloaded one.""" """Check if the current release is newer than the last downloaded one."""
try: try:
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest" release_data = fetch_release_info(repo_owner, repo_name)
with urllib.request.urlopen(api_url, timeout=30) as response: current_tag = release_data['tag_name']
release_info = json.loads(response.read().decode()) current_published_at = release_data['published_at']
current_published_at = release_info.get('published_at', '')
if not current_published_at:
logging.error("No published_at timestamp found in release info")
raise ValueError("Invalid release data")
current_time = datetime.datetime.fromisoformat(current_published_at.replace('Z', '+00:00')) # Handle timezone in published_at (Gitea may include TZ offset, GitHub uses Z)
if 'Z' in current_published_at:
current_published_at = current_published_at.replace('Z', '+00:00')
current_time = datetime.datetime.fromisoformat(current_published_at)
if os.path.exists(last_release_file): if os.path.exists(last_release_file):
with open(last_release_file, 'r') as f: with open(last_release_file, 'r') as f:
@@ -166,12 +185,19 @@ def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/tem
last_tag = last_release.get('tag_name', '') last_tag = last_release.get('tag_name', '')
last_published_at = last_release.get('published_at', '') last_published_at = last_release.get('published_at', '')
if last_published_at: if last_published_at:
last_time = datetime.datetime.fromisoformat(last_published_at.replace('Z', '+00:00')) if 'Z' in last_published_at:
if current_time <= last_time and last_tag == release_tag: last_published_at = last_published_at.replace('Z', '+00:00')
logging.info(f"No newer release found. Current: {release_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})") last_time = datetime.datetime.fromisoformat(last_published_at)
if current_time <= last_time and last_tag == current_tag:
logging.info(f"No newer release found. Current: {current_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})")
return False, None return False, None
logging.info(f"Newer release detected or no previous download: {release_tag} ({current_published_at})")
return True, {'tag_name': release_tag, 'published_at': current_published_at} logging.info(f"Newer release detected or no previous download: {current_tag} ({current_published_at})")
return True, {
'tag_name': current_tag,
'published_at': current_published_at,
'assets': release_data['assets']
}
except urllib.error.URLError as e: except urllib.error.URLError as e:
logging.error(f"Failed to fetch release info: {e}") logging.error(f"Failed to fetch release info: {e}")
raise raise
@@ -185,24 +211,35 @@ def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/tem
logging.error(f"Unexpected error checking release: {e}") logging.error(f"Unexpected error checking release: {e}")
raise raise
repo_owner = "Reyher-VDI" repo_owner = "REYHER-Systemtechnik-Public"
repo_name = "Citrix-RPM-Repository" repo_name = "VDI-Linux"
last_release_file = "/temp/packages/last_release.json" last_release_file = "/temp/packages/last_release.json"
is_newer, release_info = is_newer_release(repo_owner, repo_name, last_release_file) is_newer, release_data = is_newer_release(repo_owner, repo_name, last_release_file)
if is_newer: if is_newer:
expected_hashes = get_expected_hashes_from_api(repo_owner, repo_name) assets = release_data['assets']
if not expected_hashes: if not assets:
raise Exception("Failed to auto-fetch expected hashes from GitHub API. Check logs for details.") raise Exception("No RPM assets available in the latest release. Check logs for details.")
for rpm_file in ["/temp/packages/ctxusb.rpm", "/temp/packages/ICAClient.rpm"]: rpm_files = {
if os.path.exists(rpm_file): "/temp/packages/ctxusb.rpm": "ctxusb.rpm",
os.remove(rpm_file) "/temp/packages/ICAClient.rpm": "ICAClient.rpm"
}
download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ctxusb.rpm", "/temp/packages/ctxusb.rpm", expected_hashes) for local_path, filename in rpm_files.items():
download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ICAClient.rpm", "/temp/packages/ICAClient.rpm", expected_hashes) if filename not in assets:
raise Exception(f"Required asset {filename} not found in release. Available: {list(assets.keys())}")
os.makedirs("/var/local/citrix-repo", exist_ok = True) asset_info = assets[filename]
url = asset_info['url']
expected_hash = asset_info['hash']
if os.path.exists(local_path):
os.remove(local_path)
download_file(url, local_path, expected_hash)
os.makedirs("/var/local/citrix-repo", exist_ok=True)
logging.info("Copying ctxusb.rpm...") logging.info("Copying ctxusb.rpm...")
shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm") shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm")
@@ -229,7 +266,8 @@ if is_newer:
os.makedirs(os.path.dirname(last_release_file), exist_ok=True) os.makedirs(os.path.dirname(last_release_file), exist_ok=True)
with open(last_release_file, 'w') as f: with open(last_release_file, 'w') as f:
json.dump(release_info, f) # Save only tag and published_at; assets are transient
json.dump({'tag_name': release_data['tag_name'], 'published_at': release_data['published_at']}, f)
logging.info(f"Saved last release info to {last_release_file}") logging.info(f"Saved last release info to {last_release_file}")
else: else:
logging.info("Skipping download; no new release available.") logging.info("Skipping download; no new release available.")