Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3ce513a4b3 | |||
| 13503619a8 | |||
| 29667222b2 |
153
update_local_repo.py
Normal file → Executable file
153
update_local_repo.py
Normal file → Executable file
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import urllib.request
|
||||
import os
|
||||
import urllib.error
|
||||
@@ -10,42 +12,73 @@ import hashlib
|
||||
import json
|
||||
import datetime
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - Thread %(threadName)s - %(message)s')
|
||||
|
||||
def get_expected_hashes_from_api(repo_owner, repo_name):
|
||||
"""Fetch expected SHA256 hashes directly from GitHub API release assets."""
|
||||
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
|
||||
# Update the local repository via git pull before proceeding
|
||||
try:
|
||||
subprocess.run(['git', 'pull'], check=True, capture_output=True)
|
||||
logging.info("Updated repo via git pull")
|
||||
except subprocess.CalledProcessError as e:
|
||||
logging.warning(f"Git pull failed (non-zero exit): {e}")
|
||||
except FileNotFoundError:
|
||||
logging.warning("Git not found; skipping pull")
|
||||
except Exception as e:
|
||||
logging.warning(f"Unexpected error during git pull: {e}")
|
||||
|
||||
api_base = "https://nachos386.lcube-server.de/api/v1"
|
||||
|
||||
def fetch_release_info(repo_owner, repo_name):
|
||||
"""Fetch latest release info including assets from Gitea API."""
|
||||
api_url = f"{api_base}/repos/{repo_owner}/{repo_name}/releases/latest"
|
||||
try:
|
||||
with urllib.request.urlopen(api_url, timeout=30) as response:
|
||||
release_info = json.loads(response.read().decode())
|
||||
|
||||
expected_hashes = {}
|
||||
tag_name = release_info.get('tag_name', '')
|
||||
published_at = release_info.get('published_at', '')
|
||||
if not tag_name or not published_at:
|
||||
raise ValueError("Invalid release data: missing tag_name or published_at")
|
||||
|
||||
assets = {}
|
||||
for asset in release_info.get('assets', []):
|
||||
filename = asset.get('name', '')
|
||||
if filename.endswith('.rpm'):
|
||||
download_url = asset.get('browser_download_url', '')
|
||||
if not download_url:
|
||||
logging.warning(f"No download URL for {filename}")
|
||||
continue
|
||||
# Gitea does not provide digests; hash will be None
|
||||
hash_value = None
|
||||
# For compatibility, check if 'digest' exists (unlikely in Gitea)
|
||||
digest = asset.get('digest', '')
|
||||
if digest.startswith('sha256:'):
|
||||
if digest and digest.startswith('sha256:'):
|
||||
hash_value = digest[7:].lower()
|
||||
expected_hashes[filename] = hash_value
|
||||
logging.info(f"Fetched hash for {filename}: {hash_value[:16]}...")
|
||||
else:
|
||||
logging.warning(f"No valid SHA256 digest found for {filename}: {digest}")
|
||||
logging.info(f"No hash available for {filename}; verification will be skipped")
|
||||
assets[filename] = {'url': download_url, 'hash': hash_value}
|
||||
|
||||
if not expected_hashes:
|
||||
logging.warning("No RPM assets with SHA256 digests found in release.")
|
||||
if not assets:
|
||||
logging.warning("No RPM assets found in release.")
|
||||
logging.info(f"Available assets: {[a['name'] for a in release_info.get('assets', [])]}")
|
||||
else:
|
||||
logging.info(f"Fetched {len(expected_hashes)} expected hashes from API.")
|
||||
return expected_hashes
|
||||
logging.info(f"Fetched {len(assets)} assets from API.")
|
||||
|
||||
return {
|
||||
'tag_name': tag_name,
|
||||
'published_at': published_at,
|
||||
'assets': assets
|
||||
}
|
||||
except urllib.error.URLError as e:
|
||||
logging.error(f"Failed to fetch release assets from API: {e}")
|
||||
logging.error(f"Failed to fetch release info from API: {e}")
|
||||
raise
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f"Failed to parse API JSON: {e}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error fetching hashes from API: {e}")
|
||||
logging.error(f"Unexpected error fetching release info: {e}")
|
||||
raise
|
||||
|
||||
def compute_sha256(file_path):
|
||||
@@ -90,7 +123,7 @@ def download_chunk(url, save_path, start, end, total_size, progress_bar, lock):
|
||||
except Exception as e:
|
||||
logging.error(f"Unexpected error in range {start}-{end}: {e}")
|
||||
|
||||
def download_file(url, save_path, expected_hashes, num_threads=4):
|
||||
def download_file(url, save_path, expected_hash=None, num_threads=4):
|
||||
try:
|
||||
with urllib.request.urlopen(url, timeout=30) as response:
|
||||
total_size = int(response.getheader('Content-Length', 0))
|
||||
@@ -122,15 +155,15 @@ def download_file(url, save_path, expected_hashes, num_threads=4):
|
||||
logging.info(f"Download completed in {end_time - start_time:.2f} seconds")
|
||||
|
||||
filename = os.path.basename(save_path)
|
||||
if filename not in expected_hashes:
|
||||
raise Exception(f"No expected hash found for {filename} in API response. Available: {list(expected_hashes.keys())}")
|
||||
expected_hash = expected_hashes[filename]
|
||||
computed_hash = compute_sha256(save_path)
|
||||
if computed_hash is None:
|
||||
raise Exception(f"Hash computation failed for {save_path}")
|
||||
if computed_hash != expected_hash:
|
||||
raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}")
|
||||
print(f"Hash verified for {filename}: {computed_hash}")
|
||||
if expected_hash:
|
||||
computed_hash = compute_sha256(save_path)
|
||||
if computed_hash is None:
|
||||
raise Exception(f"Hash computation failed for {save_path}")
|
||||
if computed_hash != expected_hash:
|
||||
raise Exception(f"Hash mismatch for {save_path}: expected {expected_hash}, got {computed_hash}")
|
||||
print(f"Hash verified for {filename}: {computed_hash}")
|
||||
else:
|
||||
logging.warning(f"Skipping hash verification for {filename} (no expected hash available)")
|
||||
|
||||
except urllib.error.HTTPError as http_err:
|
||||
logging.error(f"HTTP error occurred: {http_err}")
|
||||
@@ -145,18 +178,17 @@ def download_file(url, save_path, expected_hashes, num_threads=4):
|
||||
logging.error(f"An unexpected error occurred: {e}")
|
||||
raise
|
||||
|
||||
def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/temp/packages/last_release.json"):
|
||||
def is_newer_release(repo_owner, repo_name, last_release_file="/temp/packages/last_release.json"):
|
||||
"""Check if the current release is newer than the last downloaded one."""
|
||||
try:
|
||||
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/releases/latest"
|
||||
with urllib.request.urlopen(api_url, timeout=30) as response:
|
||||
release_info = json.loads(response.read().decode())
|
||||
current_published_at = release_info.get('published_at', '')
|
||||
if not current_published_at:
|
||||
logging.error("No published_at timestamp found in release info")
|
||||
raise ValueError("Invalid release data")
|
||||
|
||||
current_time = datetime.datetime.fromisoformat(current_published_at.replace('Z', '+00:00'))
|
||||
release_data = fetch_release_info(repo_owner, repo_name)
|
||||
current_tag = release_data['tag_name']
|
||||
current_published_at = release_data['published_at']
|
||||
|
||||
# Handle timezone in published_at (Gitea may include TZ offset, GitHub uses Z)
|
||||
if 'Z' in current_published_at:
|
||||
current_published_at = current_published_at.replace('Z', '+00:00')
|
||||
current_time = datetime.datetime.fromisoformat(current_published_at)
|
||||
|
||||
if os.path.exists(last_release_file):
|
||||
with open(last_release_file, 'r') as f:
|
||||
@@ -164,12 +196,19 @@ def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/tem
|
||||
last_tag = last_release.get('tag_name', '')
|
||||
last_published_at = last_release.get('published_at', '')
|
||||
if last_published_at:
|
||||
last_time = datetime.datetime.fromisoformat(last_published_at.replace('Z', '+00:00'))
|
||||
if current_time <= last_time and last_tag == release_tag:
|
||||
logging.info(f"No newer release found. Current: {release_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})")
|
||||
if 'Z' in last_published_at:
|
||||
last_published_at = last_published_at.replace('Z', '+00:00')
|
||||
last_time = datetime.datetime.fromisoformat(last_published_at)
|
||||
if current_time <= last_time and last_tag == current_tag:
|
||||
logging.info(f"No newer release found. Current: {current_tag} ({current_published_at}), Last: {last_tag} ({last_published_at})")
|
||||
return False, None
|
||||
logging.info(f"Newer release detected or no previous download: {release_tag} ({current_published_at})")
|
||||
return True, {'tag_name': release_tag, 'published_at': current_published_at}
|
||||
|
||||
logging.info(f"Newer release detected or no previous download: {current_tag} ({current_published_at})")
|
||||
return True, {
|
||||
'tag_name': current_tag,
|
||||
'published_at': current_published_at,
|
||||
'assets': release_data['assets']
|
||||
}
|
||||
except urllib.error.URLError as e:
|
||||
logging.error(f"Failed to fetch release info: {e}")
|
||||
raise
|
||||
@@ -183,24 +222,35 @@ def is_newer_release(repo_owner, repo_name, release_tag, last_release_file="/tem
|
||||
logging.error(f"Unexpected error checking release: {e}")
|
||||
raise
|
||||
|
||||
repo_owner = "Reyher-VDI"
|
||||
repo_name = "Citrix-RPM-Repository"
|
||||
repo_owner = "REYHER-Systemtechnik-Public"
|
||||
repo_name = "VDI-Linux"
|
||||
last_release_file = "/temp/packages/last_release.json"
|
||||
|
||||
is_newer, release_info = is_newer_release(repo_owner, repo_name, last_release_file)
|
||||
is_newer, release_data = is_newer_release(repo_owner, repo_name, last_release_file)
|
||||
if is_newer:
|
||||
expected_hashes = get_expected_hashes_from_api(repo_owner, repo_name)
|
||||
if not expected_hashes:
|
||||
raise Exception("Failed to auto-fetch expected hashes from GitHub API. Check logs for details.")
|
||||
assets = release_data['assets']
|
||||
if not assets:
|
||||
raise Exception("No RPM assets available in the latest release. Check logs for details.")
|
||||
|
||||
for rpm_file in ["/temp/packages/ctxusb.rpm", "/temp/packages/ICAClient.rpm"]:
|
||||
if os.path.exists(rpm_file):
|
||||
os.remove(rpm_file)
|
||||
rpm_files = {
|
||||
"/temp/packages/ctxusb.rpm": "ctxusb.rpm",
|
||||
"/temp/packages/ICAClient.rpm": "ICAClient.rpm"
|
||||
}
|
||||
|
||||
download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ctxusb.rpm", "/temp/packages/ctxusb.rpm", expected_hashes)
|
||||
download_file("https://github.com/Reyher-VDI/Citrix-RPM-Repository/releases/latest/download/ICAClient.rpm", "/temp/packages/ICAClient.rpm", expected_hashes)
|
||||
for local_path, filename in rpm_files.items():
|
||||
if filename not in assets:
|
||||
raise Exception(f"Required asset {filename} not found in release. Available: {list(assets.keys())}")
|
||||
|
||||
asset_info = assets[filename]
|
||||
url = asset_info['url']
|
||||
expected_hash = asset_info['hash']
|
||||
|
||||
if os.path.exists(local_path):
|
||||
os.remove(local_path)
|
||||
|
||||
os.makedirs("/var/local/citrix-repo", exist_ok = True)
|
||||
download_file(url, local_path, expected_hash)
|
||||
|
||||
os.makedirs("/var/local/citrix-repo", exist_ok=True)
|
||||
|
||||
logging.info("Copying ctxusb.rpm...")
|
||||
shutil.copyfile("/temp/packages/ctxusb.rpm", "/var/local/citrix-repo/ctxusb.rpm")
|
||||
@@ -227,7 +277,8 @@ if is_newer:
|
||||
|
||||
os.makedirs(os.path.dirname(last_release_file), exist_ok=True)
|
||||
with open(last_release_file, 'w') as f:
|
||||
json.dump(release_info, f)
|
||||
# Save only tag and published_at; assets are transient
|
||||
json.dump({'tag_name': release_data['tag_name'], 'published_at': release_data['published_at']}, f)
|
||||
logging.info(f"Saved last release info to {last_release_file}")
|
||||
else:
|
||||
logging.info("Skipping download; no new release available.")
|
||||
|
||||
Reference in New Issue
Block a user