Files
OmniNX-Downloader/generate_release_files.py
niklascfw 7dfdfa0462 Removed RELEASE_XXX format
footer writing has been replaced with the version being in the INI sections themselves
2026-02-10 16:38:44 +01:00

251 lines
9.8 KiB
Python
Executable File
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
#!/usr/bin/env python3
"""
Update main INI files (apps, sysmodules, overlays, emulation) with latest
GitHub/Gitea release versions in section names, e.g. [DBI_EN - 864].
No RELEASE_*.ini files; versions live only in the main INIs.
"""
import re
import json
import os
import time
import urllib.request
import urllib.error
from pathlib import Path
from typing import Dict, List, Tuple, Optional, Any
# GitHub API base URL
GITHUB_API = "https://api.github.com/repos"
# Get GitHub token from environment variable if available
GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN', '')
def extract_repo_from_github_url(url: str) -> Optional[Tuple[str, str]]:
"""Extract owner and repo from GitHub API URL."""
match = re.search(r'/repos/([^/]+)/([^/]+)/releases', url)
if match:
return (match.group(1), match.group(2))
return None
def extract_repo_from_gitea_url(url: str) -> Optional[Tuple[str, str, str]]:
"""Extract API base, owner and repo from Gitea API URL. Returns (api_base, owner, repo)."""
match = re.search(r'(https://[^/]+/api/v1)/repos/([^/]+)/([^/]+)/releases', url)
if match:
return (match.group(1), match.group(2), match.group(3))
return None
def get_latest_tag_github(owner: str, repo: str) -> Optional[str]:
"""Fetch the latest release tag from GitHub API."""
url = f"{GITHUB_API}/{owner}/{repo}/releases?per_page=1"
try:
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Release-Tag-Fetcher/1.0')
if GITHUB_TOKEN:
req.add_header('Authorization', f'token {GITHUB_TOKEN}')
with urllib.request.urlopen(req, timeout=10) as response:
releases = json.loads(response.read().decode('utf-8'))
if releases and len(releases) > 0:
return releases[0].get('tag_name', releases[0].get('name', ''))
except urllib.error.HTTPError as e:
if e.code == 403:
print(f" Rate limit exceeded. Set GITHUB_TOKEN env var for higher limits.")
elif e.code == 404:
print(f" Repository not found")
else:
print(f" HTTP {e.code}: {e.reason}")
except Exception as e:
print(f" Error: {e}")
return None
def get_latest_tag_gitea(api_base: str, owner: str, repo: str) -> Optional[str]:
"""Fetch the latest release tag from Gitea API."""
url = f"{api_base}/repos/{owner}/{repo}/releases?limit=1"
try:
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Release-Tag-Fetcher/1.0')
with urllib.request.urlopen(req, timeout=10) as response:
releases = json.loads(response.read().decode('utf-8'))
if releases and len(releases) > 0:
return releases[0].get('tag_name', releases[0].get('name', ''))
except urllib.error.HTTPError as e:
if e.code == 404:
print(f" Repository not found")
else:
print(f" HTTP {e.code}: {e.reason}")
except Exception as e:
print(f" Error: {e}")
return None
def get_latest_tag(entry: Dict[str, Any]) -> Optional[str]:
"""Fetch the latest release tag from GitHub or Gitea depending on entry source."""
if entry.get('source') == 'gitea':
return get_latest_tag_gitea(entry['api_base'], entry['owner'], entry['repo'])
return get_latest_tag_github(entry['owner'], entry['repo'])
def parse_ini_file(file_path: Path, strip_version_suffix: bool = True) -> List[Dict[str, Any]]:
"""Parse .ini file and extract entries with GitHub or Gitea API URLs.
With strip_version_suffix=True, section names like [DBI_EN - 864] are treated
as base name 'DBI_EN' for API matching; section headers are then updated in place.
"""
entries = []
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
sections = re.finditer(r'^\[([^\]]+)\]', content, re.MULTILINE)
for section_match in sections:
section_name = section_match.group(1)
name = section_name.split(' - ', 1)[0].strip() if strip_version_suffix else section_name
section_start = section_match.end()
next_section = re.search(r'^\[', content[section_start:], re.MULTILINE)
section_end = section_start + (next_section.start() if next_section else len(content[section_start:]))
section_content = content[section_start:section_end]
github_urls = re.findall(r'https://api\.github\.com/repos/[^\s]+', section_content)
if github_urls:
repo_info = extract_repo_from_github_url(github_urls[0])
if repo_info:
entries.append({
'name': name,
'owner': repo_info[0],
'repo': repo_info[1],
'source': 'github',
'url': github_urls[0]
})
continue
gitea_urls = re.findall(r'https://[^\s]+/api/v1/repos/[^\s]+/releases[^\s]*', section_content)
if gitea_urls:
repo_info = extract_repo_from_gitea_url(gitea_urls[0])
if repo_info:
entries.append({
'name': name,
'api_base': repo_info[0],
'owner': repo_info[1],
'repo': repo_info[2],
'source': 'gitea',
'url': gitea_urls[0]
})
return entries
def update_ini_section_versions(ini_path: Path, versions: Dict[str, str]) -> None:
"""Rewrite INI section headers to include version, e.g. [DBI_EN] -> [DBI_EN - 864].
Only updates sections whose base name (before ' - ') is in versions.
"""
with open(ini_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
out = []
for line in lines:
m = re.match(r'^\[([^\]]+)\]\s*$', line)
if m:
title = m.group(1)
base = title.split(' - ', 1)[0].strip()
if base in versions:
line = f"[{base} - {versions[base]}]\n"
out.append(line)
with open(ini_path, 'w', encoding='utf-8') as f:
f.writelines(out)
print(f" Updated section versions in {ini_path.name}")
def _clean_tag(tag: str) -> str:
"""Normalize version string for display (strip v, truncate if needed)."""
clean = tag.lstrip('v')
if len(clean) > 30:
if '-' in clean:
parts = clean.split('-')
if len(parts) > 1 and len(parts[-1]) > 20:
clean = f"{parts[-2]}-{parts[-1][:7]}" if len(parts) > 1 else parts[-1][:7]
else:
clean = clean[:30]
else:
clean = clean[:30]
return clean
def fetch_versions_and_update_ini(category: str, ini_path: Path) -> Optional[Dict[str, Any]]:
"""Parse INI, fetch latest release tag per entry, update section headers in place. Returns result stats."""
entries = parse_ini_file(ini_path, strip_version_suffix=True)
if not entries:
return None
print(f"\n{category} ({ini_path.name})...")
print(f"Found {len(entries)} entries")
versions_dict: Dict[str, str] = {}
success_count = 0
failure_count = 0
failed_entries: List[str] = []
for i, entry in enumerate(entries):
if i > 0:
time.sleep(0.5)
source = entry.get('source', 'github')
print(f" Fetching {entry['name']} ({entry['owner']}/{entry['repo']}) [{source}]...", end=' ')
tag = get_latest_tag(entry)
if tag:
clean_tag = _clean_tag(tag)
versions_dict[entry['name']] = clean_tag
success_count += 1
print(f"{clean_tag}")
else:
failure_count += 1
failed_entries.append(f"{entry['name']} ({entry['owner']}/{entry['repo']})")
print("✗ Failed")
if versions_dict:
update_ini_section_versions(ini_path, versions_dict)
print(f" Success: {success_count}/{len(entries)}")
if failure_count > 0:
print(f" Failed: {failure_count}/{len(entries)}")
for failed in failed_entries:
print(f" - {failed}")
return {
'category': category,
'total': len(entries),
'success': success_count,
'failed': failure_count,
'failed_entries': failed_entries
}
def main():
"""Main function."""
base_path = Path(__file__).parent
include_path = base_path / "include"
print("GitHub / Gitea update main INI section versions")
if GITHUB_TOKEN:
print("✓ Using GitHub token (higher rate limit)")
else:
print("⚠ No GitHub token. Set GITHUB_TOKEN for higher rate limits.")
print("=" * 50)
all_results: List[Dict[str, Any]] = []
for category, rel_path in [
('sysmodules', 'sysmodules/sysmodules.ini'),
('overlays', 'overlays/overlays.ini'),
('apps', 'apps/apps.ini'),
('emulation', 'emulation/emulation.ini'),
]:
ini_path = include_path / rel_path
if ini_path.exists():
result = fetch_versions_and_update_ini(category, ini_path)
if result:
all_results.append(result)
print("\n" + "=" * 50)
print("FINAL SUMMARY")
print("=" * 50)
total_entries = sum(r['total'] for r in all_results)
total_success = sum(r['success'] for r in all_results)
total_failed = sum(r['failed'] for r in all_results)
print(f"Total entries processed: {total_entries}")
if total_entries > 0:
print(f"Successfully fetched: {total_success} ({total_success/total_entries*100:.1f}%)")
print(f"Failed: {total_failed} ({total_failed/total_entries*100:.1f}%)")
else:
print("Successfully fetched: 0")
if total_failed > 0:
print("\nFailed entries by category:")
for result in all_results:
if result['failed'] > 0:
print(f" {result['category']}:")
for failed in result['failed_entries']:
print(f" - {failed}")
print("\n" + "=" * 50)
print("Done!")
if __name__ == "__main__":
main()