Removed RELEASE_XXX format

footer writing has been replaced with the version being in the INI sections themselves
This commit is contained in:
2026-02-10 16:38:44 +01:00
parent 7995cc5d94
commit 7dfdfa0462
23 changed files with 150 additions and 365 deletions

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env python3
"""
Generate RELEASE_X.ini files by fetching latest GitHub/Gitea release tags
for sysmodules, overlays, apps, and emulation.
Update main INI files (apps, sysmodules, overlays, emulation) with latest
GitHub/Gitea release versions in section names, e.g. [DBI_EN - 864].
No RELEASE_*.ini files; versions live only in the main INIs.
"""
import re
@@ -10,7 +11,6 @@ import os
import time
import urllib.request
import urllib.error
import configparser
from pathlib import Path
from typing import Dict, List, Tuple, Optional, Any
@@ -22,7 +22,6 @@ GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN', '')
def extract_repo_from_github_url(url: str) -> Optional[Tuple[str, str]]:
"""Extract owner and repo from GitHub API URL."""
# Pattern: https://api.github.com/repos/owner/repo/releases?...
match = re.search(r'/repos/([^/]+)/([^/]+)/releases', url)
if match:
return (match.group(1), match.group(2))
@@ -30,7 +29,6 @@ def extract_repo_from_github_url(url: str) -> Optional[Tuple[str, str]]:
def extract_repo_from_gitea_url(url: str) -> Optional[Tuple[str, str, str]]:
"""Extract API base, owner and repo from Gitea API URL. Returns (api_base, owner, repo)."""
# Pattern: https://host/api/v1/repos/owner/repo/releases?...
match = re.search(r'(https://[^/]+/api/v1)/repos/([^/]+)/([^/]+)/releases', url)
if match:
return (match.group(1), match.group(2), match.group(3))
@@ -44,7 +42,6 @@ def get_latest_tag_github(owner: str, repo: str) -> Optional[str]:
req.add_header('User-Agent', 'Release-Tag-Fetcher/1.0')
if GITHUB_TOKEN:
req.add_header('Authorization', f'token {GITHUB_TOKEN}')
with urllib.request.urlopen(req, timeout=10) as response:
releases = json.loads(response.read().decode('utf-8'))
if releases and len(releases) > 0:
@@ -66,7 +63,6 @@ def get_latest_tag_gitea(api_base: str, owner: str, repo: str) -> Optional[str]:
try:
req = urllib.request.Request(url)
req.add_header('User-Agent', 'Release-Tag-Fetcher/1.0')
with urllib.request.urlopen(req, timeout=10) as response:
releases = json.loads(response.read().decode('utf-8'))
if releases and len(releases) > 0:
@@ -86,127 +82,117 @@ def get_latest_tag(entry: Dict[str, Any]) -> Optional[str]:
return get_latest_tag_gitea(entry['api_base'], entry['owner'], entry['repo'])
return get_latest_tag_github(entry['owner'], entry['repo'])
def parse_ini_file(file_path: Path) -> List[Dict[str, Any]]:
"""Parse .ini file and extract entries with GitHub or Gitea API URLs."""
def parse_ini_file(file_path: Path, strip_version_suffix: bool = True) -> List[Dict[str, Any]]:
"""Parse .ini file and extract entries with GitHub or Gitea API URLs.
With strip_version_suffix=True, section names like [DBI_EN - 864] are treated
as base name 'DBI_EN' for API matching; section headers are then updated in place.
"""
entries = []
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# Find all section headers
sections = re.finditer(r'^\[([^\]]+)\]', content, re.MULTILINE)
for section_match in sections:
section_name = section_match.group(1)
name = section_name.split(' - ', 1)[0].strip() if strip_version_suffix else section_name
section_start = section_match.end()
# Find next section or end of file
next_section = re.search(r'^\[', content[section_start:], re.MULTILINE)
section_end = section_start + (next_section.start() if next_section else len(content[section_start:]))
section_content = content[section_start:section_end]
# Look for GitHub API URLs first
github_urls = re.findall(r'https://api\.github\.com/repos/[^\s]+', section_content)
if github_urls:
repo_info = extract_repo_from_github_url(github_urls[0])
if repo_info:
entries.append({
'name': section_name,
'name': name,
'owner': repo_info[0],
'repo': repo_info[1],
'source': 'github',
'url': github_urls[0]
})
continue
# Look for Gitea API URLs (e.g. https://host/api/v1/repos/owner/repo/releases?...)
gitea_urls = re.findall(r'https://[^\s]+/api/v1/repos/[^\s]+/releases[^\s]*', section_content)
if gitea_urls:
repo_info = extract_repo_from_gitea_url(gitea_urls[0])
if repo_info:
entries.append({
'name': section_name,
'name': name,
'api_base': repo_info[0],
'owner': repo_info[1],
'repo': repo_info[2],
'source': 'gitea',
'url': gitea_urls[0]
})
return entries
def generate_release_ini(category: str, entries: List[Dict[str, Any]], output_path: Path):
"""Generate RELEASE_X.ini file for a category."""
print(f"\nGenerating {output_path.name}...")
def update_ini_section_versions(ini_path: Path, versions: Dict[str, str]) -> None:
"""Rewrite INI section headers to include version, e.g. [DBI_EN] -> [DBI_EN - 864].
Only updates sections whose base name (before ' - ') is in versions.
"""
with open(ini_path, 'r', encoding='utf-8') as f:
lines = f.readlines()
out = []
for line in lines:
m = re.match(r'^\[([^\]]+)\]\s*$', line)
if m:
title = m.group(1)
base = title.split(' - ', 1)[0].strip()
if base in versions:
line = f"[{base} - {versions[base]}]\n"
out.append(line)
with open(ini_path, 'w', encoding='utf-8') as f:
f.writelines(out)
print(f" Updated section versions in {ini_path.name}")
def _clean_tag(tag: str) -> str:
"""Normalize version string for display (strip v, truncate if needed)."""
clean = tag.lstrip('v')
if len(clean) > 30:
if '-' in clean:
parts = clean.split('-')
if len(parts) > 1 and len(parts[-1]) > 20:
clean = f"{parts[-2]}-{parts[-1][:7]}" if len(parts) > 1 else parts[-1][:7]
else:
clean = clean[:30]
else:
clean = clean[:30]
return clean
def fetch_versions_and_update_ini(category: str, ini_path: Path) -> Optional[Dict[str, Any]]:
"""Parse INI, fetch latest release tag per entry, update section headers in place. Returns result stats."""
entries = parse_ini_file(ini_path, strip_version_suffix=True)
if not entries:
return None
print(f"\n{category} ({ini_path.name})...")
print(f"Found {len(entries)} entries")
# Create config parser
config = configparser.ConfigParser()
config.optionxform = str # Preserve case
# Create section based on category
if category == 'sysmodules':
section_name = 'Versions'
elif category == 'overlays':
section_name = 'Versions'
elif category == 'apps':
section_name = 'Versions'
elif category == 'emulation':
section_name = 'Versions'
else:
section_name = 'Release Info'
config.add_section(section_name)
# Monitoring statistics
versions_dict: Dict[str, str] = {}
success_count = 0
failure_count = 0
failed_entries = []
# Fetch tags for each entry
failed_entries: List[str] = []
for i, entry in enumerate(entries):
# Add delay between requests to avoid rate limiting (except for first request)
if i > 0:
time.sleep(0.5) # 500ms delay between requests
time.sleep(0.5)
source = entry.get('source', 'github')
print(f" Fetching {entry['name']} ({entry['owner']}/{entry['repo']}) [{source}]...", end=' ')
tag = get_latest_tag(entry)
if tag:
# Remove 'v' prefix if present for cleaner version
clean_tag = tag.lstrip('v')
# Truncate very long version strings (e.g., commit hashes) to max 30 chars for Switch display
if len(clean_tag) > 30:
# Try to extract meaningful part (e.g., commit hash from "weekly-canary-release-25f89d3...")
if '-' in clean_tag:
parts = clean_tag.split('-')
# If it looks like a commit hash at the end, take last part and truncate to 7 chars
if len(parts) > 1 and len(parts[-1]) > 20:
clean_tag = f"{parts[-2]}-{parts[-1][:7]}" if len(parts) > 1 else parts[-1][:7]
else:
clean_tag = clean_tag[:30]
else:
clean_tag = clean_tag[:30]
config.set(section_name, entry['name'], clean_tag)
clean_tag = _clean_tag(tag)
versions_dict[entry['name']] = clean_tag
success_count += 1
print(f"{clean_tag}")
else:
failure_count += 1
failed_entries.append(f"{entry['name']} ({entry['owner']}/{entry['repo']})")
print("✗ Failed")
# Write to file
with open(output_path, 'w', encoding='utf-8') as f:
config.write(f, space_around_delimiters=False)
# Print monitoring summary
print(f"\n✓ Created {output_path}")
if versions_dict:
update_ini_section_versions(ini_path, versions_dict)
print(f" Success: {success_count}/{len(entries)}")
if failure_count > 0:
print(f" Failed: {failure_count}/{len(entries)}")
for failed in failed_entries:
print(f" - {failed}")
return {
'category': category,
'total': len(entries),
@@ -215,70 +201,41 @@ def generate_release_ini(category: str, entries: List[Dict[str, Any]], output_pa
'failed_entries': failed_entries
}
def main():
"""Main function."""
base_path = Path(__file__).parent
include_path = base_path / "include"
print("GitHub / Gitea Release Tag Fetcher")
print("GitHub / Gitea update main INI section versions")
if GITHUB_TOKEN:
print("✓ Using GitHub token (higher rate limit)")
else:
print("⚠ No GitHub token found. Set GITHUB_TOKEN env var for higher rate limits.")
print("⚠ No GitHub token. Set GITHUB_TOKEN for higher rate limits.")
print("=" * 50)
# Track all results for final summary
all_results = []
# Process sysmodules
sysmodules_path = include_path / "sysmodules" / "sysmodules.ini"
if sysmodules_path.exists():
entries = parse_ini_file(sysmodules_path)
if entries:
output_path = include_path / "sysmodules" / "RELEASE_SM.ini"
result = generate_release_ini('sysmodules', entries, output_path)
all_results.append(result)
# Process overlays
overlays_path = include_path / "overlays" / "overlays.ini"
if overlays_path.exists():
entries = parse_ini_file(overlays_path)
if entries:
output_path = include_path / "overlays" / "RELEASE_OV.ini"
result = generate_release_ini('overlays', entries, output_path)
all_results.append(result)
# Process apps
apps_path = include_path / "apps" / "apps.ini"
if apps_path.exists():
entries = parse_ini_file(apps_path)
if entries:
output_path = include_path / "apps" / "RELEASE_APPS.ini"
result = generate_release_ini('apps', entries, output_path)
all_results.append(result)
# Process emulation
emulation_path = include_path / "emulation" / "emulation.ini"
if emulation_path.exists():
entries = parse_ini_file(emulation_path)
if entries:
output_path = include_path / "emulation" / "RELEASE_EM.ini"
result = generate_release_ini('emulation', entries, output_path)
all_results.append(result)
# Print final monitoring summary
all_results: List[Dict[str, Any]] = []
for category, rel_path in [
('sysmodules', 'sysmodules/sysmodules.ini'),
('overlays', 'overlays/overlays.ini'),
('apps', 'apps/apps.ini'),
('emulation', 'emulation/emulation.ini'),
]:
ini_path = include_path / rel_path
if ini_path.exists():
result = fetch_versions_and_update_ini(category, ini_path)
if result:
all_results.append(result)
print("\n" + "=" * 50)
print("FINAL SUMMARY")
print("=" * 50)
total_entries = sum(r['total'] for r in all_results)
total_success = sum(r['success'] for r in all_results)
total_failed = sum(r['failed'] for r in all_results)
print(f"Total entries processed: {total_entries}")
print(f"Successfully fetched: {total_success} ({total_success/total_entries*100:.1f}%)" if total_entries > 0 else "Successfully fetched: 0")
print(f"Failed: {total_failed} ({total_failed/total_entries*100:.1f}%)" if total_entries > 0 else "Failed: 0")
# List all failed entries by category
if total_entries > 0:
print(f"Successfully fetched: {total_success} ({total_success/total_entries*100:.1f}%)")
print(f"Failed: {total_failed} ({total_failed/total_entries*100:.1f}%)")
else:
print("Successfully fetched: 0")
if total_failed > 0:
print("\nFailed entries by category:")
for result in all_results:
@@ -286,10 +243,8 @@ def main():
print(f" {result['category']}:")
for failed in result['failed_entries']:
print(f" - {failed}")
print("\n" + "=" * 50)
print("Done!")
if __name__ == "__main__":
main()