From fd8498c6c353cf1ddaf92de05f8b308014e16515 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Wed, 3 Dec 2025 17:02:10 +1000 Subject: [PATCH 01/17] Update SpawnerSystem.cs --- .../Spawners/EntitySystems/SpawnerSystem.cs | 67 ++++++++++++++++++- 1 file changed, 65 insertions(+), 2 deletions(-) diff --git a/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs b/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs index 6e0b0f384d0a..4c56af30d6ca 100644 --- a/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs +++ b/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs @@ -1,6 +1,7 @@ using Content.Server.Spawners.Components; using Robust.Shared.Random; using Robust.Shared.Timing; +using Content.Shared.Mobs.Components; namespace Content.Server.Spawners.EntitySystems; @@ -43,13 +44,75 @@ private void OnTimerFired(EntityUid uid, TimedSpawnerComponent component) if (!_random.Prob(component.Chance)) return; - var number = _random.Next(component.MinimumEntitiesSpawned, component.MaximumEntitiesSpawned); + // Forge-Change start + CleanupSpawnedEntities(uid, component); + + var aliveEntitiesCount = CountAliveEntities(component); + + if (aliveEntitiesCount >= component.MaximumEntitiesPerGrid) + return; + + var maxAllowedEntities = component.MaximumEntitiesPerGrid - aliveEntitiesCount; + var maxToSpawn = Math.Min(component.MaximumEntitiesSpawned, maxAllowedEntities); + + if (maxToSpawn < component.MinimumEntitiesSpawned) + return; + + var number = _random.Next(component.MinimumEntitiesSpawned, maxToSpawn); + // Forge-Change end var coordinates = Transform(uid).Coordinates; for (var i = 0; i < number; i++) { var entity = _random.Pick(component.Prototypes); - SpawnAtPosition(entity, coordinates); + var spawned = SpawnAtPosition(entity, coordinates); // Forge-Change + component.SpawnedEntities.Add(spawned); // Forge-Change + } + } + + // Forge-Change start + private void CleanupSpawnedEntities(EntityUid spawnerUid, TimedSpawnerComponent component) + { + var toRemove = new List(); + + foreach (var entityUid in component.SpawnedEntities) + { + if (!Exists(entityUid)) + { + toRemove.Add(entityUid); + } + } + + foreach (var entityUid in toRemove) + { + component.SpawnedEntities.Remove(entityUid); + } + } + + private int CountAliveEntities(TimedSpawnerComponent component) + { + var count = 0; + + foreach (var entityUid in component.SpawnedEntities) + { + if (!Exists(entityUid)) + continue; + + if (TryComp(entityUid, out var mobState)) + { + if (mobState.CurrentState == MobState.Alive || + mobState.CurrentState == MobState.Critical) + { + count++; + } + } + else + { + count++; + } } + + return count; } + // Forge-Change end } From 7aca3d0424e3cce3fbacc3f280c01a8b0da419cc Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 19:41:25 +1000 Subject: [PATCH 02/17] Add advanced_publish.py for parallel file publishing This script implements an advanced publishing mechanism with parallel uploads and command-line arguments for configuration. --- Tools/_Forge/Publish/advanced_publish.py | 168 +++++++++++++++++++++++ 1 file changed, 168 insertions(+) create mode 100644 Tools/_Forge/Publish/advanced_publish.py diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py new file mode 100644 index 000000000000..3e8b789fb4c0 --- /dev/null +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -0,0 +1,168 @@ +""" +Продвинутый паблиш с параллельной загрузкой и аргументами +Github: FireFoxPhoenix +""" + +#!/usr/bin/env python3 + +import argparse +import requests +import os +import subprocess +import threading +import logging +import sys +from typing import Iterable +from concurrent.futures import ThreadPoolExecutor, as_completed + +thread_session = threading.local() +logger = logging.getLogger(__name__) + +# +# CONFIGURATION PARAMETERS +# Forks should change these to publish to their own infrastructure. +# +ROBUST_CDN_URL = "https://cdn.corvaxforge.ru/" # TODO: сделать через arguments + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--fork-id", required=True) + parser.add_argument("--publish-token") + parser.add_argument("--max-workers", type=int, default=4) + parser.add_argument("--release_dir", default="release") + + args = parser.parse_args() + fork_id = args.fork_id + publish_token = args.publish_token + max_workers = args.max_workers + release_dir = args.release_dir + + if fork_id == "" or fork_id == None: + logger.critical("Fork id was not entered") + raise KeyError() + + if publish_token not in os.environ: + logger.critical("Publish token not found") + sys.exit(1) + publish_token = os.environ[publish_token] + if not publish_token: + logger.critical(f"Publish token is empty") + sys.exit(1) + + if "GITHUB_SHA" not in os.environ: # TODO: сделать через argument + logger.critical("GITHUB_SHA environment variable not set") + sys.exit(1) + version = os.environ["GITHUB_SHA"] + logger.info(f"Starting publish on Robust.Cdn for version {version}") + + session = create_session(publish_token) + data = { + "version": version, + "engineVersion": get_engine_version(), + } + headers = { + "Content-Type": "application/json" + } + resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/start", json=data, headers=headers) + resp.raise_for_status() + logger.info("Publish successfully started, adding files...") + + files = list(get_files_to_publish(release_dir)) + if not files: + logger.warning("No files found to publish") + return + + logger.info(f"Uploading {len(files)} files using {max_workers} parallel workers...") + successful = 0 + failed = 0 + with ThreadPoolExecutor(max_workers=max_workers) as executor: + future_files = { + executor.submit(upload_file, file, fork_id, publish_token, version): file for file in files + } + for future in as_completed(future_files): + file_path = future_files[future] + try: + result = future.result() + successful += 1 + # logger.info(f"Successfully published {os.path.basename(file_path)} ({successful}/{len(files)}") + except Exception as e: + failed += 1 + logger.warning(f"Failed to publish {os.path.basename(file_path)}: {e}") + if failed > 0: + logger.warning(f"Upload completed with {failed} failures") + # sys.exit(1) + else: + logger.info(f"All {successful} files uploaded successfully") + + logger.info("Finishing publish...") + data = { + "version": version + } + headers = { + "Content-Type": "application/json" + } + resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/finish", json=data, headers=headers) + resp.raise_for_status() + + logger.info("SUCCESS!") + + +def get_files_to_publish(release_dir: str) -> Iterable[str]: + try: + with os.scandir(release_dir) as d: + for entry in d: + if entry.is_file(): + yield entry.path + except FileNotFoundError: + logger.error(f"Release directory '{release_dir}' not found") + return [] + except PermissionError: + logger.error(f"No permission to read directory '{release_dir}'") + return [] + + +def get_engine_version() -> str: + try: + proc = subprocess.run(["git", "describe","--tags", "--abbrev=0"], stdout=subprocess.PIPE, cwd="RobustToolbox", check=True, encoding="UTF-8") + tag = proc.stdout.strip() + if not tag.startswith("v"): + logger.warning(f"Unexpected tag format: {tag}") + return tag + return tag[1:] + except subprocess.CalledProcessError as e: + logger.error(f"Failed to get engine version: {e.stderr}") + raise + except FileNotFoundError: + logger.error("RobustToolbox directory not found") + raise + +def upload_file(file_path: str, fork_id: str, publish_token: str, version: str): + if not hasattr(thread_session, "session"): + thread_session.session = create_session(publish_token) + session = thread_session.session + with open(file_path, "rb") as file: + headers = { + "Content-Type": "application/octet-stream", + "Robust-Cdn-Publish-File": os.path.basename(file_path), + "Robust-Cdn-Publish-Version": version + } + resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/file", data=file, headers=headers) + resp.raise_for_status() + return file_path + +def create_session(publish_token: str) -> requests.Session: + session = requests.Session() + adapter = requests.adapters.HTTPAdapter( + pool_connections=10, + pool_maxsize=10, + max_retries=3 + ) + session.mount("https://", adapter) + session.mount("http://", adapter) + session.headers = { + "Authorization": f"Bearer {publish_token}", + } + return session + +if __name__ == '__main__': + main() From bcaa450c20b8e9c47cdd319c04ab90b3180a0a67 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 19:46:51 +1000 Subject: [PATCH 03/17] Update publish.yml --- .github/workflows/publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 0f984ec1becc..cf0642f610ce 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -44,10 +44,10 @@ jobs: run: dotnet run --project Content.Packaging client --no-wipe-release - name: Publish version - run: Tools/publish_multi_request.py + run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier --publish-token PUBLISH_TOKEN env: PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} - GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} + GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} # почему не GITHUB_SHA # - name: Publish changelog (Discord) # continue-on-error: true From cf6276a7008fc2001a05201837075b6530a240d1 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 19:49:18 +1000 Subject: [PATCH 04/17] Update publish_mapping.yml --- .github/workflows/publish_mapping.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish_mapping.yml b/.github/workflows/publish_mapping.yml index d588cb6da297..e75063a9a6a6 100644 --- a/.github/workflows/publish_mapping.yml +++ b/.github/workflows/publish_mapping.yml @@ -38,7 +38,7 @@ jobs: run: dotnet run --project Content.Packaging client --no-wipe-release - name: Publish version - run: Tools/publish_multi_request_mapping.py + run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier_mapping --publish-token PUBLISH_MAPPING_TOKEN env: PUBLISH_MAPPING_TOKEN: ${{ secrets.PUBLISH_MAPPING_TOKEN }} FORK_ID_MAPPING: ${{ vars.FORK_ID_MAPPING }} From 7891f855cd687df49ca993ba5ed1c0bb1195fdad Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 20:20:54 +1000 Subject: [PATCH 05/17] Refactor advanced_publish.py for better session handling --- Tools/_Forge/Publish/advanced_publish.py | 36 +++++++++++++----------- 1 file changed, 19 insertions(+), 17 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 3e8b789fb4c0..f83e8a1472d0 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -5,6 +5,8 @@ #!/usr/bin/env python3 +#!/usr/bin/env python3 + import argparse import requests import os @@ -22,7 +24,7 @@ # CONFIGURATION PARAMETERS # Forks should change these to publish to their own infrastructure. # -ROBUST_CDN_URL = "https://cdn.corvaxforge.ru/" # TODO: сделать через arguments +ROBUST_CDN_URL = "https://cdn.corvaxforge.ru/" def main(): parser = argparse.ArgumentParser() @@ -49,13 +51,13 @@ def main(): logger.critical(f"Publish token is empty") sys.exit(1) - if "GITHUB_SHA" not in os.environ: # TODO: сделать через argument - logger.critical("GITHUB_SHA environment variable not set") - sys.exit(1) + #if "GITHUB_SHA" not in os.environ: # TODO: сделать через argument + # logger.critical("GITHUB_SHA environment variable not set") + # sys.exit(1) version = os.environ["GITHUB_SHA"] logger.info(f"Starting publish on Robust.Cdn for version {version}") - session = create_session(publish_token) + session = create_session(publish_token, max_workers=max_workers) data = { "version": version, "engineVersion": get_engine_version(), @@ -70,14 +72,13 @@ def main(): files = list(get_files_to_publish(release_dir)) if not files: logger.warning("No files found to publish") - return logger.info(f"Uploading {len(files)} files using {max_workers} parallel workers...") successful = 0 failed = 0 with ThreadPoolExecutor(max_workers=max_workers) as executor: future_files = { - executor.submit(upload_file, file, fork_id, publish_token, version): file for file in files + executor.submit(upload_file, str(file), fork_id, publish_token, version, max_workers): file for file in files } for future in as_completed(future_files): file_path = future_files[future] @@ -88,7 +89,7 @@ def main(): except Exception as e: failed += 1 logger.warning(f"Failed to publish {os.path.basename(file_path)}: {e}") - if failed > 0: + if failed: logger.warning(f"Upload completed with {failed} failures") # sys.exit(1) else: @@ -104,7 +105,7 @@ def main(): resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/finish", json=data, headers=headers) resp.raise_for_status() - logger.info("SUCCESS!") + logger.info("Publish completed") def get_files_to_publish(release_dir: str) -> Iterable[str]: @@ -130,15 +131,16 @@ def get_engine_version() -> str: return tag return tag[1:] except subprocess.CalledProcessError as e: - logger.error(f"Failed to get engine version: {e.stderr}") - raise + stderr = (e.stderr or "").strip() + logger.error(f"Failed to get engine version: {stderr}") + return "unknown" except FileNotFoundError: logger.error("RobustToolbox directory not found") - raise + return "unknown" -def upload_file(file_path: str, fork_id: str, publish_token: str, version: str): +def upload_file(file_path: str, fork_id: str, publish_token: str, version: str, max_workers: int): if not hasattr(thread_session, "session"): - thread_session.session = create_session(publish_token) + thread_session.session = create_session(publish_token, max_workers) session = thread_session.session with open(file_path, "rb") as file: headers = { @@ -150,11 +152,11 @@ def upload_file(file_path: str, fork_id: str, publish_token: str, version: str): resp.raise_for_status() return file_path -def create_session(publish_token: str) -> requests.Session: +def create_session(publish_token: str, max_workers: int) -> requests.Session: session = requests.Session() adapter = requests.adapters.HTTPAdapter( - pool_connections=10, - pool_maxsize=10, + pool_connections=max(10, max_workers * 2), + pool_maxsize=max(10, max_workers * 2), max_retries=3 ) session.mount("https://", adapter) From 8e03362ed0233a4131b29f08ff5b5d581e5e55a5 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 20:21:11 +1000 Subject: [PATCH 06/17] Remove duplicate shebang line Removed duplicate shebang line from advanced_publish.py --- Tools/_Forge/Publish/advanced_publish.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index f83e8a1472d0..0d985d84088a 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -5,8 +5,6 @@ #!/usr/bin/env python3 -#!/usr/bin/env python3 - import argparse import requests import os From 35a18c5ea50a3c345370aec2cbbd2ea460e52f10 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 20:28:39 +1000 Subject: [PATCH 07/17] Change pool_connections to a fixed value of 5 --- Tools/_Forge/Publish/advanced_publish.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 0d985d84088a..6d004879a566 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -153,7 +153,7 @@ def upload_file(file_path: str, fork_id: str, publish_token: str, version: str, def create_session(publish_token: str, max_workers: int) -> requests.Session: session = requests.Session() adapter = requests.adapters.HTTPAdapter( - pool_connections=max(10, max_workers * 2), + pool_connections=5, pool_maxsize=max(10, max_workers * 2), max_retries=3 ) From fde559eefcd9dea6bc5f2633a11e72ceb13cc9fa Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 20:41:42 +1000 Subject: [PATCH 08/17] Update SpawnerSystem.cs --- .../Spawners/EntitySystems/SpawnerSystem.cs | 71 ++++++++++++------- 1 file changed, 45 insertions(+), 26 deletions(-) diff --git a/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs b/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs index 4c56af30d6ca..56eeaf4c7f4a 100644 --- a/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs +++ b/Content.Server/Spawners/EntitySystems/SpawnerSystem.cs @@ -1,7 +1,8 @@ using Content.Server.Spawners.Components; using Robust.Shared.Random; using Robust.Shared.Timing; -using Content.Shared.Mobs.Components; +using Content.Shared.Mobs.Components; // Forge-Change +using Content.Shared.Mobs; // Forge-Change namespace Content.Server.Spawners.EntitySystems; @@ -45,36 +46,51 @@ private void OnTimerFired(EntityUid uid, TimedSpawnerComponent component) return; // Forge-Change start - CleanupSpawnedEntities(uid, component); - - var aliveEntitiesCount = CountAliveEntities(component); - - if (aliveEntitiesCount >= component.MaximumEntitiesPerGrid) - return; - - var maxAllowedEntities = component.MaximumEntitiesPerGrid - aliveEntitiesCount; - var maxToSpawn = Math.Min(component.MaximumEntitiesSpawned, maxAllowedEntities); - - if (maxToSpawn < component.MinimumEntitiesSpawned) - return; + if (component.MaximumEntitiesPerGrid > 0) + { + CleanupSpawnedEntities(uid, component); - var number = _random.Next(component.MinimumEntitiesSpawned, maxToSpawn); - // Forge-Change end - var coordinates = Transform(uid).Coordinates; + var aliveEntitiesCount = CountAliveEntities(component); + + if (aliveEntitiesCount >= component.MaximumEntitiesPerGrid) + return; + + var maxAllowedEntities = component.MaximumEntitiesPerGrid - aliveEntitiesCount; + var maxToSpawn = Math.Min(component.MaximumEntitiesSpawned, maxAllowedEntities); - for (var i = 0; i < number; i++) + if (maxToSpawn < component.MinimumEntitiesSpawned) + return; + + var number = _random.Next(component.MinimumEntitiesSpawned, maxToSpawn); + + var coordinates = Transform(uid).Coordinates; + + for (var i = 0; i < number; i++) + { + var entity = _random.Pick(component.Prototypes); + var spawned = SpawnAtPosition(entity, coordinates); + component.SpawnedEntities.Add(spawned); + } + } + else { - var entity = _random.Pick(component.Prototypes); - var spawned = SpawnAtPosition(entity, coordinates); // Forge-Change - component.SpawnedEntities.Add(spawned); // Forge-Change + var number = _random.Next(component.MinimumEntitiesSpawned, component.MaximumEntitiesSpawned); + var coordinates = Transform(uid).Coordinates; + + for (var i = 0; i < number; i++) + { + var entity = _random.Pick(component.Prototypes); + SpawnAtPosition(entity, coordinates); + } } + // Forge-Change end } // Forge-Change start private void CleanupSpawnedEntities(EntityUid spawnerUid, TimedSpawnerComponent component) { var toRemove = new List(); - + foreach (var entityUid in component.SpawnedEntities) { if (!Exists(entityUid)) @@ -82,7 +98,7 @@ private void CleanupSpawnedEntities(EntityUid spawnerUid, TimedSpawnerComponent toRemove.Add(entityUid); } } - + foreach (var entityUid in toRemove) { component.SpawnedEntities.Remove(entityUid); @@ -91,16 +107,19 @@ private void CleanupSpawnedEntities(EntityUid spawnerUid, TimedSpawnerComponent private int CountAliveEntities(TimedSpawnerComponent component) { + if (component.MaximumEntitiesPerGrid <= 0) + return 0; + var count = 0; - + foreach (var entityUid in component.SpawnedEntities) { if (!Exists(entityUid)) continue; - + if (TryComp(entityUid, out var mobState)) { - if (mobState.CurrentState == MobState.Alive || + if (mobState.CurrentState == MobState.Alive || mobState.CurrentState == MobState.Critical) { count++; @@ -111,7 +130,7 @@ private int CountAliveEntities(TimedSpawnerComponent component) count++; } } - + return count; } // Forge-Change end From f2e9a608723e086d3219e673701facce2f379fcf Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sun, 4 Jan 2026 21:14:19 +1000 Subject: [PATCH 09/17] Update publish.yml --- .github/workflows/publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 5dd7094f3a89..ba85ba777ee2 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -48,7 +48,7 @@ jobs: run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier --publish-token PUBLISH_TOKEN env: PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} - GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} # почему не GITHUB_SHA + GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} # - name: Publish changelog (Discord) # continue-on-error: true From de029e659fa248f8a391eb09f2fb4fd058b68a99 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Mon, 5 Jan 2026 00:49:08 +1000 Subject: [PATCH 10/17] Refactor file retrieval and enhance error handling --- Tools/_Forge/Publish/advanced_publish.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 6d004879a566..2e35a87841be 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -108,10 +108,9 @@ def main(): def get_files_to_publish(release_dir: str) -> Iterable[str]: try: - with os.scandir(release_dir) as d: - for entry in d: - if entry.is_file(): - yield entry.path + for root, dirs, files in os.walk(release_dir): + for file in files: + yield os.path.join(root, file) except FileNotFoundError: logger.error(f"Release directory '{release_dir}' not found") return [] @@ -122,7 +121,7 @@ def get_files_to_publish(release_dir: str) -> Iterable[str]: def get_engine_version() -> str: try: - proc = subprocess.run(["git", "describe","--tags", "--abbrev=0"], stdout=subprocess.PIPE, cwd="RobustToolbox", check=True, encoding="UTF-8") + proc = subprocess.run(["git", "describe","--tags", "--abbrev=0"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd="RobustToolbox", check=True, encoding="UTF-8", timeout=20) tag = proc.stdout.strip() if not tag.startswith("v"): logger.warning(f"Unexpected tag format: {tag}") @@ -130,11 +129,14 @@ def get_engine_version() -> str: return tag[1:] except subprocess.CalledProcessError as e: stderr = (e.stderr or "").strip() - logger.error(f"Failed to get engine version: {stderr}") + logger.error(f"Failed to get engine version: {stderr[:300]}") return "unknown" except FileNotFoundError: logger.error("RobustToolbox directory not found") return "unknown" + except subprocess.TimeoutExpired: + logger.error("Git command timed out") + return "unknown" def upload_file(file_path: str, fork_id: str, publish_token: str, version: str, max_workers: int): if not hasattr(thread_session, "session"): From 610c0df7fddcecacb2abc027d2bf31fcf80efe39 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Mon, 5 Jan 2026 12:59:28 +1000 Subject: [PATCH 11/17] Update advanced_publish.py --- Tools/_Forge/Publish/advanced_publish.py | 41 +++++++++++------------- 1 file changed, 19 insertions(+), 22 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 2e35a87841be..a0fe1d119965 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -29,12 +29,18 @@ def main(): parser.add_argument("--fork-id", required=True) parser.add_argument("--publish-token") parser.add_argument("--max-workers", type=int, default=4) + parser.add_argument("--pool-connections", type=int, default=3) + parser.add_argument("--pool-maxsize", type=int, default=10) + parser.add_argument("--max-retries", type=int, default=3) parser.add_argument("--release_dir", default="release") args = parser.parse_args() fork_id = args.fork_id publish_token = args.publish_token max_workers = args.max_workers + pool_connections = args.pool_connections + pool_maxsize = args.pool_maxsize + max_retries = args.max_retries release_dir = args.release_dir if fork_id == "" or fork_id == None: @@ -49,20 +55,18 @@ def main(): logger.critical(f"Publish token is empty") sys.exit(1) - #if "GITHUB_SHA" not in os.environ: # TODO: сделать через argument + #if "GITHUB_SHA" not in os.environ: # logger.critical("GITHUB_SHA environment variable not set") # sys.exit(1) version = os.environ["GITHUB_SHA"] logger.info(f"Starting publish on Robust.Cdn for version {version}") - session = create_session(publish_token, max_workers=max_workers) + session = create_session(publish_token, pool_connections, pool_maxsize, max_retries) data = { "version": version, "engineVersion": get_engine_version(), } - headers = { - "Content-Type": "application/json" - } + headers = { "Content-Type": "application/json" } resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/start", json=data, headers=headers) resp.raise_for_status() logger.info("Publish successfully started, adding files...") @@ -76,7 +80,7 @@ def main(): failed = 0 with ThreadPoolExecutor(max_workers=max_workers) as executor: future_files = { - executor.submit(upload_file, str(file), fork_id, publish_token, version, max_workers): file for file in files + executor.submit(upload_file, str(file), fork_id, publish_token, pool_connections, pool_maxsize, max_retries, version): file for file in files } for future in as_completed(future_files): file_path = future_files[future] @@ -94,15 +98,10 @@ def main(): logger.info(f"All {successful} files uploaded successfully") logger.info("Finishing publish...") - data = { - "version": version - } - headers = { - "Content-Type": "application/json" - } + data = { "version": version } + headers = { "Content-Type": "application/json" } resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/finish", json=data, headers=headers) resp.raise_for_status() - logger.info("Publish completed") @@ -138,9 +137,9 @@ def get_engine_version() -> str: logger.error("Git command timed out") return "unknown" -def upload_file(file_path: str, fork_id: str, publish_token: str, version: str, max_workers: int): +def upload_file(file_path: str, fork_id: str, publish_token: str, pool_connections: int, pool_maxsize: int, max_retries: int, version: str): if not hasattr(thread_session, "session"): - thread_session.session = create_session(publish_token, max_workers) + thread_session.session = create_session(publish_token, pool_connections, pool_maxsize, max_retries) session = thread_session.session with open(file_path, "rb") as file: headers = { @@ -152,18 +151,16 @@ def upload_file(file_path: str, fork_id: str, publish_token: str, version: str, resp.raise_for_status() return file_path -def create_session(publish_token: str, max_workers: int) -> requests.Session: +def create_session(publish_token: str, pool_connections: int, pool_maxsize: int, max_retries: int) -> requests.Session: session = requests.Session() adapter = requests.adapters.HTTPAdapter( - pool_connections=5, - pool_maxsize=max(10, max_workers * 2), - max_retries=3 + pool_connections=pool_connections, + pool_maxsize=pool_maxsize, + max_retries=max_retries ) session.mount("https://", adapter) session.mount("http://", adapter) - session.headers = { - "Authorization": f"Bearer {publish_token}", - } + session.headers = { "Authorization": f"Bearer {publish_token}" } return session if __name__ == '__main__': From d396556ff58250e5face01a2746a5573d9b4c598 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Mon, 5 Jan 2026 13:04:23 +1000 Subject: [PATCH 12/17] Create calculate_optimal_settings.py --- .../Publish/calculate_optimal_settings.py | 190 ++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 Tools/_Forge/Publish/calculate_optimal_settings.py diff --git a/Tools/_Forge/Publish/calculate_optimal_settings.py b/Tools/_Forge/Publish/calculate_optimal_settings.py new file mode 100644 index 000000000000..2903d781645e --- /dev/null +++ b/Tools/_Forge/Publish/calculate_optimal_settings.py @@ -0,0 +1,190 @@ +""" +Считает оптимальные настройки для максимизации скорости паблиша +Github: FireFoxPhoenix +""" + +#!/usr/bin/env python3 + +import argparse +import os +import time +import requests +import statistics +from pathlib import Path +import socket +import json + +def measure_network_speed(url: str) -> float: + try: + test_file = os.urandom(1024 * 1024) + start = time.time() + response = requests.post(f"{url}fork/test/publish/file", data=test_file, headers={"Content-Type": "application/octet-stream"}, timeout=5) + elapsed = time.time() - start + if response.status_code < 500: + speed_mbps = (1 * 8) / elapsed + return speed_mbps + except: + pass + return 100.0 + +def measure_server_latency(url: str) -> float: + try: + times = [] + for _ in range(3): + start = time.perf_counter() + requests.get(f"{url}fork/test/publish/start", timeout=3) + elapsed = (time.perf_counter() - start) * 1000 + times.append(elapsed) + return statistics.median(times) + except: + return 100.0 + +def analyze_files(files_dir: str): + total_size = 0 + file_count = 0 + sizes = [] + for root, dirs, files in os.walk(files_dir): + for file in files: + filepath = os.path.join(root, file) + try: + size = os.path.getsize(filepath) + total_size += size + sizes.append(size) + file_count += 1 + except: + continue + if file_count == 0: + return 0, 0.0, 0.0, [] + avg_size = total_size / file_count / (1024 * 1024) + median_size = statistics.median(sizes) / (1024 * 1024) if sizes else 0 + size_distribution = { + 'tiny': sum(1 for s in sizes if s < 100 * 1024), + 'small': sum(1 for s in sizes if 100 * 1024 <= s < 1 * 1024 * 1024), + 'medium': sum(1 for s in sizes if 1 * 1024 * 1024 <= s < 10 * 1024 * 1024), + 'large': sum(1 for s in sizes if 10 * 1024 * 1024 <= s < 100 * 1024 * 1024), + 'huge': sum(1 for s in sizes if s >= 100 * 1024 * 1024) + } + return file_count, avg_size, median_size, size_distribution + +def calculate_optimal_settings(file_count, avg_size_mb, network_speed_mbps, latency_ms): + base_threads = min(file_count, 16) + if latency_ms > 200: + network_factor = 0.5 + elif latency_ms > 100: + network_factor = 0.7 + elif latency_ms > 50: + network_factor = 0.9 + else: + network_factor = 1.0 + + if avg_size_mb < 0.1: + size_factor = 2.0 + optimal_threads = min(base_threads, 16) + elif avg_size_mb < 1: + size_factor = 1.5 + optimal_threads = min(base_threads, 12) + elif avg_size_mb < 10: + size_factor = 1.0 + optimal_threads = min(base_threads, 8) + elif avg_size_mb < 50: + size_factor = 0.7 + optimal_threads = min(base_threads, 4) + else: + size_factor = 0.5 + optimal_threads = min(base_threads, 2) + + bandwidth_per_thread = network_speed_mbps / optimal_threads + if bandwidth_per_thread < 1: + optimal_threads = max(1, int(network_speed_mbps)) + + adjusted_threads = int(optimal_threads * network_factor * size_factor) + adjusted_threads = max(1, min(adjusted_threads, file_count, 16)) + + pool_connections = 3 + + if adjusted_threads <= 2: + pool_maxsize = 4 + elif adjusted_threads <= 4: + pool_maxsize = 8 + elif adjusted_threads <= 8: + pool_maxsize = 12 + else: + pool_maxsize = 16 + + total_size_mb = file_count * avg_size_mb + upload_time_single = (total_size_mb * 8) / network_speed_mbps + estimated_time = upload_time_single / adjusted_threads + estimated_time += (latency_ms / 1000) * file_count / adjusted_threads + + return { + 'max_workers': adjusted_threads, + 'pool_connections': pool_connections, + 'pool_maxsize': pool_maxsize, + 'estimated_time_minutes': estimated_time / 60, + 'speedup': upload_time_single / estimated_time if estimated_time > 0 else 1, + 'bandwidth_per_thread_mbps': network_speed_mbps / adjusted_threads + } + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--files-dir", default="release") + parser.add_argument("--server-url", required=True) + parser.add_argument("--network-speed", type=float) + parser.add_argument("--skip-measure", action="store_true") + + args = parser.parse_args() + + print("Analyzing files...") + file_count, avg_size, median_size, size_dist = analyze_files(args.files_dir) + + if file_count == 0: + print("No files found.") + return + + if args.skip_measure: + latency = 100.0 + network_speed = args.network_speed or 100.0 + print("Using default measurements (skipped)") + else: + print("Measuring server latency...") + latency = measure_server_latency(args.server_url) + + if args.network_speed: + network_speed = args.network_speed + print(f"Using provided network speed: {network_speed} Mbps") + else: + print("Measuring network speed...") + network_speed = measure_network_speed(args.server_url) + + print("\n" + "="*10) + print(f"Total files: {file_count}") + print(f"Total size: {file_count * avg_size:.1f} MB") + print(f"Average size: {avg_size:.2f} MB") + print(f"Median size: {median_size:.2f} MB") + print(f"Size distribution:") + print(f" <100KB: {size_dist['tiny']} files") + print(f" 100KB-1MB: {size_dist['small']} files") + print(f" 1-10MB: {size_dist['medium']} files") + print(f" 10-100MB: {size_dist['large']} files") + print(f" >100MB: {size_dist['huge']} files") + + print("\n" + "="*10) + print(f"Network speed: {network_speed:.1f} Mbps") + print(f"Server latency: {latency:.1f} ms") + + print("\n" + "="*10) + print("OPTIMAL SETTINGS") + print("="*10) + + optimal = calculate_optimal_settings(file_count, avg_size, network_speed, latency) + + print(f"Recommended --max-workers: {optimal['max_workers']}") + print(f"Recommended --pool-connections: {optimal['pool_connections']}") + print(f"Recommended --pool-maxsize: {optimal['pool_maxsize']}") + print(f"Estimated upload time: {optimal['estimated_time_minutes']:.1f} minutes") + print(f"Speedup vs single thread: {optimal['speedup']:.1f}x") + print(f"Bandwidth per thread: {optimal['bandwidth_per_thread_mbps']:.1f} Mbps") + print(f"Network quality: {'Good' if latency < 50 else 'Average' if latency < 100 else 'Poor'}") + +if __name__ == "__main__": + main() From c9e9cb83c31c65915a2f50bcc12740725db67d83 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Mon, 5 Jan 2026 13:12:06 +1000 Subject: [PATCH 13/17] Create optimize_publish.yml --- .github/workflows/optimize_publish.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/optimize_publish.yml diff --git a/.github/workflows/optimize_publish.yml b/.github/workflows/optimize_publish.yml new file mode 100644 index 000000000000..da7787138ba3 --- /dev/null +++ b/.github/workflows/optimize_publish.yml @@ -0,0 +1,22 @@ +name: Optimize Publish Settings +on: + workflow_dispatch: + +jobs: + optimize: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4.2.2 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install dependencies + run: pip install requests + + - name: Run optimization script + run: python3 Tools/_Forge/Publish/calculate_publish_settings.py --files-dir release --server-url "https://cdn.corvaxforge.ru/" From 9f45feccf6232336f9e85e021983ae2fac764543 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sat, 10 Jan 2026 12:36:26 +1000 Subject: [PATCH 14/17] Ap (#26) * Update publish-testing.yml * Update publish.yml * Update publish_mapping.yml * Update publish-testing.yml * Update advanced_publish.py * Update advanced_publish.py * Update advanced_publish.py --- .github/workflows/publish-testing.yml | 8 ++- .github/workflows/publish.yml | 8 ++- .github/workflows/publish_mapping.yml | 8 ++- Tools/_Forge/Publish/advanced_publish.py | 76 +++++++++++++++++++----- 4 files changed, 82 insertions(+), 18 deletions(-) diff --git a/.github/workflows/publish-testing.yml b/.github/workflows/publish-testing.yml index f56f9e753969..fafce40488f1 100644 --- a/.github/workflows/publish-testing.yml +++ b/.github/workflows/publish-testing.yml @@ -17,6 +17,12 @@ jobs: - uses: actions/checkout@v3.6.0 with: submodules: 'recursive' + + - name: Install Python dependencies + run: | + python3 -m pip install --upgrade pip + pip3 install discord-webhook requests + - name: Setup .NET Core uses: actions/setup-dotnet@v3.2.0 with: @@ -40,7 +46,7 @@ jobs: run: dotnet run --project Content.Packaging client --no-wipe-release - name: Publish version - run: Tools/publish_multi_request.py --fork-id wizards-testing + run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id wizards-testing --publish-token PUBLISH_TOKEN env: PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index ba85ba777ee2..77f033f34064 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -17,6 +17,11 @@ jobs: - name: Install dependencies run: sudo apt-get install -y python3-paramiko python3-lxml + + - name: Install Python dependencies + run: | + python3 -m pip install --upgrade pip + pip3 install discord-webhook requests - uses: actions/checkout@v4.2.2 with: @@ -45,9 +50,10 @@ jobs: run: dotnet run --project Content.Packaging client --no-wipe-release - name: Publish version - run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier --publish-token PUBLISH_TOKEN + run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier --publish-token PUBLISH_TOKEN --publish-webhook PUBLISH_WEBHOOK env: PUBLISH_TOKEN: ${{ secrets.PUBLISH_TOKEN }} + PUBLISH_WEBHOOK: ${{ secrets.PUBLISH_WEBHOOK }} GITHUB_REPOSITORY: ${{ vars.GITHUB_REPOSITORY }} # - name: Publish changelog (Discord) diff --git a/.github/workflows/publish_mapping.yml b/.github/workflows/publish_mapping.yml index 034bdd0e2eff..96dff581397d 100644 --- a/.github/workflows/publish_mapping.yml +++ b/.github/workflows/publish_mapping.yml @@ -15,6 +15,11 @@ jobs: with: submodules: 'recursive' + - name: Install Python dependencies + run: | + python3 -m pip install --upgrade pip + pip3 install discord-webhook requests + - name: Setup .NET Core uses: actions/setup-dotnet@v4.1.0 with: @@ -38,7 +43,8 @@ jobs: run: dotnet run --project Content.Packaging client --no-wipe-release - name: Publish version - run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier_mapping --publish-token PUBLISH_MAPPING_TOKEN + run: python3 Tools/_Forge/Publish/advanced_publish.py --fork-id frontier_mapping --publish-token PUBLISH_MAPPING_TOKEN --publish-webhook PUBLISH_WEBHOOK env: PUBLISH_MAPPING_TOKEN: ${{ secrets.PUBLISH_MAPPING_TOKEN }} + PUBLISH_WEBHOOK: ${{ secrets.PUBLISH_WEBHOOK }} FORK_ID_MAPPING: ${{ vars.FORK_ID_MAPPING }} diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index a0fe1d119965..4e69e276d327 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -1,10 +1,10 @@ +#!/usr/bin/env python3 + """ -Продвинутый паблиш с параллельной загрузкой и аргументами +Продвинутый паблиш с параллельной загрузкой, аргументами и публикацией статуса паблиша в дискорд Github: FireFoxPhoenix """ -#!/usr/bin/env python3 - import argparse import requests import os @@ -12,6 +12,7 @@ import threading import logging import sys +from discord_webhook import DiscordWebhook, DiscordEmbed from typing import Iterable from concurrent.futures import ThreadPoolExecutor, as_completed @@ -22,12 +23,13 @@ # CONFIGURATION PARAMETERS # Forks should change these to publish to their own infrastructure. # -ROBUST_CDN_URL = "https://cdn.corvaxforge.ru/" +ROBUST_CDN_URL = "https://cdn.corvaxforge.ru/" # добавить в аругмент def main(): parser = argparse.ArgumentParser() parser.add_argument("--fork-id", required=True) - parser.add_argument("--publish-token") + parser.add_argument("--publish-token", required=True) + parser.add_argument("--publish-webhook", required=False, default=None) parser.add_argument("--max-workers", type=int, default=4) parser.add_argument("--pool-connections", type=int, default=3) parser.add_argument("--pool-maxsize", type=int, default=10) @@ -37,23 +39,38 @@ def main(): args = parser.parse_args() fork_id = args.fork_id publish_token = args.publish_token + publish_webhook = args.publish_webhook max_workers = args.max_workers pool_connections = args.pool_connections pool_maxsize = args.pool_maxsize max_retries = args.max_retries release_dir = args.release_dir + + if publish_webhook and publish_webhook not in os.environ: + publish_webhook = None + logger.warning("Publish webhook not found") + publish_webhook = os.environ[publish_token] + if not publish_webhook: + publish_webhook = None + logger.warning(f"Publish webhook is empty") if fork_id == "" or fork_id == None: - logger.critical("Fork id was not entered") + message = "Fork id was not entered" + logger.critical(message) + send_discord_message(message, "Critical", "ffa500", publish_webhook) raise KeyError() if publish_token not in os.environ: - logger.critical("Publish token not found") + message = "Publish token not found" + logger.critical(message) + send_discord_message(message, "Critical", "ffa500", publish_webhook) sys.exit(1) publish_token = os.environ[publish_token] if not publish_token: - logger.critical(f"Publish token is empty") - sys.exit(1) + message = f"Publish token is empty" + logger.critical(message) + send_discord_message(message, "Critical", "ffa500", publish_webhook) + sys.exit(1) #if "GITHUB_SHA" not in os.environ: # logger.critical("GITHUB_SHA environment variable not set") @@ -73,7 +90,9 @@ def main(): files = list(get_files_to_publish(release_dir)) if not files: - logger.warning("No files found to publish") + message = "No files found to publish" + logger.warning(message) + send_discord_message(message, "Warning", "ffff00", publish_webhook) logger.info(f"Uploading {len(files)} files using {max_workers} parallel workers...") successful = 0 @@ -92,18 +111,23 @@ def main(): failed += 1 logger.warning(f"Failed to publish {os.path.basename(file_path)}: {e}") if failed: - logger.warning(f"Upload completed with {failed} failures") + message = f"Upload completed with {failed} failures" + logger.warning(message) + send_discord_message(message, "Warning", "ffff00", publish_webhook) # sys.exit(1) else: - logger.info(f"All {successful} files uploaded successfully") + message = f"All {successful} files uploaded successfully" + logger.info(message) + # send_discord_message(message, "Info", "03b2f8", publish_webhook) logger.info("Finishing publish...") data = { "version": version } headers = { "Content-Type": "application/json" } resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/finish", json=data, headers=headers) resp.raise_for_status() - logger.info("Publish completed") - + message = "Publish completed" + logger.info(message) + send_discord_message(message, "Info", "03b2f8", publish_webhook) def get_files_to_publish(release_dir: str) -> Iterable[str]: try: @@ -117,7 +141,6 @@ def get_files_to_publish(release_dir: str) -> Iterable[str]: logger.error(f"No permission to read directory '{release_dir}'") return [] - def get_engine_version() -> str: try: proc = subprocess.run(["git", "describe","--tags", "--abbrev=0"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd="RobustToolbox", check=True, encoding="UTF-8", timeout=20) @@ -163,5 +186,28 @@ def create_session(publish_token: str, pool_connections: int, pool_maxsize: int, session.headers = { "Authorization": f"Bearer {publish_token}" } return session +def send_discord_message(message: str, status: str, color: str = "00ff00", publish_webhook: str = None): + if not publish_webhook: + return + try: + webhook = DiscordWebhook( + url=publish_webhook, + username="Publish Status", + rate_limit_retry=True + ) + embed = DiscordEmbed( + title="Publish", + color=color + ) + embed.add_embed_field(name=status, value=message) + embed.set_timestamp() + webhook.add_embed(embed) + response = webhook.execute() + if not response.status_code in [200, 204]: + logger.warning("The Discord message was not sent") + except Exception as e: + logger.error(f"The Discord message was not sent: {e}") + return + if __name__ == '__main__': main() From 0750adac1aa6242748347b01486b8610b35d7296 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sat, 10 Jan 2026 12:51:02 +1000 Subject: [PATCH 15/17] Update advanced_publish.py --- Tools/_Forge/Publish/advanced_publish.py | 45 ++++++++++++++---------- 1 file changed, 27 insertions(+), 18 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 4e69e276d327..1319677502b7 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -46,11 +46,13 @@ def main(): max_retries = args.max_retries release_dir = args.release_dir - if publish_webhook and publish_webhook not in os.environ: - publish_webhook = None - logger.warning("Publish webhook not found") - publish_webhook = os.environ[publish_token] - if not publish_webhook: + if publish_webhook: + if publish_webhook not in os.environ: + publish_webhook = None + logger.warning("Publish webhook not found") + else: + publish_webhook = os.environ[publish_token] + else: publish_webhook = None logger.warning(f"Publish webhook is empty") @@ -69,7 +71,7 @@ def main(): if not publish_token: message = f"Publish token is empty" logger.critical(message) - send_discord_message(message, "Critical", "ffa500", publish_webhook) + # send_discord_message(message, "Critical", "ffa500", publish_webhook) sys.exit(1) #if "GITHUB_SHA" not in os.environ: @@ -161,18 +163,25 @@ def get_engine_version() -> str: return "unknown" def upload_file(file_path: str, fork_id: str, publish_token: str, pool_connections: int, pool_maxsize: int, max_retries: int, version: str): - if not hasattr(thread_session, "session"): - thread_session.session = create_session(publish_token, pool_connections, pool_maxsize, max_retries) - session = thread_session.session - with open(file_path, "rb") as file: - headers = { - "Content-Type": "application/octet-stream", - "Robust-Cdn-Publish-File": os.path.basename(file_path), - "Robust-Cdn-Publish-Version": version - } - resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/file", data=file, headers=headers) - resp.raise_for_status() - return file_path + try: + if not hasattr(thread_session, "session"): + thread_session.session = create_session(publish_token, pool_connections, pool_maxsize, max_retries) + session = thread_session.session + with open(file_path, "rb") as file: + headers = { + "Content-Type": "application/octet-stream", + "Robust-Cdn-Publish-File": os.path.basename(file_path), + "Robust-Cdn-Publish-Version": version + } + resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/file", data=file, headers=headers) + resp.raise_for_status() + return file_path + except FileNotFoundError: + logger.error("File '{file_path}' not found") + except IOError as e: + logger.error("IO error reading '{file_path}': {e}") + except Exception as e: + logger.error("Unexpected error with '{file_path}': {e}") def create_session(publish_token: str, pool_connections: int, pool_maxsize: int, max_retries: int) -> requests.Session: session = requests.Session() From d0466e43aba257be46ccfc26d7632f3010de342a Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sat, 10 Jan 2026 13:42:31 +1000 Subject: [PATCH 16/17] Update advanced_publish.py --- Tools/_Forge/Publish/advanced_publish.py | 38 +++++++++++++++++------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 1319677502b7..5b80b059418c 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -15,6 +15,7 @@ from discord_webhook import DiscordWebhook, DiscordEmbed from typing import Iterable from concurrent.futures import ThreadPoolExecutor, as_completed +from urllib3.util.retry import Retry thread_session = threading.local() logger = logging.getLogger(__name__) @@ -47,11 +48,14 @@ def main(): release_dir = args.release_dir if publish_webhook: - if publish_webhook not in os.environ: - publish_webhook = None - logger.warning("Publish webhook not found") + if publish_webhook.startswith("https://discord.com/api/webhooks/"): + pass else: - publish_webhook = os.environ[publish_token] + if publish_webhook in os.environ: + publish_webhook = os.environ[publish_webhook] + else: + publish_webhook = None + logger.warning("Publish webhook not found") else: publish_webhook = None logger.warning(f"Publish webhook is empty") @@ -173,26 +177,38 @@ def upload_file(file_path: str, fork_id: str, publish_token: str, pool_connectio "Robust-Cdn-Publish-File": os.path.basename(file_path), "Robust-Cdn-Publish-Version": version } - resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/file", data=file, headers=headers) + resp = session.post(f"{ROBUST_CDN_URL}fork/{fork_id}/publish/file", data=file, headers=headers, timeout=(15,30)) resp.raise_for_status() return file_path except FileNotFoundError: - logger.error("File '{file_path}' not found") + logger.error(f"File '{file_path}' not found") + raise except IOError as e: - logger.error("IO error reading '{file_path}': {e}") + logger.error(f"IO error reading '{file_path}': {e}") + raise except Exception as e: - logger.error("Unexpected error with '{file_path}': {e}") + logger.error(f"Unexpected error with '{file_path}': {e}") + raise def create_session(publish_token: str, pool_connections: int, pool_maxsize: int, max_retries: int) -> requests.Session: session = requests.Session() + r = Retry( + total=max_retries, + backoff_factor=0.5, + status_forcelist=[429, 500, 502, 503, 504], + allowed_methods=["HEAD", "GET", "PUT", "POST", "DELETE", "OPTIONS", "TRACE"] + ) adapter = requests.adapters.HTTPAdapter( pool_connections=pool_connections, pool_maxsize=pool_maxsize, - max_retries=max_retries + max_retries=r ) session.mount("https://", adapter) session.mount("http://", adapter) - session.headers = { "Authorization": f"Bearer {publish_token}" } + session.headers.update({ "Authorization": f"Bearer {publish_token}" }) + session.request = lambda method, url, **kwargs: requests.Session.request( + session, method, url, timeout=(5,30), **kwargs + ) return session def send_discord_message(message: str, status: str, color: str = "00ff00", publish_webhook: str = None): @@ -212,7 +228,7 @@ def send_discord_message(message: str, status: str, color: str = "00ff00", publi embed.set_timestamp() webhook.add_embed(embed) response = webhook.execute() - if not response.status_code in [200, 204]: + if response.status_code not in [200, 204]: logger.warning("The Discord message was not sent") except Exception as e: logger.error(f"The Discord message was not sent: {e}") From 95810d5bc180fadd612617fdbe07e80e40fbfe79 Mon Sep 17 00:00:00 2001 From: FireFoxPhoenix Date: Sat, 10 Jan 2026 15:22:00 +1000 Subject: [PATCH 17/17] Update advanced_publish.py --- Tools/_Forge/Publish/advanced_publish.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/Tools/_Forge/Publish/advanced_publish.py b/Tools/_Forge/Publish/advanced_publish.py index 5b80b059418c..b11989bcb1e5 100644 --- a/Tools/_Forge/Publish/advanced_publish.py +++ b/Tools/_Forge/Publish/advanced_publish.py @@ -63,19 +63,19 @@ def main(): if fork_id == "" or fork_id == None: message = "Fork id was not entered" logger.critical(message) - send_discord_message(message, "Critical", "ffa500", publish_webhook) + send_discord_message(message, "Critical", "ffa500", fork_id, publish_webhook) raise KeyError() if publish_token not in os.environ: message = "Publish token not found" logger.critical(message) - send_discord_message(message, "Critical", "ffa500", publish_webhook) + send_discord_message(message, "Critical", "ffa500", fork_id, publish_webhook) sys.exit(1) publish_token = os.environ[publish_token] if not publish_token: message = f"Publish token is empty" logger.critical(message) - # send_discord_message(message, "Critical", "ffa500", publish_webhook) + # send_discord_message(message, "Critical", "ffa500", fork_id, publish_webhook) sys.exit(1) #if "GITHUB_SHA" not in os.environ: @@ -98,7 +98,7 @@ def main(): if not files: message = "No files found to publish" logger.warning(message) - send_discord_message(message, "Warning", "ffff00", publish_webhook) + send_discord_message(message, "Warning", "ffff00", fork_id, publish_webhook) logger.info(f"Uploading {len(files)} files using {max_workers} parallel workers...") successful = 0 @@ -119,12 +119,12 @@ def main(): if failed: message = f"Upload completed with {failed} failures" logger.warning(message) - send_discord_message(message, "Warning", "ffff00", publish_webhook) + send_discord_message(message, "Warning", "ffff00", fork_id, publish_webhook) # sys.exit(1) else: message = f"All {successful} files uploaded successfully" logger.info(message) - # send_discord_message(message, "Info", "03b2f8", publish_webhook) + # send_discord_message(message, "Info", "03b2f8", fork_id, publish_webhook) logger.info("Finishing publish...") data = { "version": version } @@ -133,7 +133,7 @@ def main(): resp.raise_for_status() message = "Publish completed" logger.info(message) - send_discord_message(message, "Info", "03b2f8", publish_webhook) + send_discord_message(message, "Info", "03b2f8", fork_id, publish_webhook) def get_files_to_publish(release_dir: str) -> Iterable[str]: try: @@ -211,9 +211,11 @@ def create_session(publish_token: str, pool_connections: int, pool_maxsize: int, ) return session -def send_discord_message(message: str, status: str, color: str = "00ff00", publish_webhook: str = None): +def send_discord_message(message: str, status: str, fork_id: str = None, color: str = "00ff00", publish_webhook: str = None): if not publish_webhook: return + if not fork_id: + fork_id = "unknown" try: webhook = DiscordWebhook( url=publish_webhook, @@ -221,7 +223,7 @@ def send_discord_message(message: str, status: str, color: str = "00ff00", publi rate_limit_retry=True ) embed = DiscordEmbed( - title="Publish", + title=f"Publish for {fork_id}", color=color ) embed.add_embed_field(name=status, value=message)