diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..9a46ddc Binary files /dev/null and b/.DS_Store differ diff --git a/.github/workflows/publish-on-release.yml b/.github/workflows/publish-on-release.yml new file mode 100644 index 0000000..add5af1 --- /dev/null +++ b/.github/workflows/publish-on-release.yml @@ -0,0 +1,53 @@ +name: Publish Docker Images on Release + +on: + release: + types: [published] + workflow_dispatch: + inputs: + release_tag: + description: 'Release tag to use (e.g., v1.2.3). If empty, uses release tag from the release event.' + required: false + default: 'v0.0.1' + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKERHUB_USERNAME_AL }} + password: ${{ secrets.DOCKERHUB_API_KEY_AL }} + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install docker python-dotenv requests rich + + - name: Run workflow_runner.py + env: + RELEASE_TAG: ${{ github.event.inputs.release_tag || github.ref_name }} + DOCKERHUB_USERNAME_AL: ${{ secrets.DOCKERHUB_USERNAME_AL }} + DOCKERHUB_API_KEY_AL: ${{ secrets.DOCKERHUB_API_KEY_AL }} + MAILJET_API_KEY: ${{ secrets.MAILJET_API_KEY }} + MAILJET_API_SECRET: ${{ secrets.MAILJET_API_SECRET }} + run: | + python workflow_runner.py diff --git a/.gitignore b/.gitignore index 3db999d..900b322 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,6 @@ test_system_runner.json recorder_* local_feeder build -.vscode/ \ No newline at end of file +.vscode/ +.secrets +.env \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 0703afe..3d17d52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,6 @@ -FROM python:3.11.10 +FROM python:3.13-slim-bullseye WORKDIR /simulation - COPY scenarios/docker_system.json docker_system.json COPY components.json . COPY LocalFeeder LocalFeeder diff --git a/LocalFeeder/.DS_Store b/LocalFeeder/.DS_Store new file mode 100644 index 0000000..6d98c7e Binary files /dev/null and b/LocalFeeder/.DS_Store differ diff --git a/LocalFeeder/Dockerfile b/LocalFeeder/Dockerfile index 8a2427b..b82d1f6 100644 --- a/LocalFeeder/Dockerfile +++ b/LocalFeeder/Dockerfile @@ -1,6 +1,8 @@ FROM python:3.10.6-slim-bullseye +LABEL org.opencontainers.image.authors="Aadil Latif " RUN apt-get update RUN apt-get install -y git ssh +RUN apt install build-essential cmake git python3-dev -y RUN mkdir LocalFeeder COPY . ./LocalFeeder WORKDIR ./LocalFeeder diff --git a/LocalFeeder/FeederSimulator.py b/LocalFeeder/FeederSimulator.py index b35ba97..66c9c99 100644 --- a/LocalFeeder/FeederSimulator.py +++ b/LocalFeeder/FeederSimulator.py @@ -1,43 +1,55 @@ """Core class to abstract OpenDSS into Feeder class.""" -import csv -import json +from typing import Dict, List, Optional, Set, Tuple +from time import strptime +from enum import Enum import logging -import math -import os import random +import math import time -from enum import Enum -from time import strptime -from typing import Dict, List, Optional, Set, Tuple +import json +import csv +import os -import boto3 -import numpy as np -import opendssdirect as dss -import xarray as xr -from botocore import UNSIGNED +from scipy.sparse import coo_matrix, csc_matrix from botocore.config import Config +from pydantic import BaseModel +from botocore import UNSIGNED +import opendssdirect as dss from dss_functions import ( get_capacitors, get_generators, - get_loads, get_pvsystems, get_voltages, + get_loads, ) +import xarray as xr +import numpy as np +import boto3 from oedisi.types.data_types import ( - Command, - InverterControl, InverterControlMode, + InverterControl, IncidenceList, + Command, ) -from pydantic import BaseModel -from scipy.sparse import coo_matrix, csc_matrix logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) +def command(command_str:str)-> str: + logger.info(f"OpenDSS Command: {command_str}") + try: + dss.Text.Command(command_str) + result = dss.Text.Result() + logger.info(f"OpenDSS Reply: {result}") + return result + except Exception as e: + logger.error(f"OpenDSS Error: {e}") + raise ValueError(e) + + def permutation(from_list, to_list): """Create permutation representing change in from_list to to_list. @@ -153,13 +165,18 @@ def __init__(self, config: FeederConfig): raise Exception("Set existing_feeder_file when uploading data") else: self._feeder_file = config.existing_feeder_file - + logger.info(f"Using feeder file: {self._feeder_file}") self.open_lines = config.open_lines self.load_feeder() - if self._sensor_location is None: + logger.info("No sensor location provided, creating measurement lists") self.create_measurement_lists() + else: + logger.info( + f"Using sensor location {self._sensor_location}, not creating measurement lists" + ) + logger.info("Running initial snapshot") self.snapshot_run() assert self._state == OpenDSSState.SNAPSHOT_RUN, f"{self._state}" @@ -278,6 +295,7 @@ def create_measurement_lists( ): """Initialize list of sensor locations for the measurement federate.""" random.seed(voltage_seed) + logger.info(f"Creating measurement lists") os.makedirs("sensors", exist_ok=True) voltage_subset = random.sample( self._AllNodeNames, @@ -285,6 +303,7 @@ def create_measurement_lists( ) with open(os.path.join("sensors", "voltage_ids.json"), "w") as fp: json.dump(voltage_subset, fp, indent=4) + logger.info(f"Voltage sensors exported to sensors/voltage_ids.json") random.seed(real_seed) real_subset = random.sample( @@ -293,6 +312,7 @@ def create_measurement_lists( ) with open(os.path.join("sensors", "real_ids.json"), "w") as fp: json.dump(real_subset, fp, indent=4) + logger.info(f"Real power sensors exported to sensors/real_ids.json") random.seed(reactive_seed) reactive_subset = random.sample( @@ -301,6 +321,7 @@ def create_measurement_lists( ) with open(os.path.join("sensors", "reactive_ids.json"), "w") as fp: json.dump(reactive_subset, fp, indent=4) + logger.info(f"Reactive power sensors exported to sensors/reactive_ids.json") def get_circuit_name(self): """Get name of current opendss circuit.""" @@ -334,11 +355,19 @@ def get_bus_coords(self) -> Dict[str, Tuple[float, float]] | None: def load_feeder(self): """Load feeder once downloaded. Relies on legacy mode.""" + logger.info("Loading feeder into OpenDSS") # Real solution is kvarlimit with kvarmax dss.Basic.LegacyModels(True) - dss.Text.Command("clear") - dss.Text.Command("redirect " + self._feeder_file) - result = dss.Text.Result() + logger.info("Enabling legacy models") + if not os.path.exists(self._feeder_file): + raise ValueError(f"Feeder file {self._feeder_file} not found") + + command("clear") + + base_path = os.getcwd() + logger.info("Current working directory: " + base_path) + result = command(f'redirect "{self._feeder_file}"') + logger.info(f"Feeder loaded") if not result == "": raise ValueError("Feeder not loaded: " + result) self._circuit = dss.Circuit @@ -357,7 +386,7 @@ def load_feeder(self): self._source_indexes.append( self._AllNodeNames.index(Bus.upper() + "." + str(phase)) ) - + logger.info("Setting up base voltages") self.setup_vbase() self._pvsystems = set() @@ -366,12 +395,13 @@ def load_feeder(self): if self.tap_setting is not None: # Doesn't work with AutoTrans or 3-winding transformers. - dss.Text.Command(f"batchedit transformer..* wdg=2 tap={self.tap_setting}") + command(f"batchedit transformer..* wdg=2 tap={self.tap_setting}") if self.open_lines is not None: for l in self.open_lines: self.open_line(l) self._state = OpenDSSState.LOADED + logger.info("Feeder loaded into OpenDSS") def disable_elements(self): """Disable most elements. Used in disabled_run.""" diff --git a/LocalFeeder/sender_cosim.py b/LocalFeeder/sender_cosim.py index fd07f3b..8bcee0f 100644 --- a/LocalFeeder/sender_cosim.py +++ b/LocalFeeder/sender_cosim.py @@ -487,14 +487,19 @@ def go_cosim( def run_simulator(broker_config: BrokerConfig): """Load static_inputs and input_mapping and run JSON.""" + logger.info("Starting feeder simulator") + logger.info("Loading static_inputs.json and input_mapping.json") with open("static_inputs.json") as f: parameters = json.load(f) with open("input_mapping.json") as f: input_mapping = json.load(f) + logger.info(f"Feeder parameters: {parameters}") config = FeederConfig(**parameters) + logger.info(f"Feeder config: {config}") sim = FeederSimulator(config) + logger.info(f"Simulator created, starting co-simulation") go_cosim(sim, config, input_mapping, broker_config) - + logger.info(f"Simulator Complete") if __name__ == "__main__": run_simulator(BrokerConfig(broker_ip="127.0.0.1")) diff --git a/LocalFeeder/server.py b/LocalFeeder/server.py index 85032fc..79abcb0 100644 --- a/LocalFeeder/server.py +++ b/LocalFeeder/server.py @@ -17,8 +17,8 @@ from oedisi.types.common import ServerReply, HeathCheck, DefaultFileNames from oedisi.types.common import BrokerConfig +logger = logging.getLogger("uvicorn.error") REQUEST_TIMEOUT_SEC = 1200 - app = FastAPI() base_path = os.getcwd() @@ -53,22 +53,37 @@ def read_root(): @app.get("/sensor") async def sensor(): - logging.info(os.getcwd()) + logger.info("Checking for sensors.json file") + logger.info(os.getcwd()) sensor_path = os.path.join(base_path, "sensors", "sensors.json") while not os.path.exists(sensor_path): time.sleep(1) - logging.info(f"waiting {sensor_path}") - logging.info("success") + logger.info(f"waiting {sensor_path}") + logger.info("success") data = json.load(open(sensor_path, "r")) return data +@app.post("/sensor") +async def sensor_post(sensor_list:list[str]): + sensor_dir = os.path.join(base_path, "sensors") + sensor_path = os.path.join(sensor_dir, "sensors.json") + try: + os.makedirs(sensor_dir, exist_ok=True) + with open(sensor_path, "w") as f: + json.dump(sensor_list, f, indent=2) + response = ServerReply(detail=f"Wrote {len(sensor_list)} sensors to {sensor_path}").dict() + return JSONResponse(response, 200) + except Exception as e: + err = traceback.format_exc() + logger.error(f"Failed to write sensors file: {err}") + raise HTTPException(status_code=500, detail=str(err)) @app.post("/profiles") async def upload_profiles(file: UploadFile): try: data = file.file.read() if not file.filename.endswith(".zip"): - HTTPException(400, "Invalid file type. Only zipped profiles are accepted.") + raise HTTPException(400, "Invalid file type. Only zipped profiles are accepted.") profile_path = "./profiles" @@ -101,7 +116,7 @@ async def upload_model(file: UploadFile): try: data = file.file.read() if not file.filename.endswith(".zip"): - HTTPException( + raise HTTPException( 400, "Invalid file type. Only zipped opendss models are accepted." ) @@ -120,24 +135,24 @@ async def upload_model(file: UploadFile): return JSONResponse(response, 200) else: - HTTPException(400, "A valid opendss model should have a master.dss file.") + raise HTTPException(400, "A valid opendss model should have a master.dss file.") except Exception as e: - HTTPException(500, "Unknown error while uploading userdefined opendss model.") + raise HTTPException(500, "Unknown error while uploading userdefined opendss model.") @app.post("/run") async def run_feeder( broker_config: BrokerConfig, background_tasks: BackgroundTasks ): # :BrokerConfig - logging.info(broker_config) + logger.info(broker_config) try: background_tasks.add_task(run_simulator, broker_config) response = ServerReply(detail="Task sucessfully added.").dict() - return JSONResponse(response, 200) except Exception as e: err = traceback.format_exc() - HTTPException(500, str(err)) + logger.error(f"Error in /run: {err}") + raise HTTPException(500, str(err)) @app.post("/configure") diff --git a/LocalFeeder/tests/.DS_Store b/LocalFeeder/tests/.DS_Store new file mode 100644 index 0000000..c9f3cd6 Binary files /dev/null and b/LocalFeeder/tests/.DS_Store differ diff --git a/README.md b/README.md index ff1ba66..180a508 100644 --- a/README.md +++ b/README.md @@ -143,3 +143,40 @@ Since HELICS does not have linux ARM builds, you have to run with ```bash export DOCKER_DEFAULT_PLATFORM=linux/amd64 ``` + +## Release workflow (CI) + +This repository includes a GitHub Actions workflow at `.github/workflows/publish-on-release.yml` that runs on published GitHub releases and: + +- Finds all folders containing a `Dockerfile`. +- Builds multi-arch images using `docker buildx` and pushes them to Docker Hub. +- Tags images as `DOCKERHUB_USERNAME/:` and also with the normalized tag (strips a leading `v`). + +Required GitHub repository secrets: + +- `DOCKERHUB_USERNAME` — your Docker Hub username. +- `PAT_TOKEN` — your Docker Hub password or personal access token stored as a repository secret. + +How to test + +- Publish a release (or tag) on GitHub for this repository — the workflow triggers on published releases. +- To run the same logic locally (useful for debugging), you can run: + +```bash +export RELEASE_TAG=v1.2.3 +export DOCKERHUB_USERNAME=youruser +export PAT_TOKEN=... +echo "$PAT_TOKEN" | docker login --username "$DOCKERHUB_USERNAME" --password-stdin +mapfile -t dirs < <(find . -maxdepth 4 -type f -name Dockerfile -exec dirname {} \; | sort -u) +NORMALIZED_TAG=${RELEASE_TAG#v} +for d in "${dirs[@]}"; do + name=$(echo "$d" | sed 's|^\./||; s|/|_|g') + docker buildx build --platform linux/amd64,linux/arm64 -t "${DOCKERHUB_USERNAME}/${name}:${RELEASE_TAG}" -t "${DOCKERHUB_USERNAME}/${name}:${NORMALIZED_TAG}" "${d}" --push +done +``` + +Notes + +- Add the two secrets in the repository Settings → Secrets → Actions. +- Images will be pushed to Docker Hub; ensure your account has push permission and consider Docker Hub rate limits. + diff --git a/best_practice_guide.md b/best_practice_guide.md new file mode 100644 index 0000000..e9899d5 --- /dev/null +++ b/best_practice_guide.md @@ -0,0 +1,86 @@ + +# **Best Practice Guide for OEDISI Component Development** +- **Purpose:** Practical, repeatable steps for building a new OEDISI component. +- **Audience:** Component developers and integrators working with the `oedisi` framework. + +**Repository & Project Setup** +- **Prefer location:** Create the new component inside the `oedisi-example` repository. Keeping components together simplifies testing, version control, and CI/integration workflows. + - **If adding to `oedisi-example`:** Create a top-level directory `oedisi-example//` and follow the recommended layout. + - **Required docs:** Add `LICENSE.md` and `CONTRIBUTORS.json` that describe commit/PR expectations and how to run tests locally. +- **Alternate location:** For independently versioned components, create a dedicated repository under the `openEDI` organization. Note this increases CI and release maintenance overhead and is NOT the prefered approach +- **Prohibited:** Avoid private or internal-only repositories, since those complicate shared CI and community review. + +**Recommended Component Layout** +- `component/` — source package (python module, go module, etc.). +- `tests/unit/` — unit tests for core logic. +- `tests/integration/` — system-level tests that run the component with other federates or a minimal orchestrator. +- `examples/` — example configs, `component_definition.json`, `input_mapping.json`, sample data. +- `Dockerfile` / `docker-compose.yml` — for replicable integration test runs. +- `README.md` — usage, configuration, and quickstart (also update central docs). + +**Scaffolding Steps** + +- Create the directory under `oedisi-example/` and initialize git (or create a repository under `openEDI` if appropriate). +- Add `.gitignore`, `LICENSE.md`, and `README.md`. +- Scaffold a minimal package in `component/` with a clear entry point, configuration loader, and a simple run loop that can be exercised by tests. +- Add `component_definition.json` describing inputs/outputs and include an example `input_mapping.json` in `examples/`. + +**Testing Strategy** + +- Unit tests: Keep logic pure and unit-testable. Use `pytest` (or the project standard). Put tests in `tests/unit/`. +- Mock dependencies: Abstract external systems (HELICS, databases) with adapter interfaces so unit tests can use fast mocks. +- Integration tests: Put end-to-end tests in `tests/integration/`. Use a local HELICS broker or `docker-compose` to run multiple federates for system tests. +- Test data & fixtures: Store small representative datasets in `examples/` or `tests/fixtures/` so CI runs quickly. + +**Continuous Integration** + +- GitHub Actions: Create workflows to run unit tests on each PR and a separate workflow/job for integration tests (nightly or on-demand if they are long-running). +- Integration job config: Use `services`, `docker-compose`, or reusable workflows to bring up HELICS and dependent services. +- Dependency caching: Cache dependencies to speed up CI runs. + +**Documentation & Discovery** + +- Live docs: Keep `best_practice_guide.md` and the component `README.md` up to date as you add features. +- Component metadata: Maintain `component_definition.json` to reflect actual inputs, outputs, and configuration. +- Examples: Provide at least one runnable example in `examples/` demonstrating a typical run and expected outputs. + +**Collaboration & Avoiding Silos** + +- Don't build in a silo: Avoid side-loading models or making large, undocumented changes in a single component. That hides requirements from core maintainers and duplicates effort. +- Start minimal: Implement a small, well-documented component that demonstrates the required inputs/outputs and includes a `component_definition.json`. Use this artifact to propose core API or schema changes. +- Communicate early: Open an issue or RFC in the appropriate `openEDI` repo describing the change, expected APIs, and link to the minimal example so reviewers can run and test it. +- Design for extensibility: Prefer plug-ins or optional configuration over hard-coded changes. Provide fallbacks so the component can run even if core features are not yet available. + +**Release & Contribution Workflow** + +- Use semantic or conventional commits to help automate changelogs. +- Use PR templates and require at least one reviewer for new components or breaking changes. +- PR checklist: unit tests pass, integration smoke-tested (if applicable), docs updated, and `component_definition.json` validated. + +**Checklist (Quick Start)** + +- Create component scaffold under `oedisi-example//`. +- Add `component_definition.json` and an example `helics_config.json` in `examples/`. +- Implement unit tests in `tests/unit/` and run them locally. +- Add an integration test in `tests/integration/` and provide `docker-compose.yml` to run it. +- Update the component `README.md` and this `best_practice_guide.md` with usage and test instructions. + +**Example commands** + +Run unit tests: + +```bash +pytest tests/unit +``` + +Run integration locally (example using docker-compose): + +```bash +docker-compose -f docker-compose.yml up --build +``` + +**Where to document changes** + +- Update the component `README.md` for usage and configuration examples. +- Update this file: `oedisi-example/best_practice_guide.md` with lessons learned and new steps. + diff --git a/best_practice_guide.pdf b/best_practice_guide.pdf new file mode 100644 index 0000000..727b5fd Binary files /dev/null and b/best_practice_guide.pdf differ diff --git a/broker/Dockerfile b/broker/Dockerfile index 0c009d8..dea9963 100644 --- a/broker/Dockerfile +++ b/broker/Dockerfile @@ -1,7 +1,7 @@ -FROM python:3.10.6-slim-bullseye - +FROM python:3.13-slim-bullseye RUN apt-get update RUN apt-get install -y git ssh +RUN apt install build-essential cmake git python3-dev -y RUN mkdir broker diff --git a/broker/oedisi.code-workspace b/broker/oedisi.code-workspace new file mode 100644 index 0000000..d220756 --- /dev/null +++ b/broker/oedisi.code-workspace @@ -0,0 +1,17 @@ +{ + "folders": [ + { + "path": "../../oedisi" + }, + { + "path": ".." + }, + { + "path": "../../oedisi-ieee123" + }, + { + "path": "../../openapi_spec" + } + ], + "settings": {} +} \ No newline at end of file diff --git a/broker/requirements.txt b/broker/requirements.txt index 08474c4..20823db 100644 --- a/broker/requirements.txt +++ b/broker/requirements.txt @@ -3,5 +3,5 @@ pyyaml fastapi uvicorn oedisi>=2.0.2,<3 -grequests +httpx python-multipart diff --git a/broker/server.py b/broker/server.py index 72f3ae5..a8effbc 100644 --- a/broker/server.py +++ b/broker/server.py @@ -1,45 +1,66 @@ -from fastapi import FastAPI, BackgroundTasks, UploadFile -from fastapi.responses import FileResponse, JSONResponse -from fastapi.exceptions import HTTPException -import helics as h -import grequests +from functools import cache +from enum import Enum import traceback import requests import zipfile -import uvicorn +import logging +import asyncio import logging import socket import time -import yaml import json import os -import json + +from fastapi import FastAPI, BackgroundTasks, UploadFile +from fastapi.responses import FileResponse, JSONResponse +from fastapi.exceptions import HTTPException +import helics as h +import uvicorn +import httpx from oedisi.componentframework.system_configuration import ( - WiringDiagram, ComponentStruct, + WiringDiagram, ) from oedisi.types.common import ServerReply, HeathCheck from oedisi.tools.broker_utils import get_time_data logger = logging.getLogger("uvicorn.error") - +logger.setLevel(logging.DEBUG) app = FastAPI() -is_kubernetes_env = ( - os.environ["KUBERNETES_SERVICE_NAME"] - if "KUBERNETES_SERVICE_NAME" in os.environ - else None -) - WIRING_DIAGRAM_FILENAME = "system.json" WIRING_DIAGRAM: WiringDiagram | None = None +class SimulationState(str, Enum): + FEDERATE_CONFIGURATION_SUCESSS = "federate_configuration_sucess" + FEDERATE_CONFIGURATION_FAILURE = "federate_configuration_failed" + FEDERATE_CONNECTION_TIMED_OUT = "federate_connection_timed_out" + FEDERATE_RUNTIME_FAILURE = "federate_runtime_failure" + SIMULATION_TERMINATED = "simulation_terminated" + WAITING_FOR_FEDERATES = "waiting_for_federates" + STARTING_SIMULATION = "starting_simulation" + SIMULATION_ERROR = "simulation_error" + SERVER_READY = "server_ready" + COMPLETED = "completed" + RUNNING = "running" + PAUSED = "paused" + +app.state.simulation_state = SimulationState.SERVER_READY +app.state.simulation_lock = asyncio.Lock() + +@cache +def kubernetes_service(): + if "KUBERNETES_SERVICE_NAME" in os.environ: + return os.environ["KUBERNETES_SERVICE_NAME"] # works with kurenetes + elif "SERVICE_NAME" in os.environ: + return os.environ["SERVICE_NAME"] # works with minikube + else: + return None def build_url(host: str, port: int, enpoint: list): - if is_kubernetes_env: - KUBERNETES_SERVICE_NAME = os.environ["KUBERNETES_SERVICE_NAME"] - url = f"http://{host}.{KUBERNETES_SERVICE_NAME}:{port}/" + if kubernetes_service(): + url = f"http://{host}.{kubernetes_service()}:{port}/" else: url = f"http://{host}:{port}/" url = url + "/".join(enpoint) + "/" @@ -91,12 +112,13 @@ async def upload_profiles(file: UploadFile): HTTPException( 400, "Invalid file type. Only zip files are accepted." ) + + logger.info(f"Writing profile file to disk {file.filename}") with open(file.filename, "wb") as f: f.write(data) url = build_url(ip, port, ["profiles"]) - logger.info(f"making a request to url - {url}") - + logger.info(f"Uploading profile file {file.filename} to {url}") files = {"file": open(file.filename, "rb")} r = requests.post(url, files=files) response = ServerReply(detail=r.text).dict() @@ -107,6 +129,25 @@ async def upload_profiles(file: UploadFile): raise HTTPException(status_code=500, detail=str(err)) +@app.post("/sensors") +async def add_sensors(sensors: list[str]): + try: + component_map, _, _ = read_settings() + for hostname in component_map: + if "feeder" in hostname: + ip = hostname + port = component_map[hostname] + url = build_url(ip, port, ["sensor"]) + logger.info(f"Uploading sensors to {url}") + r = requests.post(url, json=sensors) + response = ServerReply(detail=r.text).dict() + return JSONResponse(response, r.status_code) + raise HTTPException(status_code=404, detail="Unable to upload sensors") + except Exception as e: + err = traceback.format_exc() + raise HTTPException(status_code=500, detail=str(err)) + + @app.post("/model") async def upload_model(file: UploadFile): try: @@ -120,12 +161,12 @@ async def upload_model(file: UploadFile): HTTPException( 400, "Invalid file type. Only zip files are accepted." ) + logger.info(f"Writing model file to disk {file.filename}") with open(file.filename, "wb") as f: f.write(data) url = build_url(ip, port, ["model"]) - logger.info(f"making a request to url - {url}") - + logger.info(f"Uploading model file {file.filename} to {url}") files = {"file": open(file.filename, "rb")} r = requests.post(url, files=files) response = ServerReply(detail=r.text).dict() @@ -139,7 +180,6 @@ async def upload_model(file: UploadFile): @app.get("/results") def download_results(): component_map, _, _ = read_settings() - for hostname in component_map: if "recorder" in hostname: host = hostname @@ -157,6 +197,7 @@ def download_results(): with zipfile.ZipFile(file_path, "w") as zipMe: for feather_file in find_filenames(): zipMe.write(feather_file, compress_type=zipfile.ZIP_DEFLATED) + logger.info(f"Added {feather_file} to zip") try: return FileResponse(path=file_path, filename=file_path, media_type="zip") @@ -165,9 +206,12 @@ def download_results(): @app.get("/terminate") -def terminate_simulation(): +async def terminate_simulation(): try: h.helicsCloseLibrary() + logger.info("Closed helics library") + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.SIMULATION_TERMINATED return JSONResponse({"detail": "Helics broker sucessfully closed"}, 200) except Exception as e: raise HTTPException(status_code=404, detail="Failed download ") @@ -179,7 +223,7 @@ def _get_feeder_info(component_map: dict): return host, component_map[host] -def run_simulation(): +async def run_simulation(): component_map, broker_ip, api_port = read_settings() feeder_host, feeder_port = _get_feeder_info(component_map) logger.info(f"{broker_ip}, {api_port}") @@ -188,44 +232,123 @@ def run_simulation(): broker = h.helicsCreateBroker("zmq", "", initstring) app.state.broker = broker - logging.info(broker) + logger.info(f"Created broker: {broker}") isconnected = h.helicsBrokerIsConnected(broker) logger.info(f"Broker connected: {isconnected}") logger.info(str(component_map)) - replies = [] - broker_host = socket.gethostname() - for service_ip, service_port in component_map.items(): - if service_ip != broker_host: - url = build_url(service_ip, service_port, ["run"]) - logger.info(f"making a request to url - {url}") + async with httpx.AsyncClient(timeout=None) as client: + tasks = [] + for service_ip, service_port in component_map.items(): + if service_ip != broker_host: + url = build_url(service_ip, service_port, ["run"]) + logger.info(f"service_ip: {service_ip}, service_port: {service_port}") + logger.info(f"making a request to url - {url}") + + myobj = { + "broker_port": 23404, + "broker_ip": broker_ip, + "api_port": api_port, + "feeder_host": feeder_host, + "feeder_port": feeder_port, + } + logger.info(f"{myobj}") + task = asyncio.create_task(client.post(url[:-1], json=myobj)) + tasks.append(task) + + + sleep_delay_sec = 5 + max_wait_time_min = 5 + max_count = max_wait_time_min * 60 / 5 + count = 0 + + if tasks: + pending = set(tasks) + while pending: + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.WAITING_FOR_FEDERATES + done = {t for t in pending if t.done()} + for idx, t in enumerate(tasks): + state = ( + "done" + if t.done() + else "cancelled" + if t.cancelled() + else "pending" + ) + info = None + if t.done() and not t.cancelled(): + try: + res = t.result() + info = f"status_code={getattr(res, 'status_code', 'N/A')}" + except Exception as exc: + info = f"exception={exc}" + logger.info(f"Task {idx}: {state} {info or ''}") + + pending -= done + if pending and count <= max_count: + await asyncio.sleep(sleep_delay_sec) + count += 1 + elif pending and count > max_count: + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.FEDERATE_CONNECTION_TIMED_OUT + raise HTTPException( + status_code=500, + detail = F"Failed to start simulation after {max_wait_time_min} minutes. Please check the logs for more details.", + ) + else: + for idx, t in enumerate(tasks): + try: + res = t.result() + logger.info(f"Task {idx} succeeded: {getattr(res, 'status_code', 'N/A')}") + except Exception as exc: + logger.error(f"Task {idx} failed: {exc}") + + # max_wait_time_min = 1 + # max_count = max_wait_time_min * 60 / 5 + # count = 0 - myobj = { - "broker_port": 23404, - "broker_ip": broker_ip, - "api_port": api_port, - "feeder_host": feeder_host, - "feeder_port": feeder_port, - } - replies.append(grequests.post(url, json=myobj)) - grequests.map(replies) while h.helicsBrokerIsConnected(broker): - time.sleep(1) + await asyncio.sleep(sleep_delay_sec) + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.RUNNING + query_result = broker.query("broker", "current_state") + expected_federates = len(component_map)-1 + connected_federates = len(broker.query("broker", "federates")) + logger.info(f"Federates expected: {expected_federates}") + logger.info(f"Federates connected: {connected_federates}") + logger.info(f"Simulation state: {query_result['state']}") + logger.info(f"Global time: {get_time_data(broker)}") + + if expected_federates != connected_federates: + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.FEDERATE_RUNTIME_FAILURE + raise HTTPException( + error_code=500, + detail= "Federate failure. Check out the logs" + ) + + count += 1 h.helicsCloseLibrary() - + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.COMPLETED return @app.post("/run") async def run_feeder(background_tasks: BackgroundTasks): + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.STARTING_SIMULATION try: background_tasks.add_task(run_simulation) - response = ServerReply(detail="Task sucessfully added.").dict() + response = ServerReply(detail="Task sucessfully added.").model_dump() return JSONResponse({"detail": response}, 200) except Exception as e: err = traceback.format_exc() + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.SIMULATION_ERROR raise HTTPException(status_code=404, detail=str(err)) @@ -233,46 +356,40 @@ async def run_feeder(background_tasks: BackgroundTasks): async def configure(wiring_diagram: WiringDiagram): global WIRING_DIAGRAM WIRING_DIAGRAM = wiring_diagram + json.dump(wiring_diagram.model_dump(), open(WIRING_DIAGRAM_FILENAME, "w")) + try: + for component in wiring_diagram.components: + component_model = ComponentStruct(component=component, links=[]) + for link in wiring_diagram.links: + if link.target == component.name: + component_model.links.append(link) - json.dump(wiring_diagram.dict(), open(WIRING_DIAGRAM_FILENAME, "w")) - for component in wiring_diagram.components: - component_model = ComponentStruct(component=component, links=[]) - for link in wiring_diagram.links: - if link.target == component.name: - component_model.links.append(link) - - url = build_url(component.host, component.container_port, ["configure"]) - logger.info(f"making a request to url - {url}") - - r = requests.post(url, json=component_model.dict()) - assert ( - r.status_code == 200 - ), f"POST request to update configuration failed for url - {url}" - return JSONResponse( - ServerReply( - detail="Sucessfully updated config files for all containers" - ).dict(), - 200, - ) - - -@app.get("/status/") + url = build_url(component.host, component.container_port, ["configure"]) + logger.info(f"making a request to url - {url}") + + r = requests.post(url[:-1], json=component_model.model_dump()) + assert ( + r.status_code == 200 + ), f"POST request to update configuration failed for url - {url}" + + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.FEDERATE_CONFIGURATION_SUCESSS + return JSONResponse( + ServerReply( + detail="Sucessfully updated config files for all containers" + ).model_dump(), + 200, + ) + except Exception as _: + err = traceback.format_exc() + async with app.state.simulation_lock: + app.state.simulation_state = SimulationState.FEDERATE_CONFIGURATION_FAILURE + raise HTTPException(status_code=404, detail=str(err)) + +@app.get("/status") async def status(): - try: - name_2_timedata = {} - connected = h.helicsBrokerIsConnected(app.state.broker) - if connected: - for time_data in get_time_data(app.state.broker): - if (time_data.name not in name_2_timedata) or ( - name_2_timedata[time_data.name] != time_data - ): - name_2_timedata[time_data.name] = time_data - return {"connected": connected, "timedata": name_2_timedata, "error": False} - except AttributeError as e: - return {"reply": str(e), "error": True} + return {"Simulation status": app.state.simulation_state.value} if __name__ == "__main__": uvicorn.run(app, host="0.0.0.0", port=int(os.environ["PORT"])) - # test_function() - # read_settings() diff --git a/lindistflow_federate/requirements.txt b/lindistflow_federate/requirements.txt index b17efad..339cb89 100644 --- a/lindistflow_federate/requirements.txt +++ b/lindistflow_federate/requirements.txt @@ -1,3 +1,4 @@ + helics>=3.4.0 pydantic>=1.7,<2 cvxpy diff --git a/measuring_federate/Dockerfile b/measuring_federate/Dockerfile index 29f1b71..e9d6ecc 100644 --- a/measuring_federate/Dockerfile +++ b/measuring_federate/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.10.6-slim-bullseye RUN apt-get update RUN apt-get install -y git ssh +RUN apt install build-essential cmake git python3-dev -y RUN mkdir MeasurementComponent COPY . ./MeasurementComponent WORKDIR ./MeasurementComponent diff --git a/measuring_federate/requirements.txt b/measuring_federate/requirements.txt index a2b7050..fa80af9 100644 --- a/measuring_federate/requirements.txt +++ b/measuring_federate/requirements.txt @@ -6,5 +6,5 @@ pandas fastapi uvicorn requests -grequests +httpx oedisi>=2.0.2,<3 diff --git a/measuring_federate/server.py b/measuring_federate/server.py index f3632d3..621cd64 100644 --- a/measuring_federate/server.py +++ b/measuring_federate/server.py @@ -1,30 +1,41 @@ -from fastapi import FastAPI, BackgroundTasks, HTTPException -from measuring_federate import run_simulator -from fastapi.responses import JSONResponse +from functools import cache import traceback import requests -import uvicorn -import socket import logging -import sys +import socket import json import os +from fastapi import FastAPI, BackgroundTasks, HTTPException +from measuring_federate import run_simulator +from fastapi.responses import JSONResponse +import uvicorn + from oedisi.componentframework.system_configuration import ComponentStruct from oedisi.types.common import ServerReply, HeathCheck, DefaultFileNames from oedisi.types.common import BrokerConfig app = FastAPI() -is_kubernetes_env = os.environ['SERVICE_NAME'] if 'SERVICE_NAME' in os.environ else None +@cache +def kubernetes_service(): + if "KUBERNETES_SERVICE_NAME" in os.environ: + return os.environ["KUBERNETES_SERVICE_NAME"] # works with kurenetes + elif "SERVICE_NAME" in os.environ: + return os.environ["SERVICE_NAME"] # works with minikube + else: + return None def build_url(host:str, port:int, enpoint:list): - if is_kubernetes_env: - SERVICE_NAME = os.environ['SERVICE_NAME'] - url = f"http://{host}.{SERVICE_NAME}:{port}/" + + if kubernetes_service(): + logging.info("Containers running in docker-compose environment") + url = f"http://{host}.{kubernetes_service()}:{port}/" else: + logging.info("Containers running in kubernetes environment") url = f"http://{host}:{port}/" url = url + "/".join(enpoint) + logging.info(f"Built url {url}") return url @app.get("/") @@ -43,14 +54,15 @@ async def run_model(broker_config:BrokerConfig, background_tasks: BackgroundTask feeder_host = broker_config.feeder_host feeder_port = broker_config.feeder_port url = build_url(feeder_host, feeder_port, ['sensor']) - logging.info(url) + logging.info(f"Making a request to url - {url}") try: reply = requests.get(url) sensor_data = reply.json() if not sensor_data: msg = "empty sensor list" raise HTTPException(404, msg) - logging.info(sensor_data) + logging.info(f"Received sensor data {sensor_data}") + logging.info("Writing sensor data to sensors.json") with open("sensors.json", "w") as outfile: json.dump(sensor_data, outfile) diff --git a/recorder/Dockerfile b/recorder/Dockerfile index 82988ba..f6eb825 100644 --- a/recorder/Dockerfile +++ b/recorder/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.10.6-slim-bullseye RUN apt-get update RUN apt-get install -y git ssh +RUN apt install build-essential cmake git python3-dev -y RUN mkdir Recorder COPY . ./Recorder WORKDIR ./Recorder diff --git a/recorder/record_subscription.py b/recorder/record_subscription.py index abd5e38..72a4b5e 100644 --- a/recorder/record_subscription.py +++ b/recorder/record_subscription.py @@ -11,11 +11,11 @@ from oedisi.types.common import BrokerConfig from oedisi.types.data_types import MeasurementArray -logger = logging.getLogger(__name__) +# logger = logging.getLogger(__name__) +logger = logging.getLogger("uvicorn.error") logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) - class Recorder: def __init__( self, diff --git a/recorder/server.py b/recorder/server.py index 8e5249d..0f6a708 100644 --- a/recorder/server.py +++ b/recorder/server.py @@ -16,10 +16,9 @@ from record_subscription import run_simulator - +logger = logging.getLogger("uvicorn.error") app = FastAPI() - @app.get("/") def read_root(): hostname = socket.gethostname() @@ -48,8 +47,10 @@ def download_results(): @app.post("/run") async def run_model(broker_config: BrokerConfig, background_tasks: BackgroundTasks): - logging.info(broker_config) + logger.info(broker_config) + print(broker_config) try: + logger.info("Adding task to background tasks") background_tasks.add_task(run_simulator, broker_config) response = ServerReply(detail="Task sucessfully added.").dict() return JSONResponse(response, 200) diff --git a/system.json b/system.json new file mode 100644 index 0000000..b10d15d --- /dev/null +++ b/system.json @@ -0,0 +1,203 @@ +{ + "name": "docker_test", + "components": [ + { + "name": "feeder", + "type": "LocalFeeder", + "host": "feeder", + "container_port": 5678, + "parameters": { + "use_smartds": false, + "user_uploads_model": true, + "profile_location": "profiles", + "opendss_location": "opendss", + "existing_feeder_file": "opendss/master.dss", + "start_date": "2017-01-01 00:00:00", + "number_of_timesteps": 96, + "run_freq_sec": 900, + "topology_output": "topology.json" + } + }, + { + "name": "recorder_voltage_real", + "type": "Recorder", + "host": "recorder-voltage-real", + "container_port": 5679, + "parameters": {"feather_filename": "voltage_real.feather", + "csv_filename": "voltage_real.csv" + } + }, + { + "name": "recorder_voltage_imag", + "type": "Recorder", + "host": "recorder-voltage-imag", + "container_port": 5680, + "parameters": {"feather_filename": "voltage_imag.feather", + "csv_filename": "voltage_imag.csv" + } + }, + { + "name": "recorder_voltage_mag", + "type": "Recorder", + "host": "recorder-voltage-mag", + "container_port": 5681, + "parameters": {"feather_filename": "voltage_mag.feather", + "csv_filename": "voltage_mag.csv" + } + }, + { + "name": "recorder_voltage_angle", + "type": "Recorder", + "host": "recorder-voltage-angle", + "container_port": 5682, + "parameters": {"feather_filename": "voltage_angle.feather", + "csv_filename": "voltage_angle.csv" + } + }, + { + "name": "state_estimator", + "type": "StateEstimatorComponent", + "host": "state-estimator", + "container_port": 5683, + "parameters": { + "algorithm_parameters": {"tol": 1e-5} + } + }, + { + "name": "sensor_voltage_real", + "type": "MeasurementComponent", + "host": "sensor-voltage-real", + "container_port": 5684, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_voltage_magnitude", + "type": "MeasurementComponent", + "host": "sensor-voltage-magnitude", + "container_port": 5685, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_voltage_imaginary", + "type": "MeasurementComponent", + "host": "sensor-voltage-imaginary", + "container_port": 5686, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_power_real", + "type": "MeasurementComponent", + "host": "sensor-power-real", + "container_port": 5687, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + }, + { + "name": "sensor_power_imaginary", + "type": "MeasurementComponent", + "host": "sensor-power-imaginary", + "container_port": 5688, + "parameters": { + "gaussian_variance": 0.0, + "random_percent": 0.0, + "measurement_file": "sensors.json" + } + } + + ], + "links": [ + { + "source": "feeder", + "source_port": "voltages_magnitude", + "target": "sensor_voltage_magnitude", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_real", + "target": "sensor_voltage_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_imag", + "target": "sensor_voltage_imaginary", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "powers_real", + "target": "sensor_power_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "powers_imag", + "target": "sensor_power_imaginary", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "topology", + "target": "state_estimator", + "target_port": "topology" + }, + { + "source": "sensor_voltage_magnitude", + "source_port": "publication", + "target": "state_estimator", + "target_port": "voltages_magnitude" + }, + { + "source": "sensor_power_real", + "source_port": "publication", + "target": "state_estimator", + "target_port": "powers_real" + }, + { + "source": "sensor_power_imaginary", + "source_port": "publication", + "target": "state_estimator", + "target_port": "powers_imaginary" + }, + { + "source": "feeder", + "source_port": "voltages_real", + "target": "recorder_voltage_real", + "target_port": "subscription" + }, + { + "source": "feeder", + "source_port": "voltages_imag", + "target": "recorder_voltage_imag", + "target_port": "subscription" + }, + { + "source": "state_estimator", + "source_port": "voltage_angle", + "target": "recorder_voltage_angle", + "target_port": "subscription" + }, + { + "source": "state_estimator", + "source_port": "voltage_mag", + "target": "recorder_voltage_mag", + "target_port": "subscription" + } + ] +} diff --git a/wls_federate/Dockerfile b/wls_federate/Dockerfile index cc55cb6..56f6c3b 100644 --- a/wls_federate/Dockerfile +++ b/wls_federate/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.10.6-slim-bullseye RUN apt-get update RUN apt-get install -y git ssh +RUN apt install build-essential cmake git python3-dev -y RUN mkdir StateEstimatorComponent COPY . ./StateEstimatorComponent WORKDIR ./StateEstimatorComponent diff --git a/workflow_runner.py b/workflow_runner.py new file mode 100644 index 0000000..94f012b --- /dev/null +++ b/workflow_runner.py @@ -0,0 +1,157 @@ +from requests.auth import HTTPBasicAuth +from collections import defaultdict +from pathlib import Path +import requests +import json +import os + +from dotenv import load_dotenv +import requests +from rich import print +import docker + + +load_dotenv() + +# --- Configuration --- +DOCKERHUB_USERNAME_AL = os.getenv("DOCKERHUB_USERNAME_AL") +DOCKERHUB_API_KEY_AL = os.getenv("DOCKERHUB_API_KEY_AL") +MAILJET_API_KEY = os.environ["MAILJET_API_KEY"] +MAILJET_API_SECRET = os.environ["MAILJET_API_SECRET"] +TAG = os.environ["RELEASE_TAG"] + + +def collect_components(): + # Simulate collecting components + print("Collecting components...") + components = json.load(open("components.json", "r")) # Load components from a JSON file + for component_name, component_path in components.items(): + try: + print(f"Building image for component {component_name}...") + component_path = Path(component_path).parent + if not component_path.is_dir(): + raise ValueError(f"Component path {component_path} is not a directory") + if not (component_path / "Dockerfile").is_file(): + raise ValueError(f"No Dockerfile found in {component_path}") + build_and_push_docker_image(component_name, component_path, ) + except Exception as e: + print(f"An unexpected error occurred: {e}") + +def build_and_push_docker_image(image_name, docker_file_path, tag="v0.0.1"): + client = docker.from_env() + REPOSITORY_NAME = f"{DOCKERHUB_USERNAME_AL}/{image_name}:{tag}".lower() + email_msg = f"Building Docker image: {REPOSITORY_NAME} from directory {docker_file_path}" + + try: + # Build the image + image, build_logs = client.images.build( + path=str(docker_file_path), + tag=REPOSITORY_NAME, + rm=True, # Remove intermediate containers after a successful build + nocache=False, # Do not use cache when building the image + ) + + email_msg += f"\nSuccessfully built image: {REPOSITORY_NAME}" + + labels = image.attrs['Config'].get('Labels') or {} + author_names = [] + author_emails = [] + authors_label = labels.get("org.opencontainers.image.authors") + if authors_label: + for author in authors_label.split(","): + author = author.strip() + if "<" in author and ">" in author: + name, email = author.split("<", 1) + author_names.append(name.strip()) + author_emails.append(email.strip("> ").strip()) + else: + author_names.append(author) + author_emails.append(None) + + print(f"Image authors: {{'names': author_names, 'emails': author_emails}}") + + + except docker.errors.BuildError as e: + email_msg += f"\nError building image: {e}" + print(f"Error building image: {e}") + return + + print(f"2. Logging in to Docker Hub as {DOCKERHUB_USERNAME_AL}") + email_msg += f"\nLogging in to Docker Hub as {DOCKERHUB_USERNAME_AL}" + + email_msg += f"\nPushing image to Docker Hub: {REPOSITORY_NAME}" + print(f"3. Pushing image to Docker Hub: {REPOSITORY_NAME}") + try: + # Push the image to Docker Hub + # The push operation returns an iterator of events + push_logs = client.images.push( + repository=f"{DOCKERHUB_USERNAME_AL}/{image_name}".lower(), + tag=TAG, + stream=True, + decode=True + ) + for line in push_logs: + if 'status' in line: + print(line['status']) + elif 'error' in line: + print(f"Error during push: {line['error']}") + email_msg += f"\nError during push: {line['error']}" + + print("Image pushed successfully to Docker Hub.") + email_msg += f"\nImage pushed successfully to Docker Hub." + + + except docker.errors.APIError as e: + print(f"Docker API Error during push: {e}") + email_msg += f"Docker API Error during push: {e}" + except Exception as e: + print(f"An unexpected error occurred: {e}") + email_msg += f"An unexpected error occurred: {e}" + + for name, email in zip(author_names, author_emails): + if not email: + print(f"Skipping author '{name}' - no email available") + continue + + payload = { + "Messages": [ + { + "From": { + "Email": "aadil.latif@gmail.com", + "Name": "Aadil Latif" + }, + "To": [ + { + "Email": email, + "Name": name + } + ], + "Subject": "OEDISI GitHub Workflow Notification", + "TextPart": email_msg, + } + ] + } + + try: + response = requests.post( + "https://api.mailjet.com/v3.1/send", + json=payload, + auth=HTTPBasicAuth(MAILJET_API_KEY, MAILJET_API_SECRET), + timeout=30, + ) + response.raise_for_status() + print(f"Mail sent to {email}: {response.status_code}") + except requests.exceptions.HTTPError as http_err: + # Surface the server response body to help debugging 400 errors + try: + err_text = response.text + except Exception: + err_text = str(http_err) + print(f"Mailjet HTTP error for {email}: {response.status_code} - {err_text}") + except Exception as e: + print(f"Failed to send mail to {email}: {e}") + + +if __name__ == "__main__": + os.system("docker login -u {} -p {}".format(DOCKERHUB_USERNAME_AL, DOCKERHUB_API_KEY_AL)) + collect_components() \ No newline at end of file