Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file added .DS_Store
Binary file not shown.
53 changes: 53 additions & 0 deletions .github/workflows/publish-on-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: Publish Docker Images on Release

on:
release:
types: [published]
workflow_dispatch:
inputs:
release_tag:
description: 'Release tag to use (e.g., v1.2.3). If empty, uses release tag from the release event.'
required: false
default: 'v0.0.1'

jobs:
build-and-push:
runs-on: ubuntu-latest
permissions:
contents: read

steps:
- name: Checkout repository
uses: actions/checkout@v4

- name: Set up QEMU
uses: docker/setup-qemu-action@v2

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2

- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME_AL }}
password: ${{ secrets.DOCKERHUB_API_KEY_AL }}

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install docker python-dotenv requests rich

- name: Run workflow_runner.py
env:
RELEASE_TAG: ${{ github.event.inputs.release_tag || github.ref_name }}
DOCKERHUB_USERNAME_AL: ${{ secrets.DOCKERHUB_USERNAME_AL }}
DOCKERHUB_API_KEY_AL: ${{ secrets.DOCKERHUB_API_KEY_AL }}
MAILJET_API_KEY: ${{ secrets.MAILJET_API_KEY }}
MAILJET_API_SECRET: ${{ secrets.MAILJET_API_SECRET }}
run: |
python workflow_runner.py
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,6 @@ test_system_runner.json
recorder_*
local_feeder
build
.vscode/
.vscode/
.secrets
.env
3 changes: 1 addition & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
FROM python:3.11.10

FROM python:3.13-slim-bullseye
WORKDIR /simulation

COPY scenarios/docker_system.json docker_system.json
COPY components.json .
COPY LocalFeeder LocalFeeder
Expand Down
Binary file added LocalFeeder/.DS_Store
Binary file not shown.
2 changes: 2 additions & 0 deletions LocalFeeder/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
FROM python:3.10.6-slim-bullseye
LABEL org.opencontainers.image.authors="Aadil Latif <aadil.latif@nrel.gov>"
RUN apt-get update
RUN apt-get install -y git ssh
RUN apt install build-essential cmake git python3-dev -y
RUN mkdir LocalFeeder
COPY . ./LocalFeeder
WORKDIR ./LocalFeeder
Expand Down
78 changes: 54 additions & 24 deletions LocalFeeder/FeederSimulator.py
Original file line number Diff line number Diff line change
@@ -1,43 +1,55 @@
"""Core class to abstract OpenDSS into Feeder class."""

import csv
import json
from typing import Dict, List, Optional, Set, Tuple
from time import strptime
from enum import Enum
import logging
import math
import os
import random
import math
import time
from enum import Enum
from time import strptime
from typing import Dict, List, Optional, Set, Tuple
import json
import csv
import os

import boto3
import numpy as np
import opendssdirect as dss
import xarray as xr
from botocore import UNSIGNED
from scipy.sparse import coo_matrix, csc_matrix
from botocore.config import Config
from pydantic import BaseModel
from botocore import UNSIGNED
import opendssdirect as dss
from dss_functions import (
get_capacitors,
get_generators,
get_loads,
get_pvsystems,
get_voltages,
get_loads,
)
import xarray as xr
import numpy as np
import boto3

from oedisi.types.data_types import (
Command,
InverterControl,
InverterControlMode,
InverterControl,
IncidenceList,
Command,
)
from pydantic import BaseModel
from scipy.sparse import coo_matrix, csc_matrix

logger = logging.getLogger(__name__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)

def command(command_str:str)-> str:
logger.info(f"OpenDSS Command: {command_str}")
try:
dss.Text.Command(command_str)
result = dss.Text.Result()
logger.info(f"OpenDSS Reply: {result}")
return result
except Exception as e:
logger.error(f"OpenDSS Error: {e}")
raise ValueError(e)



def permutation(from_list, to_list):
"""Create permutation representing change in from_list to to_list.
Expand Down Expand Up @@ -153,13 +165,18 @@ def __init__(self, config: FeederConfig):
raise Exception("Set existing_feeder_file when uploading data")
else:
self._feeder_file = config.existing_feeder_file

logger.info(f"Using feeder file: {self._feeder_file}")
self.open_lines = config.open_lines
self.load_feeder()

if self._sensor_location is None:
logger.info("No sensor location provided, creating measurement lists")
self.create_measurement_lists()
else:
logger.info(
f"Using sensor location {self._sensor_location}, not creating measurement lists"
)

logger.info("Running initial snapshot")
self.snapshot_run()
assert self._state == OpenDSSState.SNAPSHOT_RUN, f"{self._state}"

Expand Down Expand Up @@ -278,13 +295,15 @@ def create_measurement_lists(
):
"""Initialize list of sensor locations for the measurement federate."""
random.seed(voltage_seed)
logger.info(f"Creating measurement lists")
os.makedirs("sensors", exist_ok=True)
voltage_subset = random.sample(
self._AllNodeNames,
math.floor(len(self._AllNodeNames) * float(percent_voltage) / 100),
)
with open(os.path.join("sensors", "voltage_ids.json"), "w") as fp:
json.dump(voltage_subset, fp, indent=4)
logger.info(f"Voltage sensors exported to sensors/voltage_ids.json")

random.seed(real_seed)
real_subset = random.sample(
Expand All @@ -293,6 +312,7 @@ def create_measurement_lists(
)
with open(os.path.join("sensors", "real_ids.json"), "w") as fp:
json.dump(real_subset, fp, indent=4)
logger.info(f"Real power sensors exported to sensors/real_ids.json")

random.seed(reactive_seed)
reactive_subset = random.sample(
Expand All @@ -301,6 +321,7 @@ def create_measurement_lists(
)
with open(os.path.join("sensors", "reactive_ids.json"), "w") as fp:
json.dump(reactive_subset, fp, indent=4)
logger.info(f"Reactive power sensors exported to sensors/reactive_ids.json")

def get_circuit_name(self):
"""Get name of current opendss circuit."""
Expand Down Expand Up @@ -334,11 +355,19 @@ def get_bus_coords(self) -> Dict[str, Tuple[float, float]] | None:

def load_feeder(self):
"""Load feeder once downloaded. Relies on legacy mode."""
logger.info("Loading feeder into OpenDSS")
# Real solution is kvarlimit with kvarmax
dss.Basic.LegacyModels(True)
dss.Text.Command("clear")
dss.Text.Command("redirect " + self._feeder_file)
result = dss.Text.Result()
logger.info("Enabling legacy models")
if not os.path.exists(self._feeder_file):
raise ValueError(f"Feeder file {self._feeder_file} not found")

command("clear")

base_path = os.getcwd()
logger.info("Current working directory: " + base_path)
result = command(f'redirect "{self._feeder_file}"')
logger.info(f"Feeder loaded")
if not result == "":
raise ValueError("Feeder not loaded: " + result)
self._circuit = dss.Circuit
Expand All @@ -357,7 +386,7 @@ def load_feeder(self):
self._source_indexes.append(
self._AllNodeNames.index(Bus.upper() + "." + str(phase))
)

logger.info("Setting up base voltages")
self.setup_vbase()

self._pvsystems = set()
Expand All @@ -366,12 +395,13 @@ def load_feeder(self):

if self.tap_setting is not None:
# Doesn't work with AutoTrans or 3-winding transformers.
dss.Text.Command(f"batchedit transformer..* wdg=2 tap={self.tap_setting}")
command(f"batchedit transformer..* wdg=2 tap={self.tap_setting}")

if self.open_lines is not None:
for l in self.open_lines:
self.open_line(l)
self._state = OpenDSSState.LOADED
logger.info("Feeder loaded into OpenDSS")

def disable_elements(self):
"""Disable most elements. Used in disabled_run."""
Expand Down
7 changes: 6 additions & 1 deletion LocalFeeder/sender_cosim.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,14 +487,19 @@ def go_cosim(

def run_simulator(broker_config: BrokerConfig):
"""Load static_inputs and input_mapping and run JSON."""
logger.info("Starting feeder simulator")
logger.info("Loading static_inputs.json and input_mapping.json")
with open("static_inputs.json") as f:
parameters = json.load(f)
with open("input_mapping.json") as f:
input_mapping = json.load(f)
logger.info(f"Feeder parameters: {parameters}")
config = FeederConfig(**parameters)
logger.info(f"Feeder config: {config}")
sim = FeederSimulator(config)
logger.info(f"Simulator created, starting co-simulation")
go_cosim(sim, config, input_mapping, broker_config)

logger.info(f"Simulator Complete")

if __name__ == "__main__":
run_simulator(BrokerConfig(broker_ip="127.0.0.1"))
37 changes: 26 additions & 11 deletions LocalFeeder/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
from oedisi.types.common import ServerReply, HeathCheck, DefaultFileNames
from oedisi.types.common import BrokerConfig

logger = logging.getLogger("uvicorn.error")
REQUEST_TIMEOUT_SEC = 1200

app = FastAPI()

base_path = os.getcwd()
Expand Down Expand Up @@ -53,22 +53,37 @@ def read_root():

@app.get("/sensor")
async def sensor():
logging.info(os.getcwd())
logger.info("Checking for sensors.json file")
logger.info(os.getcwd())
sensor_path = os.path.join(base_path, "sensors", "sensors.json")
while not os.path.exists(sensor_path):
time.sleep(1)
logging.info(f"waiting {sensor_path}")
logging.info("success")
logger.info(f"waiting {sensor_path}")
logger.info("success")
data = json.load(open(sensor_path, "r"))
return data

@app.post("/sensor")
async def sensor_post(sensor_list:list[str]):
sensor_dir = os.path.join(base_path, "sensors")
sensor_path = os.path.join(sensor_dir, "sensors.json")
try:
os.makedirs(sensor_dir, exist_ok=True)
with open(sensor_path, "w") as f:
json.dump(sensor_list, f, indent=2)
response = ServerReply(detail=f"Wrote {len(sensor_list)} sensors to {sensor_path}").dict()
return JSONResponse(response, 200)
except Exception as e:
err = traceback.format_exc()
logger.error(f"Failed to write sensors file: {err}")
raise HTTPException(status_code=500, detail=str(err))

@app.post("/profiles")
async def upload_profiles(file: UploadFile):
try:
data = file.file.read()
if not file.filename.endswith(".zip"):
HTTPException(400, "Invalid file type. Only zipped profiles are accepted.")
raise HTTPException(400, "Invalid file type. Only zipped profiles are accepted.")

profile_path = "./profiles"

Expand Down Expand Up @@ -101,7 +116,7 @@ async def upload_model(file: UploadFile):
try:
data = file.file.read()
if not file.filename.endswith(".zip"):
HTTPException(
raise HTTPException(
400, "Invalid file type. Only zipped opendss models are accepted."
)

Expand All @@ -120,24 +135,24 @@ async def upload_model(file: UploadFile):
return JSONResponse(response, 200)

else:
HTTPException(400, "A valid opendss model should have a master.dss file.")
raise HTTPException(400, "A valid opendss model should have a master.dss file.")
except Exception as e:
HTTPException(500, "Unknown error while uploading userdefined opendss model.")
raise HTTPException(500, "Unknown error while uploading userdefined opendss model.")


@app.post("/run")
async def run_feeder(
broker_config: BrokerConfig, background_tasks: BackgroundTasks
): # :BrokerConfig
logging.info(broker_config)
logger.info(broker_config)
try:
background_tasks.add_task(run_simulator, broker_config)
response = ServerReply(detail="Task sucessfully added.").dict()

return JSONResponse(response, 200)
except Exception as e:
err = traceback.format_exc()
HTTPException(500, str(err))
logger.error(f"Error in /run: {err}")
raise HTTPException(500, str(err))


@app.post("/configure")
Expand Down
Binary file added LocalFeeder/tests/.DS_Store
Binary file not shown.
Loading
Loading