Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,10 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install stress-ng
sudo apt-get install graphviz libgraphviz-dev
pip install docker
pip install pygraphviz
pip install pydot

- name: Check package install
run: |
Expand Down
54 changes: 54 additions & 0 deletions tests/translators/Dockerfile.pycompss
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# docker build --platform amd64 -t wfcommons-dev-dask -f Dockerfile.dask .
# docker run -it --rm -v `pwd`:/home/wfcommons wfcommons-dev-dask /bin/bash

FROM compss/pycompss

LABEL org.containers.image.authors="henric@hawaii.edu"

# update repositories
RUN apt-get update

# set timezone
RUN echo "America/Los_Angeles" > /etc/timezone && export DEBIAN_FRONTEND=noninteractive && apt-get install -y tzdata

# install useful stuff
RUN apt-get -y install pkg-config
RUN apt-get -y install git
RUN apt-get -y install wget
RUN apt-get -y install make
RUN apt-get -y install cmake
RUN apt-get -y install cmake-data
RUN apt-get -y install sudo
RUN apt-get -y install vim --fix-missing
RUN apt-get -y install gcc
RUN apt-get -y install gcc-multilib

# Python stuff
RUN apt-get -y install python3 python3-pip
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1
RUN python3 -m pip install pathos pandas filelock
RUN python3 -m pip install networkx scipy matplotlib
RUN python3 -m pip install pyyaml jsonschema requests
#RUN python3 -m pip install --upgrade setuptools
#RUN python3 -m pip install --upgrade wheels

# Stress-ng
RUN apt-get -y install stress-ng

# Install PyCompss and dependencies
RUN apt-get install -y libtool autotools-dev automake autoconf
RUN apt-get install -y gfortran
RUN apt-get install -y libboost-dev
RUN apt-get install -y libxml2-dev
RUN python3 -m pip install pycompss
RUN python3 -m pip install pycompss-cli

# Add wfcommons user
RUN useradd -ms /bin/bash wfcommons
RUN adduser wfcommons sudo
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
ENV PATH="$PATH:/home/wfcommons/.local/bin/"

USER wfcommons
WORKDIR /home/wfcommons

68 changes: 68 additions & 0 deletions tests/translators/Dockerfile.swiftt
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
# docker build --platform amd64 -t wfcommons-dev-swiftt -f Dockerfile.swiftt .
# docker run -it --rm -v `pwd`:/home/wfcommons wfcommons-dev-swiftt /bin/bash

FROM amd64/ubuntu:noble

LABEL org.containers.image.authors="henric@hawaii.edu"

# update repositories
RUN apt-get update

# set timezone
RUN echo "America/Los_Angeles" > /etc/timezone && export DEBIAN_FRONTEND=noninteractive && apt-get install -y tzdata

# install useful stuff
RUN apt-get -y install pkg-config
RUN apt-get -y install git
RUN apt-get -y install wget
RUN apt-get -y install make
RUN apt-get -y install cmake
RUN apt-get -y install cmake-data
RUN apt-get -y install sudo
RUN apt-get -y install vim --fix-missing
RUN apt-get -y install gcc
RUN apt-get -y install gcc-multilib

# Python stuff
RUN apt-get -y install python3 python3-pip
RUN update-alternatives --install /usr/bin/python python /usr/bin/python3 1
RUN python3 -m pip install --break-system-packages pathos pandas filelock
RUN python3 -m pip install --break-system-packages networkx scipy matplotlib
RUN python3 -m pip install --break-system-packages pyyaml jsonschema requests
RUN python3 -m pip install --break-system-packages --upgrade setuptools

# Stress-ng
RUN apt-get -y install stress-ng

# REDIS
RUN apt-get -y install redis


# Add wfcommons user
RUN useradd -ms /bin/bash wfcommons
RUN adduser wfcommons sudo
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
ENV PATH="$PATH:/opt/conda/bin/:/home/wfcommons/.local/bin/"

# Install Swift-t
RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
RUN sh ./Miniconda3-latest-Linux-x86_64.sh -b -p /opt/conda
ENV PATH="/opt/conda/bin:$PATH"
RUN which conda
ENV CONDA_ALWAYS_YES=true
RUN conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/main && \
conda tos accept --override-channels --channel https://repo.anaconda.com/pkgs/r && \
conda create -n swiftt-env python=3.11 -y
RUN conda run -n swiftt-env python --version
RUN conda run -n swiftt-env pip install flowcept
RUN conda run -n swiftt-env pip install py-cpuinfo psutil redis
RUN conda run -n swiftt-env conda install -y -c conda-forge gcc zsh zlib pathos
RUN conda run -n swiftt-env conda install -y -c swift-t swift-t
ENV CONDA_PREFIX=/opt/conda/envs/swiftt-env
ENV CONDA_EXE=/opt/conda/bin/conda
ENV PATH="/opt/conda/envs/swiftt-env/bin:$PATH"


USER wfcommons
WORKDIR /home/wfcommons

2 changes: 1 addition & 1 deletion tests/translators/build_docker_docker_images.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

set -e

for backend in "dask" "parsl" "nextflow" "airflow" "bash" "taskvine" "cwl" "pegasus"; do
for backend in "dask" "parsl" "nextflow" "airflow" "bash" "taskvine" "cwl" "pegasus" "swiftt"; do
echo "Building $backend Docker image..."
docker build --platform linux/amd64 -t wfcommons/wfcommons-testing-$backend -f Dockerfile.$backend .
done
85 changes: 22 additions & 63 deletions tests/translators/test_translators.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,78 +28,17 @@
from wfcommons.wfbench import TaskVineTranslator
from wfcommons.wfbench import CWLTranslator
from wfcommons.wfbench import PegasusTranslator
from wfcommons.wfbench import SwiftTTranslator

from wfcommons.wfinstances import PegasusLogsParser

# def _start_docker_container(backend, mounted_dir, working_dir, bin_dir, command=None):
# if command is None:
# command = ["sleep", "infinity"]
# # Pulling the Docker image
# client = docker.from_env()
# image_name = f"wfcommons/wfcommons-testing-{backend}"
#
# try:
# image = client.images.get(image_name)
# sys.stderr.write(f"Image '{image_name}' is available locally\n")
# except ImageNotFound:
# sys.stderr.write(f"Pulling image '{image_name}'...\n")
# client.images.pull(image_name)
#
# # Launch the docker container to actually run the translated workflow
# sys.stderr.write("Starting Docker container...\n")
# container = client.containers.run(
# image=image_name,
# command=command,
# volumes={mounted_dir: {'bind': mounted_dir, 'mode': 'rw'}},
# working_dir=working_dir,
# tty=True,
# detach=True
# )
#
# # Installing WfCommons on container
# _install_WfCommons_on_container(container)
#
# # Copy over the wfbench and cpu-benchmark executables to where they should go on the container
# if bin_dir:
# exit_code, output = container.exec_run(["sh", "-c", "sudo cp -f `which wfbench` " + bin_dir],
# stdout=True, stderr=True)
# exit_code, output = container.exec_run(["sh", "-c", "sudo cp -f `which cpu-benchmark` " + bin_dir],
# stdout=True, stderr=True)
#
# return container

# def _make_tarfile_of_wfcommons():
# source_dir = os.getcwd() # This assumes the testing is run from the root
# tar_stream = io.BytesIO()
# with tarfile.open(fileobj=tar_stream, mode='w') as tar:
# tar.add(source_dir, arcname=os.path.basename(source_dir))
# tar_stream.seek(0)
# return tar_stream
#
# def _install_WfCommons_on_container(container):
# # sys.stderr.write("Installing WfCommons on the container...\n")
# # Copy the WfCommons code to it (removing stuff that should be removed)
# target_path = '/tmp/' # inside container
# tar_data = _make_tarfile_of_wfcommons()
# container.put_archive(target_path, tar_data)
# exit_code, output = container.exec_run("sudo /bin/rm -rf /tmp/WfCommons/build/", stdout=True, stderr=True)
# exit_code, output = container.exec_run("sudo /bin/rm -rf /tmp/WfCommons/*.egg-info/", stdout=True, stderr=True)
# exit_code, output = container.exec_run("sudo /bin/rm -rf /tmp/WfCommons/bin/cpu-benchmark.o", stdout=True,
# stderr=True)
# exit_code, output = container.exec_run("sudo /bin/rm -rf /tmp/WfCommons/bin/cpu-benchmark", stdout=True,
# stderr=True)
#
# # Install WfCommons on the container (to install wfbench and cpu-benchmark really)
# exit_code, output = container.exec_run("sudo python3 -m pip install . --break-system-packages",
# workdir="/tmp/WfCommons", stdout=True, stderr=True)

def _create_workflow_benchmark():
# Create a workflow benchmark object to generate specifications based on a recipe (in /tmp/, whatever)
desired_num_tasks = 45
benchmark_full_path = "/tmp/blast-benchmark-{desired_num_tasks}.json"
shutil.rmtree(benchmark_full_path, ignore_errors=True)
benchmark = WorkflowBenchmark(recipe=BlastRecipe, num_tasks=desired_num_tasks)
benchmark.create_benchmark(pathlib.Path("/tmp/"), cpu_work=1, data=1, percent_cpu=0.6)
benchmark.create_benchmark(pathlib.Path("/tmp/"), cpu_work=10, data=10, percent_cpu=0.6)
with open(f"/tmp/blast-benchmark-{desired_num_tasks}.json", "r") as f:
generated_json = json.load(f)
num_tasks = len(generated_json["workflow"]["specification"]["tasks"])
Expand Down Expand Up @@ -132,6 +71,10 @@ def _additional_setup_pegasus(container):
"python3 ./pegasus_workflow.py"],
stdout=True, stderr=True)

def _additional_setup_swiftt(container):
# Start a redis server in the background
exit_code, output = container.exec_run(
cmd=["bash", "-c", "redis-server"], detach=True, stdout=True, stderr=True)

additional_setup_methods = {
"dask": noop,
Expand All @@ -142,6 +85,7 @@ def _additional_setup_pegasus(container):
"taskvine": _additional_setup_taskvine,
"cwl": noop,
"pegasus": _additional_setup_pegasus,
"swiftt": _additional_setup_swiftt,
}

#############################################################################
Expand Down Expand Up @@ -185,6 +129,7 @@ def run_workflow_airflow(container, num_tasks, str_dirpath):
stderr=True)
# Kill the container
container.remove(force=True)

# Check sanity
assert (exit_code == 0)
assert (output.decode().count("completed") == num_tasks * 2)
Expand Down Expand Up @@ -229,6 +174,17 @@ def run_workflow_pegasus(container, num_tasks, str_dirpath):
assert(exit_code == 0)
assert("success" in output.decode())

def run_workflow_swiftt(container, num_tasks, str_dirpath):
# Run the workflow!
exit_code, output = container.exec_run(cmd="swift-t workflow.swift", stdout=True, stderr=True)
# Kill the container
container.remove(force=True)
# sys.stderr.write(output.decode())
# Check sanity
assert(exit_code == 0)
assert (output.decode().count("completed!") == num_tasks)
pass

run_workflow_methods = {
"dask": run_workflow_dask,
"parsl": run_workflow_parsl,
Expand All @@ -238,6 +194,7 @@ def run_workflow_pegasus(container, num_tasks, str_dirpath):
"taskvine": run_workflow_taskvine,
"cwl": run_workflow_cwl,
"pegasus": run_workflow_pegasus,
"swiftt": run_workflow_swiftt,
}

translator_classes = {
Expand All @@ -249,6 +206,7 @@ def run_workflow_pegasus(container, num_tasks, str_dirpath):
"taskvine": TaskVineTranslator,
"cwl": CWLTranslator,
"pegasus": PegasusTranslator,
"swiftt": SwiftTTranslator,
}

logs_parser_classes = {
Expand All @@ -269,6 +227,7 @@ class TestTranslators:
"taskvine",
"cwl",
"pegasus",
"swiftt",
])
@pytest.mark.unit
# @pytest.mark.skip(reason="tmp")
Expand Down
30 changes: 30 additions & 0 deletions tests/unit/common/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,37 @@ def test_workflow_json_generation(self):
# Compare the two jsons!
assert(original_json == written_json)

@pytest.mark.unit
def test_workflow_dot_file(self):

# Put a JSON file in /tmp
url = "https://raw.githubusercontent.com/wfcommons/WfInstances/refs/heads/main/makeflow/blast/blast-chameleon-small-001.json"
response = requests.get(url)
local_file_name = url.split("/")[-1]
with open("/tmp/" + local_file_name, 'wb') as f:
f.write(response.content)

# Create an instance from the JSON File and write it back to a JSON
instance = Instance(pathlib.Path("/tmp") / local_file_name)

# Capture some metrics
num_tasks = len(instance.workflow.tasks)
num_dependencies = len(instance.workflow.edges)

# # Create a dot file
dot_path = pathlib.Path("/tmp/written_workflow.dot")
instance.workflow.write_dot(dot_path)
assert dot_path.exists()
with open(str(dot_path), "r", encoding="utf-8") as f:
content = f.read()
assert(num_tasks == content.count("label") - 1) # Extra "label" in file for \N
assert(num_dependencies == content.count("->")) # Extra "label" in file for \N

# Read it back
instance.workflow.read_dot(dot_path)
assert(num_tasks == len(instance.workflow.tasks))
assert(num_tasks == len(instance.workflow.nodes))
assert(num_dependencies == len(instance.workflow.edges))



Expand Down
9 changes: 8 additions & 1 deletion wfcommons/common/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,14 @@ def read_dot(self, dot_file_path: Optional[pathlib.Path] = None) -> None:
raise ModuleNotFoundError(
f"\'pydot\' package not found: call to {type(self).__name__}.read_dot() failed.")

graph = nx.drawing.nx_pydot.read_dot(dot_file_path)
# graph = nx.drawing.nx_pydot.read_dot(str(dot_file_path))
graph = nx.nx_agraph.read_dot(str(dot_file_path))

# clear everything
self.tasks.clear()
self.tasks_parents.clear()
self.tasks_children.clear()
self.clear()

tasks_map = {}
for node in graph.nodes(data=True):
Expand Down
Loading