diff --git a/flowfile_core/tests/flowfile/test_global_artifacts_kernel_integration.py b/flowfile_core/tests/flowfile/test_global_artifacts_kernel_integration.py
index c3b23556..5ceb8008 100644
--- a/flowfile_core/tests/flowfile/test_global_artifacts_kernel_integration.py
+++ b/flowfile_core/tests/flowfile/test_global_artifacts_kernel_integration.py
@@ -111,7 +111,7 @@ def test_publish_global_basic(self, kernel_manager_with_core: tuple[KernelManage
manager, kernel_id = kernel_manager_with_core
code = '''
-artifact_id = flowfile.publish_global("kernel_test_model", {"accuracy": 0.95, "type": "classifier"})
+artifact_id = ff_kernel.publish_global("kernel_test_model", {"accuracy": 0.95, "type": "classifier"})
print(f"Published artifact with ID: {artifact_id}")
'''
result: ExecuteResult = _run(
@@ -138,7 +138,7 @@ def test_publish_and_get_global_roundtrip(
# Publish an artifact
publish_code = '''
data = {"model_type": "random_forest", "n_estimators": 100, "accuracy": 0.92}
-artifact_id = flowfile.publish_global("rf_model_test", data)
+artifact_id = ff_kernel.publish_global("rf_model_test", data)
print(f"artifact_id={artifact_id}")
'''
result = _run(
@@ -157,7 +157,7 @@ def test_publish_and_get_global_roundtrip(
# Retrieve it
get_code = '''
-retrieved = flowfile.get_global("rf_model_test")
+retrieved = ff_kernel.get_global("rf_model_test")
assert retrieved["model_type"] == "random_forest", f"Got {retrieved}"
assert retrieved["n_estimators"] == 100
assert retrieved["accuracy"] == 0.92
@@ -184,7 +184,7 @@ def test_publish_global_with_metadata(
manager, kernel_id = kernel_manager_with_core
code = '''
-artifact_id = flowfile.publish_global(
+artifact_id = ff_kernel.publish_global(
"tagged_model",
{"weights": [1.0, 2.0, 3.0]},
description="A test model with weights",
@@ -214,8 +214,8 @@ def test_list_global_artifacts(
# Publish two artifacts
setup_code = '''
-flowfile.publish_global("list_test_a", {"value": 1})
-flowfile.publish_global("list_test_b", {"value": 2})
+ff_kernel.publish_global("list_test_a", {"value": 1})
+ff_kernel.publish_global("list_test_b", {"value": 2})
print("Published two artifacts")
'''
result = _run(
@@ -234,7 +234,7 @@ def test_list_global_artifacts(
# List artifacts
list_code = '''
-artifacts = flowfile.list_global_artifacts()
+artifacts = ff_kernel.list_global_artifacts()
names = [a["name"] for a in artifacts]
assert "list_test_a" in names, f"list_test_a not found in {names}"
assert "list_test_b" in names, f"list_test_b not found in {names}"
@@ -262,18 +262,18 @@ def test_delete_global_artifact(
# Publish then delete
code = '''
# Publish
-flowfile.publish_global("to_delete", {"temp": True})
+ff_kernel.publish_global("to_delete", {"temp": True})
# Verify it exists
-obj = flowfile.get_global("to_delete")
+obj = ff_kernel.get_global("to_delete")
assert obj["temp"] is True
# Delete
-flowfile.delete_global_artifact("to_delete")
+ff_kernel.delete_global_artifact("to_delete")
# Verify it's gone
try:
- flowfile.get_global("to_delete")
+ ff_kernel.get_global("to_delete")
assert False, "Should have raised KeyError"
except KeyError:
print("Correctly deleted artifact")
@@ -301,7 +301,7 @@ def test_get_nonexistent_raises_key_error(
code = '''
try:
- flowfile.get_global("definitely_does_not_exist_12345")
+ ff_kernel.get_global("definitely_does_not_exist_12345")
print("ERROR: Should have raised KeyError")
except KeyError as e:
print(f"Correctly raised KeyError: {e}")
@@ -328,20 +328,20 @@ def test_versioning_on_republish(
code = '''
# Publish v1
-id1 = flowfile.publish_global("versioned_model", {"version": 1})
+id1 = ff_kernel.publish_global("versioned_model", {"version": 1})
# Publish v2 (same name)
-id2 = flowfile.publish_global("versioned_model", {"version": 2})
+id2 = ff_kernel.publish_global("versioned_model", {"version": 2})
# Should be different artifact IDs (different versions)
assert id2 != id1, f"Expected different IDs, got {id1} and {id2}"
# Get latest (should be v2)
-latest = flowfile.get_global("versioned_model")
+latest = ff_kernel.get_global("versioned_model")
assert latest["version"] == 2, f"Expected version 2, got {latest}"
# Get specific version
-v1 = flowfile.get_global("versioned_model", version=1)
+v1 = ff_kernel.get_global("versioned_model", version=1)
assert v1["version"] == 1, f"Expected version 1, got {v1}"
print("Versioning works correctly!")
@@ -401,10 +401,10 @@ def test_publish_global_in_flow(
input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script")
)
code = '''
-df = flowfile.read_input()
+df = ff_kernel.read_input()
# Publish a global artifact (persists beyond flow run)
-flowfile.publish_global("flow_published_model", {"trained_on": "flow_data"})
-flowfile.publish_output(df)
+ff_kernel.publish_global("flow_published_model", {"trained_on": "flow_data"})
+ff_kernel.publish_output(df)
'''
graph.add_python_script(
input_schema.NodePythonScript(
@@ -426,7 +426,7 @@ def test_publish_global_in_flow(
# Verify the global artifact was published by retrieving it
verify_code = '''
-model = flowfile.get_global("flow_published_model")
+model = ff_kernel.get_global("flow_published_model")
assert model["trained_on"] == "flow_data"
print("Flow-published global artifact verified!")
'''
@@ -476,10 +476,10 @@ def test_use_global_artifact_across_flows(
input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script")
)
publish_code = '''
-df = flowfile.read_input()
+df = ff_kernel.read_input()
# Publish global artifact in Flow 1
-flowfile.publish_global("cross_flow_artifact", {"source": "flow_1", "value": 42})
-flowfile.publish_output(df)
+ff_kernel.publish_global("cross_flow_artifact", {"source": "flow_1", "value": 42})
+ff_kernel.publish_output(df)
'''
graph1.add_python_script(
input_schema.NodePythonScript(
@@ -520,15 +520,15 @@ def test_use_global_artifact_across_flows(
consume_code = '''
import polars as pl
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
# Read global artifact from Flow 1
-artifact = flowfile.get_global("cross_flow_artifact")
+artifact = ff_kernel.get_global("cross_flow_artifact")
assert artifact["source"] == "flow_1", f"Expected flow_1, got {artifact}"
assert artifact["value"] == 42
# Add artifact value to output
result = df.with_columns(pl.lit(artifact["value"]).alias("from_global"))
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
'''
graph2.add_python_script(
input_schema.NodePythonScript(
@@ -579,11 +579,11 @@ def test_publish_numpy_array(
# Publish a numpy array
arr = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
-artifact_id = flowfile.publish_global("numpy_matrix", arr)
+artifact_id = ff_kernel.publish_global("numpy_matrix", arr)
print(f"Published numpy array, id={artifact_id}")
# Retrieve and verify
-retrieved = flowfile.get_global("numpy_matrix")
+retrieved = ff_kernel.get_global("numpy_matrix")
assert np.array_equal(retrieved, arr), f"Arrays don't match: {retrieved}"
print("Numpy array roundtrip successful!")
'''
@@ -617,11 +617,11 @@ def test_publish_polars_dataframe(
"name": ["Alice", "Bob", "Charlie"],
"score": [85.5, 92.0, 78.3],
})
-artifact_id = flowfile.publish_global("polars_df", df)
+artifact_id = ff_kernel.publish_global("polars_df", df)
print(f"Published Polars DataFrame, id={artifact_id}")
# Retrieve and verify
-retrieved = flowfile.get_global("polars_df")
+retrieved = ff_kernel.get_global("polars_df")
assert retrieved.equals(df), f"DataFrames don't match"
assert list(retrieved.columns) == ["id", "name", "score"]
print("Polars DataFrame roundtrip successful!")
@@ -665,11 +665,11 @@ def test_publish_nested_dict(
"target": "y",
},
}
-artifact_id = flowfile.publish_global("model_config", config)
+artifact_id = ff_kernel.publish_global("model_config", config)
print(f"Published nested config, id={artifact_id}")
# Retrieve and verify
-retrieved = flowfile.get_global("model_config")
+retrieved = ff_kernel.get_global("model_config")
assert retrieved["model"]["layers"] == [64, 128, 64]
assert retrieved["training"]["optimizer"]["lr"] == 0.001
print("Nested dict roundtrip successful!")
@@ -706,11 +706,11 @@ def predict(self, x):
# Publish custom object
model = ModelWrapper("linear", [1.0, 2.0, 3.0])
-artifact_id = flowfile.publish_global("custom_model", model)
+artifact_id = ff_kernel.publish_global("custom_model", model)
print(f"Published custom object, id={artifact_id}")
# Retrieve and verify
-retrieved = flowfile.get_global("custom_model")
+retrieved = ff_kernel.get_global("custom_model")
assert retrieved.name == "linear"
assert retrieved.weights == [1.0, 2.0, 3.0]
assert retrieved.predict([1, 1, 1]) == 6.0
@@ -748,7 +748,7 @@ def test_delete_nonexistent_raises_key_error(
code = '''
try:
- flowfile.delete_global_artifact("nonexistent_artifact_xyz")
+ ff_kernel.delete_global_artifact("nonexistent_artifact_xyz")
print("ERROR: Should have raised KeyError")
except KeyError as e:
print(f"Correctly raised KeyError: {e}")
@@ -775,11 +775,11 @@ def test_get_specific_version_not_found(
code = '''
# Publish version 1
-flowfile.publish_global("versioned_test", {"v": 1})
+ff_kernel.publish_global("versioned_test", {"v": 1})
# Try to get version 999 (doesn't exist)
try:
- flowfile.get_global("versioned_test", version=999)
+ ff_kernel.get_global("versioned_test", version=999)
print("ERROR: Should have raised KeyError")
except KeyError as e:
print(f"Correctly raised KeyError for missing version: {e}")
diff --git a/flowfile_core/tests/flowfile/test_kernel_integration.py b/flowfile_core/tests/flowfile/test_kernel_integration.py
index 1334a154..91677f55 100644
--- a/flowfile_core/tests/flowfile/test_kernel_integration.py
+++ b/flowfile_core/tests/flowfile/test_kernel_integration.py
@@ -127,7 +127,7 @@ def test_publish_and_list_artifacts(self, kernel_manager: tuple[KernelManager, s
kernel_id,
ExecuteRequest(
node_id=3,
- code='flowfile.publish_artifact("my_dict", {"a": 1, "b": 2})',
+ code='ff_kernel.publish_artifact("my_dict", {"a": 1, "b": 2})',
input_paths={},
output_dir="/shared/test_artifact",
),
@@ -152,9 +152,9 @@ def test_read_and_write_parquet(self, kernel_manager: tuple[KernelManager, str])
code = """
import polars as pl
-df = flowfile.read_input()
+df = ff_kernel.read_input()
df = df.with_columns((pl.col("x") * pl.col("y")).alias("product"))
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
result: ExecuteResult = _run(
@@ -196,11 +196,11 @@ def test_multiple_inputs(self, kernel_manager: tuple[KernelManager, str]):
)
code = """
-inputs = flowfile.read_inputs()
+inputs = ff_kernel.read_inputs()
left = inputs["left"][0].collect()
right = inputs["right"][0].collect()
merged = left.join(right, on="id")
-flowfile.publish_output(merged)
+ff_kernel.publish_output(merged)
"""
result = _run(
manager.execute(
@@ -299,8 +299,8 @@ def test_python_script_passthrough(self, kernel_manager: tuple[KernelManager, st
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -360,9 +360,9 @@ def test_python_script_transform(self, kernel_manager: tuple[KernelManager, str]
code = """
import polars as pl
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
df = df.with_columns((pl.col("val") * 10).alias("val_x10"))
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -464,9 +464,9 @@ def test_published_artifacts_recorded_in_context(self, kernel_manager: tuple[Ker
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("my_model", {"accuracy": 0.95})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("my_model", {"accuracy": 0.95})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -513,9 +513,9 @@ def test_available_artifacts_computed_before_execution(self, kernel_manager: tup
node_promise_2 = input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script")
graph.add_node_promise(node_promise_2)
code_publish = """
-df = flowfile.read_input()
-flowfile.publish_artifact("trained_model", {"type": "RF"})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("trained_model", {"type": "RF"})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -531,9 +531,9 @@ def test_available_artifacts_computed_before_execution(self, kernel_manager: tup
node_promise_3 = input_schema.NodePromise(flow_id=1, node_id=3, node_type="python_script")
graph.add_node_promise(node_promise_3)
code_consume = """
-df = flowfile.read_input()
-model = flowfile.read_artifact("trained_model")
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+model = ff_kernel.read_artifact("trained_model")
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -579,9 +579,9 @@ def test_artifacts_cleared_between_runs(self, kernel_manager: tuple[KernelManage
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("run_artifact", [1, 2, 3])
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("run_artifact", [1, 2, 3])
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -637,10 +637,10 @@ def test_multiple_artifacts_from_single_node(self, kernel_manager: tuple[KernelM
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", {"type": "classifier"})
-flowfile.publish_artifact("encoder", {"type": "label_encoder"})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", {"type": "classifier"})
+ff_kernel.publish_artifact("encoder", {"type": "label_encoder"})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -688,9 +688,9 @@ def test_artifact_context_to_dict_after_run(self, kernel_manager: tuple[KernelMa
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("ctx_model", {"version": 1})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("ctx_model", {"version": 1})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -750,12 +750,12 @@ def test_train_model_and_apply(self, kernel_manager: tuple[KernelManager, str]):
import numpy as np
import polars as pl
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
X = np.column_stack([df["x1"].to_numpy(), df["x2"].to_numpy(), np.ones(len(df))])
y_vals = df["y"].to_numpy()
coeffs = np.linalg.lstsq(X, y_vals, rcond=None)[0]
-flowfile.publish_artifact("linear_model", {"coefficients": coeffs.tolist()})
-flowfile.publish_output(df)
+ff_kernel.publish_artifact("linear_model", {"coefficients": coeffs.tolist()})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -774,13 +774,13 @@ def test_train_model_and_apply(self, kernel_manager: tuple[KernelManager, str]):
import numpy as np
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("linear_model")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("linear_model")
coeffs = np.array(model["coefficients"])
X = np.column_stack([df["x1"].to_numpy(), df["x2"].to_numpy(), np.ones(len(df))])
predictions = X @ coeffs
result = df.with_columns(pl.Series("predicted_y", predictions))
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -845,9 +845,9 @@ def test_publish_delete_republish_access(self, kernel_manager: tuple[KernelManag
node_promise_2 = input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script")
graph.add_node_promise(node_promise_2)
code_a = """
-df = flowfile.read_input()
-flowfile.publish_artifact("artifact_model", {"version": 1, "weights": [0.5]})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("artifact_model", {"version": 1, "weights": [0.5]})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -863,11 +863,11 @@ def test_publish_delete_republish_access(self, kernel_manager: tuple[KernelManag
node_promise_3 = input_schema.NodePromise(flow_id=1, node_id=3, node_type="python_script")
graph.add_node_promise(node_promise_3)
code_b = """
-df = flowfile.read_input()
-model = flowfile.read_artifact("artifact_model")
+df = ff_kernel.read_input()
+model = ff_kernel.read_artifact("artifact_model")
assert model["version"] == 1, f"Expected v1, got {model}"
-flowfile.delete_artifact("artifact_model")
-flowfile.publish_output(df)
+ff_kernel.delete_artifact("artifact_model")
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -883,9 +883,9 @@ def test_publish_delete_republish_access(self, kernel_manager: tuple[KernelManag
node_promise_4 = input_schema.NodePromise(flow_id=1, node_id=4, node_type="python_script")
graph.add_node_promise(node_promise_4)
code_c = """
-df = flowfile.read_input()
-flowfile.publish_artifact("artifact_model", {"version": 2, "weights": [0.9]})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("artifact_model", {"version": 2, "weights": [0.9]})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -901,10 +901,10 @@ def test_publish_delete_republish_access(self, kernel_manager: tuple[KernelManag
node_promise_5 = input_schema.NodePromise(flow_id=1, node_id=5, node_type="python_script")
graph.add_node_promise(node_promise_5)
code_d = """
-df = flowfile.read_input()
-model = flowfile.read_artifact("artifact_model")
+df = ff_kernel.read_input()
+model = ff_kernel.read_artifact("artifact_model")
assert model["version"] == 2, f"Expected v2, got {model}"
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -957,9 +957,9 @@ def test_duplicate_publish_fails(self, kernel_manager: tuple[KernelManager, str]
node_promise_2 = input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script")
graph.add_node_promise(node_promise_2)
code_publish = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", "v1")
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", "v1")
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -975,9 +975,9 @@ def test_duplicate_publish_fails(self, kernel_manager: tuple[KernelManager, str]
node_promise_3 = input_schema.NodePromise(flow_id=1, node_id=3, node_type="python_script")
graph.add_node_promise(node_promise_3)
code_dup = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", "v2")
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", "v2")
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1040,10 +1040,10 @@ def test_multi_input_python_script(self, kernel_manager: tuple[KernelManager, st
code = """
import polars as pl
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
# Should contain all 4 rows from both inputs
assert len(df) == 4, f"Expected 4 rows, got {len(df)}"
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1115,10 +1115,10 @@ def test_multi_input_read_inputs_named(self, kernel_manager: tuple[KernelManager
code = """
import polars as pl
-df = flowfile.read_first().collect()
+df = ff_kernel.read_first().collect()
# read_first should return only the first input (2 rows, not 4)
assert len(df) == 2, f"Expected 2 rows from read_first, got {len(df)}"
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1204,12 +1204,12 @@ def test_artifact_survives_when_producer_skipped(
import numpy as np
import polars as pl
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
X = np.column_stack([df["x1"].to_numpy(), df["x2"].to_numpy(), np.ones(len(df))])
y_vals = df["y"].to_numpy()
coeffs = np.linalg.lstsq(X, y_vals, rcond=None)[0]
-flowfile.publish_artifact("linear_model", {"coefficients": coeffs.tolist()})
-flowfile.publish_output(df)
+ff_kernel.publish_artifact("linear_model", {"coefficients": coeffs.tolist()})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1233,13 +1233,13 @@ def test_artifact_survives_when_producer_skipped(
import numpy as np
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("linear_model")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("linear_model")
coeffs = np.array(model["coefficients"])
X = np.column_stack([df["x1"].to_numpy(), df["x2"].to_numpy(), np.ones(len(df))])
predictions = X @ coeffs
result = df.with_columns(pl.Series("predicted_y", predictions))
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1270,8 +1270,8 @@ def test_artifact_survives_when_producer_skipped(
import numpy as np
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("linear_model")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("linear_model")
coeffs = np.array(model["coefficients"])
X = np.column_stack([df["x1"].to_numpy(), df["x2"].to_numpy(), np.ones(len(df))])
predictions = X @ coeffs
@@ -1280,7 +1280,7 @@ def test_artifact_survives_when_producer_skipped(
pl.Series("predicted_y", predictions),
pl.Series("residual", residuals),
)
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1360,10 +1360,10 @@ def test_multiple_artifacts_survive_selective_clear(
input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script"),
)
producer_code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", {"type": "linear", "coeff": 2.0})
-flowfile.publish_artifact("scaler", {"mean": 20.0, "std": 10.0})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", {"type": "linear", "coeff": 2.0})
+ff_kernel.publish_artifact("scaler", {"mean": 20.0, "std": 10.0})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1384,13 +1384,13 @@ def test_multiple_artifacts_survive_selective_clear(
)
consumer_code_v1 = """
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("model")
-scaler = flowfile.read_artifact("scaler")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("model")
+scaler = ff_kernel.read_artifact("scaler")
result = df.with_columns(
(pl.col("val") * model["coeff"]).alias("scaled"),
)
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1411,15 +1411,15 @@ def test_multiple_artifacts_survive_selective_clear(
# Change the consumer's code — also use the scaler now
consumer_code_v2 = """
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("model")
-scaler = flowfile.read_artifact("scaler")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("model")
+scaler = ff_kernel.read_artifact("scaler")
normalized = (pl.col("val") - scaler["mean"]) / scaler["std"]
result = df.with_columns(
(pl.col("val") * model["coeff"]).alias("scaled"),
normalized.alias("normalized"),
)
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1478,9 +1478,9 @@ def test_rerun_producer_clears_old_artifacts(
input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script"),
)
code_v1 = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", {"version": 1})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", {"version": 1})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1500,10 +1500,10 @@ def test_rerun_producer_clears_old_artifacts(
input_schema.NodePromise(flow_id=1, node_id=3, node_type="python_script"),
)
consumer_code = """
-df = flowfile.read_input()
-model = flowfile.read_artifact("model")
+df = ff_kernel.read_input()
+model = ff_kernel.read_artifact("model")
print(f"model version: {model['version']}")
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1526,9 +1526,9 @@ def test_rerun_producer_clears_old_artifacts(
# Change the PRODUCER (Node 2) — publish v2 of the artifact
code_v2 = """
-df = flowfile.read_input()
-flowfile.publish_artifact("model", {"version": 2})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("model", {"version": 2})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1587,9 +1587,9 @@ def test_deleted_artifact_producer_reruns_on_consumer_change(
input_schema.NodePromise(flow_id=1, node_id=2, node_type="python_script"),
)
producer_code = """
-df = flowfile.read_input()
-flowfile.publish_artifact("linear_model", {"coefficients": [1.0, 2.0, 3.0]})
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_artifact("linear_model", {"coefficients": [1.0, 2.0, 3.0]})
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1610,12 +1610,12 @@ def test_deleted_artifact_producer_reruns_on_consumer_change(
)
consumer_code_v1 = """
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("linear_model")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("linear_model")
coeffs = model["coefficients"]
result = df.with_columns(pl.lit(coeffs[0]).alias("c0"))
-flowfile.publish_output(result)
-flowfile.delete_artifact("linear_model")
+ff_kernel.publish_output(result)
+ff_kernel.delete_artifact("linear_model")
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1640,15 +1640,15 @@ def test_deleted_artifact_producer_reruns_on_consumer_change(
# Change the consumer's code (node 3) — still deletes the artifact
consumer_code_v2 = """
import polars as pl
-df = flowfile.read_input().collect()
-model = flowfile.read_artifact("linear_model")
+df = ff_kernel.read_input().collect()
+model = ff_kernel.read_artifact("linear_model")
coeffs = model["coefficients"]
result = df.with_columns(
pl.lit(coeffs[0]).alias("c0"),
pl.lit(coeffs[1]).alias("c1"),
)
-flowfile.publish_output(result)
-flowfile.delete_artifact("linear_model")
+ff_kernel.publish_output(result)
+ff_kernel.delete_artifact("linear_model")
"""
graph.add_python_script(
input_schema.NodePythonScript(
@@ -1791,8 +1791,8 @@ def test_python_script_node_with_stopped_kernel(self, kernel_manager: tuple[Kern
graph.add_node_promise(node_promise_2)
code = """
-df = flowfile.read_input()
-flowfile.publish_output(df)
+df = ff_kernel.read_input()
+ff_kernel.publish_output(df)
"""
graph.add_python_script(
input_schema.NodePythonScript(
diff --git a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/FlowfileApiHelp.vue b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/FlowfileApiHelp.vue
index cbb3b417..97edf19e 100644
--- a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/FlowfileApiHelp.vue
+++ b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/FlowfileApiHelp.vue
@@ -2,7 +2,7 @@
@@ -146,23 +146,23 @@ flowfile.publish_output(df)
import polars as pl
from sklearn.ensemble import RandomForestClassifier
-df = flowfile.read_input().collect()
+df = ff_kernel.read_input().collect()
X = df.select(["feature_1", "feature_2"]).to_numpy()
y = df.get_column("target").to_numpy()
model = RandomForestClassifier()
model.fit(X, y)
-flowfile.publish_artifact("model", model)
-flowfile.publish_output(flowfile.read_input())
+ff_kernel.publish_artifact("model", model)
+ff_kernel.publish_output(ff_kernel.read_input())
Apply a Model
import polars as pl
-model = flowfile.read_artifact("model")
-df = flowfile.read_input().collect()
+model = ff_kernel.read_artifact("model")
+df = ff_kernel.read_input().collect()
X = df.select(["feature_1", "feature_2"]).to_numpy()
predictions = model.predict(X)
@@ -170,30 +170,30 @@ predictions = model.predict(X)
result = df.with_columns(
pl.Series("prediction", predictions)
)
-flowfile.publish_output(result.lazy())
+ff_kernel.publish_output(result.lazy())
Publish a Global Artifact
from sklearn.ensemble import RandomForestClassifier
-model = flowfile.read_artifact("model")
-flowfile.publish_global("rf_model", model,
+model = ff_kernel.read_artifact("model")
+ff_kernel.publish_global("rf_model", model,
description="Trained random forest",
tags=["ml", "production"])
-flowfile.log_info("Model published to catalog")
+ff_kernel.log_info("Model published to catalog")
Multiple Inputs
import polars as pl
-inputs = flowfile.read_inputs()
+inputs = ff_kernel.read_inputs()
# inputs is a dict: {"main": [LazyFrame, ...]}
# Each connected input is a separate LazyFrame in the list
df1, df2 = inputs["main"]
combined = pl.concat([df1, df2])
-flowfile.publish_output(combined)
+ff_kernel.publish_output(combined)
diff --git a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/flowfileCompletions.ts b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/flowfileCompletions.ts
index 4f8e3f71..66fe3462 100644
--- a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/flowfileCompletions.ts
+++ b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/flowfileCompletions.ts
@@ -1,9 +1,9 @@
export const flowfileCompletionVals = [
- // flowfile module
+ // ff_kernel module
{
- label: "flowfile",
+ label: "ff_kernel",
type: "variable",
- info: "FlowFile API module for data I/O and artifacts",
+ info: "FlowFile kernel API for data I/O and artifacts",
},
// Data I/O functions
@@ -11,21 +11,21 @@ export const flowfileCompletionVals = [
label: "read_input",
type: "function",
info: "Read input DataFrame. Optional name parameter for named inputs.",
- detail: "flowfile.read_input(name?)",
+ detail: "ff_kernel.read_input(name?)",
apply: "read_input()",
},
{
label: "read_inputs",
type: "function",
info: "Read all inputs as a dict of LazyFrame lists (one per connection).",
- detail: "flowfile.read_inputs() -> dict[str, list[LazyFrame]]",
+ detail: "ff_kernel.read_inputs() -> dict[str, list[LazyFrame]]",
apply: "read_inputs()",
},
{
label: "publish_output",
type: "function",
info: "Write output DataFrame. Optional name parameter for named outputs.",
- detail: "flowfile.publish_output(df, name?)",
+ detail: "ff_kernel.publish_output(df, name?)",
apply: "publish_output(df)",
},
@@ -34,7 +34,7 @@ export const flowfileCompletionVals = [
label: "display",
type: "function",
info: "Display a rich object (matplotlib figure, plotly figure, PIL image, HTML string) in the output panel.",
- detail: "flowfile.display(obj, title?)",
+ detail: "ff_kernel.display(obj, title?)",
apply: "display(obj)",
},
@@ -43,28 +43,28 @@ export const flowfileCompletionVals = [
label: "publish_artifact",
type: "function",
info: "Store a Python object as a named artifact in kernel memory.",
- detail: 'flowfile.publish_artifact("name", obj)',
+ detail: 'ff_kernel.publish_artifact("name", obj)',
apply: 'publish_artifact("name", obj)',
},
{
label: "read_artifact",
type: "function",
info: "Retrieve a Python object from a named artifact.",
- detail: 'flowfile.read_artifact("name")',
+ detail: 'ff_kernel.read_artifact("name")',
apply: 'read_artifact("name")',
},
{
label: "delete_artifact",
type: "function",
info: "Remove a named artifact from kernel memory.",
- detail: 'flowfile.delete_artifact("name")',
+ detail: 'ff_kernel.delete_artifact("name")',
apply: 'delete_artifact("name")',
},
{
label: "list_artifacts",
type: "function",
info: "List all artifacts available in the kernel.",
- detail: "flowfile.list_artifacts()",
+ detail: "ff_kernel.list_artifacts()",
apply: "list_artifacts()",
},
@@ -73,28 +73,28 @@ export const flowfileCompletionVals = [
label: "publish_global",
type: "function",
info: "Persist a Python object to the global artifact store (survives across sessions).",
- detail: 'flowfile.publish_global("name", obj, description?, tags?, namespace_id?, fmt?)',
+ detail: 'ff_kernel.publish_global("name", obj, description?, tags?, namespace_id?, fmt?)',
apply: 'publish_global("name", obj)',
},
{
label: "get_global",
type: "function",
info: "Retrieve a Python object from the global artifact store.",
- detail: 'flowfile.get_global("name", version?, namespace_id?)',
+ detail: 'ff_kernel.get_global("name", version?, namespace_id?)',
apply: 'get_global("name")',
},
{
label: "list_global_artifacts",
type: "function",
info: "List available global artifacts with optional namespace/tag filters.",
- detail: "flowfile.list_global_artifacts(namespace_id?, tags?)",
+ detail: "ff_kernel.list_global_artifacts(namespace_id?, tags?)",
apply: "list_global_artifacts()",
},
{
label: "delete_global_artifact",
type: "function",
info: "Delete a global artifact by name, optionally a specific version.",
- detail: 'flowfile.delete_global_artifact("name", version?, namespace_id?)',
+ detail: 'ff_kernel.delete_global_artifact("name", version?, namespace_id?)',
apply: 'delete_global_artifact("name")',
},
@@ -103,28 +103,28 @@ export const flowfileCompletionVals = [
label: "log",
type: "function",
info: "Send a log message to the FlowFile log viewer.",
- detail: 'flowfile.log("message", level?)',
+ detail: 'ff_kernel.log("message", level?)',
apply: 'log("message")',
},
{
label: "log_info",
type: "function",
info: "Send an INFO log message to the FlowFile log viewer.",
- detail: 'flowfile.log_info("message")',
+ detail: 'ff_kernel.log_info("message")',
apply: 'log_info("message")',
},
{
label: "log_warning",
type: "function",
info: "Send a WARNING log message to the FlowFile log viewer.",
- detail: 'flowfile.log_warning("message")',
+ detail: 'ff_kernel.log_warning("message")',
apply: 'log_warning("message")',
},
{
label: "log_error",
type: "function",
info: "Send an ERROR log message to the FlowFile log viewer.",
- detail: 'flowfile.log_error("message")',
+ detail: 'ff_kernel.log_error("message")',
apply: 'log_error("message")',
},
diff --git a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/utils.ts b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/utils.ts
index d5af790d..070e402b 100644
--- a/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/utils.ts
+++ b/flowfile_frontend/src/renderer/app/components/nodes/node-types/elements/pythonScript/utils.ts
@@ -2,11 +2,11 @@ import type { NodePythonScript, PythonScriptInput } from "../../../../../types/n
export const DEFAULT_PYTHON_SCRIPT_CODE = `import polars as pl
-df = flowfile.read_input()
+df = ff_kernel.read_input()
# Your transformation here
-flowfile.publish_output(df)
+ff_kernel.publish_output(df)
`;
export const createPythonScriptNode = (
diff --git a/kernel_runtime/README.md b/kernel_runtime/README.md
index 61762082..797493ea 100644
--- a/kernel_runtime/README.md
+++ b/kernel_runtime/README.md
@@ -6,7 +6,7 @@ A FastAPI-based Python code execution kernel that runs in isolated Docker contai
The kernel runtime provides:
- Isolated Python code execution via REST API
-- Built-in `flowfile` module for data I/O and artifact management
+- Built-in `ff_kernel` module for data I/O and artifact management
- Parquet-based data exchange using Polars LazyFrames
- Thread-safe in-memory artifact storage
- Multi-flow support with artifact isolation
@@ -86,7 +86,7 @@ curl -X POST http://localhost:9999/execute \
-H "Content-Type: application/json" \
-d '{
"node_id": "node_1",
- "code": "import polars as pl\ndf = flowfile.read_input()\nresult = df.collect()\nflowfile.publish_output(result)",
+ "code": "import polars as pl\ndf = ff_kernel.read_input()\nresult = df.collect()\nff_kernel.publish_output(result)",
"input_paths": {"main": ["/shared/input.parquet"]},
"output_dir": "/shared/output",
"flow_id": 1
@@ -134,24 +134,24 @@ curl -X POST http://localhost:9999/clear_node_artifacts \
-d '{"node_ids": ["node_1", "node_2"], "flow_id": 1}'
```
-## Using the `flowfile` Module
+## Using the `ff_kernel` Module
-When code is executed, the `flowfile` module is automatically injected into the namespace. Here's how to use it:
+When code is executed, the `ff_kernel` module is automatically injected into the namespace. Here's how to use it:
### Reading Input Data
```python
# Read the main input as a LazyFrame
-df = flowfile.read_input()
+df = ff_kernel.read_input()
# Read a named input
-df = flowfile.read_input(name="customers")
+df = ff_kernel.read_input(name="customers")
# Read only the first file of an input
-df = flowfile.read_first(name="main")
+df = ff_kernel.read_first(name="main")
# Read all inputs as a dictionary
-inputs = flowfile.read_inputs()
+inputs = ff_kernel.read_inputs()
# Returns: {"main": LazyFrame, "customers": LazyFrame, ...}
```
@@ -160,10 +160,10 @@ inputs = flowfile.read_inputs()
```python
# Publish a DataFrame or LazyFrame
result = df.collect()
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
# Publish with a custom name
-flowfile.publish_output(result, name="cleaned_data")
+ff_kernel.publish_output(result, name="cleaned_data")
```
### Artifact Management
@@ -173,28 +173,28 @@ Artifacts allow you to store Python objects in memory for use across executions:
```python
# Store an artifact
model = train_model(data)
-flowfile.publish_artifact("trained_model", model)
+ff_kernel.publish_artifact("trained_model", model)
# Retrieve an artifact
-model = flowfile.read_artifact("trained_model")
+model = ff_kernel.read_artifact("trained_model")
# List all artifacts in current flow
-artifacts = flowfile.list_artifacts()
+artifacts = ff_kernel.list_artifacts()
# Delete an artifact
-flowfile.delete_artifact("trained_model")
+ff_kernel.delete_artifact("trained_model")
```
### Logging
```python
# General logging
-flowfile.log("Processing started", level="INFO")
+ff_kernel.log("Processing started", level="INFO")
# Convenience methods
-flowfile.log_info("Step 1 complete")
-flowfile.log_warning("Missing values detected")
-flowfile.log_error("Failed to process record")
+ff_kernel.log_info("Step 1 complete")
+ff_kernel.log_warning("Missing values detected")
+ff_kernel.log_error("Failed to process record")
```
## Complete Example
@@ -203,7 +203,7 @@ flowfile.log_error("Failed to process record")
import polars as pl
# Read input data
-df = flowfile.read_input()
+df = ff_kernel.read_input()
# Transform the data
result = (
@@ -214,13 +214,13 @@ result = (
.collect()
)
-flowfile.log_info(f"Processed {result.height} categories")
+ff_kernel.log_info(f"Processed {result.height} categories")
# Store intermediate result as artifact
-flowfile.publish_artifact("category_totals", result)
+ff_kernel.publish_artifact("category_totals", result)
# Write output
-flowfile.publish_output(result)
+ff_kernel.publish_output(result)
```
## Pre-installed Packages
@@ -264,7 +264,7 @@ kernel_runtime/
├── pyproject.toml # Project configuration
├── kernel_runtime/
│ ├── main.py # FastAPI application and endpoints
-│ ├── flowfile_client.py # The flowfile module for code execution
+│ ├── flowfile_client.py # The ff_kernel module for code execution
│ └── artifact_store.py # Thread-safe artifact storage
└── tests/ # Test suite
```
diff --git a/kernel_runtime/kernel_runtime/main.py b/kernel_runtime/kernel_runtime/main.py
index 1fc9749f..3ab959c3 100644
--- a/kernel_runtime/kernel_runtime/main.py
+++ b/kernel_runtime/kernel_runtime/main.py
@@ -169,19 +169,19 @@ async def _lifespan(app: FastAPI) -> AsyncIterator[None]:
_mpl.use('Agg')
import matplotlib.pyplot as _plt
_original_show = _plt.show
- def _flowfile_show(*args, **kwargs):
+ def _ff_kernel_show(*args, **kwargs):
import matplotlib.pyplot as __plt
for _fig_num in __plt.get_fignums():
- flowfile.display(__plt.figure(_fig_num))
+ ff_kernel.display(__plt.figure(_fig_num))
__plt.close('all')
- _plt.show = _flowfile_show
+ _plt.show = _ff_kernel_show
except ImportError:
pass
"""
def _maybe_wrap_last_expression(code: str) -> str:
- """If the last statement is a bare expression, wrap it in flowfile.display().
+ """If the last statement is a bare expression, wrap it in ff_kernel.display().
This provides Jupyter-like behavior where the result of the last expression
is automatically displayed.
@@ -218,7 +218,7 @@ def _maybe_wrap_last_expression(code: str) -> str:
prefix = "\n".join(lines[: last.lineno - 1])
if prefix:
prefix += "\n"
- return prefix + f"flowfile.display({last_expr_text})\n"
+ return prefix + f"ff_kernel.display({last_expr_text})\n"
class ExecuteRequest(BaseModel):
@@ -314,11 +314,11 @@ async def execute(request: ExecuteRequest):
# Variables defined in one cell will be available in subsequent cells
exec_globals = _get_namespace(request.flow_id)
- # Always update flowfile reference (context changes between executions)
+ # Always update ff_kernel reference (context changes between executions)
# Include __name__ and __builtins__ so classes defined in user code
# get __module__ = "__main__" instead of "builtins", enabling cloudpickle
# to serialize them correctly.
- exec_globals["flowfile"] = flowfile_client
+ exec_globals["ff_kernel"] = flowfile_client
exec_globals["__builtins__"] = __builtins__
exec_globals["__name__"] = "__main__"
diff --git a/kernel_runtime/tests/test_main.py b/kernel_runtime/tests/test_main.py
index bbe4db43..94da07e1 100644
--- a/kernel_runtime/tests/test_main.py
+++ b/kernel_runtime/tests/test_main.py
@@ -96,12 +96,12 @@ def test_execution_time_tracked(self, client: TestClient):
assert data["success"] is True
assert data["execution_time_ms"] > 0
- def test_flowfile_module_available(self, client: TestClient):
+ def test_ff_kernel_module_available(self, client: TestClient):
resp = client.post(
"/execute",
json={
"node_id": 6,
- "code": "print(type(flowfile).__name__)",
+ "code": "print(type(ff_kernel).__name__)",
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -125,9 +125,9 @@ def test_read_and_write_parquet(self, client: TestClient, tmp_dir: Path):
code = (
"import polars as pl\n"
- "df = flowfile.read_input()\n"
+ "df = ff_kernel.read_input()\n"
"df = df.collect().with_columns((pl.col('x') * pl.col('y')).alias('product'))\n"
- "flowfile.publish_output(df)\n"
+ "ff_kernel.publish_output(df)\n"
)
resp = client.post(
@@ -164,11 +164,11 @@ def test_multiple_inputs(self, client: TestClient, tmp_dir: Path):
)
code = (
- "inputs = flowfile.read_inputs()\n"
+ "inputs = ff_kernel.read_inputs()\n"
"left = inputs['left'][0].collect()\n"
"right = inputs['right'][0].collect()\n"
"merged = left.join(right, on='id')\n"
- "flowfile.publish_output(merged)\n"
+ "ff_kernel.publish_output(merged)\n"
)
resp = client.post(
@@ -202,8 +202,8 @@ def test_multi_main_inputs_union(self, client: TestClient, tmp_dir: Path):
pl.DataFrame({"v": [3, 4]}).write_parquet(str(input_dir / "main_1.parquet"))
code = (
- "df = flowfile.read_input().collect()\n"
- "flowfile.publish_output(df)\n"
+ "df = ff_kernel.read_input().collect()\n"
+ "ff_kernel.publish_output(df)\n"
)
resp = client.post(
@@ -238,8 +238,8 @@ def test_read_first_via_execute(self, client: TestClient, tmp_dir: Path):
pl.DataFrame({"v": [30, 40]}).write_parquet(str(input_dir / "b.parquet"))
code = (
- "df = flowfile.read_first().collect()\n"
- "flowfile.publish_output(df)\n"
+ "df = ff_kernel.read_first().collect()\n"
+ "ff_kernel.publish_output(df)\n"
)
resp = client.post(
@@ -272,8 +272,8 @@ def test_publish_lazyframe_output(self, client: TestClient, tmp_dir: Path):
pl.DataFrame({"v": [10, 20]}).write_parquet(str(input_dir / "main.parquet"))
code = (
- "lf = flowfile.read_input()\n"
- "flowfile.publish_output(lf)\n"
+ "lf = ff_kernel.read_input()\n"
+ "ff_kernel.publish_output(lf)\n"
)
resp = client.post(
@@ -298,7 +298,7 @@ def test_publish_artifact_via_execute(self, client: TestClient):
"/execute",
json={
"node_id": 20,
- "code": 'flowfile.publish_artifact("my_dict", {"a": 1})',
+ "code": 'ff_kernel.publish_artifact("my_dict", {"a": 1})',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -315,8 +315,8 @@ def test_list_artifacts(self, client: TestClient):
json={
"node_id": 21,
"code": (
- 'flowfile.publish_artifact("item_a", [1, 2])\n'
- 'flowfile.publish_artifact("item_b", "hello")\n'
+ 'ff_kernel.publish_artifact("item_a", [1, 2])\n'
+ 'ff_kernel.publish_artifact("item_b", "hello")\n'
),
"flow_id": 1,
"input_paths": {},
@@ -337,7 +337,7 @@ def test_clear_artifacts(self, client: TestClient):
"/execute",
json={
"node_id": 22,
- "code": 'flowfile.publish_artifact("tmp", 42)',
+ "code": 'ff_kernel.publish_artifact("tmp", 42)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -356,7 +356,7 @@ def test_health_shows_artifact_count(self, client: TestClient):
"/execute",
json={
"node_id": 23,
- "code": 'flowfile.publish_artifact("x", 1)',
+ "code": 'ff_kernel.publish_artifact("x", 1)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -371,7 +371,7 @@ def test_duplicate_publish_fails(self, client: TestClient):
"/execute",
json={
"node_id": 24,
- "code": 'flowfile.publish_artifact("model", 1)',
+ "code": 'ff_kernel.publish_artifact("model", 1)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -383,7 +383,7 @@ def test_duplicate_publish_fails(self, client: TestClient):
"/execute",
json={
"node_id": 25,
- "code": 'flowfile.publish_artifact("model", 2)',
+ "code": 'ff_kernel.publish_artifact("model", 2)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -399,7 +399,7 @@ def test_delete_artifact_via_execute(self, client: TestClient):
"/execute",
json={
"node_id": 26,
- "code": 'flowfile.publish_artifact("temp", 99)',
+ "code": 'ff_kernel.publish_artifact("temp", 99)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -409,7 +409,7 @@ def test_delete_artifact_via_execute(self, client: TestClient):
"/execute",
json={
"node_id": 27,
- "code": 'flowfile.delete_artifact("temp")',
+ "code": 'ff_kernel.delete_artifact("temp")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -429,7 +429,7 @@ def test_same_node_reexecution_clears_own_artifacts(self, client: TestClient):
"/execute",
json={
"node_id": 24,
- "code": 'flowfile.publish_artifact("model", "v1")',
+ "code": 'ff_kernel.publish_artifact("model", "v1")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -443,7 +443,7 @@ def test_same_node_reexecution_clears_own_artifacts(self, client: TestClient):
"/execute",
json={
"node_id": 24,
- "code": 'flowfile.publish_artifact("model", "v2")',
+ "code": 'ff_kernel.publish_artifact("model", "v2")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -457,7 +457,7 @@ def test_same_node_reexecution_clears_own_artifacts(self, client: TestClient):
"/execute",
json={
"node_id": 99,
- "code": 'v = flowfile.read_artifact("model"); print(v)',
+ "code": 'v = ff_kernel.read_artifact("model"); print(v)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -472,7 +472,7 @@ def test_delete_then_republish_via_execute(self, client: TestClient):
"/execute",
json={
"node_id": 28,
- "code": 'flowfile.publish_artifact("model", "v1")',
+ "code": 'ff_kernel.publish_artifact("model", "v1")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -483,8 +483,8 @@ def test_delete_then_republish_via_execute(self, client: TestClient):
json={
"node_id": 29,
"code": (
- 'flowfile.delete_artifact("model")\n'
- 'flowfile.publish_artifact("model", "v2")\n'
+ 'ff_kernel.delete_artifact("model")\n'
+ 'ff_kernel.publish_artifact("model", "v2")\n'
),
"flow_id": 1,
"input_paths": {},
@@ -506,7 +506,7 @@ def test_delete_then_republish_via_execute(self, client: TestClient):
json={
"node_id": 30,
"code": (
- 'v = flowfile.read_artifact("model")\n'
+ 'v = ff_kernel.read_artifact("model")\n'
'print(v)\n'
),
"flow_id": 1,
@@ -526,7 +526,7 @@ def test_clear_node_artifacts_selective(self, client: TestClient):
"/execute",
json={
"node_id": 40,
- "code": 'flowfile.publish_artifact("model", {"v": 1})',
+ "code": 'ff_kernel.publish_artifact("model", {"v": 1})',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -536,7 +536,7 @@ def test_clear_node_artifacts_selective(self, client: TestClient):
"/execute",
json={
"node_id": 41,
- "code": 'flowfile.publish_artifact("scaler", {"v": 2})',
+ "code": 'ff_kernel.publish_artifact("scaler", {"v": 2})',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -561,7 +561,7 @@ def test_clear_node_artifacts_empty_list(self, client: TestClient):
"/execute",
json={
"node_id": 42,
- "code": 'flowfile.publish_artifact("keep_me", 42)',
+ "code": 'ff_kernel.publish_artifact("keep_me", 42)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -578,7 +578,7 @@ def test_clear_node_artifacts_allows_republish(self, client: TestClient):
"/execute",
json={
"node_id": 43,
- "code": 'flowfile.publish_artifact("reuse", "v1")',
+ "code": 'ff_kernel.publish_artifact("reuse", "v1")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -589,7 +589,7 @@ def test_clear_node_artifacts_allows_republish(self, client: TestClient):
"/execute",
json={
"node_id": 43,
- "code": 'flowfile.publish_artifact("reuse", "v2")',
+ "code": 'ff_kernel.publish_artifact("reuse", "v2")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -606,8 +606,8 @@ def test_list_node_artifacts(self, client: TestClient):
json={
"node_id": 50,
"code": (
- 'flowfile.publish_artifact("a", 1)\n'
- 'flowfile.publish_artifact("b", 2)\n'
+ 'ff_kernel.publish_artifact("a", 1)\n'
+ 'ff_kernel.publish_artifact("b", 2)\n'
),
"flow_id": 1,
"input_paths": {},
@@ -618,7 +618,7 @@ def test_list_node_artifacts(self, client: TestClient):
"/execute",
json={
"node_id": 51,
- "code": 'flowfile.publish_artifact("c", 3)',
+ "code": 'ff_kernel.publish_artifact("c", 3)',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -657,12 +657,12 @@ def test_display_outputs_empty_by_default(self, client: TestClient):
assert data["display_outputs"] == []
def test_display_output_explicit(self, client: TestClient):
- """Execute flowfile.display() should return a display output."""
+ """Execute ff_kernel.display() should return a display output."""
resp = client.post(
"/execute",
json={
"node_id": 61,
- "code": 'flowfile.display("hello")',
+ "code": 'ff_kernel.display("hello")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -675,12 +675,12 @@ def test_display_output_explicit(self, client: TestClient):
assert data["display_outputs"][0]["data"] == "hello"
def test_display_output_html(self, client: TestClient):
- """Execute flowfile.display() with HTML should return HTML mime type."""
+ """Execute ff_kernel.display() with HTML should return HTML mime type."""
resp = client.post(
"/execute",
json={
"node_id": 62,
- "code": 'flowfile.display("
bold")',
+ "code": 'ff_kernel.display("
bold")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -698,7 +698,7 @@ def test_display_output_with_title(self, client: TestClient):
"/execute",
json={
"node_id": 63,
- "code": 'flowfile.display("data", title="My Chart")',
+ "code": 'ff_kernel.display("data", title="My Chart")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -716,9 +716,9 @@ def test_multiple_display_outputs(self, client: TestClient):
json={
"node_id": 64,
"code": (
- 'flowfile.display("first")\n'
- 'flowfile.display("second")\n'
- 'flowfile.display("third")\n'
+ 'ff_kernel.display("first")\n'
+ 'ff_kernel.display("second")\n'
+ 'ff_kernel.display("third")\n'
),
"flow_id": 1,
"input_paths": {},
@@ -739,7 +739,7 @@ def test_display_outputs_cleared_between_executions(self, client: TestClient):
"/execute",
json={
"node_id": 65,
- "code": 'flowfile.display("from first call")',
+ "code": 'ff_kernel.display("from first call")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -751,7 +751,7 @@ def test_display_outputs_cleared_between_executions(self, client: TestClient):
"/execute",
json={
"node_id": 66,
- "code": 'flowfile.display("from second call")',
+ "code": 'ff_kernel.display("from second call")',
"flow_id": 1,
"input_paths": {},
"output_dir": "",
@@ -769,7 +769,7 @@ def test_display_output_on_error_still_collected(self, client: TestClient):
json={
"node_id": 67,
"code": (
- 'flowfile.display("before error")\n'
+ 'ff_kernel.display("before error")\n'
'raise ValueError("oops")\n'
),
"flow_id": 1,
@@ -900,7 +900,7 @@ def test_same_artifact_name_different_flows(self, client: TestClient):
"/execute",
json={
"node_id": 1,
- "code": 'flowfile.publish_artifact("model", "flow1_model")',
+ "code": 'ff_kernel.publish_artifact("model", "flow1_model")',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -912,7 +912,7 @@ def test_same_artifact_name_different_flows(self, client: TestClient):
"/execute",
json={
"node_id": 1,
- "code": 'flowfile.publish_artifact("model", "flow2_model")',
+ "code": 'ff_kernel.publish_artifact("model", "flow2_model")',
"input_paths": {},
"output_dir": "",
"flow_id": 2,
@@ -925,7 +925,7 @@ def test_same_artifact_name_different_flows(self, client: TestClient):
"/execute",
json={
"node_id": 99,
- "code": 'v = flowfile.read_artifact("model"); print(v)',
+ "code": 'v = ff_kernel.read_artifact("model"); print(v)',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -938,7 +938,7 @@ def test_same_artifact_name_different_flows(self, client: TestClient):
"/execute",
json={
"node_id": 99,
- "code": 'v = flowfile.read_artifact("model"); print(v)',
+ "code": 'v = ff_kernel.read_artifact("model"); print(v)',
"input_paths": {},
"output_dir": "",
"flow_id": 2,
@@ -953,7 +953,7 @@ def test_flow_cannot_read_other_flows_artifact(self, client: TestClient):
"/execute",
json={
"node_id": 1,
- "code": 'flowfile.publish_artifact("secret", "hidden")',
+ "code": 'ff_kernel.publish_artifact("secret", "hidden")',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -964,7 +964,7 @@ def test_flow_cannot_read_other_flows_artifact(self, client: TestClient):
"/execute",
json={
"node_id": 2,
- "code": 'flowfile.read_artifact("secret")',
+ "code": 'ff_kernel.read_artifact("secret")',
"input_paths": {},
"output_dir": "",
"flow_id": 2,
@@ -981,7 +981,7 @@ def test_reexecution_only_clears_own_flow(self, client: TestClient):
"/execute",
json={
"node_id": 5,
- "code": 'flowfile.publish_artifact("model", "f1v1")',
+ "code": 'ff_kernel.publish_artifact("model", "f1v1")',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -992,7 +992,7 @@ def test_reexecution_only_clears_own_flow(self, client: TestClient):
"/execute",
json={
"node_id": 5,
- "code": 'flowfile.publish_artifact("model", "f2v1")',
+ "code": 'ff_kernel.publish_artifact("model", "f2v1")',
"input_paths": {},
"output_dir": "",
"flow_id": 2,
@@ -1004,7 +1004,7 @@ def test_reexecution_only_clears_own_flow(self, client: TestClient):
"/execute",
json={
"node_id": 5,
- "code": 'flowfile.publish_artifact("model", "f1v2")',
+ "code": 'ff_kernel.publish_artifact("model", "f1v2")',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -1017,7 +1017,7 @@ def test_reexecution_only_clears_own_flow(self, client: TestClient):
"/execute",
json={
"node_id": 99,
- "code": 'v = flowfile.read_artifact("model"); print(v)',
+ "code": 'v = ff_kernel.read_artifact("model"); print(v)',
"input_paths": {},
"output_dir": "",
"flow_id": 2,
@@ -1032,7 +1032,7 @@ def test_list_artifacts_filtered_by_flow(self, client: TestClient):
"/execute",
json={
"node_id": 1,
- "code": 'flowfile.publish_artifact("a", 1)',
+ "code": 'ff_kernel.publish_artifact("a", 1)',
"input_paths": {},
"output_dir": "",
"flow_id": 10,
@@ -1042,7 +1042,7 @@ def test_list_artifacts_filtered_by_flow(self, client: TestClient):
"/execute",
json={
"node_id": 2,
- "code": 'flowfile.publish_artifact("b", 2)',
+ "code": 'ff_kernel.publish_artifact("b", 2)',
"input_paths": {},
"output_dir": "",
"flow_id": 20,
@@ -1065,7 +1065,7 @@ def test_clear_node_artifacts_scoped_to_flow(self, client: TestClient):
"/execute",
json={
"node_id": 5,
- "code": 'flowfile.publish_artifact("model", "f1")',
+ "code": 'ff_kernel.publish_artifact("model", "f1")',
"input_paths": {},
"output_dir": "",
"flow_id": 1,
@@ -1075,7 +1075,7 @@ def test_clear_node_artifacts_scoped_to_flow(self, client: TestClient):
"/execute",
json={
"node_id": 5,
- "code": 'flowfile.publish_artifact("model", "f2")',
+ "code": 'ff_kernel.publish_artifact("model", "f2")',
"input_paths": {},
"output_dir": "",
"flow_id": 2,