diff --git a/hyperbench/__init__.py b/hyperbench/__init__.py new file mode 100644 index 0000000..cd87b24 --- /dev/null +++ b/hyperbench/__init__.py @@ -0,0 +1,11 @@ +from . import hyperlink_prediction +from . import negative_sampling +from . import pipelines +from . import utils + +__all__ = [ + "hyperlink_prediction", + "negative_sampling", + "pipelines", + "utils" +] \ No newline at end of file diff --git a/hyperbench/hyperlink_prediction/__init__.py b/hyperbench/hyperlink_prediction/__init__.py new file mode 100644 index 0000000..bf0ab41 --- /dev/null +++ b/hyperbench/hyperlink_prediction/__init__.py @@ -0,0 +1,7 @@ +from . import datasets, loader, models + +__all__ = [ + "datasets", + "loader", + "models" +] \ No newline at end of file diff --git a/hyperlink_prediction/datasets/__init__.py b/hyperbench/hyperlink_prediction/datasets/__init__.py similarity index 80% rename from hyperlink_prediction/datasets/__init__.py rename to hyperbench/hyperlink_prediction/datasets/__init__.py index 30e7147..3ed86d5 100644 --- a/hyperlink_prediction/datasets/__init__.py +++ b/hyperbench/hyperlink_prediction/datasets/__init__.py @@ -1,10 +1,11 @@ from .arb_dataset import ARBDataset from .dataset_hypergraph import DatasetHyperGraph -from .imdb_dataset import IMDBHypergraphDataset, ARXIVHypergraphDataset, COURSERAHypergraphDataset +from .imdb_dataset import IMDBHypergraphDataset, ARXIVHypergraphDataset, COURSERAHypergraphDataset, CHLPBaseDataset __all__ = data_classes = [ 'DatasetHyperGraph', 'ARBDataset', + 'CHLPBaseDataset', 'IMDBHypergraphDataset', 'COURSERAHypergraphDataset', 'ARXIVHypergraphDataset' diff --git a/hyperlink_prediction/datasets/arb_dataset.py b/hyperbench/hyperlink_prediction/datasets/arb_dataset.py similarity index 97% rename from hyperlink_prediction/datasets/arb_dataset.py rename to hyperbench/hyperlink_prediction/datasets/arb_dataset.py index 28a0684..8621456 100644 --- a/hyperlink_prediction/datasets/arb_dataset.py +++ b/hyperbench/hyperlink_prediction/datasets/arb_dataset.py @@ -2,7 +2,7 @@ import tarfile import torch import torch.nn.functional as F -from hyperlink_prediction.datasets.dataset_hypergraph import DatasetHyperGraph +from .dataset_hypergraph import DatasetHyperGraph from torch_geometric.data.hypergraph_data import HyperGraphData from os import remove, listdir @@ -22,7 +22,7 @@ class ARBDataset(DatasetHyperGraph): "tags-math-sx": "1eDevpF6EZs19rLouNpiKGLIlFOLUfKKG", "contact-high-school": "1VA2P62awVYgluOIh1W4NZQQgkQCBk-Eu", "contact-primary-school": "1sBHSEIyvVKavAho524Ro4cKL66W6rn-t", - "NDC-substances": "1dLJt3qzAOYieay03Sp9h8ZfVMiU-nMqC" + "NDC-substances": "1mGOg0DMh46J2zQdimSXMde1pKNtfAdh8" } diff --git a/hyperlink_prediction/datasets/dataset_hypergraph.py b/hyperbench/hyperlink_prediction/datasets/dataset_hypergraph.py similarity index 100% rename from hyperlink_prediction/datasets/dataset_hypergraph.py rename to hyperbench/hyperlink_prediction/datasets/dataset_hypergraph.py diff --git a/hyperlink_prediction/datasets/imdb_dataset.py b/hyperbench/hyperlink_prediction/datasets/imdb_dataset.py similarity index 98% rename from hyperlink_prediction/datasets/imdb_dataset.py rename to hyperbench/hyperlink_prediction/datasets/imdb_dataset.py index 74e9819..c532518 100644 --- a/hyperlink_prediction/datasets/imdb_dataset.py +++ b/hyperbench/hyperlink_prediction/datasets/imdb_dataset.py @@ -1,7 +1,7 @@ import torch import pickle from abc import ABC -from hyperlink_prediction.datasets.dataset_hypergraph import DatasetHyperGraph +from .dataset_hypergraph import DatasetHyperGraph from torch_geometric.data.hypergraph_data import HyperGraphData diff --git a/hyperlink_prediction/loader/__init__.py b/hyperbench/hyperlink_prediction/loader/__init__.py similarity index 100% rename from hyperlink_prediction/loader/__init__.py rename to hyperbench/hyperlink_prediction/loader/__init__.py diff --git a/hyperlink_prediction/loader/dataloader.py b/hyperbench/hyperlink_prediction/loader/dataloader.py similarity index 91% rename from hyperlink_prediction/loader/dataloader.py rename to hyperbench/hyperlink_prediction/loader/dataloader.py index 6da46e2..9962436 100644 --- a/hyperlink_prediction/loader/dataloader.py +++ b/hyperbench/hyperlink_prediction/loader/dataloader.py @@ -1,10 +1,10 @@ import torch from typing import List, Any from torch.utils.data import DataLoader -from utils.set_negative_samplig_method import setNegativeSamplingAlgorithm -from negative_sampling.hypergraph_negative_sampling_algorithm import HypergraphNegativeSampler, MotifHypergraphNegativeSampler +from ...utils.data_and_sampling_selector import setNegativeSamplingAlgorithm +from ...negative_sampling.hypergraph_negative_sampling_algorithm import HypergraphNegativeSampler, MotifHypergraphNegativeSampler from torch_geometric.data.hypergraph_data import HyperGraphData -from hyperlink_prediction.datasets.dataset_hypergraph import DatasetHyperGraph +from ..datasets.dataset_hypergraph import DatasetHyperGraph class DatasetLoader(DataLoader): diff --git a/hyperbench/hyperlink_prediction/models/__init__.py b/hyperbench/hyperlink_prediction/models/__init__.py new file mode 100644 index 0000000..33a2ab4 --- /dev/null +++ b/hyperbench/hyperlink_prediction/models/__init__.py @@ -0,0 +1,9 @@ +from .hyperlink_prediction_base import HyperlinkPredictor +from .hyperlink_prediction_algorithm import CommonNeighbors +from .hyperlink_prediction_result import HyperlinkPredictionResult + +__all__ = data_classes = [ + 'HyperlinkPredictor', + 'CommonNeighbors', + 'HyperlinkPredictionResult' +] \ No newline at end of file diff --git a/hyperbench/hyperlink_prediction/models/hyperlink_prediction_algorithm.py b/hyperbench/hyperlink_prediction/models/hyperlink_prediction_algorithm.py new file mode 100644 index 0000000..e5b9e1c --- /dev/null +++ b/hyperbench/hyperlink_prediction/models/hyperlink_prediction_algorithm.py @@ -0,0 +1,45 @@ +import torch +from torch import Tensor +from .hyperlink_prediction_base import HyperlinkPredictor +from .hyperlink_prediction_result import HyperlinkPredictionResult + +class CommonNeighbors(HyperlinkPredictor): + def __init__(self, device='cpu'): + super().__init__(device) + self.H = None + self.num_node = None + self.num_hyperlink = None + + def fit(self, X, y, edge_index, *args, **kwargs): + + self.num_node = int(edge_index[0].max().item()) + 1 + self.num_hyperlink = int(edge_index[1].max().item()) + 1 + + sparse = torch.sparse_coo_tensor( + edge_index, + torch.ones(edge_index.shape[1], device=self.device), + (self.num_node, edge_index.max().item() + 1), + device=self.device + ) + + self.H = sparse.to_dense() + return self + + def score_CN(self, H, u, v): + return torch.dot(H[u], H[v]).item() + + def predict(self, edge_index: Tensor): + if self.H is None: + if edge_index is None: + raise ValueError("Model not fitted. Call fit() first.") + self.fit(None, None, edge_index) + H = self.H + + CN_matrix = torch.matmul(H, H.T) + + new_edges = torch.nonzero(torch.triu(CN_matrix, diagonal=1)).T + + return HyperlinkPredictionResult( + edge_index=new_edges, + device=self.device + ) diff --git a/hyperbench/hyperlink_prediction/models/hyperlink_prediction_base.py b/hyperbench/hyperlink_prediction/models/hyperlink_prediction_base.py new file mode 100644 index 0000000..30daf2a --- /dev/null +++ b/hyperbench/hyperlink_prediction/models/hyperlink_prediction_base.py @@ -0,0 +1,18 @@ +import torch +import numpy as np +from abc import abstractmethod +from .hyperlink_prediction_result import HyperlinkPredictionResult +class HyperlinkPredictor(): + + def __init__(self, num_node: int, device: torch.device = torch.device('cpu')): + self.num_node = num_node + self.device = device + + @abstractmethod + def fit(self, X, y, edge_index, *args, **kwargs): + pass + + @abstractmethod + def predict(self, X, edge_index: torch.Tensor) -> HyperlinkPredictionResult: + pass + diff --git a/hyperlink_prediction/hyperlink_prediction_result.py b/hyperbench/hyperlink_prediction/models/hyperlink_prediction_result.py similarity index 100% rename from hyperlink_prediction/hyperlink_prediction_result.py rename to hyperbench/hyperlink_prediction/models/hyperlink_prediction_result.py diff --git a/negative_sampling/__init__.py b/hyperbench/negative_sampling/__init__.py similarity index 96% rename from negative_sampling/__init__.py rename to hyperbench/negative_sampling/__init__.py index e851cd2..2b2f408 100644 --- a/negative_sampling/__init__.py +++ b/hyperbench/negative_sampling/__init__.py @@ -1,4 +1,3 @@ -import negative_sampling from .hypergraph_negative_sampling import HypergraphNegativeSampler from .hypergraph_negative_sampling_result import HypergraphNegativeSamplerResult, ABSizedHypergraphNegativeSamplerResult from .hypergraph_negative_sampling_algorithm import ABSizedHypergraphNegativeSampler,SizedHypergraphNegativeSampler, MotifHypergraphNegativeSampler, CliqueHypergraphNegativeSampler diff --git a/negative_sampling/hypergraph_negative_sampling.py b/hyperbench/negative_sampling/hypergraph_negative_sampling.py similarity index 100% rename from negative_sampling/hypergraph_negative_sampling.py rename to hyperbench/negative_sampling/hypergraph_negative_sampling.py diff --git a/negative_sampling/hypergraph_negative_sampling_algorithm.py b/hyperbench/negative_sampling/hypergraph_negative_sampling_algorithm.py similarity index 97% rename from negative_sampling/hypergraph_negative_sampling_algorithm.py rename to hyperbench/negative_sampling/hypergraph_negative_sampling_algorithm.py index b54116b..eec8edd 100644 --- a/negative_sampling/hypergraph_negative_sampling_algorithm.py +++ b/hyperbench/negative_sampling/hypergraph_negative_sampling_algorithm.py @@ -2,8 +2,8 @@ import torch_geometric.nn.aggr as aggr from enum import Enum from torch import Tensor -from negative_sampling.hypergraph_negative_sampling import HypergraphNegativeSampler -from negative_sampling.hypergraph_negative_sampling_result import HypergraphNegativeSamplerResult, ABSizedHypergraphNegativeSamplerResult +from .hypergraph_negative_sampling import HypergraphNegativeSampler +from .hypergraph_negative_sampling_result import HypergraphNegativeSamplerResult, ABSizedHypergraphNegativeSamplerResult import warnings warnings.filterwarnings('ignore', category=UserWarning) @@ -119,7 +119,7 @@ def generate(self, edge_index: Tensor) -> ABSizedHypergraphNegativeSamplerResult local_edge_index[1] += num_hyperedges num_hyperedges = torch.max(local_edge_index[1]) + 1 negative_edge_index = torch.cat([negative_edge_index , local_edge_index], dim = 1) - global_positives = torch.cat([global_positives, probabilities], dim = 0) + global_positives = torch.empty((0, probabilities.shape[1]), dtype=torch.float32, device=self.device) global_replace_mask = torch.cat([global_replace_mask, replace_mask], dim = 0) global_replacement = torch.cat([global_replacement, replacement], dim = 0) diff --git a/negative_sampling/hypergraph_negative_sampling_result.py b/hyperbench/negative_sampling/hypergraph_negative_sampling_result.py similarity index 98% rename from negative_sampling/hypergraph_negative_sampling_result.py rename to hyperbench/negative_sampling/hypergraph_negative_sampling_result.py index de7f452..e03e1d5 100644 --- a/negative_sampling/hypergraph_negative_sampling_result.py +++ b/hyperbench/negative_sampling/hypergraph_negative_sampling_result.py @@ -2,7 +2,7 @@ import torch_geometric.nn.aggr as aggr from abc import ABC from torch import Tensor -from negative_sampling.hypergraph_negative_sampling import HypergraphNegativeSampler +from .hypergraph_negative_sampling import HypergraphNegativeSampler class HypergraphNegativeSamplerResult(ABC): """ A class Result which rapresents the hypergraph generated by diff --git a/hyperbench/pipelines/__init__.py b/hyperbench/pipelines/__init__.py new file mode 100644 index 0000000..f458b4d --- /dev/null +++ b/hyperbench/pipelines/__init__.py @@ -0,0 +1,5 @@ +from . import pipeline + +__all__ = [ + pipeline +] \ No newline at end of file diff --git a/pipelines/pipeline.py b/hyperbench/pipelines/pipeline.py similarity index 90% rename from pipelines/pipeline.py rename to hyperbench/pipelines/pipeline.py index 047d152..f44fe64 100644 --- a/pipelines/pipeline.py +++ b/hyperbench/pipelines/pipeline.py @@ -2,7 +2,7 @@ def execute(): parser = argparse.ArgumentParser(description="Insert dataset_name, insert negative_sampling method") - parser.add_argument('--dataset_name', type=str, help="The dataset's name, possible dataset's name: \nIMDB,\nCURSERA,\nARXIV", required=True) + parser.add_argument('--dataset_name', type=str, help="The dataset's name, possible dataset's name: \nIMDB,\nCOURSERA,\nARXIV", required=True) parser.add_argument('--negative_sampling', type=str, help="negative sampling method to use, possible methods: \n SizedHypergraphNegativeSampler,\nMotifHypergraphNegativeSampler,\nCliqueHypergraphNegativeSampler", required=True) parser.add_argument('--hlp_method', type=str, help="hyperlink prediction method to use, possible method: \nCommonNeighbors", required=True) parser.add_argument('--output_path', type=str, help="Path to save the results", default="./results") @@ -21,11 +21,10 @@ def execute(): import matplotlib.pyplot as plt import time from random import randint, seed - from hyperlink_prediction.loader.dataloader import DatasetLoader - from hyperlink_prediction.hyperlink_prediction_algorithm import CommonNeighbors - from hyperlink_prediction.datasets.imdb_dataset import CHLPBaseDataset, IMDBHypergraphDataset, ARXIVHypergraphDataset, COURSERAHypergraphDataset - from utils.set_negative_samplig_method import setNegativeSamplingAlgorithm - from utils.hyperlink_train_test_split import train_test_split + from ..hyperlink_prediction.loader.dataloader import DatasetLoader + from ..hyperlink_prediction.models.hyperlink_prediction_algorithm import CommonNeighbors + from ..utils.data_and_sampling_selector import setNegativeSamplingAlgorithm, select_dataset + from ..utils.hyperlink_train_test_split import train_test_split from torch_geometric.nn import HypergraphConv from tqdm.auto import trange, tqdm from torch_geometric.data.hypergraph_data import HyperGraphData @@ -57,14 +56,7 @@ def pre_transform(data: HyperGraphData): return data - dataset : CHLPBaseDataset - match(dataset_name): - case 'IMDB': - dataset = IMDBHypergraphDataset("./data", pre_transform= pre_transform) - case 'ARXIV': - dataset = ARXIVHypergraphDataset("./data", pre_transform = pre_transform) - case 'COURSERA': - dataset = COURSERAHypergraphDataset("./data", pre_transform = pre_transform) + dataset = select_dataset(dataset_name, pre_transform= pre_transform) test_size = 0.2 val_size = 0.0 @@ -170,7 +162,7 @@ def forward(self, x, x_e, edge_index): negative_test = negative_sampler.generate(h.edge_index) hlp_method = CommonNeighbors(h.num_nodes) - hlp_result = hlp_method.generate(negative_test.edge_index) + hlp_result = hlp_method.predict(negative_test.edge_index) y_pos = torch.ones(hlp_result.edge_index.size(1), 1) y_neg = torch.zeros(negative_test.edge_index.size(1), 1) diff --git a/hyperbench/utils/__init__.py b/hyperbench/utils/__init__.py new file mode 100644 index 0000000..19cfe99 --- /dev/null +++ b/hyperbench/utils/__init__.py @@ -0,0 +1,7 @@ +from .hyperlink_train_test_split import train_test_split +from .data_and_sampling_selector import setNegativeSamplingAlgorithm, select_dataset +__all__ = [ + 'train_test_split', + 'setNegativeSamplingAlgorithm', + 'select_dataset' +] \ No newline at end of file diff --git a/hyperbench/utils/data_and_sampling_selector.py b/hyperbench/utils/data_and_sampling_selector.py new file mode 100644 index 0000000..2732002 --- /dev/null +++ b/hyperbench/utils/data_and_sampling_selector.py @@ -0,0 +1,31 @@ +from ..hyperlink_prediction.datasets import ARBDataset, IMDBHypergraphDataset, ARXIVHypergraphDataset, COURSERAHypergraphDataset, CHLPBaseDataset +from ..negative_sampling.hypergraph_negative_sampling_algorithm import SizedHypergraphNegativeSampler, MotifHypergraphNegativeSampler, CliqueHypergraphNegativeSampler, HypergraphNegativeSampler + +def setNegativeSamplingAlgorithm(ns_algorithm: str, num_node: int): + ns_method : HypergraphNegativeSampler + match(ns_algorithm): + case 'SizedHypergraphNegativeSampler': + ns_method = SizedHypergraphNegativeSampler(num_node) + case 'MotifHypergraphNegativeSampler': + ns_method = MotifHypergraphNegativeSampler(num_node) + case 'CliqueHypergraphNegativeSampler': + ns_method = CliqueHypergraphNegativeSampler(num_node) + + return ns_method + +def select_dataset(ds: str, pre_transform): + + dataset : ARBDataset + if ds in ARBDataset.GDRIVE_IDs.keys(): + dataset = ARBDataset(ds, pre_transform= pre_transform) + else: + dataset : CHLPBaseDataset + match(ds): + case 'IMDB': + dataset = IMDBHypergraphDataset("./data", pre_transform= pre_transform) + case 'ARXIV': + dataset = ARXIVHypergraphDataset("./data", pre_transform = pre_transform) + case 'COURSERA': + dataset = COURSERAHypergraphDataset("./data", pre_transform = pre_transform) + + return dataset \ No newline at end of file diff --git a/utils/hyperlink_train_test_split.py b/hyperbench/utils/hyperlink_train_test_split.py similarity index 100% rename from utils/hyperlink_train_test_split.py rename to hyperbench/utils/hyperlink_train_test_split.py diff --git a/hyperlink_prediction/__init__.py b/hyperlink_prediction/__init__.py deleted file mode 100644 index 05f24e4..0000000 --- a/hyperlink_prediction/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -import hyperlink_prediction.datasets -from .hyperlink_prediction_base import HypergraphSampler -from .hyperlink_prediction_algorithm import CommonNeighbors -from .hyperlink_prediction_result import HyperlinkPredictionResult - -__all__ = [ - 'hyperlink_prediction.datasets', -] -data_classes = [ - 'HypergraphSampler', - 'CommonNeighbors', - 'HyperlinkPredictionResult' -] \ No newline at end of file diff --git a/hyperlink_prediction/hyperlink_prediction_algorithm.py b/hyperlink_prediction/hyperlink_prediction_algorithm.py deleted file mode 100644 index c87aab0..0000000 --- a/hyperlink_prediction/hyperlink_prediction_algorithm.py +++ /dev/null @@ -1,27 +0,0 @@ -import torch -from torch import Tensor -from .hyperlink_prediction_base import HypergraphSampler -from .hyperlink_prediction_result import HyperlinkPredictionResult - -class CommonNeighbors(HypergraphSampler): - - def score_CN(self, H, u, v): - return torch.dot(H[u], H[v]).item() - - def generate(self, edge_index: Tensor): - sparse = torch.sparse_coo_tensor( - edge_index, - torch.ones(edge_index.shape[1], device=self.device), - (self.num_node, edge_index.max().item() + 1), - device=self.device - ) - H = sparse.to_dense() - - CN_matrix = torch.matmul(H, H.T) - - new_edges = torch.nonzero(torch.triu(CN_matrix, diagonal=1)).T - - return HyperlinkPredictionResult( - edge_index=new_edges, - device=self.device - ) diff --git a/hyperlink_prediction/hyperlink_prediction_base.py b/hyperlink_prediction/hyperlink_prediction_base.py deleted file mode 100644 index d2f7d14..0000000 --- a/hyperlink_prediction/hyperlink_prediction_base.py +++ /dev/null @@ -1,24 +0,0 @@ -import torch -import numpy as np -from abc import abstractmethod - -class HypergraphSampler(): - - def __init__(self, num_node: int, device: torch.device = torch.device('cpu')): - self.num_node = num_node - self.device = device - - @abstractmethod - def fit(self, *args, **kwargs): - pass - - @abstractmethod - def generate(self, edge_index: torch.Tensor): - pass - - @abstractmethod - def transform(self, edge_index: np.ndarray): - pass - - def trasform(self, edge_index: torch.Tensor): - return self.generate(edge_index) diff --git a/pipelines/__init__.py b/pipelines/__init__.py deleted file mode 100644 index f7981c3..0000000 --- a/pipelines/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import pipelines -from pipelines import pipeline - -__all__ = [ - pipeline -] \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index c330c6f..b487f5c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dependencies = [ requires-python = ">=3.9" [project.scripts] -pipeline = "pipelines.pipeline:execute" +pipeline = "hyperbench.pipelines.pipeline:execute" [project.urls] Homepage = "https://www.isislab.it/" diff --git a/setup.py b/setup.py index d704634..917ad1c 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ include_package_data=True, entry_points={ 'console_scripts': [ - 'pipeline=pipelines.pipeline:execute', + 'pipeline=hyperbench.pipelines.pipeline:execute', ], }, install_requires=[], diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..ed48da5 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,5 @@ +from . import methods_test + +__all__ = [ + "methods_test" +] \ No newline at end of file diff --git a/tests/methods_test.py b/tests/methods_test.py new file mode 100644 index 0000000..54e1c60 --- /dev/null +++ b/tests/methods_test.py @@ -0,0 +1,83 @@ +import unittest +import subprocess + +def dataset_dict(): + datasets = {} + dataset_arb = [ + 'coauth-DBLP', + # "coauth-MAG-Geology", + # "email-Enron", + # "tags-math-sx", + # "contact-high-school", + # "contact-primary-school", + # "NDC-substances" + ] + datasets_CHLP = [ + #"IMDB", + "COURSERA", + #"ARXIV" + ] + negative_methods = [ + "SizedHypergraphNegativeSampler", + "MotifHypergraphNegativeSampler", + "CliqueHypergraphNegativeSampler" + ] + hlp_methods = ["CommonNeighbors"] + + ns_hlp_union = [] + for ns in negative_methods: + for hlp in hlp_methods: + ns_hlp_union.append([ns, hlp]) + + for dataset in dataset_arb: + datasets[dataset] = {"methods": ns_hlp_union} + + for dataset in datasets_CHLP: + datasets[dataset] = {"methods": ns_hlp_union} + + return datasets + +def create_pipelines_comand(): + datasets = dataset_dict() + pipelines = [] + + for dataset_name, content in datasets.items(): + for ns, hlp in content["methods"]: + cmd = ( + f"uv run pipeline " + f"--dataset_name {dataset_name} " + f"--hlp_method {hlp} " + f"--negative_sampling {ns}" + ) + pipelines.append(cmd) + + return pipelines + +class TestPipelineExecution(unittest.TestCase): + + def test_pipeline_execution(self): + pipelines = create_pipelines_comand() + for cmd in pipelines: + with self.subTest(cmd=cmd): + result = subprocess.run(cmd, shell=True, capture_output=True, text=True) + + if result.returncode == 0 and result.stdout.strip() != "": + print(f"[OK] {cmd}") + elif result.returncode != 0: + print(f"[FAIL] {cmd} (exit {result.returncode})") + print(f"STDERR:\n{result.stderr}") + else: + print(f"[FAIL] {cmd} — Nessun output prodotto") + + self.assertEqual( + result.returncode, 0, + msg=f"Pipeline fallita (exit {result.returncode}): {cmd}\nSTDERR:\n{result.stderr}" + ) + + self.assertTrue( + result.stdout.strip() != "", + msg=f"Pipeline terminata senza output: {cmd}" + ) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/utils/__init__.py b/utils/__init__.py deleted file mode 100644 index 09fd158..0000000 --- a/utils/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -import utils -from utils.hyperlink_train_test_split import train_test_split -from utils.set_negative_samplig_method import setNegativeSamplingAlgorithm -__all__ = [ - 'train_test_split', - 'setNegativeSamplingAlgorithm' -] \ No newline at end of file diff --git a/utils/set_negative_samplig_method.py b/utils/set_negative_samplig_method.py deleted file mode 100644 index d986722..0000000 --- a/utils/set_negative_samplig_method.py +++ /dev/null @@ -1,13 +0,0 @@ -from negative_sampling.hypergraph_negative_sampling_algorithm import SizedHypergraphNegativeSampler, MotifHypergraphNegativeSampler, CliqueHypergraphNegativeSampler, HypergraphNegativeSampler - -def setNegativeSamplingAlgorithm(ns_algorithm: str, num_node: int): - ns_method : HypergraphNegativeSampler - match(ns_algorithm): - case 'SizedHypergraphNegativeSampler': - ns_method = SizedHypergraphNegativeSampler(num_node) - case 'MotifHypergraphNegativeSampler': - ns_method = MotifHypergraphNegativeSampler(num_node) - case 'CliqueHypergraphNegativeSampler': - ns_method = CliqueHypergraphNegativeSampler(num_node) - - return ns_method \ No newline at end of file