Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 0 additions & 16 deletions src/vtlengine/API/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
from vtlengine.AST.ASTConstructor import ASTVisitor
from vtlengine.AST.ASTString import ASTString
from vtlengine.AST.DAG import DAGAnalyzer
from vtlengine.AST.DAG._models import DatasetSchedule
from vtlengine.AST.Grammar.lexer import Lexer
from vtlengine.AST.Grammar.parser import Parser
from vtlengine.Exceptions import InputValidationException
Expand Down Expand Up @@ -94,14 +93,6 @@ def _extract_input_datasets(script: Union[str, TransformationScheme, Path]) -> L
return dag_inputs


def _validate_extra_datasets(datasets: Dict[str, Any], ds_analysis: DatasetSchedule) -> None:
"""Raise if data_structures contains datasets not referenced by the script."""
script_datasets = set(ds_analysis.global_inputs) | set(ds_analysis.all_outputs)
extra_datasets = set(datasets.keys()) - script_datasets
if extra_datasets:
raise InputValidationException(code="0-1-3-9", datasets=sorted(extra_datasets))


def prettify(script: Union[str, TransformationScheme, Path]) -> str:
"""
Function that prettifies the VTL script given.
Expand Down Expand Up @@ -270,10 +261,6 @@ def semantic_analysis(
# Loading datasets from file/dict/pysdmx objects/URLs
datasets, scalars = load_datasets(data_structures, sdmx_mappings=mapping_dict)

# Validate that all provided datasets are required by the script
ds_analysis = DAGAnalyzer.ds_structure(ast)
_validate_extra_datasets(datasets, ds_analysis)

# Handling of library items
vd = None
if value_domains is not None:
Expand Down Expand Up @@ -447,9 +434,6 @@ def run(
# VTL Efficient analysis
ds_analysis = DAGAnalyzer.ds_structure(ast)

# Validate that all provided datasets are required by the script
_validate_extra_datasets(datasets, ds_analysis)

# Checking the output path to be a Path object to a directory
if output_folder is not None:
_check_output_folder(output_folder)
Expand Down
6 changes: 0 additions & 6 deletions src/vtlengine/Exceptions/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,12 +224,6 @@
"description": "Raised when URL datapoints are provided but data_structures is not a "
"file path or URL for fetching the SDMX structure definition.",
},
"0-1-3-9": {
"message": "Dataset(s) {datasets} defined in data structures "
"but not required by the script.",
"description": "Raised when the provided data structures contain datasets "
"that are not used as inputs in the VTL script.",
},
# ------------Operators-------------
# General Semantic errors
"1-1-1-1": {
Expand Down
30 changes: 0 additions & 30 deletions tests/API/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2084,33 +2084,3 @@ def test_validate_dataset(ds_input, dp_input, is_valid, message):
else:
with pytest.raises(Exception, match=message):
validate_dataset(ds_data, dp_input)


def test_extra_dataset_in_data_structures():
"""run() and semantic_analysis() should fail when data_structures has unused datasets."""
script = "DS_A <- DS_1 * 10;"
data_structures = {
"datasets": [
{
"name": "DS_1",
"DataStructure": [
{"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False},
{"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True},
],
},
{
"name": "DS_2",
"DataStructure": [
{"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False},
{"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True},
],
},
]
}
datapoints = {"DS_1": pd.DataFrame({"Id_1": [1], "Me_1": [10]})}

with pytest.raises(InputValidationException, match="0-1-3-9"):
semantic_analysis(script=script, data_structures=data_structures)

with pytest.raises(InputValidationException, match="0-1-3-9"):
run(script=script, data_structures=data_structures, datapoints=datapoints)