Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
55 changes: 25 additions & 30 deletions src/vtlengine/Interpreter/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,47 +853,40 @@ def visit_VarID(self, node: AST.VarID) -> Any: # noqa: C901
if node.value in self.regular_aggregation_dataset.components:
raise SemanticError("1-1-6-11", comp_name=node.value)
return copy(self.scalars[node.value])
if self.regular_aggregation_dataset.data is not None:
if (
self.is_from_join
and node.value
not in self.regular_aggregation_dataset.get_components_names()
):
is_partial_present = 0
found_comp = None
for comp_name in self.regular_aggregation_dataset.get_components_names():
if (
"#" in comp_name
and comp_name.split("#")[1] == node.value
or "#" in node.value
and node.value.split("#")[1] == comp_name
):
is_partial_present += 1
found_comp = comp_name
if is_partial_present == 0:
raise SemanticError(
"1-1-1-10",
comp_name=node.value,
dataset_name=self.regular_aggregation_dataset.name,
)
elif is_partial_present == 2:
raise SemanticError("1-1-13-9", comp_name=node.value)
node.value = found_comp # type:ignore[assignment]
if node.value not in self.regular_aggregation_dataset.components:
if (
self.is_from_join
and node.value not in self.regular_aggregation_dataset.get_components_names()
):
is_partial_present = 0
found_comp = None
for comp_name in self.regular_aggregation_dataset.get_components_names():
if (
"#" in comp_name
and comp_name.split("#")[1] == node.value
or "#" in node.value
and node.value.split("#")[1] == comp_name
):
is_partial_present += 1
found_comp = comp_name
if is_partial_present == 0:
raise SemanticError(
"1-1-1-10",
comp_name=node.value,
dataset_name=self.regular_aggregation_dataset.name,
)
data = copy(self.regular_aggregation_dataset.data[node.value])
else:
data = None
elif is_partial_present == 2:
raise SemanticError("1-1-13-9", comp_name=node.value)
node.value = found_comp # type:ignore[assignment]
if node.value not in self.regular_aggregation_dataset.components:
raise SemanticError(
"1-1-1-10",
comp_name=node.value,
dataset_name=self.regular_aggregation_dataset.name,
)
if self.regular_aggregation_dataset.data is not None:
data = copy(self.regular_aggregation_dataset.data[node.value])
else:
data = None
return DataComponent(
name=node.value,
data=data,
Expand Down Expand Up @@ -1567,6 +1560,8 @@ def visit_HRBinOp(self, node: AST.HRBinOp) -> Any:
filter_comp = self.visit(node.left)
if self.rule_data is None:
return None
if filter_comp.data is None:
return self.visit(node.right)
filtering_indexes = list(filter_comp.data[filter_comp.data == True].index)
nan_indexes = list(filter_comp.data[filter_comp.data.isnull()].index)
# If no filtering indexes, then all datapoints are valid on DPR and HR
Expand Down
3 changes: 1 addition & 2 deletions tests/Bugs/test_bugs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2575,8 +2575,7 @@ def test_Fail_GL_67(self):
""" """
code = "GL_67_Fail"
number_inputs = 39
message = "1-1-1-10"
# TODO: test error code has been changed until revision
message = "1-1-6-10"
self.NewSemanticExceptionTest(
code=code, number_inputs=number_inputs, exception_code=message
)
Expand Down
51 changes: 4 additions & 47 deletions tests/Helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,9 @@ def NewSemanticExceptionTest(
warnings.filterwarnings("ignore", category=FutureWarning)
if text is None:
text = cls.LoadVTL(code)

is_runtime_error = exception_code.startswith("2")

input_datasets = cls.LoadInputs(code=code, number_inputs=number_inputs)

value_domains = None
Expand All @@ -256,6 +259,7 @@ def NewSemanticExceptionTest(
scalars=scalars_obj,
value_domains=value_domains,
external_routines=external_routines,
only_semantic=not is_runtime_error,
)
with pytest.raises((SemanticError, RunTimeError)) as context:
ast = create_ast(text)
Expand All @@ -266,53 +270,6 @@ def NewSemanticExceptionTest(
print(f"\n{exception_code} != {context.value.args[1]}")
assert result

@classmethod
def SemanticExceptionTest(
cls,
code: str,
number_inputs: int,
exception_code: str,
vd_names: List[str] = None,
sql_names: List[str] = None,
text: Optional[str] = None,
scalars: Dict[str, Any] = None,
):
# Data Loading.--------------------------------------------------------
warnings.filterwarnings("ignore", category=FutureWarning)
if text is None:
text = cls.LoadVTL(code)
input_datasets = cls.LoadInputs(code=code, number_inputs=number_inputs)

value_domains = None
if vd_names is not None:
value_domains = cls.LoadValueDomains(vd_names)

external_routines = None
if sql_names is not None:
external_routines = cls.LoadExternalRoutines(sql_names)

if scalars is not None:
for scalar_name, scalar_value in scalars.items():
if scalar_name not in input_datasets:
raise Exception(f"Scalar {scalar_name} not found in the input datasets")
if not isinstance(input_datasets[scalar_name], Scalar):
raise Exception(f"{scalar_name} is a dataset")
input_datasets[scalar_name].value = scalar_value

interpreter = InterpreterAnalyzer(
input_datasets,
value_domains=value_domains,
external_routines=external_routines,
)
with pytest.raises(SemanticError) as context:
ast = create_ast(text)
interpreter.visit(ast)

result = exception_code == str(context.value.args[1])
if result is False:
print(f"\n{exception_code} != {context.value.args[1]}")
assert result

@classmethod
def LoadValueDomains(cls, vd_names):
value_domains = {}
Expand Down
Loading