From 7fc5b6020a4770eafdf12d57c6beb9e226db1307 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Thu, 26 Feb 2026 12:37:36 +0100 Subject: [PATCH 01/38] Fixed literal casting inside sub operator (#538) * Added visitScalarWithCast statement into sub AST constructor to handle ScalarWithCastContext * Added related test --- .../AST/ASTConstructorModules/Expr.py | 4 ++- .../data/DataStructure/input/GH_537_1-1.json | 27 +++++++++++++++++++ .../data/DataStructure/output/GH_537_1-1.json | 21 +++++++++++++++ tests/Cast/data/Dataset/input/GH_537_1-1.csv | 5 ++++ tests/Cast/data/Dataset/output/GH_537_1-1.csv | 3 +++ tests/Cast/data/vtl/GH_537_1.vtl | 1 + tests/Cast/test_cast.py | 10 +++++++ 7 files changed, 70 insertions(+), 1 deletion(-) create mode 100644 tests/Cast/data/DataStructure/input/GH_537_1-1.json create mode 100644 tests/Cast/data/DataStructure/output/GH_537_1-1.json create mode 100644 tests/Cast/data/Dataset/input/GH_537_1-1.csv create mode 100644 tests/Cast/data/Dataset/output/GH_537_1-1.csv create mode 100644 tests/Cast/data/vtl/GH_537_1.vtl diff --git a/src/vtlengine/AST/ASTConstructorModules/Expr.py b/src/vtlengine/AST/ASTConstructorModules/Expr.py index 72bacaae9..97b3cb544 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Expr.py +++ b/src/vtlengine/AST/ASTConstructorModules/Expr.py @@ -1884,7 +1884,9 @@ def visitSubspaceClauseItem(self, ctx: Parser.SubspaceClauseItemContext): left_node = Terminals().visitVarID(ctx_list[0]) op_node = ctx_list[1].getSymbol().text - if isinstance(ctx_list[2], Parser.ScalarItemContext): + if isinstance(ctx_list[2], Parser.ScalarWithCastContext): + right_node = Terminals().visitScalarWithCast(ctx_list[2]) + elif isinstance(ctx_list[2], Parser.ScalarItemContext): right_node = Terminals().visitScalarItem(ctx_list[2]) else: right_node = Terminals().visitVarID(ctx_list[2]) diff --git a/tests/Cast/data/DataStructure/input/GH_537_1-1.json b/tests/Cast/data/DataStructure/input/GH_537_1-1.json new file mode 100644 index 000000000..31bd21eeb --- /dev/null +++ b/tests/Cast/data/DataStructure/input/GH_537_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Id_2", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Me_1", + "type": "Integer", + "nullable": true, + "role": "Measure" + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Cast/data/DataStructure/output/GH_537_1-1.json b/tests/Cast/data/DataStructure/output/GH_537_1-1.json new file mode 100644 index 000000000..56672d9c3 --- /dev/null +++ b/tests/Cast/data/DataStructure/output/GH_537_1-1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Me_1", + "type": "Integer", + "nullable": true, + "role": "Measure" + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Cast/data/Dataset/input/GH_537_1-1.csv b/tests/Cast/data/Dataset/input/GH_537_1-1.csv new file mode 100644 index 000000000..53e89f364 --- /dev/null +++ b/tests/Cast/data/Dataset/input/GH_537_1-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +1,1,10 +2,1,20 +1,2,30 +2,2,40 \ No newline at end of file diff --git a/tests/Cast/data/Dataset/output/GH_537_1-1.csv b/tests/Cast/data/Dataset/output/GH_537_1-1.csv new file mode 100644 index 000000000..7677d02e0 --- /dev/null +++ b/tests/Cast/data/Dataset/output/GH_537_1-1.csv @@ -0,0 +1,3 @@ +Id_1,Me_1 +1,10 +2,20 \ No newline at end of file diff --git a/tests/Cast/data/vtl/GH_537_1.vtl b/tests/Cast/data/vtl/GH_537_1.vtl new file mode 100644 index 000000000..3c3d475da --- /dev/null +++ b/tests/Cast/data/vtl/GH_537_1.vtl @@ -0,0 +1 @@ +DS_r <- DS_1[sub Id_2 = cast(1, string)]; \ No newline at end of file diff --git a/tests/Cast/test_cast.py b/tests/Cast/test_cast.py index 3118a7917..205249ba4 100644 --- a/tests/Cast/test_cast.py +++ b/tests/Cast/test_cast.py @@ -59,6 +59,16 @@ def test_GL_563_1(self): self.BaseTest(code, number_inputs, references_names=reference_names) + def test_GH_537_1(self): + """ + Solves bug report in github issue #537: sub fails whith scalar casting + """ + code = "GH_537_1" + number_inputs = 1 + reference_names = ["1"] + + self.BaseTest(code, number_inputs, references_names=reference_names) + # =========================================================================== # Comprehensive explicit cast tests (VTL 2.2) - Without mask From aadba80c704346678200a9af5366b247bdfd9fe3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Fri, 27 Feb 2026 11:05:39 +0100 Subject: [PATCH 02/38] Fix #541: Harden DuckDB error handling and detect infinite values (#542) * Added visitScalarWithCast statement into sub AST constructor to handle ScalarWithCastContext * Added related test * Harden DuckDB error handling and detect infinite values (#541) - Add pyarrow-based inf detection for ratio_to_report (division by zero) - Add ieee_floating_point_ops=false to eval operator connection - Add inf check on eval operator measure columns - Replace bare exceptions in eval with dedicated error codes - Add centralized error messages: 2-1-1-1, 2-1-3-1, 2-3-8, 1-1-1-21, 1-1-1-22 - Add test for ratio_to_report on zero-sum partitions * Remove unrelated changes from issue #537 --------- Co-authored-by: Mateo --- src/vtlengine/Exceptions/messages.py | 25 +++++++++++++++++ src/vtlengine/Operators/Aggregation.py | 5 ++-- src/vtlengine/Operators/Analytic.py | 12 +++++++-- src/vtlengine/Operators/General.py | 19 +++++++++---- .../data/DataSet/input/1-1-1-20-1.csv | 4 +++ .../data/DataStructure/input/1-1-1-20-1.json | 27 +++++++++++++++++++ tests/Analytic/data/vtl/1-1-1-20.vtl | 1 + tests/Analytic/test_analytic.py | 21 +++++++++++++++ tests/Eval/test_eval.py | 12 ++++----- 9 files changed, 111 insertions(+), 15 deletions(-) create mode 100644 tests/Analytic/data/DataSet/input/1-1-1-20-1.csv create mode 100644 tests/Analytic/data/DataStructure/input/1-1-1-20-1.json create mode 100644 tests/Analytic/data/vtl/1-1-1-20.vtl diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index 573759b4d..0174dca60 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -278,6 +278,16 @@ "description": "Occurs when a Scalar is provided to an operation " "that only supports Datasets.", }, + "1-1-1-21": { + "message": "At op eval: Query contains forbidden command: {command}.", + "description": "Raised when an eval query contains a forbidden SQL command " + "such as INSTALL or LOAD.", + }, + "1-1-1-22": { + "message": "At op eval: Query contains forbidden URL in FROM clause.", + "description": "Raised when an eval query attempts to access an external URL " + "in the FROM clause.", + }, # Aggregate errors "1-1-2-2": { "message": "At op {op}: Only Identifiers are allowed for grouping, " @@ -296,6 +306,16 @@ "description": "Raised when a non-Identifier component is used as a " "partitioning key in an analytic operation.", }, + "2-1-1-1": { + "message": "At op {op}: DuckDB runtime error during query execution: {error}.", + "description": "Raised when DuckDB encounters an unexpected runtime error " + "during query execution.", + }, + "2-1-3-1": { + "message": "At op {op}: Division by zero produced infinite values.", + "description": "Raised when a division by zero in a DuckDB query produces " + "infinite floating-point values in measure columns.", + }, # Cast errors "1-1-5-1": { "message": "Type {type_1}, cannot be cast to {type_2}.", @@ -1072,6 +1092,11 @@ "message": "Ruleset Dataset not found, please check the ruleset definition.", "description": "Occurs when the Ruleset Dataset is missing.", }, + "2-3-8": { + "message": "At op {op}: Data conversion error: {msg}.", + "description": "Raised when DuckDB encounters a data conversion error during " + "aggregation query execution.", + }, "2-3-9": { "message": "{comp_type} {comp_name} not found in {param}.", "description": "Raised when a component is not found within a specified parameter.", diff --git a/src/vtlengine/Operators/Aggregation.py b/src/vtlengine/Operators/Aggregation.py index e9f9b70ac..ac55769cb 100644 --- a/src/vtlengine/Operators/Aggregation.py +++ b/src/vtlengine/Operators/Aggregation.py @@ -208,12 +208,13 @@ def _agg_func( ) try: - return duckdb.query(query).to_df() + result = duckdb.query(query).to_df() except RuntimeError as e: if "Conversion" in e.args[0]: raise RunTimeError("2-3-8", op=cls.op, msg=e.args[0].split(":")[-1]) else: - raise RunTimeError("2-1-1-1", op=cls.op) + raise RunTimeError("2-1-1-1", op=cls.op, error=e) + return result @classmethod def evaluate( # type: ignore[override] diff --git a/src/vtlengine/Operators/Analytic.py b/src/vtlengine/Operators/Analytic.py index 21acd2360..d3dd2331a 100644 --- a/src/vtlengine/Operators/Analytic.py +++ b/src/vtlengine/Operators/Analytic.py @@ -3,6 +3,8 @@ import duckdb import pandas as pd +import pyarrow as pa +import pyarrow.compute as pc import vtlengine.Operators as Operator from vtlengine.AST import OrderBy, Windowing @@ -30,7 +32,7 @@ Number, unary_implicit_promotion, ) -from vtlengine.Exceptions import SemanticError +from vtlengine.Exceptions import RunTimeError, SemanticError from vtlengine.Model import Component, Dataset, Role from vtlengine.Utils.__Virtual_Assets import VirtualCounter @@ -248,7 +250,13 @@ def analyticfunc( if cls.op == COUNT: df[measure_names] = df[measure_names].fillna(-1) - return duckdb.query(query).to_df() + result = duckdb.query(query).to_df() + if cls.op == RATIO_TO_REPORT: + for col_name in measure_names: + arr = pa.array(result[col_name]) + if pa.types.is_floating(arr.type) and pc.any(pc.is_inf(arr)).as_py(): + raise RunTimeError("2-1-3-1", op=cls.op) + return result @classmethod def evaluate( # type: ignore[override] diff --git a/src/vtlengine/Operators/General.py b/src/vtlengine/Operators/General.py index c01d2a2ff..388db20d9 100644 --- a/src/vtlengine/Operators/General.py +++ b/src/vtlengine/Operators/General.py @@ -3,9 +3,11 @@ import duckdb import pandas as pd +import pyarrow as pa +import pyarrow.compute as pc from vtlengine.DataTypes import COMP_NAME_MAPPING -from vtlengine.Exceptions import SemanticError +from vtlengine.Exceptions import RunTimeError, SemanticError from vtlengine.Model import Component, DataComponent, Dataset, ExternalRoutine, Role from vtlengine.Operators import Binary, Unary from vtlengine.Utils.__Virtual_Assets import VirtualCounter @@ -113,9 +115,9 @@ def _execute_query( query = re.sub(r'"([^"]*)"', r"'\1'", query) for forbidden in ["INSTALL", "LOAD"]: if re.search(rf"\b{forbidden}\b", query, re.IGNORECASE): - raise Exception(f"Query contains forbidden command: {forbidden}") + raise SemanticError("1-1-1-21", command=forbidden) if re.search(r"FROM\s+'https?://", query, re.IGNORECASE): - raise Exception("Query contains forbidden URL in FROM clause") + raise SemanticError("1-1-1-22") try: conn = duckdb.connect(database=":memory:", read_only=False) conn.execute("SET enable_external_access = false") @@ -130,12 +132,19 @@ def _execute_query( df = data[ds_name] conn.register(ds_name, df) df_result = conn.execute(query).fetchdf() + for col_name in df_result.columns: + arr = pa.array(df_result[col_name]) + if pa.types.is_floating(arr.type) and pc.any(pc.is_inf(arr)).as_py(): + conn.close() + raise RunTimeError("2-1-3-1", op="eval") conn.close() except Exception as e: conn.close() - raise Exception(f"Error executing SQL query: {e}") + raise RunTimeError("2-1-1-1", op="eval", error=e) + except RunTimeError: + raise except Exception as e: - raise Exception(f"Error connecting to DuckDB in memory: {e}") + raise RunTimeError("2-1-1-1", op="eval", error=e) return df_result @classmethod diff --git a/tests/Analytic/data/DataSet/input/1-1-1-20-1.csv b/tests/Analytic/data/DataSet/input/1-1-1-20-1.csv new file mode 100644 index 000000000..4df0f90b0 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/1-1-1-20-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,1,5 +A,2,-5 +A,3,0 diff --git a/tests/Analytic/data/DataStructure/input/1-1-1-20-1.json b/tests/Analytic/data/DataStructure/input/1-1-1-20-1.json new file mode 100644 index 000000000..b49998747 --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/1-1-1-20-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Analytic/data/vtl/1-1-1-20.vtl b/tests/Analytic/data/vtl/1-1-1-20.vtl new file mode 100644 index 000000000..12a4ad72a --- /dev/null +++ b/tests/Analytic/data/vtl/1-1-1-20.vtl @@ -0,0 +1 @@ +DS_r := ratio_to_report ( DS_1 over ( partition by Id_1 ) ); \ No newline at end of file diff --git a/tests/Analytic/test_analytic.py b/tests/Analytic/test_analytic.py index 36fbb18f4..09710e9ae 100644 --- a/tests/Analytic/test_analytic.py +++ b/tests/Analytic/test_analytic.py @@ -429,6 +429,27 @@ def test_19(self): self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_20(self): + """ + Ratio to report: ratio_to_report + Dataset --> Dataset + Status: RunTimeError + Expression: DS_r := ratio_to_report ( DS_1 over ( partition by Id_1 ) ); + DS_1 Dataset + + Description: ratio_to_report raises an error when a partition sums to + zero, producing infinite values from the division by zero. + + Goal: Check that ratio_to_report raises error 2-1-3-1 on zero-sum partitions. + """ + code = "1-1-1-20" + number_inputs = 1 + exception_code = "2-1-3-1" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + class AnalyticOperatorsWithCalcTest(AnalyticHelper): """ diff --git a/tests/Eval/test_eval.py b/tests/Eval/test_eval.py index 68f8c5879..26a0210f2 100644 --- a/tests/Eval/test_eval.py +++ b/tests/Eval/test_eval.py @@ -4,7 +4,7 @@ import pytest from tests.Helper import TestHelper -from vtlengine.Exceptions import SemanticError +from vtlengine.Exceptions import RunTimeError, SemanticError from vtlengine.Operators.General import Eval @@ -125,28 +125,28 @@ def test_execute_query_empty_row(): def test_execute_query_forbid_install(): query = "INSTALL some_extension;" datasets = {"DS_1": pd.DataFrame([{"A": 1}])} - with pytest.raises(Exception, match="Query contains forbidden command: INSTALL"): + with pytest.raises(SemanticError, match="forbidden command: INSTALL"): Eval._execute_query(query, ["DS_1"], datasets) def test_execute_query_forbid_load(): query = "LOAD 'some_file';" datasets = {"DS_1": pd.DataFrame([{"A": 1}])} - with pytest.raises(Exception, match="Query contains forbidden command: LOAD"): + with pytest.raises(SemanticError, match="forbidden command: LOAD"): Eval._execute_query(query, ["DS_1"], datasets) def test_execute_query_forbid_url_in_from(): query = "SELECT column_a FROM 'https://domain.tld/file.parquet';" datasets = {"DS_1": pd.DataFrame([{"column_a": 1}])} - with pytest.raises(Exception, match="Query contains forbidden URL in FROM clause"): + with pytest.raises(SemanticError, match="forbidden URL in FROM clause"): Eval._execute_query(query, ["DS_1"], datasets) def test_execute_query_sql_error(): query = "SELECT NONEXISTENT_FUNC(A) FROM DS_1;" datasets = {"DS_1": pd.DataFrame([{"A": 1}])} - with pytest.raises(Exception, match="Error executing SQL query:"): + with pytest.raises(RunTimeError, match="DuckDB runtime error"): Eval._execute_query(query, ["DS_1"], datasets) @@ -165,5 +165,5 @@ def test_execute_query_empty_row_with_function_error(): FROM MSMTCH_BL_DS; """ datasets = {"MSMTCH_BL_DS": pd.DataFrame([{"DT_LGL_FNL_MTRTY": None, "DT_MTRTY_PRTCTN": None}])} - with pytest.raises(Exception, match="Error executing SQL query:"): + with pytest.raises(RunTimeError, match="DuckDB runtime error"): Eval._execute_query(query, ["MSMTCH_BL_DS"], datasets) From a804f64c7465df0b9f10cc297ea60b26c6b2937f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Fri, 27 Feb 2026 13:26:02 +0100 Subject: [PATCH 03/38] Fixed julian SQL method failing with Date input (#547) * Eval operator now cast Date columns to date64[pyarrow] * Added related test * Minor fix * Refactor Eval operator to normalize date columns and improve readability * Fixed ruff errors * Fixed mypy errors --- src/vtlengine/Operators/General.py | 26 +++++++++++--- tests/Eval/test_eval.py | 57 ++++++++++++++++++++++++++++++ 2 files changed, 79 insertions(+), 4 deletions(-) diff --git a/src/vtlengine/Operators/General.py b/src/vtlengine/Operators/General.py index 388db20d9..b660f7ac5 100644 --- a/src/vtlengine/Operators/General.py +++ b/src/vtlengine/Operators/General.py @@ -1,12 +1,12 @@ import re -from typing import Any, Dict, List, Union +from typing import Any, Dict, List, Optional, Union import duckdb import pandas as pd import pyarrow as pa import pyarrow.compute as pc -from vtlengine.DataTypes import COMP_NAME_MAPPING +from vtlengine.DataTypes import COMP_NAME_MAPPING, Date from vtlengine.Exceptions import RunTimeError, SemanticError from vtlengine.Model import Component, DataComponent, Dataset, ExternalRoutine, Role from vtlengine.Operators import Binary, Unary @@ -191,10 +191,28 @@ def evaluate( # type: ignore[override] output: Dataset, ) -> Dataset: result: Dataset = cls.validate(operands, external_routine, output) - operands_data_dict = {ds_name: operands[ds_name].data for ds_name in operands} + operands_data = {} + for ds_name in operands: + operands_data[ds_name] = cls.normalize_dates( + operands[ds_name].data, operands[ds_name].components + ) + result.data = cls._execute_query( external_routine.query, external_routine.dataset_names, - operands_data_dict, # type: ignore[arg-type] + operands_data, ) return result + + @classmethod + def normalize_dates( + cls, data: Optional[pd.DataFrame], components: Dict[str, Component] + ) -> pd.DataFrame: + if data is None: + return pd.DataFrame(columns=[comp.name for comp in components.values()]) + elif any(comp.data_type is Date for comp in components.values()): + data = data.copy() + for comp_name, comp in components.items(): + if comp.data_type is Date: + data[comp_name] = data[comp_name].astype("date64[pyarrow]") + return data diff --git a/tests/Eval/test_eval.py b/tests/Eval/test_eval.py index 26a0210f2..bcafdcdca 100644 --- a/tests/Eval/test_eval.py +++ b/tests/Eval/test_eval.py @@ -4,6 +4,7 @@ import pytest from tests.Helper import TestHelper +from vtlengine import run from vtlengine.Exceptions import RunTimeError, SemanticError from vtlengine.Operators.General import Eval @@ -167,3 +168,59 @@ def test_execute_query_empty_row_with_function_error(): datasets = {"MSMTCH_BL_DS": pd.DataFrame([{"DT_LGL_FNL_MTRTY": None, "DT_MTRTY_PRTCTN": None}])} with pytest.raises(RunTimeError, match="DuckDB runtime error"): Eval._execute_query(query, ["MSMTCH_BL_DS"], datasets) + + +def test_eval_julian_with_date_columns(): + """Date columns stored as string[pyarrow] should work with DuckDB date functions.""" + + script = """ + DS_r <- eval( + dateDiff(DS_1) + language "sql" + returns dataset { + identifier Id_1, + measure DIFF_DAYS + } + ); + """ + + er = { + "name": "dateDiff", + "query": ( + "SELECT Id_1, CAST(julian(Me_1) - julian(Me_2) AS INTEGER) AS DIFF_DAYS FROM DS_1;" + ), + } + + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + {"name": "Id_1", "type": "String", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Date", "role": "Measure", "nullable": True}, + {"name": "Me_2", "type": "Date", "role": "Measure", "nullable": True}, + ], + } + ] + } + + df = pd.DataFrame( + { + "Id_1": ["A", "B"], + "Me_1": ["2024-01-15", "2024-06-01"], + "Me_2": ["2024-01-01", "2024-05-01"], + } + ) + datapoints = {"DS_1": df} + + expected_diff_days = (pd.to_datetime(df["Me_1"]) - pd.to_datetime(df["Me_2"])).dt.days + + result = run( + script=script, + data_structures=data_structures, + datapoints=datapoints, + external_routines=er, + ) + + assert result["DS_r"] is not None + assert result["DS_r"].data["DIFF_DAYS"].tolist() == expected_diff_days.tolist() From f0dd08b2416bf71ae7226a175b04c122e3cb1ef3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Fri, 27 Feb 2026 14:55:55 +0100 Subject: [PATCH 04/38] Added "legacy" time period representation (#545) * Added legacy representation method to TimePeriodHandler class * Added legacy time period representation formatter * Added related tests * Renamed format_time_period_external_representation dataset argument to operand. * Added related error message * Updated invalid TimePeriodRepresentation exception * Updated docs * Updated docs * updated sdmx reporting D regex * Added related tests * Updated docs --- docs/data_types.rst | 46 +++++++++++++------ src/vtlengine/API/__init__.py | 4 +- src/vtlengine/DataTypes/TimeHandling.py | 13 ++++++ src/vtlengine/DataTypes/_time_checking.py | 4 +- src/vtlengine/Exceptions/messages.py | 6 +++ .../output/_time_period_representation.py | 38 ++++++++++----- tests/TimePeriod/test_time_period_formats.py | 36 +++++++++++++++ 7 files changed, 116 insertions(+), 31 deletions(-) diff --git a/docs/data_types.rst b/docs/data_types.rst index b864ceb7c..3354bd48d 100644 --- a/docs/data_types.rst +++ b/docs/data_types.rst @@ -179,25 +179,34 @@ Time_Period **Accepted input formats:** .. list-table:: - :widths: 20 40 40 + :widths: 15 45 40 :header-rows: 1 - * - Category + * - Period - Formats - Examples - * - VTL compact - - ``YYYY``, ``YYYYA``, ``YYYYSn``, ``YYYYQn``, - ``YYYYMm``, ``YYYYWw``, ``YYYYDd`` - - ``2020``, ``2020A``, ``2020S1``, ``2020Q3``, - ``2020M1``, ``2020W15``, ``2020D100`` - * - SDMX reporting - - ``YYYY-A1``, ``YYYY-Sx``, ``YYYY-Qx``, - ``YYYY-Mxx``, ``YYYY-Wxx``, ``YYYY-Dxxx`` - - ``2020-A1``, ``2020-S1``, ``2020-Q3``, - ``2020-M01``, ``2020-W15``, ``2020-D100`` - * - ISO date/month - - ``YYYY-MM``, ``YYYY-M``, ``YYYY-MM-DD`` - - ``2020-01``, ``2020-1``, ``2020-01-15`` + * - Annual + - ``YYYY``, ``YYYYA``, ``YYYY-A1`` + - ``2020``, ``2020A``, ``2020-A1`` + * - Semester + - ``YYYYSx``, ``YYYY-Sx`` + - ``2020S1``, ``2020-S1`` + * - Quarter + - ``YYYYQx``, ``YYYY-Qx`` + - ``2020Q1``, ``2020-Q1`` + * - Monthly + - ``YYYYMm``, ``YYYYMmm``, ``YYYY-MM``, + ``YYYY-M``, ``YYYY-Mxx``, ``YYYY-Mx`` + - ``2020M1``, ``2020M01``, ``2020-01``, + ``2020-1``, ``2020-M01``, ``2020-M1`` + * - Weekly + - ``YYYYWw``, ``YYYYWww``, ``YYYY-Wxx`` + - ``2020W1``, ``2020W01``, ``2020-W01`` + * - Daily + - ``YYYYD[dd]d``, ``YYYY-D[xx]x``, ``YYYY-MM-DD`` + - ``2020D1``, ``2020D01``, ``2020D001``, + ``2020D-1``, ``2020D-01``, ``2020D-001``, + ``2020-01-01`` **Output formats** (controlled by ``time_period_output_format`` parameter): @@ -234,6 +243,13 @@ parameter): - ``2020-01`` - Not supported - ``2020-01-15`` + * - ``"legacy"`` + - ``2020`` + - ``2020-S1`` + - ``2020-Q1`` + - ``2020-M01`` + - ``2020-W15`` + - ``2020-01-15`` Time_Period is a **subtype of Time** — anywhere a Time value is expected, a Time_Period is accepted automatically. diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index 221bcd8fb..23cdd1a59 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -381,7 +381,7 @@ def run( :ref:`Example 5 `. time_period_output_format: String with the possible values \ - ("sdmx_gregorian", "sdmx_reporting", "vtl") for the representation of the \ + ("sdmx_gregorian", "sdmx_reporting", "vtl", "legacy") for the representation of the \ Time Period components. return_only_persistent: If True, run function will only return the results of \ @@ -523,7 +523,7 @@ def run_sdmx( :ref:`Example 5 `. time_period_output_format: String with the possible values \ - ("sdmx_gregorian", "sdmx_reporting", "vtl") for the representation of the \ + ("sdmx_gregorian", "sdmx_reporting", "vtl", "legacy") for the representation of the \ Time Period components. return_only_persistent: If True, run function will only return the results of \ diff --git a/src/vtlengine/DataTypes/TimeHandling.py b/src/vtlengine/DataTypes/TimeHandling.py index 132380a9f..77ec75b9c 100644 --- a/src/vtlengine/DataTypes/TimeHandling.py +++ b/src/vtlengine/DataTypes/TimeHandling.py @@ -372,6 +372,19 @@ def sdmx_reporting_representation(self) -> str: period_number_str = str(self.period_number) return f"{self.year}-{self.period_indicator}{period_number_str}" + def legacy_representation(self) -> str: + """Legacy representation: YYYY, YYYY-Sx, YYYY-Qx, YYYY-Mxx, YYYY-Wxx, YYYY-MM-DD.""" + if self.period_indicator == "A": + return f"{self.year}" + if self.period_indicator == "D": + d = period_to_date(self.year, "D", self.period_number) + return d.isoformat() + if self.period_indicator in ("M", "W"): + period_number_str = f"{self.period_number:02}" + else: + period_number_str = str(self.period_number) + return f"{self.year}-{self.period_indicator}{period_number_str}" + class TimeIntervalHandler: _date1: str = "0" diff --git a/src/vtlengine/DataTypes/_time_checking.py b/src/vtlengine/DataTypes/_time_checking.py index 685f52357..b53f9a60f 100644 --- a/src/vtlengine/DataTypes/_time_checking.py +++ b/src/vtlengine/DataTypes/_time_checking.py @@ -114,7 +114,7 @@ def check_time(value: str) -> str: r"|^\d{4}Q[1-4]$" # YYYYQN (quarter) r"|^\d{4}M[0-1]?\d$" # YYYYM[M] (month, 1 or 2 digits) r"|^\d{4}W[0-5]?\d$" # YYYYW[W] (week, 1 or 2 digits) - r"|^\d{4}D[0-3]?[0-9]?\d$" # YYYYD[DD] (day of year, 1 to 3 digits) + r"|^\d{4}D[0-3]?\d{1,2}$" # YYYYD[DD] (day of year, 1 to 3 digits) ) # Hyphenated formats: YYYY-MM, YYYY-M, YYYY-MM-DD, YYYY-MXX, YYYY-QX, YYYY-SX, YYYY-WXX, @@ -126,7 +126,7 @@ def check_time(value: str) -> str: r"|^\d{4}-Q[1-4]$" # YYYY-QX (hyphenated quarter) r"|^\d{4}-S[1-2]$" # YYYY-SX (hyphenated semester) r"|^\d{4}-W([0-4]\d|5[0-3]|[1-9])$" # YYYY-WXX (hyphenated week) - r"|^\d{4}-D[0-3]\d\d$" # YYYY-DXXX (hyphenated day) + r"|^\d{4}-D[0-3]?\d{1,2}$" # YYYY-D[XX]X (hyphenated day) r"|^\d{4}-A1$" # YYYY-A1 (SDMX reporting annual) ) diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index 0174dca60..4b89fdc91 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -75,6 +75,12 @@ "message": "Empty Datasets {dataset1} and {dataset2} shape missmatch.", "description": "Raised when two Datasets are empty or have incompatible shapes.", }, + "0-1-1-15": { + "message": "Invalid time_period_output_format value: '{value}'. " + "Allowed formats: {valid_options}.", + "description": "Raised when the provided time period output format " + "is not one of the supported representations.", + }, "0-1-2-3": { "message": "{element_type} '{element}' is/are duplicated.", "description": "Occurs when an element (e.g., Identifier or component) " diff --git a/src/vtlengine/files/output/_time_period_representation.py b/src/vtlengine/files/output/_time_period_representation.py index 3d2725d61..951511b53 100644 --- a/src/vtlengine/files/output/_time_period_representation.py +++ b/src/vtlengine/files/output/_time_period_representation.py @@ -3,6 +3,7 @@ from vtlengine.DataTypes import TimePeriod from vtlengine.DataTypes.TimeHandling import TimePeriodHandler +from vtlengine.Exceptions import InputValidationException from vtlengine.Model import Dataset, Scalar @@ -11,11 +12,15 @@ class TimePeriodRepresentation(Enum): SDMX_GREGORIAN = "sdmx_gregorian" SDMX_REPORTING = "sdmx_reporting" VTL = "vtl" + LEGACY = "legacy" @classmethod def check_value(cls, value: str) -> "TimePeriodRepresentation": if value not in cls._value2member_map_: - raise Exception("Invalid Time Period Representation") + valid_options = ", ".join(m.value for m in cls) + raise InputValidationException( + code="0-1-1-15", value=value, valid_options=valid_options + ) return cls(value) @@ -31,8 +36,12 @@ def _format_sdmx_reporting_representation(value: str) -> str: return TimePeriodHandler(value).sdmx_reporting_representation() +def _format_legacy_representation(value: str) -> str: + return TimePeriodHandler(value).legacy_representation() + + def format_time_period_external_representation( - dataset: Union[Dataset, Scalar], mode: TimePeriodRepresentation + operand: Union[Dataset, Scalar], mode: TimePeriodRepresentation ) -> None: """ Converts internal time period representation to the requested external format. @@ -40,21 +49,24 @@ def format_time_period_external_representation( SDMX Reporting: YYYY-A1, YYYY-Ss, YYYY-Qq, YYYY-Mmm, YYYY-Www, YYYY-Dddd SDMX Gregorian: YYYY, YYYY-MM, YYYY-MM-DD (only A, M, D supported) VTL: YYYY, YYYYSn, YYYYQn, YYYYMm, YYYYWw, YYYYDd (no hyphens) + Legacy: YYYY, YYYY-Sx, YYYY-Qx, YYYY-Mxx, YYYY-Wxx, YYYY-MM-DD """ - if isinstance(dataset, Scalar): - if dataset.data_type != TimePeriod or dataset.value is None: + if isinstance(operand, Scalar): + if operand.data_type != TimePeriod or operand.value is None: return - value = dataset.value + value = operand.value if mode == TimePeriodRepresentation.VTL: - dataset.value = _format_vtl_representation(value) + operand.value = _format_vtl_representation(value) elif mode == TimePeriodRepresentation.SDMX_GREGORIAN: - dataset.value = _format_sdmx_gregorian_representation(value) + operand.value = _format_sdmx_gregorian_representation(value) elif mode == TimePeriodRepresentation.SDMX_REPORTING: - dataset.value = _format_sdmx_reporting_representation(value) + operand.value = _format_sdmx_reporting_representation(value) + elif mode == TimePeriodRepresentation.LEGACY: + operand.value = _format_legacy_representation(value) return - if dataset.data is None or len(dataset.data) == 0: + if operand.data is None or len(operand.data) == 0: return if mode == TimePeriodRepresentation.VTL: formatter = _format_vtl_representation @@ -62,9 +74,11 @@ def format_time_period_external_representation( formatter = _format_sdmx_gregorian_representation elif mode == TimePeriodRepresentation.SDMX_REPORTING: formatter = _format_sdmx_reporting_representation + elif mode == TimePeriodRepresentation.LEGACY: + formatter = _format_legacy_representation - for comp in dataset.components.values(): + for comp in operand.components.values(): if comp.data_type == TimePeriod: - dataset.data[comp.name] = ( - dataset.data[comp.name].map(formatter, na_action="ignore").astype("string[pyarrow]") + operand.data[comp.name] = ( + operand.data[comp.name].map(formatter, na_action="ignore").astype("string[pyarrow]") ) diff --git a/tests/TimePeriod/test_time_period_formats.py b/tests/TimePeriod/test_time_period_formats.py index 88c0bb016..830af6c58 100644 --- a/tests/TimePeriod/test_time_period_formats.py +++ b/tests/TimePeriod/test_time_period_formats.py @@ -187,6 +187,8 @@ def test_check_time_period_all_formats_consistent(expected: str, inputs: list) - ("2020W1", "2020W1", "week 1"), ("2020W53", "2020W53", "week 53"), ("2020D1", "2020D1", "day 1"), + ("2020D01", "2020D1", "day 1"), + ("2020D001", "2020D1", "day 1"), ("2020D100", "2020D100", "day 100"), ("2020D366", "2020D366", "day 366"), ] @@ -212,6 +214,8 @@ def test_vtl_representation(internal: str, expected: str) -> None: ("2020W1", "2020-W01", "week 1 zero-padded"), ("2020W53", "2020-W53", "week 53"), ("2020D1", "2020-D001", "day 1 zero-padded"), + ("2020D01", "2020-D001", "day 1 zero-padded"), + ("2020D001", "2020-D001", "day 1 zero-padded"), ("2020D100", "2020-D100", "day 100"), ("2020D366", "2020-D366", "day 366"), ] @@ -262,6 +266,31 @@ def test_sdmx_gregorian_representation_unsupported(internal: str) -> None: TimePeriodHandler(internal).sdmx_gregorian_representation() +legacy_repr_params = [ + ("2020A", "2020", "annual"), + ("2020S1", "2020-S1", "semester 1"), + ("2020S2", "2020-S2", "semester 2"), + ("2020Q1", "2020-Q1", "quarter 1"), + ("2020Q4", "2020-Q4", "quarter 4"), + ("2020M1", "2020-M01", "month 1"), + ("2020M12", "2020-M12", "month 12"), + ("2020W1", "2020-W01", "week 1"), + ("2020W53", "2020-W53", "week 53"), + ("2020D1", "2020-01-01", "day 1"), + ("2020D59", "2020-02-28", "day 59"), + ("2020D366", "2020-12-31", "day 366 leap year"), +] + + +@pytest.mark.parametrize( + "internal, expected", + [(c[0], c[1]) for c in legacy_repr_params], + ids=[c[2] for c in legacy_repr_params], +) +def test_legacy_representation(internal: str, expected: str) -> None: + assert TimePeriodHandler(internal).legacy_representation() == expected + + # VTL Data Types to external representations tests @@ -298,6 +327,13 @@ def get_tp_scalar(value: Optional[str]) -> Scalar: ("2020A", TimePeriodRepresentation.SDMX_GREGORIAN, "2020", "gregorian annual"), ("2020-M01", TimePeriodRepresentation.SDMX_GREGORIAN, "2020-01", "gregorian month"), ("2020-D001", TimePeriodRepresentation.SDMX_GREGORIAN, "2020-01-01", "gregorian day"), + # Legacy + ("2020A", TimePeriodRepresentation.LEGACY, "2020", "legacy annual"), + ("2020-M01", TimePeriodRepresentation.LEGACY, "2020-M01", "legacy month"), + ("2020-Q3", TimePeriodRepresentation.LEGACY, "2020-Q3", "legacy quarter"), + ("2020-S2", TimePeriodRepresentation.LEGACY, "2020-S2", "legacy semester"), + ("2020-W01", TimePeriodRepresentation.LEGACY, "2020-W01", "legacy week"), + ("2020-D001", TimePeriodRepresentation.LEGACY, "2020-01-01", "legacy day"), ] From b855d0330d66eacb867852eb16f2d39a6464fbb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 2 Mar 2026 10:17:26 +0100 Subject: [PATCH 05/38] Fix #544: Add Extra Inputs documentation page (#548) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add Extra Inputs documentation page for Value Domains and External Routines (#544) * Improve extra_inputs docs and fix deploy job skip on release - Add Time format example in Value Domains supported types - Add SQL file example in External Routines - Add note that only SQL external routines are supported - Fix function names: validate_value_domain, validate_external_routine - Fix deploy job being skipped when check-docs-label is skipped * Remove broken .sql file support for external routines The directory loading path filtered for .sql files but the file handler only accepted .json, causing all .sql loads to fail. Removed the dead .sql code path and updated docs to reflect JSON-only file support. * Fix external_routines docstrings and type signature Update run() and run_sdmx() docstrings from "String or Path" to "Dict or Path" to match semantic_analysis() and value_domains. Remove dead str type from load_external_routines() signature since strings are rejected at runtime. * Add automated tests for documentation Python examples - Extract and execute Python code blocks from RST files (walkthrough.rst, extra_inputs.rst) - Validate run results against reference CSV files using pyarrow dtype comparison - Fix pre-existing bugs in walkthrough examples: wrong path casing (Docs/ → docs/), language "sqlite" → "SQL", Me_1 → Id_2 in VD membership, variable name typo, malformed value_domains dict, wrong VD/routine names in Example_6.vtl - Update reference CSVs (Example_5.csv, Example_6_output.csv) to match corrected examples * Fix incorrect parameter name in S3 example Rename `output` to `output_folder` in environment_variables.rst to match the actual run() API signature. * Fix Python 3.9 compatibility in doc example tests Replace `str | None` (PEP 604, requires 3.10+) with `Optional[str]` to support Python 3.9. * Fix Windows encoding error in RST code extractor Specify UTF-8 encoding in read_text() to avoid charmap codec errors on Windows. --- .github/workflows/docs.yml | 1 + docs/_static/Example_5.csv | 4 +- docs/_static/Example_6.vtl | 6 +- docs/_static/Example_6_output.csv | 4 +- docs/environment_variables.rst | 2 +- docs/extra_inputs.rst | 464 ++++++++++++++++++++++++ docs/index.rst | 14 + docs/walkthrough.rst | 43 ++- src/vtlengine/API/_InternalApi.py | 4 +- src/vtlengine/API/__init__.py | 8 +- tests/DocScripts/_rst_code_extractor.py | 113 ++++++ tests/DocScripts/test_doc_examples.py | 145 ++++++++ 12 files changed, 774 insertions(+), 34 deletions(-) create mode 100644 docs/extra_inputs.rst create mode 100644 tests/DocScripts/_rst_code_extractor.py create mode 100644 tests/DocScripts/test_doc_examples.py diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 643c12ff8..ff8f20863 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -143,6 +143,7 @@ jobs: # Deployment job deploy: + if: always() && !cancelled() && needs.build.result == 'success' environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} diff --git a/docs/_static/Example_5.csv b/docs/_static/Example_5.csv index 537b2158e..94cad6ed0 100644 --- a/docs/_static/Example_5.csv +++ b/docs/_static/Example_5.csv @@ -1,4 +1,4 @@ Id_1,Id_2,Me_1,Me_2 2012,AT,0.0,False -2012,DE,4.0,False -2012,FR,9.0,False \ No newline at end of file +2012,DE,4.0,True +2012,FR,9.0,True diff --git a/docs/_static/Example_6.vtl b/docs/_static/Example_6.vtl index bf33b8486..d021ee1d1 100644 --- a/docs/_static/Example_6.vtl +++ b/docs/_static/Example_6.vtl @@ -1,3 +1,3 @@ -Example_6 <- DS_1 [ calc Me_2:= Me_1 in Countries_EU_Sample]; -Example_6_2 <- eval(SQL_3(DS_1) language "sqlite" returns dataset { identifier Id_1, -measure Me_1}); \ No newline at end of file +Example_6 <- DS_1 [ calc Me_2:= Id_2 in Countries]; +Example_6_2 <- eval(SQL_4(DS_1) language "SQL" returns dataset { identifier Id_1, +measure Me_1}); diff --git a/docs/_static/Example_6_output.csv b/docs/_static/Example_6_output.csv index 537b2158e..151b035ab 100644 --- a/docs/_static/Example_6_output.csv +++ b/docs/_static/Example_6_output.csv @@ -1,4 +1,4 @@ Id_1,Id_2,Me_1,Me_2 -2012,AT,0.0,False +2012,AT,0.0,True 2012,DE,4.0,False -2012,FR,9.0,False \ No newline at end of file +2012,FR,9.0,False diff --git a/docs/environment_variables.rst b/docs/environment_variables.rst index 0018c6058..fd2a22345 100644 --- a/docs/environment_variables.rst +++ b/docs/environment_variables.rst @@ -148,7 +148,7 @@ Using S3 with environment variables script="DS_r := DS_1;", data_structures=data_structures, datapoints="s3://my-bucket/input/DS_1.csv", - output="s3://my-bucket/output/", + output_folder="s3://my-bucket/output/", ) Using a custom S3 endpoint diff --git a/docs/extra_inputs.rst b/docs/extra_inputs.rst new file mode 100644 index 000000000..e3789e498 --- /dev/null +++ b/docs/extra_inputs.rst @@ -0,0 +1,464 @@ +############ +Extra Inputs +############ + +Both :meth:`vtlengine.run` and :meth:`vtlengine.semantic_analysis` +accept two optional parameters — ``value_domains`` and +``external_routines`` — that extend a VTL script with +membership checks and SQL-based transformations respectively. + +This page documents the definition format, input options, +VTL syntax, and validation for each feature. + +.. seealso:: + + - :ref:`example_5_run_with_multiple_value_domains_and_external_routines` + — Walkthrough example using both features as dictionaries + - :ref:`example_6_run_using_paths` + — Walkthrough example using both features as Path objects + - `VTL 2.1 Reference Manual + `_ + — Full VTL specification + + +Value Domains +************* + +A Value Domain is a named set of unique values that share +a common data type. Value domains are used with the ``in`` +and ``not_in`` operators to perform membership checks +in VTL scripts. + + +Definition Format +================= + +Each value domain is a JSON object with three required fields: + +.. list-table:: + :widths: 20 15 65 + :header-rows: 1 + + * - Field + - Type + - Description + * - ``name`` + - string + - Unique identifier referenced in the VTL script. + * - ``type`` + - string + - Data type of the values. See supported types below. + * - ``setlist`` + - array + - List of unique values belonging to the domain. + Items must match the declared ``type``. + +Example: + +.. code-block:: json + + { + "name": "Countries", + "type": "String", + "setlist": ["DE", "FR", "IT", "ES"] + } + +Multiple value domains can be provided as a JSON array: + +.. code-block:: json + + [ + { + "name": "Countries", + "type": "String", + "setlist": ["DE", "FR", "IT"] + }, + { + "name": "Thresholds", + "type": "Integer", + "setlist": [10, 20, 50, 100] + } + ] + + +Supported Types +=============== + +.. list-table:: + :widths: 25 75 + :header-rows: 1 + + * - Type + - ``setlist`` item type + * - ``Integer`` + - JSON integer (e.g. ``1``, ``42``) + * - ``Number`` + - JSON number (e.g. ``3.14``, ``100``) + * - ``String`` + - JSON string (e.g. ``"DE"``, ``"hello"``) + * - ``Boolean`` + - JSON boolean (``true`` or ``false``) + * - ``Date`` + - JSON string in date format (e.g. ``"2024-01-15"``) + * - ``Time_Period`` + - JSON string in time period format + (e.g. ``"2024Q1"``) + * - ``Time`` + - JSON string as ISO 8601 interval + (e.g. ``"2024-01-01/2024-12-31"``) + * - ``Duration`` + - JSON string in duration format (e.g. ``"P1Y"``) + + +Input Options +============= + +The ``value_domains`` parameter accepts the following formats: + +- **Dictionary**: A single value domain as a Python dict. +- **Path to a JSON file**: A ``Path`` pointing to a ``.json`` + file containing one or more value domain definitions. +- **Path to a directory**: A ``Path`` pointing to a directory; + all ``.json`` files in the directory are loaded. +- **List**: A list mixing any of the above formats. + +.. code-block:: python + + from pathlib import Path + + # Single dict + value_domains = { + "name": "Countries", + "type": "String", + "setlist": ["DE", "FR", "IT"], + } + + # Path to file + value_domains = Path("data/value_domains.json") + + # Path to directory (loads all .json files) + value_domains = Path("data/value_domains/") + + # List of mixed formats + value_domains = [ + {"name": "Countries", "type": "String", "setlist": ["DE", "FR"]}, + Path("data/extra_domains.json"), + ] + + +VTL Usage +========= + +Value domains are referenced in VTL scripts using the ``in`` +and ``not_in`` operators. These can be used in both scalar +and component contexts. + +.. code-block:: text + + /* Scalar membership: returns Boolean */ + DS_r <- DS_1 [calc Me_2 := Me_1 in Countries]; + + /* Negated membership */ + DS_r <- DS_1 [calc Me_2 := Me_1 not_in Countries]; + + /* Filter rows where a component belongs to a domain */ + DS_r <- DS_1 [filter Me_1 in Thresholds]; + + +Example +======= + +.. code-block:: python + + import pandas as pd + + from vtlengine import run + + script = """ + DS_r <- DS_1 [calc Me_2 := Me_1 in Countries]; + """ + + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": True, + }, + ], + } + ] + } + + datapoints = { + "DS_1": pd.DataFrame( + {"Id_1": [1, 2, 3], "Me_1": ["DE", "US", "FR"]} + ), + } + + value_domains = { + "name": "Countries", + "type": "String", + "setlist": ["DE", "FR", "IT"], + } + + result = run( + script=script, + data_structures=data_structures, + datapoints=datapoints, + value_domains=value_domains, + ) + + print(result["DS_r"]) + + +Validation +========== + +Use :meth:`vtlengine.validate_value_domain` to validate +the JSON structure of value domains before execution: + +.. code-block:: python + + from vtlengine import validate_value_domain + + value_domains = { + "name": "Countries", + "type": "String", + "setlist": ["DE", "FR", "IT"], + } + + # Raises an exception if the structure is invalid + validate_value_domain(value_domains) + + +External Routines +***************** + +External Routines allow VTL scripts to execute SQL queries +through the ``eval()`` operator. Queries are executed in +a sandboxed `DuckDB `_ environment. + +.. note:: + + Currently, only SQL external routines are supported. + The ``language`` parameter in the ``eval()`` call must + be set to ``"SQL"``. + + +Definition Format +================= + +Each external routine is a JSON object with two required fields: + +.. list-table:: + :widths: 20 15 65 + :header-rows: 1 + + * - Field + - Type + - Description + * - ``name`` + - string + - Identifier referenced in the VTL ``eval()`` call. + * - ``query`` + - string + - SQL query to execute. Table names in the query must + match the dataset names passed as operands. + +**JSON format**: + +.. code-block:: json + + { + "name": "SQL_1", + "query": "SELECT Id_1, SUM(Me_1) AS Me_1 FROM DS_1 GROUP BY Id_1;" + } + +Multiple routines can be provided as a JSON array: + +.. code-block:: json + + [ + { + "name": "SQL_1", + "query": "SELECT Id_1, COUNT(*) AS Me_1 FROM DS_1 GROUP BY Id_1;" + }, + { + "name": "SQL_2", + "query": "SELECT Id_1, Me_1 FROM DS_1 WHERE Me_1 > 10;" + } + ] + + +Input Options +============= + +The ``external_routines`` parameter accepts the following formats: + +- **Dictionary**: A single routine as a Python dict. +- **Path to a file**: A ``Path`` pointing to a ``.json`` file. +- **Path to a directory**: A ``Path`` pointing to a directory; + all ``.json`` files in the directory are loaded. +- **List**: A list mixing any of the above formats. + +.. code-block:: python + + from pathlib import Path + + # Single dict + external_routines = { + "name": "SQL_1", + "query": "SELECT Id_1, COUNT(*) AS Me_1 FROM DS_1 GROUP BY Id_1;", + } + + # Path to file + external_routines = Path("data/SQL_1.json") + + # Path to directory (loads all .json files) + external_routines = Path("data/routines/") + + # List of mixed formats + external_routines = [ + {"name": "SQL_1", "query": "SELECT * FROM DS_1;"}, + Path("data/SQL_2.json"), + ] + + +VTL Syntax +========== + +The ``eval()`` operator invokes an external routine: + +.. code-block:: text + + DS_r := eval( + SQL_NAME(DS_1, DS_2) + language "SQL" + returns dataset { + identifier Id_1, + measure Me_1 + } + ); + +- **SQL_NAME**: Name matching the external routine definition. +- **Operands**: Input datasets passed to the SQL query + (``DS_1``, ``DS_2``, etc.). +- **language**: Must be ``"SQL"``. +- **returns dataset**: Defines the output structure with + component roles (``identifier``, ``measure``, + ``attribute``) and types. + +.. note:: + + The column names in the SQL query result must match the + component names declared in the ``returns dataset`` clause. + + +Security +======== + +External routines are executed in a sandboxed DuckDB +in-memory database with the following restrictions: + +- ``INSTALL`` and ``LOAD`` commands are forbidden. +- URLs (``http://``, ``https://``) in ``FROM`` clauses + are forbidden. +- External file access is disabled. +- Extension loading is disabled. +- Configuration is locked after initialization. +- Results are checked for infinite values. + + +Examples +======== + +Using a dictionary +^^^^^^^^^^^^^^^^^^ + +.. code-block:: python + + import pandas as pd + + from vtlengine import run + + script = """ + DS_r <- eval( + SQL_1(DS_1) + language "SQL" + returns dataset { + identifier Id_1, + measure Me_1 + } + ); + """ + + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": True, + }, + ], + } + ] + } + + datapoints = { + "DS_1": pd.DataFrame( + {"Id_1": [1, 2, 3, 4, 5], "Me_1": [10, 20, 30, 40, 50]} + ), + } + + external_routines = { + "name": "SQL_1", + "query": "SELECT Id_1, Me_1 * 2 AS Me_1 FROM DS_1;", + } + + result = run( + script=script, + data_structures=data_structures, + datapoints=datapoints, + external_routines=external_routines, + ) + + print(result["DS_r"]) + +Validation +========== + +Use :meth:`vtlengine.validate_external_routine` to validate +the JSON structure and SQL syntax before execution: + +.. code-block:: python + + from vtlengine import validate_external_routine + + external_routines = { + "name": "SQL_1", + "query": "SELECT Id_1, SUM(Me_1) AS Me_1 FROM DS_1 GROUP BY Id_1;", + } + + # Raises an exception if the structure or SQL is invalid + validate_external_routine(external_routines) diff --git a/docs/index.rst b/docs/index.rst index 474870bb7..1eae82da8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -63,11 +63,25 @@ If you would like to use the S3 capabilities, you need to install the `s3` extra The S3 extra is based on the pandas[aws] extra, which requires to set up some environment variables. Please check the `boto3 documentation `_ .. toctree:: + :caption: Getting Started walkthrough + +.. toctree:: + :caption: Using the Engine + api data_types + extra_inputs + +.. toctree:: + :caption: Configuration + environment_variables + +.. toctree:: + :caption: Error Messages + error_messages diff --git a/docs/walkthrough.rst b/docs/walkthrough.rst index 54e40a5a8..badc9d460 100644 --- a/docs/walkthrough.rst +++ b/docs/walkthrough.rst @@ -36,10 +36,10 @@ Any VTL action requires the following elements as input: * **External Routines**: The VTL Engine supports the use of SQL (ISO/IEC 9075) within the `eval` - operator. External routines can be provided as a SQL string, a `Path` - object, or a list of such elements pointing to `.sql` files. The - default value is `None`, which should be used if external routines - are not applicable to the script. See :ref:`example 5 + operator. External routines can be provided as a dictionary, a `Path` + object pointing to a `.json` file or directory, or a list of such + elements. The default value is `None`, which should be used if external + routines are not applicable to the script. See :ref:`example 5 ` for an example. @@ -326,8 +326,8 @@ Optional settings are the same as in the `run` method, including: from vtlengine import run_sdmx - data = Path("Docs/_static/data.xml") - structure = Path("Docs/_static/metadata.xml") + data = Path("docs/_static/data.xml") + structure = Path("docs/_static/metadata.xml") datasets = get_datasets(data, structure) script = "DS_r <- DS_1 [calc Me_4 := OBS_VALUE];" print(run_sdmx(script, datasets)['DS_r'].data) @@ -349,8 +349,8 @@ If no mapping is provided, the VTL script must have a single input, and the data from vtlengine import run_sdmx - data = Path("Docs/_static/data.xml") - structure = Path("Docs/_static/metadata.xml") + data = Path("docs/_static/data.xml") + structure = Path("docs/_static/metadata.xml") datasets = get_datasets(data, structure) script = TransformationScheme( id="TS1", @@ -399,8 +399,8 @@ Finally, mapping information can be used to link an SDMX input dataset to a VTL from vtlengine import run_sdmx - data = Path("Docs/_static/data.xml") - structure = Path("Docs/_static/metadata.xml") + data = Path("docs/_static/data.xml") + structure = Path("docs/_static/metadata.xml") datasets = get_datasets(data, structure) script = TransformationScheme( id="TS1", @@ -553,8 +553,8 @@ Example 5: Run with multiple Value Domains and External Routines as dictionaries def main(): script = """ - Example_5 <- DS_1 [ calc Me_2:= Me_1 in Countries]; - Example_5_2 <- eval(SQL_3(DS_1) language "sqlite" returns dataset { identifier Id_1, + Example_5 <- DS_1 [ calc Me_2:= Id_2 in Countries]; + Example_5_2 <- eval(SQL_3(DS_1) language "SQL" returns dataset { identifier Id_1, measure Me_1}); """ @@ -583,8 +583,11 @@ Example 5: Run with multiple Value Domains and External Routines as dictionaries } - value_domains = - {"name": "Countries", "setlist": ["DE", "FR", "IT"], "type": "String"} + value_domains = { + "name": "Countries", + "setlist": ["DE", "FR", "IT"], + "type": "String", + } run_result = run( script=script, @@ -624,13 +627,13 @@ Here, `DS_1` is the dictionary key that matches the dataset defined in the data from vtlengine import run def main(): - filepath_external_routines = Path("Docs/_static/SQL_4.json") - filepath_ValueDomains = Path("Docs/_static/VD_2.json") - filepath_vtl_script = Path("Docs/_static/Example_6.vtl") - filepath_data_structures = Path("Docs/_static/Example_6.json") - filepath_data = Path("Docs/_static/Example_6_input.csv") + filepath_external_routines = Path("docs/_static/SQL_4.json") + filepath_ValueDomains = Path("docs/_static/VD_2.json") + filepath_vtl_script = Path("docs/_static/Example_6.vtl") + filepath_data_structures = Path("docs/_static/Example_6.json") + filepath_data = Path("docs/_static/Example_6_input.csv") - datastructures = filepath_data_structures + data_structures = filepath_data_structures datapoints = {"DS_1": filepath_data} script = filepath_vtl_script external_routines = filepath_external_routines diff --git a/src/vtlengine/API/_InternalApi.py b/src/vtlengine/API/_InternalApi.py index 3f4f18379..8b179d196 100644 --- a/src/vtlengine/API/_InternalApi.py +++ b/src/vtlengine/API/_InternalApi.py @@ -659,7 +659,7 @@ def load_value_domains( def load_external_routines( - input: Union[Dict[str, Any], Path, str, List[Union[Dict[str, Any], Path]]], + input: Union[Dict[str, Any], Path, List[Union[Dict[str, Any], Path]]], ) -> Any: """ Load the external routines. @@ -695,7 +695,7 @@ def load_external_routines( raise DataLoadError(code="0-3-1-1", file=input) if input.is_dir(): for f in input.iterdir(): - if f.suffix != ".sql": + if f.suffix != ".json": continue ext_rout = _load_single_external_routine_from_file(f) external_routines[ext_rout.name] = ext_rout diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index 23cdd1a59..7e2df21ba 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -233,8 +233,8 @@ def semantic_analysis( Check the following example: \ :ref:`Example 5 `. - external_routines: String or Path, or List of Strings or Paths of the \ - external routines SQL files. (default: None) It is passed as an object, that can be read \ + external_routines: Dict or Path, or List of Dicts or Paths of the \ + external routines JSON files. (default: None) It is passed as an object, that can be read \ from a Path or from a dictionary. Furthermore, a list of those objects can be passed. \ Check the following example: \ :ref:`Example 5 `. @@ -374,7 +374,7 @@ def run( Check the following example: \ :ref:`Example 5 `. - external_routines: String or Path, or List of Strings or Paths of the \ + external_routines: Dict or Path, or List of Dicts or Paths of the \ external routines JSON files. (default: None) It is passed as an object, that can be read \ from a Path or from a dictionary. Furthermore, a list of those objects can be passed. \ Check the following example: \ @@ -516,7 +516,7 @@ def run_sdmx( Check the following example: \ :ref:`Example 5 `. - external_routines: String or Path, or List of Strings or Paths of the \ + external_routines: Dict or Path, or List of Dicts or Paths of the \ external routines JSON files. (default: None) It is passed as an object, that can be read \ from a Path or from a dictionary. Furthermore, a list of those objects can be passed. \ Check the following example: \ diff --git a/tests/DocScripts/_rst_code_extractor.py b/tests/DocScripts/_rst_code_extractor.py new file mode 100644 index 000000000..8bcebf2c6 --- /dev/null +++ b/tests/DocScripts/_rst_code_extractor.py @@ -0,0 +1,113 @@ +"""Utility to extract runnable Python code blocks from RST documentation files.""" + +import re +from dataclasses import dataclass, field +from pathlib import Path +from typing import List + + +@dataclass +class CodeBlock: + """A Python code block extracted from an RST file.""" + + source: str + line_number: int + expects_error: bool + csv_references: List[str] = field(default_factory=list) + + +def extract_python_blocks(rst_path: Path) -> List[CodeBlock]: + """Extract Python code blocks from an RST file. + + Parses ``.. code-block:: python`` directives and returns their content. + Detects blocks that are followed by error-related text to flag them + as expecting an exception. Also captures ``csv-table`` ``:file:`` references + that follow the block for output validation. + + Args: + rst_path: Path to the RST file. + + Returns: + List of CodeBlock objects with source code and metadata. + """ + text = rst_path.read_text(encoding="utf-8") + lines = text.splitlines() + blocks: List[CodeBlock] = [] + + i = 0 + while i < len(lines): + stripped = lines[i].strip() + if stripped == ".. code-block:: python": + block_start_line = i + 1 # 1-indexed for display + i += 1 + # Skip blank lines after directive + while i < len(lines) and lines[i].strip() == "": + i += 1 + if i >= len(lines): + break + # Determine indentation of the code block + indent_match = re.match(r"^(\s+)", lines[i]) + if not indent_match: + continue + indent = indent_match.group(1) + indent_len = len(indent) + + # Collect indented lines + code_lines: List[str] = [] + while i < len(lines) and (lines[i].strip() == "" or lines[i].startswith(indent)): + # De-indent the line + if lines[i].strip() == "": + code_lines.append("") + else: + code_lines.append(lines[i][indent_len:]) + i += 1 + + # Strip trailing blank lines + while code_lines and code_lines[-1].strip() == "": + code_lines.pop() + + source = "\n".join(code_lines) + + # Scan the following lines for error text and csv-table references + expects_error = False + csv_references: List[str] = [] + for j in range(i, min(i + 30, len(lines))): + line = lines[j].strip() + if re.search(r"raises the following error", line, re.IGNORECASE): + expects_error = True + # Collect csv-table :file: references + file_match = re.match(r":file:\s+_static/(.+\.csv)", line) + if file_match: + csv_references.append(file_match.group(1)) + # Stop scanning at the next code-block or major section + if line.startswith(".. code-block:: python") or re.match(r"^[=*]{3,}$", line): + break + + blocks.append( + CodeBlock( + source=source, + line_number=block_start_line, + expects_error=expects_error, + csv_references=csv_references, + ) + ) + else: + i += 1 + + return blocks + + +def is_runnable(block: CodeBlock) -> bool: + """Determine if a code block is self-contained and runnable. + + A block is runnable if it imports from vtlengine and does not + reference file paths that don't exist in the test environment. + """ + if "from vtlengine import" not in block.source and "import vtlengine" not in block.source: + return False + # Skip blocks that reference non-existent file paths + non_runnable_patterns = [ + 'Path("path/to/', + "s3://", + ] + return not any(pattern in block.source for pattern in non_runnable_patterns) diff --git a/tests/DocScripts/test_doc_examples.py b/tests/DocScripts/test_doc_examples.py new file mode 100644 index 000000000..86a03867d --- /dev/null +++ b/tests/DocScripts/test_doc_examples.py @@ -0,0 +1,145 @@ +"""Test that Python code examples in RST documentation files run correctly.""" + +import re +from pathlib import Path +from typing import Dict, List, Optional, Tuple, Union + +import pandas as pd +import pytest + +from tests.DocScripts._rst_code_extractor import CodeBlock, extract_python_blocks, is_runnable +from vtlengine.Exceptions import SemanticError +from vtlengine.Model import Dataset, Scalar + +docs_dir = Path(__file__).resolve().parents[2] / "docs" +static_dir = docs_dir / "_static" + + +def get_runnable_blocks() -> List[Tuple[str, CodeBlock]]: + """Scan all RST files in docs/ and return runnable Python code blocks.""" + blocks: List[Tuple[str, CodeBlock]] = [] + for rst_file in sorted(docs_dir.glob("*.rst")): + for block in extract_python_blocks(rst_file): + if is_runnable(block): + blocks.append((rst_file.name, block)) + return blocks + + +def _block_id(item: Tuple[str, CodeBlock]) -> str: + rst_name, block = item + return f"{rst_name}::line_{block.line_number}" + + +def _preprocess_for_result_capture(source: str) -> str: + """Transform source code to capture run results in the namespace. + + Handles two patterns: + 1. def main() blocks that print results — replace print with return, add main() call. + 2. Inline print(run(...)[...]) calls — replace with a plain assignment. + """ + if "def main():" in source: + source = re.sub(r"\bprint\(run_result\)", "return run_result", source) + source = re.sub(r"\bprint\(result\)", "return result", source) + source += "\nrun_result = main()\n" + return source + + # Replace print(run_sdmx(...)[...].data) or print(run(...)[...]) with assignment + source = re.sub( + r"\bprint\((run(?:_sdmx)?\([^)]*\)).*\)", + r"run_result = \1", + source, + ) + return source + + +def _exec_block(source: str, filename: str, capture_results: bool = False) -> dict[str, object]: + """Execute a code block and return the resulting namespace.""" + if capture_results: + source = _preprocess_for_result_capture(source) + namespace: dict[str, object] = {} + exec(compile(source, filename, "exec"), namespace) # noqa: S102 + return namespace + + +def _find_result_datasets( + namespace: dict[str, object], +) -> Dict[str, Union[Dataset, Scalar]]: + """Extract run result datasets from the exec namespace.""" + for var_name in ("run_result", "result"): + val = namespace.get(var_name) + if isinstance(val, dict) and any(isinstance(v, (Dataset, Scalar)) for v in val.values()): + return val # type: ignore[return-value] + + # Fallback: any dict of Datasets + for val in namespace.values(): + if isinstance(val, dict) and any(isinstance(v, (Dataset, Scalar)) for v in val.values()): + return val # type: ignore[return-value] + return {} + + +def _validate_csv_outputs( + namespace: dict[str, object], + csv_references: List[str], +) -> None: + """Compare run results against reference CSV files.""" + results = _find_result_datasets(namespace) + for csv_file in csv_references: + csv_path = static_dir / csv_file + expected_df = pd.read_csv(csv_path, dtype_backend="pyarrow") + + dataset_name = _csv_to_dataset_name(csv_file, results) + assert dataset_name is not None, ( + f"Could not match {csv_file} to any result dataset. Available: {list(results.keys())}" + ) + + result = results[dataset_name] + if isinstance(result, Scalar): + continue + assert isinstance(result, Dataset) + actual_df = result.data.reset_index(drop=True) + expected_df = expected_df.reset_index(drop=True) + + # Normalize empty strings to NA and cast actual to match expected types + actual_df = actual_df.replace("", pd.NA) + for col in expected_df.columns: + if col in actual_df.columns: + actual_df[col] = actual_df[col].astype(expected_df[col].dtype) + + pd.testing.assert_frame_equal(actual_df, expected_df, check_dtype=False) + + +def _csv_to_dataset_name( + csv_file: str, + results: Dict[str, Union[Dataset, Scalar]], +) -> Optional[str]: + """Map a CSV filename to a dataset name in the results dict.""" + stem = csv_file.removesuffix(".csv") + # Direct match + if stem in results: + return stem + # Strip common suffixes + for suffix in ("_run", "_run_sdmx", "_run_with_scalars", "_output", "_2_output"): + candidate = stem.removesuffix(suffix) + if candidate != stem and candidate in results: + return candidate + return None + + +runnable_blocks = get_runnable_blocks() + + +@pytest.mark.parametrize( + "rst_file,block", + runnable_blocks, + ids=[_block_id(b) for b in runnable_blocks], +) +def test_doc_example(rst_file: str, block: CodeBlock) -> None: + """Execute a Python code block extracted from documentation.""" + filename = f"<{rst_file}:{block.line_number}>" + if block.expects_error: + with pytest.raises(SemanticError): + _exec_block(block.source, filename) + else: + namespace = _exec_block(block.source, filename, capture_results=bool(block.csv_references)) + if block.csv_references: + _validate_csv_outputs(namespace, block.csv_references) From 4c49f08205130571535a1632b18fed46dd0351f2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 2 Mar 2026 10:32:19 +0100 Subject: [PATCH 06/38] Bump version to 1.6.0rc2 (#549) * Bump version to 1.6.0rc2 * Update AI coding assistant instructions with version bump branch naming convention --- .claude/CLAUDE.md | 7 ++++--- .github/copilot-instructions.md | 7 ++++--- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index 98436dde7..3c2232b13 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -1,4 +1,4 @@ -# VTL Engine - Claude Code Instructions +# VTL Engine - AI Coding Assistant Instructions ## Project Overview @@ -196,7 +196,8 @@ Only use the following labels — **never create new labels**: ### Branch Naming -Pattern: `cr-{issue_number}` (e.g., `cr-457` for issue #457) +- Issue branches: `cr-{issue_number}` (e.g., `cr-457` for issue #457) +- Version bump branches: `bump-version-{version}` (e.g., `bump-version-1.6.0rc2`) ### Workflow @@ -264,7 +265,7 @@ print(run_result) 3. **AST node equality** - Override `ast_equality()` when adding nodes 4. **Nullable identifiers** - Will raise `SemanticError("0-1-1-13")` 5. **ANTLR version** - Must use 4.9.x to match `antlr4-python3-runtime` dependency -6. **Version updates** - When bumping version, update BOTH `pyproject.toml` AND `src/vtlengine/__init__.py`. Always create a new branch from `origin/main` for version bumps and create a PR with no body +6. **Version updates** - When bumping version, update BOTH `pyproject.toml` AND `src/vtlengine/__init__.py`. Always create a new branch named `bump-version-{version}` from `origin/main` for version bumps and create a PR with no body ## External Dependencies diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 98436dde7..3c2232b13 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -1,4 +1,4 @@ -# VTL Engine - Claude Code Instructions +# VTL Engine - AI Coding Assistant Instructions ## Project Overview @@ -196,7 +196,8 @@ Only use the following labels — **never create new labels**: ### Branch Naming -Pattern: `cr-{issue_number}` (e.g., `cr-457` for issue #457) +- Issue branches: `cr-{issue_number}` (e.g., `cr-457` for issue #457) +- Version bump branches: `bump-version-{version}` (e.g., `bump-version-1.6.0rc2`) ### Workflow @@ -264,7 +265,7 @@ print(run_result) 3. **AST node equality** - Override `ast_equality()` when adding nodes 4. **Nullable identifiers** - Will raise `SemanticError("0-1-1-13")` 5. **ANTLR version** - Must use 4.9.x to match `antlr4-python3-runtime` dependency -6. **Version updates** - When bumping version, update BOTH `pyproject.toml` AND `src/vtlengine/__init__.py`. Always create a new branch from `origin/main` for version bumps and create a PR with no body +6. **Version updates** - When bumping version, update BOTH `pyproject.toml` AND `src/vtlengine/__init__.py`. Always create a new branch named `bump-version-{version}` from `origin/main` for version bumps and create a PR with no body ## External Dependencies diff --git a/pyproject.toml b/pyproject.toml index e07aacec6..5454f4481 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc1" +version = "1.6.0rc2" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index b091f0a35..ecd64388c 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc1" +__version__ = "1.6.0rc2" From d7c062d4f4ad35f6062e12bbd7411f046f4232df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Mon, 2 Mar 2026 16:05:18 +0100 Subject: [PATCH 07/38] (QA 1.6.0) Updated legacy Time_Period month representation (#551) * Added legacy representation method to TimePeriodHandler class * Added legacy time period representation formatter * Added related tests * Renamed format_time_period_external_representation dataset argument to operand. * Added related error message * Updated invalid TimePeriodRepresentation exception * Updated docs * Updated docs * updated sdmx reporting D regex * Added related tests * Updated docs * Updated legacy Time_Period month repr from YYYY-Mdd to YYYY-MM * Updated related tests * Updated docs --- docs/data_types.rst | 2 +- src/vtlengine/DataTypes/TimeHandling.py | 6 ++++-- src/vtlengine/files/output/_time_period_representation.py | 2 +- tests/TimePeriod/test_time_period_formats.py | 6 +++--- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/docs/data_types.rst b/docs/data_types.rst index 3354bd48d..5eb904b13 100644 --- a/docs/data_types.rst +++ b/docs/data_types.rst @@ -247,7 +247,7 @@ parameter): - ``2020`` - ``2020-S1`` - ``2020-Q1`` - - ``2020-M01`` + - ``2020-01`` - ``2020-W15`` - ``2020-01-15`` diff --git a/src/vtlengine/DataTypes/TimeHandling.py b/src/vtlengine/DataTypes/TimeHandling.py index 77ec75b9c..4fe87c18e 100644 --- a/src/vtlengine/DataTypes/TimeHandling.py +++ b/src/vtlengine/DataTypes/TimeHandling.py @@ -373,13 +373,15 @@ def sdmx_reporting_representation(self) -> str: return f"{self.year}-{self.period_indicator}{period_number_str}" def legacy_representation(self) -> str: - """Legacy representation: YYYY, YYYY-Sx, YYYY-Qx, YYYY-Mxx, YYYY-Wxx, YYYY-MM-DD.""" + """Legacy representation: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD.""" if self.period_indicator == "A": return f"{self.year}" + if self.period_indicator == "M": + return f"{self.year}-{self.period_number:02}" if self.period_indicator == "D": d = period_to_date(self.year, "D", self.period_number) return d.isoformat() - if self.period_indicator in ("M", "W"): + if self.period_indicator == "W": period_number_str = f"{self.period_number:02}" else: period_number_str = str(self.period_number) diff --git a/src/vtlengine/files/output/_time_period_representation.py b/src/vtlengine/files/output/_time_period_representation.py index 951511b53..ce0d99e03 100644 --- a/src/vtlengine/files/output/_time_period_representation.py +++ b/src/vtlengine/files/output/_time_period_representation.py @@ -49,7 +49,7 @@ def format_time_period_external_representation( SDMX Reporting: YYYY-A1, YYYY-Ss, YYYY-Qq, YYYY-Mmm, YYYY-Www, YYYY-Dddd SDMX Gregorian: YYYY, YYYY-MM, YYYY-MM-DD (only A, M, D supported) VTL: YYYY, YYYYSn, YYYYQn, YYYYMm, YYYYWw, YYYYDd (no hyphens) - Legacy: YYYY, YYYY-Sx, YYYY-Qx, YYYY-Mxx, YYYY-Wxx, YYYY-MM-DD + Legacy: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD """ if isinstance(operand, Scalar): if operand.data_type != TimePeriod or operand.value is None: diff --git a/tests/TimePeriod/test_time_period_formats.py b/tests/TimePeriod/test_time_period_formats.py index 830af6c58..dd06a4372 100644 --- a/tests/TimePeriod/test_time_period_formats.py +++ b/tests/TimePeriod/test_time_period_formats.py @@ -272,8 +272,8 @@ def test_sdmx_gregorian_representation_unsupported(internal: str) -> None: ("2020S2", "2020-S2", "semester 2"), ("2020Q1", "2020-Q1", "quarter 1"), ("2020Q4", "2020-Q4", "quarter 4"), - ("2020M1", "2020-M01", "month 1"), - ("2020M12", "2020-M12", "month 12"), + ("2020M1", "2020-01", "month 1"), + ("2020M12", "2020-12", "month 12"), ("2020W1", "2020-W01", "week 1"), ("2020W53", "2020-W53", "week 53"), ("2020D1", "2020-01-01", "day 1"), @@ -329,7 +329,7 @@ def get_tp_scalar(value: Optional[str]) -> Scalar: ("2020-D001", TimePeriodRepresentation.SDMX_GREGORIAN, "2020-01-01", "gregorian day"), # Legacy ("2020A", TimePeriodRepresentation.LEGACY, "2020", "legacy annual"), - ("2020-M01", TimePeriodRepresentation.LEGACY, "2020-M01", "legacy month"), + ("2020-M01", TimePeriodRepresentation.LEGACY, "2020-01", "legacy month"), ("2020-Q3", TimePeriodRepresentation.LEGACY, "2020-Q3", "legacy quarter"), ("2020-S2", TimePeriodRepresentation.LEGACY, "2020-S2", "legacy semester"), ("2020-W01", TimePeriodRepresentation.LEGACY, "2020-W01", "legacy week"), From ccf13494828feafc76e75101998e67dc3fbb72b5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Mar 2026 12:38:31 +0100 Subject: [PATCH 08/38] Bump ruff from 0.15.2 to 0.15.4 (#553) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.15.2 to 0.15.4. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.15.2...0.15.4) --- updated-dependencies: - dependency-name: ruff dependency-version: 0.15.4 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 80 ++++++++++++++++++++++++++--------------------------- 1 file changed, 40 insertions(+), 40 deletions(-) diff --git a/poetry.lock b/poetry.lock index 59ef12887..f695794a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.3.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. [[package]] name = "aiobotocore" @@ -7,7 +7,7 @@ description = "Async client for aws services using botocore and aiohttp" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "aiobotocore-2.26.0-py3-none-any.whl", hash = "sha256:a793db51c07930513b74ea7a95bd79aaa42f545bdb0f011779646eafa216abec"}, {file = "aiobotocore-2.26.0.tar.gz", hash = "sha256:50567feaf8dfe2b653570b4491f5bc8c6e7fb9622479d66442462c021db4fadc"}, @@ -34,7 +34,7 @@ description = "Happy Eyeballs for asyncio" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, @@ -47,7 +47,7 @@ description = "Async http client/server framework (asyncio)" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5a372fd5afd301b3a89582817fdcdb6c34124787c70dbcc616f259013e7eef7"}, {file = "aiohttp-3.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:147e422fd1223005c22b4fe080f5d93ced44460f5f9c105406b753612b587821"}, @@ -191,7 +191,7 @@ description = "itertools and builtins for AsyncIO and mixed iterables" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "aioitertools-0.13.0-py3-none-any.whl", hash = "sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be"}, {file = "aioitertools-0.13.0.tar.gz", hash = "sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c"}, @@ -207,7 +207,7 @@ description = "aiosignal: a list of registered asynchronous callbacks" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, @@ -267,7 +267,7 @@ description = "Timeout context manager for asyncio programs" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "(extra == \"s3\" or extra == \"all\") and python_version < \"3.11\"" +markers = "(extra == \"all\" or extra == \"s3\") and python_version < \"3.11\"" files = [ {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, @@ -307,7 +307,7 @@ description = "Low-level, data-driven core of boto 3." optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "botocore-1.41.5-py3-none-any.whl", hash = "sha256:3fef7fcda30c82c27202d232cfdbd6782cb27f20f8e7e21b20606483e66ee73a"}, {file = "botocore-1.41.5.tar.gz", hash = "sha256:0367622b811597d183bfcaab4a350f0d3ede712031ce792ef183cabdee80d3bf"}, @@ -699,7 +699,7 @@ description = "A list-like structure which implements collections.abc.MutableSeq optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, @@ -840,7 +840,7 @@ description = "File-system specification" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version == \"3.9\" and (extra == \"s3\" or extra == \"all\")" +markers = "python_version == \"3.9\" and (extra == \"all\" or extra == \"s3\")" files = [ {file = "fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d"}, {file = "fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59"}, @@ -881,7 +881,7 @@ description = "File-system specification" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and (extra == \"s3\" or extra == \"all\")" +markers = "python_version >= \"3.10\" and (extra == \"all\" or extra == \"s3\")" files = [ {file = "fsspec-2025.12.0-py3-none-any.whl", hash = "sha256:8bf1fe301b7d8acfa6e8571e3b1c3d158f909666642431cc78a1b7b4dbc5ec5b"}, {file = "fsspec-2025.12.0.tar.gz", hash = "sha256:c505de011584597b1060ff778bb664c1bc022e87921b0e4f10cc9c44f9635973"}, @@ -1118,7 +1118,7 @@ description = "JSON Matching Expressions" optional = true python-versions = ">=3.7" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -1138,7 +1138,7 @@ files = [ [package.dependencies] attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.3.6" +jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1668,7 +1668,7 @@ description = "multidict implementation" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, @@ -2229,7 +2229,7 @@ description = "Accelerated property cache" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, @@ -2912,30 +2912,30 @@ files = [ [[package]] name = "ruff" -version = "0.15.2" +version = "0.15.4" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d"}, - {file = "ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e"}, - {file = "ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a"}, - {file = "ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956"}, - {file = "ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4"}, - {file = "ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de"}, - {file = "ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c"}, - {file = "ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8"}, - {file = "ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f"}, - {file = "ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5"}, - {file = "ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e"}, - {file = "ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342"}, + {file = "ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0"}, + {file = "ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992"}, + {file = "ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec"}, + {file = "ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68"}, + {file = "ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3"}, + {file = "ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22"}, + {file = "ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f"}, + {file = "ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453"}, + {file = "ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1"}, ] [[package]] @@ -2945,7 +2945,7 @@ description = "Convenient Filesystem interface over S3" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "python_version == \"3.9\" and (extra == \"s3\" or extra == \"all\")" +markers = "python_version == \"3.9\" and (extra == \"all\" or extra == \"s3\")" files = [ {file = "s3fs-2025.10.0-py3-none-any.whl", hash = "sha256:da7ef25efc1541f5fca8e1116361e49ea1081f83f4e8001fbd77347c625da28a"}, {file = "s3fs-2025.10.0.tar.gz", hash = "sha256:e8be6cddc77aceea1681ece0f472c3a7f8ef71a0d2acddb1cc92bb6afa3e9e4f"}, @@ -2967,7 +2967,7 @@ description = "Convenient Filesystem interface over S3" optional = true python-versions = ">=3.10" groups = ["main"] -markers = "python_version >= \"3.10\" and (extra == \"s3\" or extra == \"all\")" +markers = "python_version >= \"3.10\" and (extra == \"all\" or extra == \"s3\")" files = [ {file = "s3fs-2025.12.0-py3-none-any.whl", hash = "sha256:89d51e0744256baad7ae5410304a368ca195affd93a07795bc8ba9c00c9effbb"}, {file = "s3fs-2025.12.0.tar.gz", hash = "sha256:8612885105ce14d609c5b807553f9f9956b45541576a17ff337d9435ed3eb01f"}, @@ -3346,7 +3346,7 @@ files = [ {file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"}, {file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"}, ] -markers = {main = "python_version == \"3.9\" and (extra == \"s3\" or extra == \"all\")", docs = "python_version == \"3.9\""} +markers = {main = "python_version == \"3.9\" and (extra == \"all\" or extra == \"s3\")", docs = "python_version == \"3.9\""} [package.extras] brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] @@ -3364,7 +3364,7 @@ files = [ {file = "urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd"}, {file = "urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797"}, ] -markers = {main = "python_version >= \"3.10\" and (extra == \"s3\" or extra == \"all\")"} +markers = {main = "python_version >= \"3.10\" and (extra == \"all\" or extra == \"s3\")"} [package.extras] brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] @@ -3379,7 +3379,7 @@ description = "Module for decorators, wrappers and monkey patching." optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}, {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}, @@ -3486,7 +3486,7 @@ description = "Yet another URL library" optional = true python-versions = ">=3.9" groups = ["main"] -markers = "extra == \"s3\" or extra == \"all\"" +markers = "extra == \"all\" or extra == \"s3\"" files = [ {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, From 3a5f394ef63f032d651a0a062efc3b731f0df7a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 3 Mar 2026 12:52:50 +0100 Subject: [PATCH 09/38] Fixed Analytic and Aggregate SQL queries fails with Date inputs (#552) * Add date normalization method to Analytic class * Add Date type handling in Aggregation class * Added VTL error handling for duckdb query in Analytic class * Minor fix * Fixed linting errors * Added Aggregate related tests * Added Analytic related tests * Enhanced error handling in Analytic class for duckdb query conversion issues * Updated Analytic TimePeriod Handler * Fixed ruff errors * Added RANGE test * Added Time_Period test * Removed Time handler until review * Fixed ruff errors * Remove Time Period handler --- src/vtlengine/Operators/Aggregation.py | 6 ++ src/vtlengine/Operators/Analytic.py | 36 +++++++++-- .../data/DataSet/input/GH_550_1-1.csv | 5 ++ .../data/DataSet/input/GH_550_2-1.csv | 5 ++ .../data/DataSet/output/GH_550_1-1.csv | 3 + .../data/DataSet/output/GH_550_2-1.csv | 3 + .../data/DataStructure/input/GH_550_1-1.json | 27 ++++++++ .../data/DataStructure/input/GH_550_2-1.json | 27 ++++++++ .../data/DataStructure/output/GH_550_1-1.json | 21 +++++++ .../data/DataStructure/output/GH_550_2-1.json | 21 +++++++ tests/Aggregate/data/vtl/GH_550_1.vtl | 1 + tests/Aggregate/data/vtl/GH_550_2.vtl | 1 + tests/Aggregate/test_aggregate.py | 24 +++++++ .../data/DataSet/input/GH_550_1-1.csv | 6 ++ .../data/DataSet/input/GH_550_2-1.csv | 6 ++ .../data/DataSet/input/GH_550_3-1.csv | 5 ++ .../data/DataSet/output/GH_550_1-1.csv | 6 ++ .../data/DataSet/output/GH_550_2-1.csv | 6 ++ .../data/DataSet/output/GH_550_3-1.csv | 5 ++ .../data/DataStructure/input/GH_550_1-1.json | 27 ++++++++ .../data/DataStructure/input/GH_550_2-1.json | 27 ++++++++ .../data/DataStructure/input/GH_550_3-1.json | 27 ++++++++ .../data/DataStructure/output/GH_550_1-1.json | 27 ++++++++ .../data/DataStructure/output/GH_550_2-1.json | 33 ++++++++++ .../data/DataStructure/output/GH_550_3-1.json | 33 ++++++++++ tests/Analytic/data/vtl/GH_550_1.vtl | 1 + tests/Analytic/data/vtl/GH_550_2.vtl | 1 + tests/Analytic/data/vtl/GH_550_3.vtl | 8 +++ tests/Analytic/test_analytic.py | 62 +++++++++++++++++++ 29 files changed, 455 insertions(+), 5 deletions(-) create mode 100644 tests/Aggregate/data/DataSet/input/GH_550_1-1.csv create mode 100644 tests/Aggregate/data/DataSet/input/GH_550_2-1.csv create mode 100644 tests/Aggregate/data/DataSet/output/GH_550_1-1.csv create mode 100644 tests/Aggregate/data/DataSet/output/GH_550_2-1.csv create mode 100644 tests/Aggregate/data/DataStructure/input/GH_550_1-1.json create mode 100644 tests/Aggregate/data/DataStructure/input/GH_550_2-1.json create mode 100644 tests/Aggregate/data/DataStructure/output/GH_550_1-1.json create mode 100644 tests/Aggregate/data/DataStructure/output/GH_550_2-1.json create mode 100644 tests/Aggregate/data/vtl/GH_550_1.vtl create mode 100644 tests/Aggregate/data/vtl/GH_550_2.vtl create mode 100644 tests/Analytic/data/DataSet/input/GH_550_1-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_550_2-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_550_3-1.csv create mode 100644 tests/Analytic/data/DataSet/output/GH_550_1-1.csv create mode 100644 tests/Analytic/data/DataSet/output/GH_550_2-1.csv create mode 100644 tests/Analytic/data/DataSet/output/GH_550_3-1.csv create mode 100644 tests/Analytic/data/DataStructure/input/GH_550_1-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_550_2-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_550_3-1.json create mode 100644 tests/Analytic/data/DataStructure/output/GH_550_1-1.json create mode 100644 tests/Analytic/data/DataStructure/output/GH_550_2-1.json create mode 100644 tests/Analytic/data/DataStructure/output/GH_550_3-1.json create mode 100644 tests/Analytic/data/vtl/GH_550_1.vtl create mode 100644 tests/Analytic/data/vtl/GH_550_2.vtl create mode 100644 tests/Analytic/data/vtl/GH_550_3.vtl diff --git a/src/vtlengine/Operators/Aggregation.py b/src/vtlengine/Operators/Aggregation.py index ac55769cb..f51332f22 100644 --- a/src/vtlengine/Operators/Aggregation.py +++ b/src/vtlengine/Operators/Aggregation.py @@ -19,6 +19,7 @@ ) from vtlengine.DataTypes import ( Boolean, + Date, Duration, Integer, Number, @@ -83,6 +84,11 @@ def _handle_data_types(cls, data: pd.DataFrame, measures: List[Component], mode: data[measure.name] = data[measure.name].map(PERIOD_IND_MAPPING) else: data[measure.name] = data[measure.name].map(PERIOD_IND_MAPPING_REVERSE) + elif measure.data_type == Date: + if mode == "input": + data[measure.name] = data[measure.name].astype("date64[pyarrow]") + else: + data[measure.name] = data[measure.name].astype(Date.dtype()) # type: ignore[call-overload] elif measure.data_type == Boolean and mode == "result": data[measure.name] = ( data[measure.name] # type: ignore[call-overload, unused-ignore] diff --git a/src/vtlengine/Operators/Analytic.py b/src/vtlengine/Operators/Analytic.py index d3dd2331a..c2b44f129 100644 --- a/src/vtlengine/Operators/Analytic.py +++ b/src/vtlengine/Operators/Analytic.py @@ -1,5 +1,5 @@ from copy import copy -from typing import List, Optional +from typing import Dict, List, Optional import duckdb import pandas as pd @@ -28,6 +28,7 @@ ) from vtlengine.DataTypes import ( COMP_NAME_MAPPING, + Date, Integer, Number, unary_implicit_promotion, @@ -125,9 +126,13 @@ def validate( # type: ignore[override] # noqa: C901 if cls.op in return_integer_operators: isNumber = False + has_non_numeric = False for measure in measures: - isNumber |= isinstance(measure.data_type, Number) - cls.return_integer = not isNumber + if isinstance(measure.data_type, (Integer, Number)): + isNumber |= isinstance(measure.data_type, Number) + else: + has_non_numeric = True + cls.return_integer = not isNumber and not has_non_numeric if cls.type_to_check is not None: for measure in measures: @@ -136,7 +141,8 @@ def validate( # type: ignore[override] # noqa: C901 if cls.op in return_integer_operators: for measure in measures: new_measure = copy(measure) - new_measure.data_type = Integer if cls.return_integer else Number + if isinstance(measure.data_type, (Integer, Number)): + new_measure.data_type = Integer if cls.return_integer else Number result_components[measure.name] = new_measure elif cls.return_type is not None: for measure in measures: @@ -250,7 +256,13 @@ def analyticfunc( if cls.op == COUNT: df[measure_names] = df[measure_names].fillna(-1) - result = duckdb.query(query).to_df() + try: + result = duckdb.query(query).to_df() + except RuntimeError as e: + if "Conversion" in e.args[0]: + raise RunTimeError("2-3-8", op=cls.op, msg=e.args[0].split(":")[-1]) + else: + raise RunTimeError("2-1-1-1", op=cls.op, error=e) if cls.op == RATIO_TO_REPORT: for col_name in measure_names: arr = pa.array(result[col_name]) @@ -270,6 +282,7 @@ def evaluate( # type: ignore[override] ) -> Dataset: result = cls.validate(operand, partitioning, ordering, window, params, component_name) df = operand.data.copy() if operand.data is not None else pd.DataFrame() + df = cls.normalize_dates(df, operand.components) identifier_names = operand.get_identifiers_names() if component_name is not None: @@ -294,6 +307,19 @@ def evaluate( # type: ignore[override] return result + @classmethod + def normalize_dates( + cls, data: Optional[pd.DataFrame], components: Dict[str, Component] + ) -> pd.DataFrame: + if data is None: + return pd.DataFrame(columns=[comp.name for comp in components.values()]) + elif any(comp.data_type is Date for comp in components.values()): + data = data.copy() + for comp_name, comp in components.items(): + if comp.data_type is Date: + data[comp_name] = data[comp_name].astype("date64[pyarrow]") + return data + class Max(Analytic): """ diff --git a/tests/Aggregate/data/DataSet/input/GH_550_1-1.csv b/tests/Aggregate/data/DataSet/input/GH_550_1-1.csv new file mode 100644 index 000000000..9e0e63a4f --- /dev/null +++ b/tests/Aggregate/data/DataSet/input/GH_550_1-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +A,XX,2023-01-01 +A,YY,2023-06-15 +B,XX,2023-03-10 +B,YY,2023-12-25 diff --git a/tests/Aggregate/data/DataSet/input/GH_550_2-1.csv b/tests/Aggregate/data/DataSet/input/GH_550_2-1.csv new file mode 100644 index 000000000..9e0e63a4f --- /dev/null +++ b/tests/Aggregate/data/DataSet/input/GH_550_2-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +A,XX,2023-01-01 +A,YY,2023-06-15 +B,XX,2023-03-10 +B,YY,2023-12-25 diff --git a/tests/Aggregate/data/DataSet/output/GH_550_1-1.csv b/tests/Aggregate/data/DataSet/output/GH_550_1-1.csv new file mode 100644 index 000000000..727b7c563 --- /dev/null +++ b/tests/Aggregate/data/DataSet/output/GH_550_1-1.csv @@ -0,0 +1,3 @@ +Id_1,Me_1 +A,2023-06-15 +B,2023-12-25 diff --git a/tests/Aggregate/data/DataSet/output/GH_550_2-1.csv b/tests/Aggregate/data/DataSet/output/GH_550_2-1.csv new file mode 100644 index 000000000..c901c33c1 --- /dev/null +++ b/tests/Aggregate/data/DataSet/output/GH_550_2-1.csv @@ -0,0 +1,3 @@ +Id_1,Me_1 +A,2023-01-01 +B,2023-03-10 diff --git a/tests/Aggregate/data/DataStructure/input/GH_550_1-1.json b/tests/Aggregate/data/DataStructure/input/GH_550_1-1.json new file mode 100644 index 000000000..e982cbb8a --- /dev/null +++ b/tests/Aggregate/data/DataStructure/input/GH_550_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/DataStructure/input/GH_550_2-1.json b/tests/Aggregate/data/DataStructure/input/GH_550_2-1.json new file mode 100644 index 000000000..e982cbb8a --- /dev/null +++ b/tests/Aggregate/data/DataStructure/input/GH_550_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/DataStructure/output/GH_550_1-1.json b/tests/Aggregate/data/DataStructure/output/GH_550_1-1.json new file mode 100644 index 000000000..20a14c0cc --- /dev/null +++ b/tests/Aggregate/data/DataStructure/output/GH_550_1-1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/DataStructure/output/GH_550_2-1.json b/tests/Aggregate/data/DataStructure/output/GH_550_2-1.json new file mode 100644 index 000000000..20a14c0cc --- /dev/null +++ b/tests/Aggregate/data/DataStructure/output/GH_550_2-1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/vtl/GH_550_1.vtl b/tests/Aggregate/data/vtl/GH_550_1.vtl new file mode 100644 index 000000000..aa29ed865 --- /dev/null +++ b/tests/Aggregate/data/vtl/GH_550_1.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 group by Id_1); diff --git a/tests/Aggregate/data/vtl/GH_550_2.vtl b/tests/Aggregate/data/vtl/GH_550_2.vtl new file mode 100644 index 000000000..6f78209bd --- /dev/null +++ b/tests/Aggregate/data/vtl/GH_550_2.vtl @@ -0,0 +1 @@ +DS_r := min(DS_1 group by Id_1); diff --git a/tests/Aggregate/test_aggregate.py b/tests/Aggregate/test_aggregate.py index 4a6a0a7cd..99082f42e 100644 --- a/tests/Aggregate/test_aggregate.py +++ b/tests/Aggregate/test_aggregate.py @@ -1307,3 +1307,27 @@ def test_GH_164_2(self): number_inputs=number_inputs, references_names=references_names, ) + + def test_GH_550_1(self): + """ + Status: OK + Description: Fix #550: max aggregate with Date measure and group by + Goal: Check that max works correctly with Date type measures in aggregation + """ + code = "GH_550_1" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_550_2(self): + """ + Status: OK + Description: Fix #550: min aggregate with Date measure and group by + Goal: Check that min works correctly with Date type measures in aggregation + """ + code = "GH_550_2" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) diff --git a/tests/Analytic/data/DataSet/input/GH_550_1-1.csv b/tests/Analytic/data/DataSet/input/GH_550_1-1.csv new file mode 100644 index 000000000..86f9cd854 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_550_1-1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Me_1 +A,XX,2023-01-01 +A,YY,2023-06-15 +A,ZZ,2023-03-10 +B,XX,2023-12-25 +B,YY,2023-07-04 diff --git a/tests/Analytic/data/DataSet/input/GH_550_2-1.csv b/tests/Analytic/data/DataSet/input/GH_550_2-1.csv new file mode 100644 index 000000000..86f9cd854 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_550_2-1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Me_1 +A,XX,2023-01-01 +A,YY,2023-06-15 +A,ZZ,2023-03-10 +B,XX,2023-12-25 +B,YY,2023-07-04 diff --git a/tests/Analytic/data/DataSet/input/GH_550_3-1.csv b/tests/Analytic/data/DataSet/input/GH_550_3-1.csv new file mode 100644 index 000000000..b83094677 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_550_3-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +A,A,2020-01-01 +B,A,2020-01-02 +A,B,2020-01-03 +B,B,2020-01-04 diff --git a/tests/Analytic/data/DataSet/output/GH_550_1-1.csv b/tests/Analytic/data/DataSet/output/GH_550_1-1.csv new file mode 100644 index 000000000..7b74484e8 --- /dev/null +++ b/tests/Analytic/data/DataSet/output/GH_550_1-1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Me_1 +A,XX,2023-06-15 +A,YY,2023-06-15 +A,ZZ,2023-06-15 +B,XX,2023-12-25 +B,YY,2023-12-25 diff --git a/tests/Analytic/data/DataSet/output/GH_550_2-1.csv b/tests/Analytic/data/DataSet/output/GH_550_2-1.csv new file mode 100644 index 000000000..f1db68eee --- /dev/null +++ b/tests/Analytic/data/DataSet/output/GH_550_2-1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Me_1,Me_2 +A,XX,2023-01-01,2023-01-01 +A,YY,2023-06-15,2023-01-01 +A,ZZ,2023-03-10,2023-03-10 +B,XX,2023-12-25,2023-07-04 +B,YY,2023-07-04,2023-07-04 diff --git a/tests/Analytic/data/DataSet/output/GH_550_3-1.csv b/tests/Analytic/data/DataSet/output/GH_550_3-1.csv new file mode 100644 index 000000000..b8fcaf0c5 --- /dev/null +++ b/tests/Analytic/data/DataSet/output/GH_550_3-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1,MAX_Me_1 +A,A,2020-01-01, +B,A,2020-01-02, +A,B,2020-01-03,2020-01-01 +B,B,2020-01-04,2020-01-02 diff --git a/tests/Analytic/data/DataStructure/input/GH_550_1-1.json b/tests/Analytic/data/DataStructure/input/GH_550_1-1.json new file mode 100644 index 000000000..e982cbb8a --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_550_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_550_2-1.json b/tests/Analytic/data/DataStructure/input/GH_550_2-1.json new file mode 100644 index 000000000..e982cbb8a --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_550_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_550_3-1.json b/tests/Analytic/data/DataStructure/input/GH_550_3-1.json new file mode 100644 index 000000000..9139517b6 --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_550_3-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Id_2", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Me_1", + "type": "Date", + "nullable": true, + "role": "Measure" + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Analytic/data/DataStructure/output/GH_550_1-1.json b/tests/Analytic/data/DataStructure/output/GH_550_1-1.json new file mode 100644 index 000000000..c54abed74 --- /dev/null +++ b/tests/Analytic/data/DataStructure/output/GH_550_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/output/GH_550_2-1.json b/tests/Analytic/data/DataStructure/output/GH_550_2-1.json new file mode 100644 index 000000000..1ecca8422 --- /dev/null +++ b/tests/Analytic/data/DataStructure/output/GH_550_2-1.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Date", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Date", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/output/GH_550_3-1.json b/tests/Analytic/data/DataStructure/output/GH_550_3-1.json new file mode 100644 index 000000000..faad24371 --- /dev/null +++ b/tests/Analytic/data/DataStructure/output/GH_550_3-1.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Id_2", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "Me_1", + "type": "Date", + "nullable": true, + "role": "Measure" + }, + { + "name": "MAX_Me_1", + "type": "Date", + "nullable": true, + "role": "Measure" + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Analytic/data/vtl/GH_550_1.vtl b/tests/Analytic/data/vtl/GH_550_1.vtl new file mode 100644 index 000000000..f0f376c14 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_550_1.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2 data points between 1 preceding and 1 following)); diff --git a/tests/Analytic/data/vtl/GH_550_2.vtl b/tests/Analytic/data/vtl/GH_550_2.vtl new file mode 100644 index 000000000..115b4ea4a --- /dev/null +++ b/tests/Analytic/data/vtl/GH_550_2.vtl @@ -0,0 +1 @@ +DS_r := DS_1[calc Me_2 := min(Me_1 over (partition by Id_1 order by Id_2 data points between 1 preceding and 1 following))]; diff --git a/tests/Analytic/data/vtl/GH_550_3.vtl b/tests/Analytic/data/vtl/GH_550_3.vtl new file mode 100644 index 000000000..1cce79e93 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_550_3.vtl @@ -0,0 +1,8 @@ +DS_r <- DS_1[calc MAX_Me_1 := + max(Me_1 over ( + partition by Id_1 + order by Me_1 + range between unbounded preceding and 1 preceding + ) + ) +]; diff --git a/tests/Analytic/test_analytic.py b/tests/Analytic/test_analytic.py index 09710e9ae..9773f5662 100644 --- a/tests/Analytic/test_analytic.py +++ b/tests/Analytic/test_analytic.py @@ -450,6 +450,49 @@ def test_20(self): code=code, number_inputs=number_inputs, exception_code=exception_code ) + def test_GH_550_1(self): + """ + Max: max + Dataset --> Dataset + Status: OK + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2 + data points between 1 preceding and 1 following)); + DS_1 Dataset + + Description: Fix #550: max analytic with Date measure and data points window. + + Goal: Check that max works correctly with Date type measures in analytic operations. + """ + code = "GH_550_1" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_550_3(self): + """ + Max: max + Dataset --> Dataset + Status: OK + Expression: DS_r <- DS_1[calc MAX_Me_1 := + max(Me_1 over ( + partition by Id_1 + order by Me_1 + range between unbounded preceding and 1 preceding + ) + ) + ]; + + Description: Fix #550: max analytic with Date measure and range window. + + Goal: Check that max works correctly with Date type measures in analytic operations. + """ + code = "GH_550_3" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + class AnalyticOperatorsWithCalcTest(AnalyticHelper): """ @@ -1072,3 +1115,22 @@ def test_29(self): references_names = ["1"] self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_550_2(self): + """ + Min: min + Dataset --> Dataset + Status: OK + Expression: DS_r := DS_1[calc Me_2 := min(Me_1 over (partition by Id_1 + order by Id_2 data points between 1 preceding and 1 following))]; + DS_1 Dataset + + Description: Fix #550: min analytic with calc and Date measure. + + Goal: Check that min works correctly with Date type measures in analytic calc. + """ + code = "GH_550_2" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) From b3e0c82e161fc22ee7a52ddc831afe6f4f4db049 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Tue, 3 Mar 2026 13:34:06 +0100 Subject: [PATCH 10/38] Bump version to 1.6.0rc3 (#556) --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5454f4481..3e57d5a03 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc2" +version = "1.6.0rc3" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index ecd64388c..64480d5ea 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc2" +__version__ = "1.6.0rc3" From 589ccaf7a0171c18e81fd556362f2ebd1fb4f1b2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Wed, 4 Mar 2026 09:33:18 +0100 Subject: [PATCH 11/38] Rename "legacy" time period representation to "natural" (#561) --- docs/data_types.rst | 2 +- src/vtlengine/API/__init__.py | 4 ++-- src/vtlengine/DataTypes/TimeHandling.py | 4 ++-- .../output/_time_period_representation.py | 16 ++++++------- tests/TimePeriod/test_time_period_formats.py | 24 +++++++++---------- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/docs/data_types.rst b/docs/data_types.rst index 5eb904b13..fdc7630c4 100644 --- a/docs/data_types.rst +++ b/docs/data_types.rst @@ -243,7 +243,7 @@ parameter): - ``2020-01`` - Not supported - ``2020-01-15`` - * - ``"legacy"`` + * - ``"natural"`` - ``2020`` - ``2020-S1`` - ``2020-Q1`` diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index 7e2df21ba..33dce44f4 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -381,7 +381,7 @@ def run( :ref:`Example 5 `. time_period_output_format: String with the possible values \ - ("sdmx_gregorian", "sdmx_reporting", "vtl", "legacy") for the representation of the \ + ("sdmx_gregorian", "sdmx_reporting", "vtl", "natural") for the representation of the \ Time Period components. return_only_persistent: If True, run function will only return the results of \ @@ -523,7 +523,7 @@ def run_sdmx( :ref:`Example 5 `. time_period_output_format: String with the possible values \ - ("sdmx_gregorian", "sdmx_reporting", "vtl", "legacy") for the representation of the \ + ("sdmx_gregorian", "sdmx_reporting", "vtl", "natural") for the representation of the \ Time Period components. return_only_persistent: If True, run function will only return the results of \ diff --git a/src/vtlengine/DataTypes/TimeHandling.py b/src/vtlengine/DataTypes/TimeHandling.py index 4fe87c18e..204a449b2 100644 --- a/src/vtlengine/DataTypes/TimeHandling.py +++ b/src/vtlengine/DataTypes/TimeHandling.py @@ -372,8 +372,8 @@ def sdmx_reporting_representation(self) -> str: period_number_str = str(self.period_number) return f"{self.year}-{self.period_indicator}{period_number_str}" - def legacy_representation(self) -> str: - """Legacy representation: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD.""" + def natural_representation(self) -> str: + """Natural representation: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD.""" if self.period_indicator == "A": return f"{self.year}" if self.period_indicator == "M": diff --git a/src/vtlengine/files/output/_time_period_representation.py b/src/vtlengine/files/output/_time_period_representation.py index ce0d99e03..dd77f12b6 100644 --- a/src/vtlengine/files/output/_time_period_representation.py +++ b/src/vtlengine/files/output/_time_period_representation.py @@ -12,7 +12,7 @@ class TimePeriodRepresentation(Enum): SDMX_GREGORIAN = "sdmx_gregorian" SDMX_REPORTING = "sdmx_reporting" VTL = "vtl" - LEGACY = "legacy" + NATURAL = "natural" @classmethod def check_value(cls, value: str) -> "TimePeriodRepresentation": @@ -36,8 +36,8 @@ def _format_sdmx_reporting_representation(value: str) -> str: return TimePeriodHandler(value).sdmx_reporting_representation() -def _format_legacy_representation(value: str) -> str: - return TimePeriodHandler(value).legacy_representation() +def _format_natural_representation(value: str) -> str: + return TimePeriodHandler(value).natural_representation() def format_time_period_external_representation( @@ -49,7 +49,7 @@ def format_time_period_external_representation( SDMX Reporting: YYYY-A1, YYYY-Ss, YYYY-Qq, YYYY-Mmm, YYYY-Www, YYYY-Dddd SDMX Gregorian: YYYY, YYYY-MM, YYYY-MM-DD (only A, M, D supported) VTL: YYYY, YYYYSn, YYYYQn, YYYYMm, YYYYWw, YYYYDd (no hyphens) - Legacy: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD + Natural: YYYY, YYYY-Sx, YYYY-Qx, YYYY-MM, YYYY-Wxx, YYYY-MM-DD """ if isinstance(operand, Scalar): if operand.data_type != TimePeriod or operand.value is None: @@ -62,8 +62,8 @@ def format_time_period_external_representation( operand.value = _format_sdmx_gregorian_representation(value) elif mode == TimePeriodRepresentation.SDMX_REPORTING: operand.value = _format_sdmx_reporting_representation(value) - elif mode == TimePeriodRepresentation.LEGACY: - operand.value = _format_legacy_representation(value) + elif mode == TimePeriodRepresentation.NATURAL: + operand.value = _format_natural_representation(value) return if operand.data is None or len(operand.data) == 0: @@ -74,8 +74,8 @@ def format_time_period_external_representation( formatter = _format_sdmx_gregorian_representation elif mode == TimePeriodRepresentation.SDMX_REPORTING: formatter = _format_sdmx_reporting_representation - elif mode == TimePeriodRepresentation.LEGACY: - formatter = _format_legacy_representation + elif mode == TimePeriodRepresentation.NATURAL: + formatter = _format_natural_representation for comp in operand.components.values(): if comp.data_type == TimePeriod: diff --git a/tests/TimePeriod/test_time_period_formats.py b/tests/TimePeriod/test_time_period_formats.py index dd06a4372..45ec34494 100644 --- a/tests/TimePeriod/test_time_period_formats.py +++ b/tests/TimePeriod/test_time_period_formats.py @@ -266,7 +266,7 @@ def test_sdmx_gregorian_representation_unsupported(internal: str) -> None: TimePeriodHandler(internal).sdmx_gregorian_representation() -legacy_repr_params = [ +natural_repr_params = [ ("2020A", "2020", "annual"), ("2020S1", "2020-S1", "semester 1"), ("2020S2", "2020-S2", "semester 2"), @@ -284,11 +284,11 @@ def test_sdmx_gregorian_representation_unsupported(internal: str) -> None: @pytest.mark.parametrize( "internal, expected", - [(c[0], c[1]) for c in legacy_repr_params], - ids=[c[2] for c in legacy_repr_params], + [(c[0], c[1]) for c in natural_repr_params], + ids=[c[2] for c in natural_repr_params], ) -def test_legacy_representation(internal: str, expected: str) -> None: - assert TimePeriodHandler(internal).legacy_representation() == expected +def test_natural_representation(internal: str, expected: str) -> None: + assert TimePeriodHandler(internal).natural_representation() == expected # VTL Data Types to external representations tests @@ -327,13 +327,13 @@ def get_tp_scalar(value: Optional[str]) -> Scalar: ("2020A", TimePeriodRepresentation.SDMX_GREGORIAN, "2020", "gregorian annual"), ("2020-M01", TimePeriodRepresentation.SDMX_GREGORIAN, "2020-01", "gregorian month"), ("2020-D001", TimePeriodRepresentation.SDMX_GREGORIAN, "2020-01-01", "gregorian day"), - # Legacy - ("2020A", TimePeriodRepresentation.LEGACY, "2020", "legacy annual"), - ("2020-M01", TimePeriodRepresentation.LEGACY, "2020-01", "legacy month"), - ("2020-Q3", TimePeriodRepresentation.LEGACY, "2020-Q3", "legacy quarter"), - ("2020-S2", TimePeriodRepresentation.LEGACY, "2020-S2", "legacy semester"), - ("2020-W01", TimePeriodRepresentation.LEGACY, "2020-W01", "legacy week"), - ("2020-D001", TimePeriodRepresentation.LEGACY, "2020-01-01", "legacy day"), + # Natural + ("2020A", TimePeriodRepresentation.NATURAL, "2020", "natural annual"), + ("2020-M01", TimePeriodRepresentation.NATURAL, "2020-01", "natural month"), + ("2020-Q3", TimePeriodRepresentation.NATURAL, "2020-Q3", "natural quarter"), + ("2020-S2", TimePeriodRepresentation.NATURAL, "2020-S2", "natural semester"), + ("2020-W01", TimePeriodRepresentation.NATURAL, "2020-W01", "natural week"), + ("2020-D001", TimePeriodRepresentation.NATURAL, "2020-01-01", "natural day"), ] From 977c24de0228cb15ed98978b141d0a590d52308b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Wed, 4 Mar 2026 10:20:39 +0100 Subject: [PATCH 12/38] Added new exceptions to Analytic and Aggregate operators with String, Duration, TimePeriod, and TimeInterval (#558) * Add semantic error handling for TimeInterval in Analytic and Aggregate operations * Added related tests * Added missing RunTimeError with TimePeriods with different durations test * Enhance TimePeriod handling in Aggregation and Analytic operations with improved regex extraction and error handling * Updated related tests * Fixed related ests * Fixed grammar test * Fixed linting errors * Minor fix --- src/vtlengine/Exceptions/messages.py | 11 ++ src/vtlengine/Operators/Aggregation.py | 21 ++- src/vtlengine/Operators/Analytic.py | 54 ++++++++ tests/Additional/test_additional.py | 4 +- .../data/DataSet/input/GH_554_1-1.csv | 4 + .../data/DataSet/input/GH_554_2-1.csv | 4 + .../data/DataStructure/input/GH_554_1-1.json | 27 ++++ .../data/DataStructure/input/GH_554_2-1.json | 27 ++++ tests/Aggregate/data/vtl/GH_554_1.vtl | 1 + tests/Aggregate/data/vtl/GH_554_2.vtl | 1 + tests/Aggregate/test_aggregate.py | 43 ++++++ .../data/DataSet/input/GH_554_1-1.csv | 4 + .../data/DataSet/input/GH_554_2-1.csv | 4 + .../data/DataSet/input/GH_554_3-1.csv | 4 + .../data/DataSet/input/GH_554_4-1.csv | 4 + .../data/DataSet/input/GH_554_5-1.csv | 4 + .../data/DataSet/input/GH_554_6-1.csv | 4 + .../data/DataStructure/input/GH_554_1-1.json | 27 ++++ .../data/DataStructure/input/GH_554_2-1.json | 27 ++++ .../data/DataStructure/input/GH_554_3-1.json | 27 ++++ .../data/DataStructure/input/GH_554_4-1.json | 27 ++++ .../data/DataStructure/input/GH_554_5-1.json | 27 ++++ .../data/DataStructure/input/GH_554_6-1.json | 27 ++++ tests/Analytic/data/vtl/GH_554_1.vtl | 1 + tests/Analytic/data/vtl/GH_554_2.vtl | 1 + tests/Analytic/data/vtl/GH_554_3.vtl | 1 + tests/Analytic/data/vtl/GH_554_4.vtl | 1 + tests/Analytic/data/vtl/GH_554_5.vtl | 1 + tests/Analytic/data/vtl/GH_554_6.vtl | 1 + tests/Analytic/test_analytic.py | 127 ++++++++++++++++++ .../data/DataSet/output/rank_ds.csv | 22 +-- .../data/DataStructure/output/reference.json | 6 - .../data/vtl/test_grammar.vtl | 2 +- .../test_aggregate_operators.py | 14 +- 34 files changed, 531 insertions(+), 29 deletions(-) create mode 100644 tests/Aggregate/data/DataSet/input/GH_554_1-1.csv create mode 100644 tests/Aggregate/data/DataSet/input/GH_554_2-1.csv create mode 100644 tests/Aggregate/data/DataStructure/input/GH_554_1-1.json create mode 100644 tests/Aggregate/data/DataStructure/input/GH_554_2-1.json create mode 100644 tests/Aggregate/data/vtl/GH_554_1.vtl create mode 100644 tests/Aggregate/data/vtl/GH_554_2.vtl create mode 100644 tests/Analytic/data/DataSet/input/GH_554_1-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_554_2-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_554_3-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_554_4-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_554_5-1.csv create mode 100644 tests/Analytic/data/DataSet/input/GH_554_6-1.csv create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_1-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_2-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_3-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_4-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_5-1.json create mode 100644 tests/Analytic/data/DataStructure/input/GH_554_6-1.json create mode 100644 tests/Analytic/data/vtl/GH_554_1.vtl create mode 100644 tests/Analytic/data/vtl/GH_554_2.vtl create mode 100644 tests/Analytic/data/vtl/GH_554_3.vtl create mode 100644 tests/Analytic/data/vtl/GH_554_4.vtl create mode 100644 tests/Analytic/data/vtl/GH_554_5.vtl create mode 100644 tests/Analytic/data/vtl/GH_554_6.vtl diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index 4b89fdc91..7a27891ba 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -817,6 +817,17 @@ "description": "Raised when the first/last parameter is missing in a time aggregation " "over a Date type.", }, + "1-1-19-12": { + "message": "At op {op}: TimeInterval data type is not supported for {context} operations.", + "description": "Raised when a TimeInterval data type is used in an Analytic or " + "Aggregate operation, which does not support it.", + }, + "1-1-19-13": { + "message": "At op {op}: RANGE window is not supported for {data_type} " + "data type in component {comp_name}.", + "description": "Raised when a RANGE window is applied to a component with an " + "incompatible data type (String, Duration, TimePeriod, TimeInterval).", + }, # ---------Semantic Analyzer Common---- "1-2-1": { "message": "Please don't use twice {alias} like var_to.", diff --git a/src/vtlengine/Operators/Aggregation.py b/src/vtlengine/Operators/Aggregation.py index f51332f22..3dc8d9277 100644 --- a/src/vtlengine/Operators/Aggregation.py +++ b/src/vtlengine/Operators/Aggregation.py @@ -58,13 +58,15 @@ def _handle_data_types(cls, data: pd.DataFrame, measures: List[Component], mode: for measure in measures: if measure.data_type == TimePeriod: if mode == "input": + if cls.op in [MAX, MIN]: + indicators = ( + data[measure.name].dropna().str.extract(r"^\d{4}-?([ASQMWD])")[0] + ) + if indicators.nunique() > 1: + raise RunTimeError("2-1-19-20", op=cls.op) data[measure.name] = data[measure.name].map( lambda x: TimePeriodHandler(str(x)), na_action="ignore" ) - if cls.op in [MAX, MIN]: - indicators = {v.period_indicator for v in data[measure.name].dropna()} - if len(indicators) > 1: - raise RunTimeError("2-1-19-20", op=cls.op) else: data[measure.name] = data[measure.name].map( lambda x: str(x), na_action="ignore" @@ -138,6 +140,17 @@ def validate( # type: ignore[override] for comp_name, comp in operand.components.items(): if comp.role == Role.ATTRIBUTE: del result_components[comp_name] + # TimeInterval is not supported as a measure in aggregate operations + if any( + comp.role == Role.MEASURE and comp.data_type is TimeInterval + for comp in result_components.values() + ): + raise SemanticError( + "1-1-19-12", + op=cls.op, + context="aggregate", + ) + # Change Measure data type for _, comp in result_components.items(): if comp.role == Role.MEASURE: diff --git a/src/vtlengine/Operators/Analytic.py b/src/vtlengine/Operators/Analytic.py index c2b44f129..578ba06b5 100644 --- a/src/vtlengine/Operators/Analytic.py +++ b/src/vtlengine/Operators/Analytic.py @@ -29,8 +29,12 @@ from vtlengine.DataTypes import ( COMP_NAME_MAPPING, Date, + Duration, Integer, Number, + String, + TimeInterval, + TimePeriod, unary_implicit_promotion, ) from vtlengine.Exceptions import RunTimeError, SemanticError @@ -93,6 +97,43 @@ def validate( # type: ignore[override] # noqa: C901 comp_name=comp_name, dataset_name=operand.name, ) + # TimeInterval is not supported in ORDER BY + if operand.components[comp_name].data_type is TimeInterval: + raise SemanticError( + "1-1-19-12", + op=cls.op, + context="analytic", + ) + # RANGE window is not supported for String, Duration, TimePeriod, TimeInterval + range_unsupported_types = (String, Duration, TimePeriod, TimeInterval) + if ( + window is not None + and window.type_ != "data" + and operand.components[comp_name].data_type in range_unsupported_types + ): + raise SemanticError( + "1-1-19-13", + op=cls.op, + data_type=operand.components[comp_name].data_type.__name__, + comp_name=comp_name, + ) + + # TimeInterval is not supported as a measure in analytic operations + if component_name is not None: + if operand.components[component_name].data_type is TimeInterval: + raise SemanticError( + "1-1-19-12", + op=cls.op, + context="analytic", + ) + else: + if any(me.data_type is TimeInterval for me in operand.get_measures()): + raise SemanticError( + "1-1-19-12", + op=cls.op, + context="analytic", + ) + if component_name is not None: if cls.type_to_check is not None: unary_implicit_promotion( @@ -290,6 +331,19 @@ def evaluate( # type: ignore[override] else: measure_names = operand.get_measures_names() + # Validate TimePeriod measures have same period indicator for MAX/MIN + if cls.op in [MAX, MIN]: + measures = ( + [operand.components[component_name]] + if component_name is not None + else operand.get_measures() + ) + for measure in measures: + if measure.data_type is TimePeriod: + indicators = df[measure.name].dropna().str.extract(r"^\d{4}-?([ASQMWD])")[0] + if indicators.nunique() > 1: + raise RunTimeError("2-1-19-20", op=cls.op) + result.data = cls.analyticfunc( df=df, partitioning=partitioning, diff --git a/tests/Additional/test_additional.py b/tests/Additional/test_additional.py index b5f4b8535..826c4fdb6 100644 --- a/tests/Additional/test_additional.py +++ b/tests/Additional/test_additional.py @@ -3933,7 +3933,7 @@ def test_GH_261_5(self): text = "DS_r <- DS_1[aggr Me_2 := max(Me_1)];" code = "GH_261" number_inputs = 1 - exception_code = "2-1-19-18" + exception_code = "1-1-19-12" self.NewSemanticExceptionTest( text=text, code=code, number_inputs=number_inputs, exception_code=exception_code @@ -3943,7 +3943,7 @@ def test_GH_261_6(self): text = "DS_r <- DS_1[aggr Me_2 := min(Me_1)];" code = "GH_261" number_inputs = 1 - exception_code = "2-1-19-18" + exception_code = "1-1-19-12" self.NewSemanticExceptionTest( text=text, code=code, number_inputs=number_inputs, exception_code=exception_code diff --git a/tests/Aggregate/data/DataSet/input/GH_554_1-1.csv b/tests/Aggregate/data/DataSet/input/GH_554_1-1.csv new file mode 100644 index 000000000..84f0c6689 --- /dev/null +++ b/tests/Aggregate/data/DataSet/input/GH_554_1-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,1,2020-01-01/2020-06-30 +A,2,2020-07-01/2020-12-31 +B,1,2021-01-01/2021-06-30 diff --git a/tests/Aggregate/data/DataSet/input/GH_554_2-1.csv b/tests/Aggregate/data/DataSet/input/GH_554_2-1.csv new file mode 100644 index 000000000..7020481c1 --- /dev/null +++ b/tests/Aggregate/data/DataSet/input/GH_554_2-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,1,2020A +A,2,2020Q1 +B,1,2021M01 diff --git a/tests/Aggregate/data/DataStructure/input/GH_554_1-1.json b/tests/Aggregate/data/DataStructure/input/GH_554_1-1.json new file mode 100644 index 000000000..76b0df83a --- /dev/null +++ b/tests/Aggregate/data/DataStructure/input/GH_554_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Time", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/DataStructure/input/GH_554_2-1.json b/tests/Aggregate/data/DataStructure/input/GH_554_2-1.json new file mode 100644 index 000000000..25cef532d --- /dev/null +++ b/tests/Aggregate/data/DataStructure/input/GH_554_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Time_Period", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Aggregate/data/vtl/GH_554_1.vtl b/tests/Aggregate/data/vtl/GH_554_1.vtl new file mode 100644 index 000000000..eabfdc3a9 --- /dev/null +++ b/tests/Aggregate/data/vtl/GH_554_1.vtl @@ -0,0 +1 @@ +DS_r := sum(DS_1 group by Id_1); diff --git a/tests/Aggregate/data/vtl/GH_554_2.vtl b/tests/Aggregate/data/vtl/GH_554_2.vtl new file mode 100644 index 000000000..aa29ed865 --- /dev/null +++ b/tests/Aggregate/data/vtl/GH_554_2.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 group by Id_1); diff --git a/tests/Aggregate/test_aggregate.py b/tests/Aggregate/test_aggregate.py index 99082f42e..b38aa4e07 100644 --- a/tests/Aggregate/test_aggregate.py +++ b/tests/Aggregate/test_aggregate.py @@ -1331,3 +1331,46 @@ def test_GH_550_2(self): references_names = ["1"] self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_554_1(self): + """ + Sum: sum + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := sum(DS_1 group by Id_1); + DS_1 Dataset + + Description: Fix #554: TimeInterval measure is not supported in aggregate operations. + + Goal: Check that a SemanticError is raised when a TimeInterval measure is used + in aggregate operations. + """ + code = "GH_554_1" + number_inputs = 1 + exception_code = "1-1-19-12" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_2(self): + """ + Max: max + Dataset --> Dataset + Status: RunTimeError + Expression: DS_r := max(DS_1 group by Id_1); + DS_1 Dataset + + Description: Fix #554: TimePeriod measures with different period indicators + are not supported in aggregate MAX/MIN operations. + + Goal: Check that a RunTimeError is raised when TimePeriod values have different + period indicators in aggregate operations. + """ + code = "GH_554_2" + number_inputs = 1 + exception_code = "2-1-19-20" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) diff --git a/tests/Analytic/data/DataSet/input/GH_554_1-1.csv b/tests/Analytic/data/DataSet/input/GH_554_1-1.csv new file mode 100644 index 000000000..84f0c6689 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_1-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,1,2020-01-01/2020-06-30 +A,2,2020-07-01/2020-12-31 +B,1,2021-01-01/2021-06-30 diff --git a/tests/Analytic/data/DataSet/input/GH_554_2-1.csv b/tests/Analytic/data/DataSet/input/GH_554_2-1.csv new file mode 100644 index 000000000..7b97eb4c2 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_2-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,2020-01-01/2020-06-30,10 +A,2020-07-01/2020-12-31,20 +B,2021-01-01/2021-06-30,30 diff --git a/tests/Analytic/data/DataSet/input/GH_554_3-1.csv b/tests/Analytic/data/DataSet/input/GH_554_3-1.csv new file mode 100644 index 000000000..47f299cad --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_3-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,XX,10 +A,YY,20 +B,XX,30 diff --git a/tests/Analytic/data/DataSet/input/GH_554_4-1.csv b/tests/Analytic/data/DataSet/input/GH_554_4-1.csv new file mode 100644 index 000000000..5f41605ec --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_4-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,A,10 +A,M,20 +B,Q,30 diff --git a/tests/Analytic/data/DataSet/input/GH_554_5-1.csv b/tests/Analytic/data/DataSet/input/GH_554_5-1.csv new file mode 100644 index 000000000..4eef670c4 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_5-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,2020A,10 +A,2021A,20 +B,2020Q1,30 diff --git a/tests/Analytic/data/DataSet/input/GH_554_6-1.csv b/tests/Analytic/data/DataSet/input/GH_554_6-1.csv new file mode 100644 index 000000000..7020481c1 --- /dev/null +++ b/tests/Analytic/data/DataSet/input/GH_554_6-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +A,1,2020A +A,2,2020Q1 +B,1,2021M01 diff --git a/tests/Analytic/data/DataStructure/input/GH_554_1-1.json b/tests/Analytic/data/DataStructure/input/GH_554_1-1.json new file mode 100644 index 000000000..76b0df83a --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Time", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_554_2-1.json b/tests/Analytic/data/DataStructure/input/GH_554_2-1.json new file mode 100644 index 000000000..124077514 --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Time", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_554_3-1.json b/tests/Analytic/data/DataStructure/input/GH_554_3-1.json new file mode 100644 index 000000000..5c85b3adb --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_3-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_554_4-1.json b/tests/Analytic/data/DataStructure/input/GH_554_4-1.json new file mode 100644 index 000000000..bf4991d13 --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_4-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Duration", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_554_5-1.json b/tests/Analytic/data/DataStructure/input/GH_554_5-1.json new file mode 100644 index 000000000..dba53f438 --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_5-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/DataStructure/input/GH_554_6-1.json b/tests/Analytic/data/DataStructure/input/GH_554_6-1.json new file mode 100644 index 000000000..25cef532d --- /dev/null +++ b/tests/Analytic/data/DataStructure/input/GH_554_6-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Time_Period", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Analytic/data/vtl/GH_554_1.vtl b/tests/Analytic/data/vtl/GH_554_1.vtl new file mode 100644 index 000000000..aca7cc017 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_1.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); diff --git a/tests/Analytic/data/vtl/GH_554_2.vtl b/tests/Analytic/data/vtl/GH_554_2.vtl new file mode 100644 index 000000000..aca7cc017 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_2.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); diff --git a/tests/Analytic/data/vtl/GH_554_3.vtl b/tests/Analytic/data/vtl/GH_554_3.vtl new file mode 100644 index 000000000..e46981cd5 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_3.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2 range between 1 preceding and 1 following)); diff --git a/tests/Analytic/data/vtl/GH_554_4.vtl b/tests/Analytic/data/vtl/GH_554_4.vtl new file mode 100644 index 000000000..e46981cd5 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_4.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2 range between 1 preceding and 1 following)); diff --git a/tests/Analytic/data/vtl/GH_554_5.vtl b/tests/Analytic/data/vtl/GH_554_5.vtl new file mode 100644 index 000000000..e46981cd5 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_5.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2 range between 1 preceding and 1 following)); diff --git a/tests/Analytic/data/vtl/GH_554_6.vtl b/tests/Analytic/data/vtl/GH_554_6.vtl new file mode 100644 index 000000000..aca7cc017 --- /dev/null +++ b/tests/Analytic/data/vtl/GH_554_6.vtl @@ -0,0 +1 @@ +DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); diff --git a/tests/Analytic/test_analytic.py b/tests/Analytic/test_analytic.py index 9773f5662..c8feb3611 100644 --- a/tests/Analytic/test_analytic.py +++ b/tests/Analytic/test_analytic.py @@ -493,6 +493,133 @@ def test_GH_550_3(self): self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_GH_554_1(self): + """ + Max: max + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); + DS_1 Dataset + + Description: Fix #554: TimeInterval measure is not supported in analytic operations. + + Goal: Check that a SemanticError is raised when a TimeInterval measure is used. + """ + code = "GH_554_1" + number_inputs = 1 + exception_code = "1-1-19-12" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_2(self): + """ + Max: max + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); + DS_1 Dataset + + Description: Fix #554: TimeInterval identifier in ORDER BY is not supported + in analytic operations. + + Goal: Check that a SemanticError is raised when a TimeInterval identifier + is used in ORDER BY. + """ + code = "GH_554_2" + number_inputs = 1 + exception_code = "1-1-19-12" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_3(self): + """ + Max: max + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2 + range between 1 preceding and 1 following)); + DS_1 Dataset + + Description: Fix #554: RANGE window is not supported for String data type. + + Goal: Check that a SemanticError is raised when RANGE is used with String ORDER BY. + """ + code = "GH_554_3" + number_inputs = 1 + exception_code = "1-1-19-13" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_4(self): + """ + Max: max + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2 + range between 1 preceding and 1 following)); + DS_1 Dataset + + Description: Fix #554: RANGE window is not supported for Duration data type. + + Goal: Check that a SemanticError is raised when RANGE is used with Duration ORDER BY. + """ + code = "GH_554_4" + number_inputs = 1 + exception_code = "1-1-19-13" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_5(self): + """ + Max: max + Dataset --> Dataset + Status: SemanticError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2 + range between 1 preceding and 1 following)); + DS_1 Dataset + + Description: Fix #554: RANGE window is not supported for Time_Period data type. + + Goal: Check that a SemanticError is raised when RANGE is used with TimePeriod ORDER BY. + """ + code = "GH_554_5" + number_inputs = 1 + exception_code = "1-1-19-13" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + + def test_GH_554_6(self): + """ + Max: max + Dataset --> Dataset + Status: RunTimeError + Expression: DS_r := max(DS_1 over (partition by Id_1 order by Id_2)); + DS_1 Dataset + + Description: Fix #554: TimePeriod measures with different period indicators + are not supported in analytic MAX/MIN operations. + + Goal: Check that a RunTimeError is raised when TimePeriod values have different + period indicators. + """ + code = "GH_554_6" + number_inputs = 1 + exception_code = "2-1-19-20" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) + class AnalyticOperatorsWithCalcTest(AnalyticHelper): """ diff --git a/tests/Complete_VTL_Grammar/data/DataSet/output/rank_ds.csv b/tests/Complete_VTL_Grammar/data/DataSet/output/rank_ds.csv index 1f5704241..57999d549 100644 --- a/tests/Complete_VTL_Grammar/data/DataSet/output/rank_ds.csv +++ b/tests/Complete_VTL_Grammar/data/DataSet/output/rank_ds.csv @@ -1,11 +1,11 @@ -Id_1,Id_2,Id_3,Me_int,Me_num,Me_str,Me_bool,Id_date,Id_period,Me_interval,Me_rank -1,A,1,1.0,1.1,hello,True,2001-01-01,2000M01,,1 -2,A,2,2.0,2.2,world!,,2002-02-01,2000M02,,1 -3,A,3,3.0,3.3,abc,True,2003-03-01,2000M03,,1 -1,B,4,4.0,4.4,ES,False,2004-04-01,2000M04,,1 -2,B,5,5.0,5.5,,True,2005-01-01,2000M05,,1 -3,B,6,6.0,6.6,jKl,False,2006-02-01,2000M06,,1 -1,C,7,7.0,7.7,Mno,True,2007-03-01,2000M07,,1 -2,C,8,8.0,8.8,,,2008-04-01,2000M08,,1 -3,C,9,9.0,9.9,DE,True,2009-01-01,2001M06,,1 -4,D,10,,10.0,z,False,2010-02-01,2002M01,,1 +Id_1,Id_2,Id_3,Me_int,Me_num,Me_str,Me_bool,Id_date,Id_period,Me_rank +1,A,1,1.0,1.1,hello,True,2001-01-01,2000M01,1 +2,A,2,2.0,2.2,world!,,2002-02-01,2000M02,1 +3,A,3,3.0,3.3,abc,True,2003-03-01,2000M03,1 +1,B,4,4.0,4.4,ES,False,2004-04-01,2000M04,1 +2,B,5,5.0,5.5,,True,2005-01-01,2000M05,1 +3,B,6,6.0,6.6,jKl,False,2006-02-01,2000M06,1 +1,C,7,7.0,7.7,Mno,True,2007-03-01,2000M07,1 +2,C,8,8.0,8.8,,,2008-04-01,2000M08,1 +3,C,9,9.0,9.9,DE,True,2009-01-01,2001M06,1 +4,D,10,,10.0,z,False,2010-02-01,2002M01,1 diff --git a/tests/Complete_VTL_Grammar/data/DataStructure/output/reference.json b/tests/Complete_VTL_Grammar/data/DataStructure/output/reference.json index b1f029039..624552bc9 100644 --- a/tests/Complete_VTL_Grammar/data/DataStructure/output/reference.json +++ b/tests/Complete_VTL_Grammar/data/DataStructure/output/reference.json @@ -845,12 +845,6 @@ "role": "Identifier", "nullable": false }, - { - "name": "Me_interval", - "type": "Time", - "role": "Measure", - "nullable": true - }, { "name": "Me_rank", "type": "Integer", diff --git a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl index 9172248ea..83c19a048 100644 --- a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl +++ b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl @@ -28,7 +28,7 @@ first_value_ds := first_value(DS_1#Me_num over (partition by Id_1, Id_2 order by last_value_ds := first_value(DS_1#Me_int over (partition by Id_1, Id_2 order by Id_3 asc)); lag_ds := lag(DS_1#Me_num, 1 over (partition by Id_1 , Id_2 order by Id_3)); lead_ds := lead(DS_1#Me_num, 2 over (partition by Id_1, Id_2 order by Id_3 asc)); -rank_ds := DS_1[calc Me_rank := rank (over (partition by Id_1, Id_2 order by Id_3))]; +rank_ds := DS_1_without_time[calc Me_rank := rank (over (partition by Id_1, Id_2 order by Id_3))]; ratio_to_report_ds := ratio_to_report(DS_1#Me_num over (partition by Id_1, Id_2)); max_analytic_ds := max(DS_1_without_time#Me_int over (partition by Id_1, Id_2 order by Id_3 desc)); min_analytic_ds := min(DS_1_without_time#Me_int over (partition by Id_1, Id_2 order by Id_3 asc)); diff --git a/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py b/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py index 778cd81ad..a70d67c1b 100644 --- a/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py +++ b/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py @@ -160,7 +160,7 @@ def test_9(self): """ code = "10-1-9" number_inputs = 1 - message = "1-1-1-1" + message = "1-1-19-12" self.NewSemanticExceptionTest( code=code, number_inputs=number_inputs, exception_code=message ) @@ -229,9 +229,11 @@ def test_13(self): """ code = "10-1-13" number_inputs = 1 - references_names = ["DS_r"] + exception_code = "1-1-19-12" - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) def test_14(self): """ @@ -1504,9 +1506,11 @@ def test_12(self): """ code = "10-2-12" number_inputs = 1 - references_names = ["DS_r"] + exception_code = "1-1-19-12" - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) def test_13(self): """ From f2183d3ea518ae0cdd3a109e91ca5fc72f11e600 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Wed, 4 Mar 2026 11:07:26 +0100 Subject: [PATCH 13/38] Fix #557: Add custom release creation workflow based on issue types (#559) --- .claude/CLAUDE.md | 1 + .github/copilot-instructions.md | 1 + .github/workflows/create-release.yml | 308 +++++++++++++++++++++++++++ 3 files changed, 310 insertions(+) create mode 100644 .github/workflows/create-release.yml diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md index 3c2232b13..cda8b5d5a 100644 --- a/.claude/CLAUDE.md +++ b/.claude/CLAUDE.md @@ -257,6 +257,7 @@ print(run_result) - Always follow the pull request template in `.github/PULL_REQUEST_TEMPLATE.md` - Focus on what changed, why, impact/risk, and notes +- Always include a closing keyword linking to the related issue in the PR description (e.g., `Fixes #123`, `Closes #456`, `Resolves #789`). This is required for the automated release notes workflow to categorize PRs by issue type ## Common Pitfalls diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 3c2232b13..cda8b5d5a 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -257,6 +257,7 @@ print(run_result) - Always follow the pull request template in `.github/PULL_REQUEST_TEMPLATE.md` - Focus on what changed, why, impact/risk, and notes +- Always include a closing keyword linking to the related issue in the PR description (e.g., `Fixes #123`, `Closes #456`, `Resolves #789`). This is required for the automated release notes workflow to categorize PRs by issue type ## Common Pitfalls diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml new file mode 100644 index 000000000..dce90263e --- /dev/null +++ b/.github/workflows/create-release.yml @@ -0,0 +1,308 @@ +name: Create Release + +on: + workflow_dispatch: + +permissions: + contents: write + issues: read + pull-requests: read + +jobs: + create-release: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Create release with notes + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + + // 1. Read version from pyproject.toml + const pyproject = fs.readFileSync('pyproject.toml', 'utf8'); + const versionMatch = pyproject.match(/^version\s*=\s*"([^"]+)"/m); + if (!versionMatch) { + core.setFailed('Could not read version from pyproject.toml'); + return; + } + const version = versionMatch[1]; + const tagName = `v${version}`; + core.info(`Version: ${version}, Tag: ${tagName}`); + + // 2. Determine if pre-release + const isPrerelease = /rc|alpha|beta|dev/.test(version); + core.info(`Pre-release: ${isPrerelease}`); + + // 3. Check tag does not already exist + try { + await github.rest.git.getRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `tags/${tagName}`, + }); + core.setFailed(`Tag ${tagName} already exists`); + return; + } catch (e) { + if (e.status !== 404) throw e; + } + + // 4. Find previous release + // For stable releases: compare against previous stable release + // For pre-releases: compare against previous release (any type) + const releases = await github.rest.repos.listReleases({ + owner: context.repo.owner, + repo: context.repo.repo, + per_page: 100, + }); + let previousRelease; + if (isPrerelease) { + previousRelease = releases.data[0]; + } else { + previousRelease = releases.data.find(r => !r.prerelease); + } + const previousTag = previousRelease ? previousRelease.tag_name : null; + core.info(`Previous release: ${previousTag || 'none'}`); + + // 5. Get commits between previous tag and HEAD + let commits; + if (previousTag) { + const comparison = await github.rest.repos.compareCommitsWithBasehead({ + owner: context.repo.owner, + repo: context.repo.repo, + basehead: `${previousTag}...HEAD`, + }); + commits = comparison.data.commits; + } else { + const commitList = await github.rest.repos.listCommits({ + owner: context.repo.owner, + repo: context.repo.repo, + per_page: 100, + }); + commits = commitList.data; + } + + // 6. Extract PR numbers from commit messages + const prNumbers = new Set(); + for (const commit of commits) { + const matches = commit.commit.message.matchAll(/\(#(\d+)\)/g); + for (const match of matches) { + prNumbers.add(parseInt(match[1])); + } + } + core.info(`Found ${prNumbers.size} PRs: ${[...prNumbers].join(', ')}`); + + if (prNumbers.size === 0) { + core.info('No PRs found, creating release with empty notes'); + await github.rest.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: tagName, + name: tagName, + body: `**Full Changelog**: https://github.com/${context.repo.owner}/${context.repo.repo}/compare/${previousTag}...${tagName}`, + prerelease: isPrerelease, + }); + return; + } + + // 7. Query PR details with linked issues and issue types via GraphQL + const prAliases = [...prNumbers].map((n, i) => ` + pr${i}: pullRequest(number: ${n}) { + number + title + url + author { login } + closingIssuesReferences(first: 10, userLinkedOnly: false) { + nodes { + number + issueType { name } + } + } + } + `).join(''); + + const query = ` + query { + repository(owner: "${context.repo.owner}", name: "${context.repo.repo}") { + ${prAliases} + } + } + `; + + const graphqlResult = await github.graphql(query, { + headers: { 'GraphQL-Features': 'issue_types' }, + }); + + const prs = Object.values(graphqlResult.repository).filter(Boolean); + + // 7b. Fallback: for PRs without closingIssuesReferences, parse issue + // numbers from title (e.g. "Fix #504:") and query their issue types + const unresolvedIssueNumbers = new Set(); + for (const pr of prs) { + const linked = pr.closingIssuesReferences?.nodes || []; + if (linked.length === 0) { + const titleMatch = pr.title.match(/(?:Fix|Fixes|Close|Closes|Resolve|Resolves)\s+#(\d+)/i); + if (titleMatch) { + unresolvedIssueNumbers.add(parseInt(titleMatch[1])); + } + } + } + + const issueTypeMap = new Map(); + if (unresolvedIssueNumbers.size > 0) { + const issueAliases = [...unresolvedIssueNumbers].map((n, i) => ` + issue${i}: issue(number: ${n}) { + number + issueType { name } + } + `).join(''); + + const issueQuery = ` + query { + repository(owner: "${context.repo.owner}", name: "${context.repo.repo}") { + ${issueAliases} + } + } + `; + + const issueResult = await github.graphql(issueQuery, { + headers: { 'GraphQL-Features': 'issue_types' }, + }); + + for (const issue of Object.values(issueResult.repository).filter(Boolean)) { + if (issue.issueType?.name) { + issueTypeMap.set(issue.number, issue.issueType.name); + } + } + core.info(`Resolved ${issueTypeMap.size} issue types from PR titles: ${[...issueTypeMap.entries()].map(([n, t]) => `#${n}=${t}`).join(', ')}`); + } + + // 8. Parse main dependencies from pyproject.toml + const depsSection = pyproject.match(/\[project\]\s[\s\S]*?dependencies\s*=\s*\[([\s\S]*?)\]/); + const mainDeps = new Set(); + if (depsSection) { + const depLines = depsSection[1].matchAll(/"([a-zA-Z0-9_-]+)/g); + for (const match of depLines) { + mainDeps.add(match[1].toLowerCase()); + } + } + core.info(`Main dependencies: ${[...mainDeps].join(', ')}`); + + // 9. Categorize PRs + const categories = { + 'Added': [], + 'Changes': [], + 'Bug fixes': [], + 'Dependencies': [], + 'Other Changes': [], + }; + + for (const pr of prs) { + const title = pr.title; + const author = pr.author?.login || 'unknown'; + const url = pr.url; + const entry = `* ${title} by @${author} in ${url}`; + + // Exclude version bumps always; exclude QA PRs only for stable releases + if (/^Bump version/i.test(title)) { + continue; + } + if (!isPrerelease && /^\(QA\b/i.test(title)) { + continue; + } + + const isDependabot = author === 'dependabot[bot]' || author === 'dependabot'; + + // Check linked issue types (closing refs first, then title fallback) + const linkedIssues = pr.closingIssuesReferences?.nodes || []; + let issueType = linkedIssues.length > 0 + ? linkedIssues[0].issueType?.name + : null; + + if (!issueType) { + const titleMatch = title.match(/(?:Fix|Fixes|Close|Closes|Resolve|Resolves)\s+#(\d+)/i); + if (titleMatch) { + issueType = issueTypeMap.get(parseInt(titleMatch[1])) || null; + } + } + + if (issueType) { + switch (issueType.toLowerCase()) { + case 'feature': + categories['Added'].push(entry); + break; + case 'task': + categories['Changes'].push(entry); + break; + case 'bug': + categories['Bug fixes'].push(entry); + break; + default: + categories['Other Changes'].push(entry); + } + } else if (isDependabot) { + // Extract package name from dependabot PR title + const pkgMatch = title.match(/^Bump\s+(\S+)/i); + const pkg = pkgMatch ? pkgMatch[1].toLowerCase() : null; + if (pkg && mainDeps.has(pkg)) { + categories['Dependencies'].push(entry); + } + // Dev dependencies are excluded (not added to any category) + } else { + categories['Other Changes'].push(entry); + } + } + + // 10. Generate markdown + const sections = []; + for (const [title, entries] of Object.entries(categories)) { + if (entries.length > 0) { + sections.push(`## ${title}\n${entries.join('\n')}`); + } + } + + const compareUrl = previousTag + ? `https://github.com/${context.repo.owner}/${context.repo.repo}/compare/${previousTag}...${tagName}` + : `https://github.com/${context.repo.owner}/${context.repo.repo}/commits/${tagName}`; + sections.push(`**Full Changelog**: ${compareUrl}`); + + const body = sections.join('\n\n'); + core.info(`Release notes:\n${body}`); + + // 11. Create release + await github.rest.repos.createRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + tag_name: tagName, + name: tagName, + body: body, + prerelease: isPrerelease, + }); + + core.info(`Release ${tagName} created successfully`); + + // 12. Delete pre-releases when publishing a stable release + if (!isPrerelease) { + const preReleases = releases.data.filter(r => r.prerelease); + for (const pr of preReleases) { + core.info(`Deleting pre-release: ${pr.tag_name}`); + await github.rest.repos.deleteRelease({ + owner: context.repo.owner, + repo: context.repo.repo, + release_id: pr.id, + }); + // Delete the associated tag + try { + await github.rest.git.deleteRef({ + owner: context.repo.owner, + repo: context.repo.repo, + ref: `tags/${pr.tag_name}`, + }); + } catch (e) { + core.warning(`Could not delete tag ${pr.tag_name}: ${e.message}`); + } + } + core.info(`Deleted ${preReleases.length} pre-release(s)`); + } From 480f92b9b2ba45af10cb8b65536aa4c28168b09c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Wed, 4 Mar 2026 13:41:35 +0100 Subject: [PATCH 14/38] Bump version to 1.6.0rc4 (#563) --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3e57d5a03..a75fc10d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc3" +version = "1.6.0rc4" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index 64480d5ea..57abc8915 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc3" +__version__ = "1.6.0rc4" From 68b99b56643f027dd3290252853ea3901e9f0cc1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Wed, 4 Mar 2026 16:40:19 +0100 Subject: [PATCH 15/38] Fix #555: Align grammar with standard VTL 2.1 (#564) * Updated VTL Grammar * Uodated lexer and parser * Fixed related tests * Grammar updated to the official VTL grammar * Lexer and Parser regenerated * Refactor comment handling in generate_ast_comment to use rstrip for newline removal * Refactor time-related parsing in Expr and ExprComp * Refactor constant handling in Terminals * Fixed ruff errors * Fixed mypy errors --- src/vtlengine/AST/ASTComment.py | 3 +- .../AST/ASTConstructorModules/Expr.py | 41 +- .../ASTConstructorModules/ExprComponents.py | 11 +- .../AST/ASTConstructorModules/Terminals.py | 89 +- src/vtlengine/AST/Grammar/Vtl.g4 | 80 +- src/vtlengine/AST/Grammar/VtlTokens.g4 | 49 +- src/vtlengine/AST/Grammar/lexer.py | 1786 ++-- src/vtlengine/AST/Grammar/parser.py | 7741 ++++++++++------- src/vtlengine/AST/Grammar/tokens.py | 4 +- src/vtlengine/Operators/Numeric.py | 2 +- tests/API/test_api.py | 4 +- tests/AST/data/prettier/complete_grammar.vtl | 4 +- .../prettier/reference_complete_grammar.vtl | 4 +- tests/AST/data/vtl/complete_grammar.vtl | 4 +- tests/AST/data/vtl/time.vtl | 4 +- .../data/DataSet/output/GH_164_2-1.csv | 3 +- .../data/DataSet/output/GH_164_2-2.csv | 2 - .../data/DataStructure/output/GH_164_2-1.json | 25 +- .../data/DataStructure/output/GH_164_2-2.json | 27 - tests/Aggregate/data/vtl/GH_164_2.vtl | 3 +- tests/Aggregate/test_aggregate.py | 2 +- .../data/vtl/test_grammar.vtl | 4 +- tests/DAG/data/vtl/9.vtl | 2 +- tests/DateTime/test_datetime.py | 12 +- .../UnaryTime/test_time_operators.py | 24 +- tests/ReferenceManual/data/vtl/RM179.vtl | 2 +- .../data/vtl_defined_operators/RM179.vtl | 2 +- 27 files changed, 5813 insertions(+), 4121 deletions(-) delete mode 100644 tests/Aggregate/data/DataSet/output/GH_164_2-2.csv delete mode 100644 tests/Aggregate/data/DataStructure/output/GH_164_2-2.json diff --git a/src/vtlengine/AST/ASTComment.py b/src/vtlengine/AST/ASTComment.py index 69a863310..727730613 100644 --- a/src/vtlengine/AST/ASTComment.py +++ b/src/vtlengine/AST/ASTComment.py @@ -20,7 +20,8 @@ def generate_ast_comment(token: CommonToken) -> Comment: token_info = extract_token_info(token) text = token.text if token.type == Lexer.SL_COMMENT: - text = token.text[:-1] # Remove the trailing newline character + # Usage rstrip("\r\n") instead of [:-1] since SL_COMMENT no longer includes trailing newline + text = token.text.rstrip("\r\n") return Comment(value=text, **token_info) diff --git a/src/vtlengine/AST/ASTConstructorModules/Expr.py b/src/vtlengine/AST/ASTConstructorModules/Expr.py index 97b3cb544..1f93d6ac1 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Expr.py +++ b/src/vtlengine/AST/ASTConstructorModules/Expr.py @@ -847,8 +847,8 @@ def visitTimeFunctions(self, ctx: Parser.TimeFunctionsContext): Parser.DayOfYearAtomContext, Parser.DayToYearAtomContext, Parser.DayToMonthAtomContext, - Parser.YearToDayAtomContext, - Parser.MonthToDayAtomContext, + Parser.YearTodayAtomContext, + Parser.MonthTodayAtomContext, ), ): return self.visitTimeUnaryAtom(ctx) @@ -1730,7 +1730,42 @@ def visitGroupAll(self, ctx: Parser.GroupAllContext): op_node = token_left + " " + token_right - children_nodes = [ExprComp().visitExprComponent(ctx_list[2])] + children_nodes: list = [] + + # Check if TIME_AGG is present (more than just GROUP ALL) + if len(ctx_list) > 2: + period_to = None + period_from = None + operand_node = None + conf = None + + for child in ctx_list: + if isinstance(child, TerminalNodeImpl): + token = child.getSymbol() + if token.type == Parser.STRING_CONSTANT: + if period_to is None: + period_to = token.text[1:-1] + else: + period_from = token.text[1:-1] + elif token.type in [Parser.FIRST, Parser.LAST]: + conf = token.text + elif isinstance(child, Parser.OptionalExprContext): + operand_node = self.visitOptionalExpr(child) + if isinstance(operand_node, ID): + operand_node = None + elif isinstance(operand_node, Identifier): + operand_node = VarID(value=operand_node.value, **extract_token_info(child)) + + children_nodes = [ + TimeAggregation( + op="time_agg", + operand=operand_node, + period_to=period_to, + period_from=period_from, + conf=conf, + **extract_token_info(ctx), + ) + ] return op_node, children_nodes diff --git a/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py b/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py index 6e85d8be7..4cf1cfbb2 100644 --- a/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py +++ b/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py @@ -613,11 +613,11 @@ def visitTimeFunctionsComponents(self, ctx: Parser.TimeFunctionsComponentsContex Parser.YearAtomComponentContext, Parser.MonthAtomComponentContext, Parser.DayOfMonthAtomComponentContext, - Parser.DayOfYearAtomComponentContext, + Parser.DatOfYearAtomComponentContext, Parser.DayToYearAtomComponentContext, Parser.DayToMonthAtomComponentContext, - Parser.YearToDayAtomComponentContext, - Parser.MonthToDayAtomComponentContext, + Parser.YearTodayAtomComponentContext, + Parser.MonthTodayAtomComponentContext, ), ): return self.visitTimeUnaryAtomComponent(ctx) @@ -737,12 +737,15 @@ def visitCurrentDateAtomComponent(self, ctx: Parser.CurrentDateAtomComponentCont def visitDateDiffAtomComponent(self, ctx: Parser.TimeShiftAtomComponentContext): """ """ + from vtlengine.AST.ASTConstructorModules.Expr import Expr + ctx_list = list(ctx.getChildren()) c = ctx_list[0] op = c.getSymbol().text left_node = self.visitExprComponent(ctx_list[2]) - right_node = self.visitExprComponent(ctx_list[4]) + # dateTo is 'expr' (not exprComponent) in the new grammar + right_node = Expr().visitExpr(ctx_list[4]) return BinOp(left=left_node, op=op, right=right_node, **extract_token_info(ctx)) diff --git a/src/vtlengine/AST/ASTConstructorModules/Terminals.py b/src/vtlengine/AST/ASTConstructorModules/Terminals.py index 712f0acd4..0f0f3d24e 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Terminals.py +++ b/src/vtlengine/AST/ASTConstructorModules/Terminals.py @@ -37,31 +37,45 @@ def _remove_scaped_characters(text): class Terminals(VtlVisitor): def visitConstant(self, ctx: Parser.ConstantContext): - token = ctx.children[0].getSymbol() - token_info = extract_token_info(token) - - if token.type == Parser.INTEGER_CONSTANT: - constant_node = Constant(type_="INTEGER_CONSTANT", value=int(token.text), **token_info) + # constant: signedInteger | signedNumber | BOOLEAN_CONSTANT | + # STRING_CONSTANT | NULL_CONSTANT + child = ctx.children[0] + token_info = extract_token_info(ctx) - elif token.type == Parser.NUMBER_CONSTANT: - constant_node = Constant(type_="FLOAT_CONSTANT", value=float(token.text), **token_info) + if isinstance(child, Parser.SignedIntegerContext): + constant_node = Constant( + type_="INTEGER_CONSTANT", + value=self.visitSignedInteger(child), + **token_info, + ) - elif token.type == Parser.BOOLEAN_CONSTANT: - if token.text == "true": - constant_node = Constant(type_="BOOLEAN_CONSTANT", value=True, **token_info) - elif token.text == "false": - constant_node = Constant(type_="BOOLEAN_CONSTANT", value=False, **token_info) - else: - raise NotImplementedError + elif isinstance(child, Parser.SignedNumberContext): + constant_node = Constant( + type_="FLOAT_CONSTANT", + value=self.visitSignedNumber(child), + **token_info, + ) - elif token.type == Parser.STRING_CONSTANT: - constant_node = Constant(type_="STRING_CONSTANT", value=token.text[1:-1], **token_info) + else: + token = child.getSymbol() + if token.type == Parser.BOOLEAN_CONSTANT: + if token.text == "true": + constant_node = Constant(type_="BOOLEAN_CONSTANT", value=True, **token_info) + elif token.text == "false": + constant_node = Constant(type_="BOOLEAN_CONSTANT", value=False, **token_info) + else: + raise NotImplementedError + + elif token.type == Parser.STRING_CONSTANT: + constant_node = Constant( + type_="STRING_CONSTANT", value=token.text[1:-1], **token_info + ) - elif token.type == Parser.NULL_CONSTANT: - constant_node = Constant(type_="NULL_CONSTANT", value=None, **token_info) + elif token.type == Parser.NULL_CONSTANT: + constant_node = Constant(type_="NULL_CONSTANT", value=None, **token_info) - else: - raise NotImplementedError + else: + raise NotImplementedError return constant_node @@ -171,7 +185,10 @@ def visitValueDomainName(self, ctx: Parser.ValueDomainNameContext): ) def visitValueDomainValue(self, ctx: Parser.ValueDomainValueContext): - return _remove_scaped_characters(ctx.children[0].getSymbol().text) + child = ctx.children[0] + if isinstance(child, (Parser.SignedIntegerContext, Parser.SignedNumberContext)): + return child.getText() + return _remove_scaped_characters(child.getSymbol().text) def visitRoutineName(self, ctx: Parser.RoutineNameContext): """ @@ -601,7 +618,12 @@ def visitAlias(self, ctx: Parser.AliasContext): return ctx.children[0].getSymbol().text def visitSignedInteger(self, ctx: Parser.SignedIntegerContext): - return int(ctx.children[0].getSymbol().text) + # signedInteger: (MINUS|PLUS)? INTEGER_CONSTANT + return int(ctx.getText()) + + def visitSignedNumber(self, ctx: Parser.SignedNumberContext): + # signedNumber: (MINUS|PLUS)? NUMBER_CONSTANT + return float(ctx.getText()) def visitComparisonOperand(self, ctx: Parser.ComparisonOperandContext): return ctx.children[0].getSymbol().text @@ -756,21 +778,20 @@ def visitOrderByItem(self, ctx: Parser.OrderByItemContext): ) def visitLimitClauseItem(self, ctx: Parser.LimitClauseItemContext): + # limitClauseItem: signedInteger limitDir=PRECEDING + # | signedInteger limitDir=FOLLOWING + # | CURRENT DATA POINT + # | UNBOUNDED limitDir=PRECEDING + # | UNBOUNDED limitDir=FOLLOWING ctx_list = list(ctx.getChildren()) c = ctx_list[0] - if c.getSymbol().text.lower() == "unbounded": - result = -1 + if isinstance(c, Parser.SignedIntegerContext): + result = self.visitSignedInteger(c) + return result, ctx.limitDir.text + elif c.getSymbol().text.lower() == "unbounded": + return -1, ctx.limitDir.text elif c.getSymbol().text == "current": - result = 0 - return result, ctx_list[0].getSymbol().text - else: - result = int(c.getSymbol().text) - if result < 0: - raise Exception( - f"Cannot use negative numbers ({result}) on limitClause, line {c.symbol.line}" - ) - - return result, ctx_list[1].getSymbol().text + return 0, ctx_list[0].getSymbol().text def create_windowing(win_mode, values, modes, token_info): diff --git a/src/vtlengine/AST/Grammar/Vtl.g4 b/src/vtlengine/AST/Grammar/Vtl.g4 index 7b2cbbb6d..8873f6dca 100644 --- a/src/vtlengine/AST/Grammar/Vtl.g4 +++ b/src/vtlengine/AST/Grammar/Vtl.g4 @@ -26,7 +26,7 @@ expr: | left=expr op=AND right=expr # booleanExpr | left=expr op=(OR|XOR) right=expr # booleanExpr | IF conditionalExpr=expr THEN thenExpr=expr ELSE elseExpr=expr # ifExpr - | CASE (WHEN expr THEN expr)+ ELSE expr # caseExpr + | CASE (WHEN condExpr+=expr THEN thenExpr+=expr)+ ELSE elseExpr=expr # caseExpr | constant # constantExpr | varID # varIdExpr @@ -35,19 +35,19 @@ expr: exprComponent: - LPAREN exprComponent RPAREN # parenthesisExprComp - | functionsComponents # functionsExpressionComp - | op=(PLUS|MINUS|NOT) right=exprComponent # unaryExprComp - | left=exprComponent op=(MUL|DIV) right=exprComponent # arithmeticExprComp - | left=exprComponent op=(PLUS|MINUS|CONCAT) right=exprComponent # arithmeticExprOrConcatComp - | left=exprComponent comparisonOperand right=exprComponent # comparisonExprComp - | left=exprComponent op=(IN|NOT_IN)(lists|valueDomainID) # inNotInExprComp - | left=exprComponent op=AND right=exprComponent # booleanExprComp - | left=exprComponent op=(OR|XOR) right=exprComponent # booleanExprComp - | IF conditionalExpr=exprComponent THEN thenExpr=exprComponent ELSE elseExpr=exprComponent # ifExprComp - | CASE (WHEN exprComponent THEN exprComponent)+ ELSE exprComponent # caseExprComp - | constant # constantExprComp - | componentID # compId + LPAREN exprComponent RPAREN # parenthesisExprComp + | functionsComponents # functionsExpressionComp + | op=(PLUS|MINUS|NOT) right=exprComponent # unaryExprComp + | left=exprComponent op=(MUL|DIV) right=exprComponent # arithmeticExprComp + | left=exprComponent op=(PLUS|MINUS|CONCAT) right=exprComponent # arithmeticExprOrConcatComp + | left=exprComponent comparisonOperand right=exprComponent # comparisonExprComp + | left=exprComponent op=(IN|NOT_IN)(lists|valueDomainID) # inNotInExprComp + | left=exprComponent op=AND right=exprComponent # booleanExprComp + | left=exprComponent op=(OR|XOR) right=exprComponent # booleanExprComp + | IF conditionalExpr=exprComponent THEN thenExpr=exprComponent ELSE elseExpr=exprComponent # ifExprComp + | CASE (WHEN condExpr+=exprComponent THEN thenExpr+=exprComponent)+ ELSE elseExpr=exprComponent # caseExprComp + | constant # constantExprComp + | componentID # compId ; functionsComponents: @@ -209,10 +209,10 @@ comparisonOperatorsComponent: timeOperators: PERIOD_INDICATOR LPAREN expr? RPAREN # periodAtom - | FILL_TIME_SERIES LPAREN expr (COMMA (SINGLE|ALL))? RPAREN # fillTimeAtom + | FILL_TIME_SERIES LPAREN expr (COMMA op=(SINGLE|ALL))? RPAREN # fillTimeAtom | op=(FLOW_TO_STOCK | STOCK_TO_FLOW) LPAREN expr RPAREN # flowAtom | TIMESHIFT LPAREN expr COMMA signedInteger RPAREN # timeShiftAtom - | TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExpr)? (COMMA (FIRST|LAST))? RPAREN # timeAggAtom + | TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExpr)? (COMMA delim=(FIRST|LAST))? RPAREN # timeAggAtom | CURRENT_DATE LPAREN RPAREN # currentDateAtom | DATEDIFF LPAREN dateFrom=expr COMMA dateTo=expr RPAREN # dateDiffAtom | DATEADD LPAREN op=expr COMMA shiftNumber=expr COMMA periodInd=expr RPAREN # dateAddAtom @@ -222,27 +222,27 @@ timeOperators: | DAYOFYEAR LPAREN expr RPAREN # dayOfYearAtom | DAYTOYEAR LPAREN expr RPAREN # dayToYearAtom | DAYTOMONTH LPAREN expr RPAREN # dayToMonthAtom - | YEARTODAY LPAREN expr RPAREN # yearToDayAtom - | MONTHTODAY LPAREN expr RPAREN # monthToDayAtom + | YEARTODAY LPAREN expr RPAREN # yearTodayAtom + | MONTHTODAY LPAREN expr RPAREN # monthTodayAtom ; timeOperatorsComponent: PERIOD_INDICATOR LPAREN exprComponent? RPAREN # periodAtomComponent - | FILL_TIME_SERIES LPAREN exprComponent (COMMA (SINGLE|ALL))? RPAREN # fillTimeAtomComponent + | FILL_TIME_SERIES LPAREN exprComponent (COMMA op=(SINGLE|ALL))? RPAREN # fillTimeAtomComponent | op=(FLOW_TO_STOCK | STOCK_TO_FLOW) LPAREN exprComponent RPAREN # flowAtomComponent | TIMESHIFT LPAREN exprComponent COMMA signedInteger RPAREN # timeShiftAtomComponent - | TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExprComponent)? (COMMA (FIRST|LAST))? RPAREN # timeAggAtomComponent + | TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExprComponent)? (COMMA delim=(FIRST|LAST))? RPAREN # timeAggAtomComponent | CURRENT_DATE LPAREN RPAREN # currentDateAtomComponent - | DATEDIFF LPAREN dateFrom=exprComponent COMMA dateTo=exprComponent RPAREN # dateDiffAtomComponent + | DATEDIFF LPAREN dateFrom=exprComponent COMMA dateTo=expr RPAREN # dateDiffAtomComponent | DATEADD LPAREN op=exprComponent COMMA shiftNumber=exprComponent COMMA periodInd=exprComponent RPAREN # dateAddAtomComponent | YEAR_OP LPAREN exprComponent RPAREN # yearAtomComponent | MONTH_OP LPAREN exprComponent RPAREN # monthAtomComponent | DAYOFMONTH LPAREN exprComponent RPAREN # dayOfMonthAtomComponent - | DAYOFYEAR LPAREN exprComponent RPAREN # dayOfYearAtomComponent + | DAYOFYEAR LPAREN exprComponent RPAREN # datOfYearAtomComponent | DAYTOYEAR LPAREN exprComponent RPAREN # dayToYearAtomComponent | DAYTOMONTH LPAREN exprComponent RPAREN # dayToMonthAtomComponent - | YEARTODAY LPAREN exprComponent RPAREN # yearToDayAtomComponent - | MONTHTODAY LPAREN exprComponent RPAREN # monthToDayAtomComponent + | YEARTODAY LPAREN exprComponent RPAREN # yearTodayAtomComponent + | MONTHTODAY LPAREN exprComponent RPAREN # monthTodayAtomComponent ; setOperators: @@ -331,7 +331,7 @@ aggrOperatorsGrouping: | FIRST_VALUE | LAST_VALUE) LPAREN exprComponent OVER LPAREN (partition=partitionByClause? orderBy=orderByClause? windowing=windowingClause?)RPAREN RPAREN #anSimpleFunctionComponent - | op=(LAG |LEAD) LPAREN exprComponent (COMMA offet=signedInteger(defaultValue=scalarItem)?)? OVER LPAREN (partition=partitionByClause? orderBy=orderByClause) RPAREN RPAREN # lagOrLeadAnComponent + | op=(LAG |LEAD) LPAREN exprComponent (COMMA offset=signedInteger(defaultValue=scalarItem)?)? OVER LPAREN (partition=partitionByClause? orderBy=orderByClause) RPAREN RPAREN # lagOrLeadAnComponent | op=RANK LPAREN OVER LPAREN (partition=partitionByClause? orderBy=orderByClause) RPAREN RPAREN # rankAnComponent | op=RATIO_TO_REPORT LPAREN exprComponent OVER LPAREN (partition=partitionByClause) RPAREN RPAREN # ratioToReportAnComponent ; @@ -363,7 +363,7 @@ calcClauseItem: /*SUBSPACE CLAUSE*/ subspaceClauseItem: - componentID EQ (scalarItem | varID) + componentID EQ scalarItem ; scalarItem: @@ -418,22 +418,26 @@ windowingClause: ; signedInteger: - INTEGER_CONSTANT + (MINUS|PLUS)?INTEGER_CONSTANT +; + +signedNumber: + (MINUS|PLUS)?NUMBER_CONSTANT ; limitClauseItem: - INTEGER_CONSTANT PRECEDING - | INTEGER_CONSTANT FOLLOWING + signedInteger limitDir=PRECEDING + | signedInteger limitDir=FOLLOWING | CURRENT DATA POINT - | UNBOUNDED PRECEDING - | UNBOUNDED FOLLOWING + | UNBOUNDED limitDir=PRECEDING + | UNBOUNDED limitDir=FOLLOWING ; /*--------------------------------------------END ANALYTIC CLAUSE -----------------------------------------------*/ /* ------------------------------------------------------------ GROUPING CLAUSE ------------------------------------*/ groupingClause: - GROUP op=(BY | EXCEPT) componentID (COMMA componentID)* # groupByOrExcept - | GROUP ALL exprComponent # groupAll + GROUP op=(BY | EXCEPT) componentID (COMMA componentID)* ( TIME_AGG LPAREN STRING_CONSTANT (COMMA delim=(FIRST|LAST))? RPAREN )? # groupByOrExcept + | GROUP ALL ( TIME_AGG LPAREN STRING_CONSTANT (COMMA (STRING_CONSTANT|OPTIONAL))? (COMMA optionalExpr)? (COMMA delim=(FIRST|LAST))? RPAREN )? # groupAll ; havingClause: @@ -556,8 +560,8 @@ codeItemRelationClause: valueDomainValue: IDENTIFIER - | INTEGER_CONSTANT - | NUMBER_CONSTANT + | signedInteger + | signedNumber ; scalarTypeConstraint: @@ -679,8 +683,8 @@ routineName: ; constant: - INTEGER_CONSTANT - | NUMBER_CONSTANT + signedInteger + | signedNumber | BOOLEAN_CONSTANT | STRING_CONSTANT | NULL_CONSTANT @@ -702,4 +706,4 @@ basicScalarType: retainType: BOOLEAN_CONSTANT | ALL -; \ No newline at end of file +; diff --git a/src/vtlengine/AST/Grammar/VtlTokens.g4 b/src/vtlengine/AST/Grammar/VtlTokens.g4 index 021e82d2a..062aefd24 100644 --- a/src/vtlengine/AST/Grammar/VtlTokens.g4 +++ b/src/vtlengine/AST/Grammar/VtlTokens.g4 @@ -36,8 +36,8 @@ lexer grammar VtlTokens; CURRENT_DATE : 'current_date'; DATEDIFF : 'datediff'; DATEADD : 'dateadd'; - YEAR_OP : 'year'; - MONTH_OP : 'month'; + YEAR_OP : 'getyear'; + MONTH_OP : 'getmonth'; DAYOFMONTH : 'dayofmonth'; DAYOFYEAR : 'dayofyear'; DAYTOYEAR : 'daytoyear'; @@ -258,16 +258,13 @@ DIGITS0_9: '0'..'9' ; -INTEGER_CONSTANT - : - MINUS?DIGITS0_9+ - ; +INTEGER_CONSTANT: + DIGITS0_9+ +; -NUMBER_CONSTANT - : - INTEGER_CONSTANT '.' INTEGER_CONSTANT* /*FLOATEXP? - | INTEGER_CONSTANT+ FLOATEXP*/ - ; +NUMBER_CONSTANT: + INTEGER_CONSTANT '.' INTEGER_CONSTANT +; BOOLEAN_CONSTANT : @@ -275,6 +272,18 @@ BOOLEAN_CONSTANT | 'false' ; +/* +TIME_UNIT + : + '"A"' + |'"S"' + |'"M"' + |'"Q"' + |'"W"' + |'"D"' + |'"T"' + ;*/ + STRING_CONSTANT : '"' (~'"')* '"' @@ -356,18 +365,6 @@ IDENTIFIER | (MONTH MINUS DAY MINUS YEAR) ;*/ -/*TIME_UNIT - : - 'A' - |'S' - |'M' - |'Q' - |'W' - |'D' - |'T' - ;*/ - - /* old TIME : @@ -390,11 +387,11 @@ EOL ML_COMMENT : - ('/*' (.)*? '*/')-> channel(2); + '/*' .*? '*/' -> channel(2); SL_COMMENT : - ('//' (.)*? '\n') ->channel(2); + '//' ~[\r\n]* ->channel(2); /* @@ -406,4 +403,4 @@ FREQUENCY | 'M' | 'W' | 'D' - ;*/ \ No newline at end of file + ;*/ diff --git a/src/vtlengine/AST/Grammar/lexer.py b/src/vtlengine/AST/Grammar/lexer.py index b7cf9ab69..bed9dd3ca 100644 --- a/src/vtlengine/AST/Grammar/lexer.py +++ b/src/vtlengine/AST/Grammar/lexer.py @@ -14,7 +14,7 @@ def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\u00fb") - buf.write("\u0956\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\u0953\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") @@ -76,252 +76,251 @@ def serializedATN(): buf.write("\3\34\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36\3\36\3\36") buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37\3\37") buf.write("\3\37\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3") - buf.write(' \3!\3!\3!\3!\3!\3"\3"\3"\3"\3"\3"\3#\3#\3#\3#\3') - buf.write("#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3") - buf.write("%\3%\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3&\3&\3&\3&\3&\3&\3") - buf.write("&\3&\3'\3'\3'\3'\3'\3'\3'\3'\3'\3'\3(\3(\3(") - buf.write("\3(\3(\3(\3(\3(\3(\3(\3(\3)\3)\3)\3*\3*\3*\3*\3*\3+\3") - buf.write("+\3+\3+\3+\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3-\3-\3-\3-\3") - buf.write(".\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3\60\3\61") - buf.write("\3\61\3\61\3\62\3\62\3\62\3\62\3\63\3\63\3\63\3\63\3\64") - buf.write("\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65\3\66") - buf.write("\3\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67\3\67") - buf.write("\38\38\38\38\38\38\38\39\39\39\3:\3:\3:\3:\3:\3:\3;\3") - buf.write(";\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3") - buf.write("=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3>\3?\3?\3?\3?\3?\3@\3@\3") - buf.write("@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3") - buf.write("B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3D\3D\3") - buf.write("D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3") - buf.write("G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3H\3H\3H\3H\3I\3I\3I\3I\3") - buf.write("J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3K\3L\3L\3") - buf.write("L\3L\3L\3L\3M\3M\3M\3N\3N\3N\3N\3N\3O\3O\3O\3O\3P\3P\3") - buf.write("P\3P\3P\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3S\3S\3S\3S\3S\3S\3T\3") - buf.write("T\3T\3T\3T\3U\3U\3U\3U\3U\3U\3U\3U\3V\3V\3V\3V\3W\3W\3") - buf.write("W\3W\3X\3X\3X\3Y\3Y\3Y\3Y\3Z\3Z\3Z\3Z\3Z\3Z\3[\3[\3[\3") - buf.write("[\3[\3[\3\\\3\\\3\\\3\\\3\\\3\\\3]\3]\3]\3]\3^\3^\3^\3") - buf.write("^\3^\3^\3^\3_\3_\3_\3`\3`\3`\3`\3`\3a\3a\3a\3a\3a\3a\3") - buf.write("b\3b\3b\3b\3b\3b\3c\3c\3c\3c\3c\3c\3c\3d\3d\3d\3d\3e\3") - buf.write("e\3e\3e\3f\3f\3f\3f\3f\3f\3f\3g\3g\3g\3g\3g\3g\3h\3h\3") - buf.write("h\3h\3h\3h\3h\3h\3h\3h\3h\3i\3i\3i\3i\3i\3i\3i\3i\3j\3") - buf.write("j\3j\3j\3j\3j\3j\3j\3j\3j\3k\3k\3k\3k\3k\3k\3k\3l\3l\3") - buf.write("l\3l\3l\3l\3m\3m\3m\3m\3n\3n\3n\3n\3n\3n\3n\3n\3n\3n\3") - buf.write("n\3n\3n\3n\3o\3o\3o\3o\3o\3o\3p\3p\3p\3p\3p\3p\3p\3p\3") - buf.write("p\3p\3p\3p\3p\3p\3p\3p\3p\3q\3q\3q\3q\3q\3r\3r\3r\3r\3") - buf.write("s\3s\3s\3s\3s\3s\3s\3s\3s\3s\3t\3t\3u\3u\3u\3u\3u\3u\3") - buf.write("u\3u\3v\3v\3v\3v\3v\3v\3v\3v\3v\3v\3v\3v\3w\3w\3w\3w\3") - buf.write("w\3w\3w\3w\3w\3x\3x\3x\3x\3x\3y\3y\3y\3y\3y\3y\3y\3y\3") - buf.write("y\3y\3z\3z\3z\3z\3z\3z\3z\3z\3{\3{\3{\3{\3{\3{\3{\3{\3") - buf.write("{\3|\3|\3|\3|\3|\3|\3|\3}\3}\3}\3~\3~\3~\3~\3~\3~\3~\3") - buf.write("~\3~\3~\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177") - buf.write("\3\177\3\177\3\177\3\177\3\177\3\u0080\3\u0080\3\u0080") - buf.write("\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3\u0081\3\u0081") - buf.write("\3\u0081\3\u0081\3\u0081\3\u0082\3\u0082\3\u0082\3\u0082") + buf.write(' \3!\3!\3!\3!\3!\3!\3!\3!\3"\3"\3"\3"\3"\3"\3"') + buf.write('\3"\3"\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$') + buf.write("\3$\3$\3$\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3&\3") + buf.write("&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3'\3'\3'\3'\3'\3'\3") + buf.write("'\3'\3'\3'\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3)\3)") + buf.write("\3)\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,\3-\3") + buf.write("-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3") + buf.write("\60\3\60\3\60\3\60\3\61\3\61\3\61\3\62\3\62\3\62\3\62") + buf.write("\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64") + buf.write("\3\64\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66") + buf.write("\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\38\39\39\3") + buf.write("9\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3") + buf.write("<\3<\3=\3=\3=\3=\3=\3=\3=\3=\3=\3=\3>\3>\3>\3>\3>\3>\3") + buf.write(">\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3") + buf.write("A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3B\3B\3C\3C\3C\3C\3C\3C\3") + buf.write("D\3D\3D\3D\3D\3D\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3F\3F\3") + buf.write("F\3F\3G\3G\3G\3G\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3H\3") + buf.write("H\3H\3H\3H\3I\3I\3I\3I\3J\3J\3J\3J\3J\3K\3K\3K\3K\3K\3") + buf.write("K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3M\3M\3M\3N\3N\3N\3") + buf.write("N\3N\3O\3O\3O\3O\3P\3P\3P\3P\3P\3Q\3Q\3Q\3Q\3R\3R\3R\3") + buf.write("R\3S\3S\3S\3S\3S\3S\3T\3T\3T\3T\3T\3U\3U\3U\3U\3U\3U\3") + buf.write("U\3U\3V\3V\3V\3V\3W\3W\3W\3W\3X\3X\3X\3Y\3Y\3Y\3Y\3Z\3") + buf.write("Z\3Z\3Z\3Z\3Z\3[\3[\3[\3[\3[\3[\3\\\3\\\3\\\3\\\3\\\3") + buf.write("\\\3]\3]\3]\3]\3^\3^\3^\3^\3^\3^\3^\3_\3_\3_\3`\3`\3`") + buf.write("\3`\3`\3a\3a\3a\3a\3a\3a\3b\3b\3b\3b\3b\3b\3c\3c\3c\3") + buf.write("c\3c\3c\3c\3d\3d\3d\3d\3e\3e\3e\3e\3f\3f\3f\3f\3f\3f\3") + buf.write("f\3g\3g\3g\3g\3g\3g\3h\3h\3h\3h\3h\3h\3h\3h\3h\3h\3h\3") + buf.write("i\3i\3i\3i\3i\3i\3i\3i\3j\3j\3j\3j\3j\3j\3j\3j\3j\3j\3") + buf.write("k\3k\3k\3k\3k\3k\3k\3l\3l\3l\3l\3l\3l\3m\3m\3m\3m\3n\3") + buf.write("n\3n\3n\3n\3n\3n\3n\3n\3n\3n\3n\3n\3n\3o\3o\3o\3o\3o\3") + buf.write("o\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3p\3") + buf.write("q\3q\3q\3q\3q\3r\3r\3r\3r\3s\3s\3s\3s\3s\3s\3s\3s\3s\3") + buf.write("s\3t\3t\3u\3u\3u\3u\3u\3u\3u\3u\3v\3v\3v\3v\3v\3v\3v\3") + buf.write("v\3v\3v\3v\3v\3w\3w\3w\3w\3w\3w\3w\3w\3w\3x\3x\3x\3x\3") + buf.write("x\3y\3y\3y\3y\3y\3y\3y\3y\3y\3y\3z\3z\3z\3z\3z\3z\3z\3") + buf.write("z\3{\3{\3{\3{\3{\3{\3{\3{\3{\3|\3|\3|\3|\3|\3|\3|\3}\3") + buf.write("}\3}\3~\3~\3~\3~\3~\3~\3~\3~\3~\3~\3\177\3\177\3\177\3") + buf.write("\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3\177\3") + buf.write("\177\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3\u0080\3") + buf.write("\u0080\3\u0080\3\u0081\3\u0081\3\u0081\3\u0081\3\u0081") + buf.write("\3\u0082\3\u0082\3\u0082\3\u0082\3\u0083\3\u0083\3\u0083") buf.write("\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083") - buf.write("\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0083\3\u0084") - buf.write("\3\u0084\3\u0084\3\u0084\3\u0084\3\u0084\3\u0085\3\u0085") - buf.write("\3\u0085\3\u0085\3\u0085\3\u0085\3\u0086\3\u0086\3\u0086") - buf.write("\3\u0086\3\u0086\3\u0086\3\u0087\3\u0087\3\u0087\3\u0087") - buf.write("\3\u0087\3\u0087\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088") - buf.write("\3\u0088\3\u0088\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089") - buf.write("\3\u0089\3\u008a\3\u008a\3\u008a\3\u008a\3\u008a\3\u008b") - buf.write("\3\u008b\3\u008b\3\u008b\3\u008c\3\u008c\3\u008c\3\u008c") - buf.write("\3\u008c\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d\3\u008d") - buf.write("\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d") - buf.write("\3\u008d\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e") - buf.write("\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008f") - buf.write("\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f\3\u008f") - buf.write("\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090") - buf.write("\3\u0090\3\u0090\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091") - buf.write("\3\u0091\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092") - buf.write("\3\u0092\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093") - buf.write("\3\u0093\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094") - buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0095") + buf.write("\3\u0083\3\u0083\3\u0083\3\u0084\3\u0084\3\u0084\3\u0084") + buf.write("\3\u0084\3\u0084\3\u0085\3\u0085\3\u0085\3\u0085\3\u0085") + buf.write("\3\u0085\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086\3\u0086") + buf.write("\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087\3\u0087") + buf.write("\3\u0087\3\u0088\3\u0088\3\u0088\3\u0088\3\u0088\3\u0089") + buf.write("\3\u0089\3\u0089\3\u0089\3\u0089\3\u0089\3\u008a\3\u008a") + buf.write("\3\u008a\3\u008a\3\u008a\3\u008b\3\u008b\3\u008b\3\u008b") + buf.write("\3\u008c\3\u008c\3\u008c\3\u008c\3\u008c\3\u008c\3\u008c") + buf.write("\3\u008c\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d") + buf.write("\3\u008d\3\u008d\3\u008d\3\u008d\3\u008d\3\u008e\3\u008e") + buf.write("\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e\3\u008e") + buf.write("\3\u008e\3\u008e\3\u008e\3\u008f\3\u008f\3\u008f\3\u008f") + buf.write("\3\u008f\3\u008f\3\u008f\3\u008f\3\u0090\3\u0090\3\u0090") + buf.write("\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0090\3\u0091") + buf.write("\3\u0091\3\u0091\3\u0091\3\u0091\3\u0091\3\u0092\3\u0092") + buf.write("\3\u0092\3\u0092\3\u0092\3\u0092\3\u0092\3\u0093\3\u0093") + buf.write("\3\u0093\3\u0093\3\u0093\3\u0093\3\u0093\3\u0094\3\u0094") + buf.write("\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094\3\u0094") + buf.write("\3\u0094\3\u0094\3\u0094\3\u0095\3\u0095\3\u0095\3\u0095") buf.write("\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095\3\u0095") - buf.write("\3\u0095\3\u0095\3\u0095\3\u0096\3\u0096\3\u0096\3\u0096") - buf.write("\3\u0097\3\u0097\3\u0097\3\u0097\3\u0097\3\u0098\3\u0098") + buf.write("\3\u0096\3\u0096\3\u0096\3\u0096\3\u0097\3\u0097\3\u0097") + buf.write("\3\u0097\3\u0097\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098") buf.write("\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098") - buf.write("\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098\3\u0098") - buf.write("\3\u0099\3\u0099\3\u0099\3\u0099\3\u0099\3\u009a\3\u009a") - buf.write("\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a") - buf.write("\3\u009a\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b") - buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\3\u009c\3\u009c\3\u009c") - buf.write("\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c") + buf.write("\3\u0098\3\u0098\3\u0098\3\u0098\3\u0099\3\u0099\3\u0099") + buf.write("\3\u0099\3\u0099\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a") + buf.write("\3\u009a\3\u009a\3\u009a\3\u009a\3\u009a\3\u009b\3\u009b") + buf.write("\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b\3\u009b") + buf.write("\3\u009b\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c\3\u009c") + buf.write("\3\u009c\3\u009c\3\u009c\3\u009c\3\u009d\3\u009d\3\u009d") buf.write("\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d") - buf.write("\3\u009d\3\u009d\3\u009d\3\u009e\3\u009e\3\u009e\3\u009e") - buf.write("\3\u009e\3\u009f\3\u009f\3\u009f\3\u009f\3\u009f\3\u009f") - buf.write("\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0") - buf.write("\3\u00a0\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a1") + buf.write("\3\u009e\3\u009e\3\u009e\3\u009e\3\u009e\3\u009f\3\u009f") + buf.write("\3\u009f\3\u009f\3\u009f\3\u009f\3\u00a0\3\u00a0\3\u00a0") + buf.write("\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a0\3\u00a1\3\u00a1") + buf.write("\3\u00a1\3\u00a1\3\u00a1\3\u00a1\3\u00a2\3\u00a2\3\u00a2") buf.write("\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2") buf.write("\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2\3\u00a2") - buf.write("\3\u00a2\3\u00a2\3\u00a2\3\u00a3\3\u00a3\3\u00a3\3\u00a3") buf.write("\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3") - buf.write("\3\u00a3\3\u00a3\3\u00a3\3\u00a4\3\u00a4\3\u00a4\3\u00a4") + buf.write("\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3\3\u00a3") + buf.write("\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4") buf.write("\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4") - buf.write("\3\u00a4\3\u00a4\3\u00a4\3\u00a5\3\u00a5\3\u00a5\3\u00a5") - buf.write("\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a6") - buf.write("\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6") - buf.write("\3\u00a6\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7") - buf.write("\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a8") - buf.write("\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8") - buf.write("\3\u00a8\3\u00a8\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00a9") - buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00aa\3\u00aa\3\u00aa\3\u00aa") - buf.write("\3\u00aa\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab") - buf.write("\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ac") - buf.write("\3\u00ac\3\u00ac\3\u00ac\3\u00ac\3\u00ac\3\u00ac\3\u00ad") - buf.write("\3\u00ad\3\u00ad\3\u00ad\3\u00ad\3\u00ad\3\u00ad\3\u00ae") - buf.write("\3\u00ae\3\u00ae\3\u00ae\3\u00ae\3\u00af\3\u00af\3\u00af") - buf.write("\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00b0\3\u00b0") - buf.write("\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b1\3\u00b1\3\u00b1") - buf.write("\3\u00b1\3\u00b1\3\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b2") - buf.write("\3\u00b2\3\u00b2\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b3") - buf.write("\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b4\3\u00b4\3\u00b4") - buf.write("\3\u00b4\3\u00b4\3\u00b5\3\u00b5\3\u00b5\3\u00b6\3\u00b6") - buf.write("\3\u00b6\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7") - buf.write("\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b8\3\u00b8\3\u00b8") - buf.write("\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b9\3\u00b9\3\u00b9") - buf.write("\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00ba\3\u00bb\3\u00bb") - buf.write("\3\u00bb\3\u00bb\3\u00bb\3\u00bc\3\u00bc\3\u00bc\3\u00bc") + buf.write("\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5\3\u00a5") + buf.write("\3\u00a5\3\u00a5\3\u00a5\3\u00a6\3\u00a6\3\u00a6\3\u00a6") + buf.write("\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a6\3\u00a7\3\u00a7") + buf.write("\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7\3\u00a7") + buf.write("\3\u00a7\3\u00a7\3\u00a7\3\u00a8\3\u00a8\3\u00a8\3\u00a8") + buf.write("\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a8\3\u00a9") + buf.write("\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00a9\3\u00a9") + buf.write("\3\u00aa\3\u00aa\3\u00aa\3\u00aa\3\u00aa\3\u00ab\3\u00ab") + buf.write("\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab\3\u00ab") + buf.write("\3\u00ab\3\u00ab\3\u00ab\3\u00ac\3\u00ac\3\u00ac\3\u00ac") + buf.write("\3\u00ac\3\u00ac\3\u00ac\3\u00ad\3\u00ad\3\u00ad\3\u00ad") + buf.write("\3\u00ad\3\u00ad\3\u00ad\3\u00ae\3\u00ae\3\u00ae\3\u00ae") + buf.write("\3\u00ae\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af\3\u00af") + buf.write("\3\u00af\3\u00af\3\u00b0\3\u00b0\3\u00b0\3\u00b0\3\u00b0") + buf.write("\3\u00b0\3\u00b1\3\u00b1\3\u00b1\3\u00b1\3\u00b1\3\u00b2") + buf.write("\3\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b3") + buf.write("\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b3\3\u00b3") + buf.write("\3\u00b3\3\u00b4\3\u00b4\3\u00b4\3\u00b4\3\u00b4\3\u00b5") + buf.write("\3\u00b5\3\u00b5\3\u00b6\3\u00b6\3\u00b6\3\u00b7\3\u00b7") + buf.write("\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7") + buf.write("\3\u00b7\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b8") + buf.write("\3\u00b8\3\u00b9\3\u00b9\3\u00b9\3\u00ba\3\u00ba\3\u00ba") + buf.write("\3\u00ba\3\u00ba\3\u00bb\3\u00bb\3\u00bb\3\u00bb\3\u00bb") buf.write("\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bc") - buf.write("\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd") - buf.write("\3\u00be\3\u00be\3\u00be\3\u00be\3\u00be\3\u00be\3\u00bf") - buf.write("\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00c0\3\u00c0") - buf.write("\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c1") - buf.write("\3\u00c1\3\u00c1\3\u00c1\3\u00c1\3\u00c1\3\u00c1\3\u00c2") + buf.write("\3\u00bc\3\u00bc\3\u00bc\3\u00bc\3\u00bd\3\u00bd\3\u00bd") + buf.write("\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00be\3\u00be\3\u00be") + buf.write("\3\u00be\3\u00be\3\u00be\3\u00bf\3\u00bf\3\u00bf\3\u00bf") + buf.write("\3\u00bf\3\u00bf\3\u00c0\3\u00c0\3\u00c0\3\u00c0\3\u00c0") + buf.write("\3\u00c0\3\u00c0\3\u00c0\3\u00c1\3\u00c1\3\u00c1\3\u00c1") + buf.write("\3\u00c1\3\u00c1\3\u00c1\3\u00c2\3\u00c2\3\u00c2\3\u00c2") buf.write("\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2") - buf.write("\3\u00c2\3\u00c2\3\u00c2\3\u00c3\3\u00c3\3\u00c3\3\u00c3") - buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c4") + buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3\3\u00c3") + buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c4\3\u00c4\3\u00c4\3\u00c4") buf.write("\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4\3\u00c4") - buf.write("\3\u00c4\3\u00c4\3\u00c4\3\u00c5\3\u00c5\3\u00c5\3\u00c5") - buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c6") - buf.write("\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6") - buf.write("\3\u00c6\3\u00c6\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c7") - buf.write("\3\u00c7\3\u00c7\3\u00c7\3\u00c8\3\u00c8\3\u00c8\3\u00c8") - buf.write("\3\u00c8\3\u00c8\3\u00c8\3\u00c9\3\u00c9\3\u00c9\3\u00c9") - buf.write("\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00ca\3\u00ca") - buf.write("\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00cb") - buf.write("\3\u00cb\3\u00cb\3\u00cb\3\u00cb\3\u00cb\3\u00cc\3\u00cc") + buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5\3\u00c5") + buf.write("\3\u00c5\3\u00c5\3\u00c5\3\u00c6\3\u00c6\3\u00c6\3\u00c6") + buf.write("\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c6\3\u00c7") + buf.write("\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c7\3\u00c7") + buf.write("\3\u00c8\3\u00c8\3\u00c8\3\u00c8\3\u00c8\3\u00c8\3\u00c8") + buf.write("\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00c9\3\u00c9") + buf.write("\3\u00c9\3\u00c9\3\u00ca\3\u00ca\3\u00ca\3\u00ca\3\u00ca") + buf.write("\3\u00ca\3\u00ca\3\u00ca\3\u00cb\3\u00cb\3\u00cb\3\u00cb") + buf.write("\3\u00cb\3\u00cb\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc") buf.write("\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc\3\u00cc") - buf.write("\3\u00cc\3\u00cc\3\u00cc\3\u00cd\3\u00cd\3\u00cd\3\u00cd") - buf.write("\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00ce\3\u00ce\3\u00ce") - buf.write("\3\u00ce\3\u00cf\3\u00cf\3\u00cf\3\u00cf\3\u00cf\3\u00cf") + buf.write("\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd\3\u00cd") + buf.write("\3\u00cd\3\u00ce\3\u00ce\3\u00ce\3\u00ce\3\u00cf\3\u00cf") + buf.write("\3\u00cf\3\u00cf\3\u00cf\3\u00cf\3\u00d0\3\u00d0\3\u00d0") buf.write("\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d0") - buf.write("\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d0\3\u00d1\3\u00d1") + buf.write("\3\u00d0\3\u00d0\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1") buf.write("\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1") - buf.write("\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1") - buf.write("\3\u00d1\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d2") - buf.write("\3\u00d2\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d3") - buf.write("\3\u00d3\3\u00d3\3\u00d3\3\u00d4\3\u00d4\3\u00d4\3\u00d4") - buf.write("\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d5\3\u00d5") - buf.write("\3\u00d5\3\u00d5\3\u00d5\3\u00d6\3\u00d6\3\u00d6\3\u00d6") - buf.write("\3\u00d6\3\u00d6\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7") + buf.write("\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d1\3\u00d2\3\u00d2") + buf.write("\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d2\3\u00d3\3\u00d3") + buf.write("\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d3\3\u00d3") + buf.write("\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4\3\u00d4") + buf.write("\3\u00d4\3\u00d4\3\u00d5\3\u00d5\3\u00d5\3\u00d5\3\u00d5") + buf.write("\3\u00d6\3\u00d6\3\u00d6\3\u00d6\3\u00d6\3\u00d6\3\u00d7") buf.write("\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7") - buf.write("\3\u00d7\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8") - buf.write("\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d9\3\u00d9\3\u00d9") - buf.write("\3\u00d9\3\u00d9\3\u00d9\3\u00da\3\u00da\3\u00da\3\u00da") - buf.write("\3\u00da\3\u00da\3\u00da\3\u00db\3\u00db\3\u00db\3\u00db") - buf.write("\3\u00db\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc") + buf.write("\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d7\3\u00d8\3\u00d8") + buf.write("\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8\3\u00d8") + buf.write("\3\u00d8\3\u00d9\3\u00d9\3\u00d9\3\u00d9\3\u00d9\3\u00d9") + buf.write("\3\u00da\3\u00da\3\u00da\3\u00da\3\u00da\3\u00da\3\u00da") + buf.write("\3\u00db\3\u00db\3\u00db\3\u00db\3\u00db\3\u00dc\3\u00dc") buf.write("\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc") - buf.write("\3\u00dc\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd") + buf.write("\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dc\3\u00dd\3\u00dd") buf.write("\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd") - buf.write("\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00de\3\u00de\3\u00de") - buf.write("\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de\3\u00df\3\u00df") - buf.write("\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df") + buf.write("\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd\3\u00dd") + buf.write("\3\u00dd\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de\3\u00de") + buf.write("\3\u00de\3\u00de\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df") buf.write("\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df\3\u00df") + buf.write("\3\u00df\3\u00df\3\u00df\3\u00df\3\u00e0\3\u00e0\3\u00e0") buf.write("\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0") - buf.write("\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0") - buf.write("\3\u00e0\3\u00e0\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1") - buf.write("\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e2\3\u00e2\3\u00e2") - buf.write("\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e3") - buf.write("\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e3") - buf.write("\3\u00e3\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4") + buf.write("\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e0\3\u00e1") + buf.write("\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1\3\u00e1") + buf.write("\3\u00e1\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2\3\u00e2") + buf.write("\3\u00e2\3\u00e2\3\u00e2\3\u00e3\3\u00e3\3\u00e3\3\u00e3") + buf.write("\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e3\3\u00e4\3\u00e4") buf.write("\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e4") + buf.write("\3\u00e4\3\u00e4\3\u00e4\3\u00e4\3\u00e5\3\u00e5\3\u00e5") buf.write("\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5") - buf.write("\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e5\3\u00e6") + buf.write("\3\u00e5\3\u00e5\3\u00e5\3\u00e6\3\u00e6\3\u00e6\3\u00e6") buf.write("\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e6") - buf.write("\3\u00e6\3\u00e6\3\u00e6\3\u00e6\3\u00e7\3\u00e7\3\u00e7") - buf.write("\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7") - buf.write("\3\u00e7\3\u00e7\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8") - buf.write("\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e9") + buf.write("\3\u00e6\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7") + buf.write("\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e7\3\u00e8") + buf.write("\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8\3\u00e8") + buf.write("\3\u00e8\3\u00e8\3\u00e8\3\u00e9\3\u00e9\3\u00e9\3\u00e9") buf.write("\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00e9") - buf.write("\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00e9\3\u00ea\3\u00ea") - buf.write("\3\u00ea\3\u00ea\3\u00ea\3\u00ea\3\u00ea\3\u00eb\3\u00eb") - buf.write("\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb") - buf.write("\3\u00eb\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec") + buf.write("\3\u00e9\3\u00e9\3\u00ea\3\u00ea\3\u00ea\3\u00ea\3\u00ea") + buf.write("\3\u00ea\3\u00ea\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb") + buf.write("\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00eb\3\u00ec\3\u00ec") buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec") buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec") - buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ed") - buf.write("\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed") + buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec\3\u00ec") + buf.write("\3\u00ec\3\u00ec\3\u00ec\3\u00ed\3\u00ed\3\u00ed\3\u00ed") buf.write("\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed") buf.write("\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed") - buf.write("\3\u00ed\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee") + buf.write("\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ed\3\u00ee\3\u00ee") buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee") buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee") buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee") - buf.write("\3\u00ee\3\u00ee\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef") + buf.write("\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ee\3\u00ef") buf.write("\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef") buf.write("\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef") buf.write("\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00ef") - buf.write("\3\u00f0\3\u00f0\3\u00f0\3\u00f0\3\u00f1\3\u00f1\3\u00f1") - buf.write("\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f2") - buf.write("\3\u00f2\3\u00f3\3\u00f3\3\u00f4\5\u00f4\u08f9\n\u00f4") - buf.write("\3\u00f4\6\u00f4\u08fc\n\u00f4\r\u00f4\16\u00f4\u08fd") - buf.write("\3\u00f5\3\u00f5\3\u00f5\7\u00f5\u0903\n\u00f5\f\u00f5") - buf.write("\16\u00f5\u0906\13\u00f5\3\u00f6\3\u00f6\3\u00f6\3\u00f6") - buf.write("\3\u00f6\3\u00f6\3\u00f6\3\u00f6\3\u00f6\5\u00f6\u0911") - buf.write("\n\u00f6\3\u00f7\3\u00f7\7\u00f7\u0915\n\u00f7\f\u00f7") - buf.write("\16\u00f7\u0918\13\u00f7\3\u00f7\3\u00f7\3\u00f8\3\u00f8") - buf.write("\7\u00f8\u091e\n\u00f8\f\u00f8\16\u00f8\u0921\13\u00f8") - buf.write("\3\u00f8\3\u00f8\6\u00f8\u0925\n\u00f8\r\u00f8\16\u00f8") - buf.write("\u0926\3\u00f8\3\u00f8\7\u00f8\u092b\n\u00f8\f\u00f8\16") - buf.write("\u00f8\u092e\13\u00f8\3\u00f8\5\u00f8\u0931\n\u00f8\3") - buf.write("\u00f9\6\u00f9\u0934\n\u00f9\r\u00f9\16\u00f9\u0935\3") - buf.write("\u00f9\3\u00f9\3\u00fa\3\u00fa\3\u00fb\3\u00fb\3\u00fb") - buf.write("\3\u00fb\7\u00fb\u0940\n\u00fb\f\u00fb\16\u00fb\u0943") - buf.write("\13\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fc") - buf.write("\3\u00fc\3\u00fc\3\u00fc\7\u00fc\u094e\n\u00fc\f\u00fc") - buf.write("\16\u00fc\u0951\13\u00fc\3\u00fc\3\u00fc\3\u00fc\3\u00fc") - buf.write("\5\u092c\u0941\u094f\2\u00fd\3\3\5\4\7\5\t\6\13\7\r\b") - buf.write("\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22") - buf.write("#\23%\24'\25)\26+\27-\30/\31\61\32\63\33\65\34\67\35") - buf.write("9\36;\37= ?!A\"C#E$G%I&K'M(O)Q*S+U,W-Y.[/]\60_\61a\62") - buf.write("c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083") - buf.write("C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093") - buf.write("K\u0095L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3") - buf.write("S\u00a5T\u00a7U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3") - buf.write("[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3") - buf.write("c\u00c5d\u00c7e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3") - buf.write("k\u00d5l\u00d7m\u00d9n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3") - buf.write("s\u00e5t\u00e7u\u00e9v\u00ebw\u00edx\u00efy\u00f1z\u00f3") - buf.write("{\u00f5|\u00f7}\u00f9~\u00fb\177\u00fd\u0080\u00ff\u0081") - buf.write("\u0101\u0082\u0103\u0083\u0105\u0084\u0107\u0085\u0109") - buf.write("\u0086\u010b\u0087\u010d\u0088\u010f\u0089\u0111\u008a") - buf.write("\u0113\u008b\u0115\u008c\u0117\u008d\u0119\u008e\u011b") - buf.write("\u008f\u011d\u0090\u011f\u0091\u0121\u0092\u0123\u0093") - buf.write("\u0125\u0094\u0127\u0095\u0129\u0096\u012b\u0097\u012d") - buf.write("\u0098\u012f\u0099\u0131\u009a\u0133\u009b\u0135\u009c") - buf.write("\u0137\u009d\u0139\u009e\u013b\u009f\u013d\u00a0\u013f") - buf.write("\u00a1\u0141\u00a2\u0143\u00a3\u0145\u00a4\u0147\u00a5") - buf.write("\u0149\u00a6\u014b\u00a7\u014d\u00a8\u014f\u00a9\u0151") - buf.write("\u00aa\u0153\u00ab\u0155\u00ac\u0157\u00ad\u0159\u00ae") - buf.write("\u015b\u00af\u015d\u00b0\u015f\u00b1\u0161\u00b2\u0163") - buf.write("\u00b3\u0165\u00b4\u0167\u00b5\u0169\u00b6\u016b\u00b7") - buf.write("\u016d\u00b8\u016f\u00b9\u0171\u00ba\u0173\u00bb\u0175") - buf.write("\u00bc\u0177\u00bd\u0179\u00be\u017b\u00bf\u017d\u00c0") - buf.write("\u017f\u00c1\u0181\u00c2\u0183\u00c3\u0185\u00c4\u0187") - buf.write("\u00c5\u0189\u00c6\u018b\u00c7\u018d\u00c8\u018f\u00c9") - buf.write("\u0191\u00ca\u0193\u00cb\u0195\u00cc\u0197\u00cd\u0199") - buf.write("\u00ce\u019b\u00cf\u019d\u00d0\u019f\u00d1\u01a1\u00d2") - buf.write("\u01a3\u00d3\u01a5\u00d4\u01a7\u00d5\u01a9\u00d6\u01ab") - buf.write("\u00d7\u01ad\u00d8\u01af\u00d9\u01b1\u00da\u01b3\u00db") - buf.write("\u01b5\u00dc\u01b7\u00dd\u01b9\u00de\u01bb\u00df\u01bd") - buf.write("\u00e0\u01bf\u00e1\u01c1\u00e2\u01c3\u00e3\u01c5\u00e4") - buf.write("\u01c7\u00e5\u01c9\u00e6\u01cb\u00e7\u01cd\u00e8\u01cf") - buf.write("\u00e9\u01d1\u00ea\u01d3\u00eb\u01d5\u00ec\u01d7\u00ed") - buf.write("\u01d9\u00ee\u01db\u00ef\u01dd\u00f0\u01df\u00f1\u01e1") - buf.write("\u00f2\u01e3\2\u01e5\2\u01e7\u00f3\u01e9\u00f4\u01eb\u00f5") - buf.write("\u01ed\u00f6\u01ef\u00f7\u01f1\u00f8\u01f3\u00f9\u01f5") - buf.write("\u00fa\u01f7\u00fb\3\2\6\4\2C\\c|\3\2$$\7\2\60\60\62;") - buf.write('C\\aac|\5\2\13\f\16\17""\2\u0960\2\3\3\2\2\2\2\5\3\2') + buf.write("\3\u00ef\3\u00ef\3\u00ef\3\u00ef\3\u00f0\3\u00f0\3\u00f0") + buf.write("\3\u00f0\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f1\3\u00f1") + buf.write("\3\u00f1\3\u00f1\3\u00f1\3\u00f2\3\u00f2\3\u00f3\3\u00f3") + buf.write("\3\u00f4\6\u00f4\u08ff\n\u00f4\r\u00f4\16\u00f4\u0900") + buf.write("\3\u00f5\3\u00f5\3\u00f5\3\u00f5\3\u00f6\3\u00f6\3\u00f6") + buf.write("\3\u00f6\3\u00f6\3\u00f6\3\u00f6\3\u00f6\3\u00f6\5\u00f6") + buf.write("\u0910\n\u00f6\3\u00f7\3\u00f7\7\u00f7\u0914\n\u00f7\f") + buf.write("\u00f7\16\u00f7\u0917\13\u00f7\3\u00f7\3\u00f7\3\u00f8") + buf.write("\3\u00f8\7\u00f8\u091d\n\u00f8\f\u00f8\16\u00f8\u0920") + buf.write("\13\u00f8\3\u00f8\3\u00f8\6\u00f8\u0924\n\u00f8\r\u00f8") + buf.write("\16\u00f8\u0925\3\u00f8\3\u00f8\7\u00f8\u092a\n\u00f8") + buf.write("\f\u00f8\16\u00f8\u092d\13\u00f8\3\u00f8\5\u00f8\u0930") + buf.write("\n\u00f8\3\u00f9\6\u00f9\u0933\n\u00f9\r\u00f9\16\u00f9") + buf.write("\u0934\3\u00f9\3\u00f9\3\u00fa\3\u00fa\3\u00fb\3\u00fb") + buf.write("\3\u00fb\3\u00fb\7\u00fb\u093f\n\u00fb\f\u00fb\16\u00fb") + buf.write("\u0942\13\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb\3\u00fb") + buf.write("\3\u00fc\3\u00fc\3\u00fc\3\u00fc\7\u00fc\u094d\n\u00fc") + buf.write("\f\u00fc\16\u00fc\u0950\13\u00fc\3\u00fc\3\u00fc\4\u092b") + buf.write("\u0940\2\u00fd\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13") + buf.write("\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24'\25)\26") + buf.write('+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A"C#') + buf.write("E$G%I&K'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66") + buf.write("k\67m8o9q:s;u{?}@\177A\u0081B\u0083C\u0085D\u0087") + buf.write("E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097") + buf.write("M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7") + buf.write("U\u00a9V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7") + buf.write("]\u00b9^\u00bb_\u00bd`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7") + buf.write("e\u00c9f\u00cbg\u00cdh\u00cfi\u00d1j\u00d3k\u00d5l\u00d7") + buf.write("m\u00d9n\u00dbo\u00ddp\u00dfq\u00e1r\u00e3s\u00e5t\u00e7") + buf.write("u\u00e9v\u00ebw\u00edx\u00efy\u00f1z\u00f3{\u00f5|\u00f7") + buf.write("}\u00f9~\u00fb\177\u00fd\u0080\u00ff\u0081\u0101\u0082") + buf.write("\u0103\u0083\u0105\u0084\u0107\u0085\u0109\u0086\u010b") + buf.write("\u0087\u010d\u0088\u010f\u0089\u0111\u008a\u0113\u008b") + buf.write("\u0115\u008c\u0117\u008d\u0119\u008e\u011b\u008f\u011d") + buf.write("\u0090\u011f\u0091\u0121\u0092\u0123\u0093\u0125\u0094") + buf.write("\u0127\u0095\u0129\u0096\u012b\u0097\u012d\u0098\u012f") + buf.write("\u0099\u0131\u009a\u0133\u009b\u0135\u009c\u0137\u009d") + buf.write("\u0139\u009e\u013b\u009f\u013d\u00a0\u013f\u00a1\u0141") + buf.write("\u00a2\u0143\u00a3\u0145\u00a4\u0147\u00a5\u0149\u00a6") + buf.write("\u014b\u00a7\u014d\u00a8\u014f\u00a9\u0151\u00aa\u0153") + buf.write("\u00ab\u0155\u00ac\u0157\u00ad\u0159\u00ae\u015b\u00af") + buf.write("\u015d\u00b0\u015f\u00b1\u0161\u00b2\u0163\u00b3\u0165") + buf.write("\u00b4\u0167\u00b5\u0169\u00b6\u016b\u00b7\u016d\u00b8") + buf.write("\u016f\u00b9\u0171\u00ba\u0173\u00bb\u0175\u00bc\u0177") + buf.write("\u00bd\u0179\u00be\u017b\u00bf\u017d\u00c0\u017f\u00c1") + buf.write("\u0181\u00c2\u0183\u00c3\u0185\u00c4\u0187\u00c5\u0189") + buf.write("\u00c6\u018b\u00c7\u018d\u00c8\u018f\u00c9\u0191\u00ca") + buf.write("\u0193\u00cb\u0195\u00cc\u0197\u00cd\u0199\u00ce\u019b") + buf.write("\u00cf\u019d\u00d0\u019f\u00d1\u01a1\u00d2\u01a3\u00d3") + buf.write("\u01a5\u00d4\u01a7\u00d5\u01a9\u00d6\u01ab\u00d7\u01ad") + buf.write("\u00d8\u01af\u00d9\u01b1\u00da\u01b3\u00db\u01b5\u00dc") + buf.write("\u01b7\u00dd\u01b9\u00de\u01bb\u00df\u01bd\u00e0\u01bf") + buf.write("\u00e1\u01c1\u00e2\u01c3\u00e3\u01c5\u00e4\u01c7\u00e5") + buf.write("\u01c9\u00e6\u01cb\u00e7\u01cd\u00e8\u01cf\u00e9\u01d1") + buf.write("\u00ea\u01d3\u00eb\u01d5\u00ec\u01d7\u00ed\u01d9\u00ee") + buf.write("\u01db\u00ef\u01dd\u00f0\u01df\u00f1\u01e1\u00f2\u01e3") + buf.write("\2\u01e5\2\u01e7\u00f3\u01e9\u00f4\u01eb\u00f5\u01ed\u00f6") + buf.write("\u01ef\u00f7\u01f1\u00f8\u01f3\u00f9\u01f5\u00fa\u01f7") + buf.write("\u00fb\3\2\7\4\2C\\c|\3\2$$\7\2\60\60\62;C\\aac|\5\2\13") + buf.write('\f\16\17""\4\2\f\f\17\17\2\u095b\2\3\3\2\2\2\2\5\3\2') buf.write("\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2") buf.write("\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2") buf.write("\27\3\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37") @@ -397,84 +396,84 @@ def serializedATN(): buf.write("\3\2\2\2)\u0223\3\2\2\2+\u0226\3\2\2\2-\u0228\3\2\2\2") buf.write("/\u022d\3\2\2\2\61\u0230\3\2\2\2\63\u0235\3\2\2\2\65\u023a") buf.write("\3\2\2\2\67\u023f\3\2\2\29\u0245\3\2\2\2;\u024a\3\2\2") - buf.write("\2=\u0257\3\2\2\2?\u0260\3\2\2\2A\u0268\3\2\2\2C\u026d") - buf.write("\3\2\2\2E\u0273\3\2\2\2G\u027e\3\2\2\2I\u0288\3\2\2\2") - buf.write("K\u0292\3\2\2\2M\u029d\3\2\2\2O\u02a7\3\2\2\2Q\u02b2\3") - buf.write("\2\2\2S\u02b5\3\2\2\2U\u02ba\3\2\2\2W\u02bf\3\2\2\2Y\u02c4") - buf.write("\3\2\2\2[\u02cd\3\2\2\2]\u02d4\3\2\2\2_\u02d7\3\2\2\2") - buf.write("a\u02db\3\2\2\2c\u02de\3\2\2\2e\u02e2\3\2\2\2g\u02e6\3") - buf.write("\2\2\2i\u02ee\3\2\2\2k\u02f1\3\2\2\2m\u02f8\3\2\2\2o\u02fd") - buf.write("\3\2\2\2q\u0304\3\2\2\2s\u0307\3\2\2\2u\u030d\3\2\2\2") - buf.write("w\u0312\3\2\2\2y\u031a\3\2\2\2{\u0324\3\2\2\2}\u032b\3") - buf.write("\2\2\2\177\u0330\3\2\2\2\u0081\u0338\3\2\2\2\u0083\u0341") - buf.write("\3\2\2\2\u0085\u0348\3\2\2\2\u0087\u034e\3\2\2\2\u0089") - buf.write("\u0358\3\2\2\2\u008b\u035b\3\2\2\2\u008d\u0362\3\2\2\2") - buf.write("\u008f\u036c\3\2\2\2\u0091\u0376\3\2\2\2\u0093\u037a\3") - buf.write("\2\2\2\u0095\u037f\3\2\2\2\u0097\u038a\3\2\2\2\u0099\u0390") - buf.write("\3\2\2\2\u009b\u0393\3\2\2\2\u009d\u0398\3\2\2\2\u009f") - buf.write("\u039c\3\2\2\2\u00a1\u03a1\3\2\2\2\u00a3\u03a5\3\2\2\2") - buf.write("\u00a5\u03a9\3\2\2\2\u00a7\u03af\3\2\2\2\u00a9\u03b4\3") - buf.write("\2\2\2\u00ab\u03bc\3\2\2\2\u00ad\u03c0\3\2\2\2\u00af\u03c4") - buf.write("\3\2\2\2\u00b1\u03c7\3\2\2\2\u00b3\u03cb\3\2\2\2\u00b5") - buf.write("\u03d1\3\2\2\2\u00b7\u03d7\3\2\2\2\u00b9\u03dd\3\2\2\2") - buf.write("\u00bb\u03e1\3\2\2\2\u00bd\u03e8\3\2\2\2\u00bf\u03eb\3") - buf.write("\2\2\2\u00c1\u03f0\3\2\2\2\u00c3\u03f6\3\2\2\2\u00c5\u03fc") - buf.write("\3\2\2\2\u00c7\u0403\3\2\2\2\u00c9\u0407\3\2\2\2\u00cb") - buf.write("\u040b\3\2\2\2\u00cd\u0412\3\2\2\2\u00cf\u0418\3\2\2\2") - buf.write("\u00d1\u0423\3\2\2\2\u00d3\u042b\3\2\2\2\u00d5\u0435\3") - buf.write("\2\2\2\u00d7\u043c\3\2\2\2\u00d9\u0442\3\2\2\2\u00db\u0446") - buf.write("\3\2\2\2\u00dd\u0454\3\2\2\2\u00df\u045a\3\2\2\2\u00e1") - buf.write("\u046b\3\2\2\2\u00e3\u0470\3\2\2\2\u00e5\u0474\3\2\2\2") - buf.write("\u00e7\u047e\3\2\2\2\u00e9\u0480\3\2\2\2\u00eb\u0488\3") - buf.write("\2\2\2\u00ed\u0494\3\2\2\2\u00ef\u049d\3\2\2\2\u00f1\u04a2") - buf.write("\3\2\2\2\u00f3\u04ac\3\2\2\2\u00f5\u04b4\3\2\2\2\u00f7") - buf.write("\u04bd\3\2\2\2\u00f9\u04c4\3\2\2\2\u00fb\u04c7\3\2\2\2") - buf.write("\u00fd\u04d1\3\2\2\2\u00ff\u04de\3\2\2\2\u0101\u04e6\3") - buf.write("\2\2\2\u0103\u04eb\3\2\2\2\u0105\u04ef\3\2\2\2\u0107\u04fc") - buf.write("\3\2\2\2\u0109\u0502\3\2\2\2\u010b\u0508\3\2\2\2\u010d") - buf.write("\u050e\3\2\2\2\u010f\u0516\3\2\2\2\u0111\u051b\3\2\2\2") - buf.write("\u0113\u0521\3\2\2\2\u0115\u0526\3\2\2\2\u0117\u052a\3") - buf.write("\2\2\2\u0119\u0532\3\2\2\2\u011b\u053d\3\2\2\2\u011d\u0549") - buf.write("\3\2\2\2\u011f\u0551\3\2\2\2\u0121\u055a\3\2\2\2\u0123") - buf.write("\u0560\3\2\2\2\u0125\u0567\3\2\2\2\u0127\u056e\3\2\2\2") - buf.write("\u0129\u057a\3\2\2\2\u012b\u0585\3\2\2\2\u012d\u0589\3") - buf.write("\2\2\2\u012f\u058e\3\2\2\2\u0131\u059e\3\2\2\2\u0133\u05a3") - buf.write("\3\2\2\2\u0135\u05ad\3\2\2\2\u0137\u05b7\3\2\2\2\u0139") - buf.write("\u05c1\3\2\2\2\u013b\u05cb\3\2\2\2\u013d\u05d0\3\2\2\2") - buf.write("\u013f\u05d6\3\2\2\2\u0141\u05de\3\2\2\2\u0143\u05e4\3") - buf.write("\2\2\2\u0145\u05f5\3\2\2\2\u0147\u0603\3\2\2\2\u0149\u0611") - buf.write("\3\2\2\2\u014b\u061b\3\2\2\2\u014d\u0624\3\2\2\2\u014f") - buf.write("\u0630\3\2\2\2\u0151\u063a\3\2\2\2\u0153\u0642\3\2\2\2") - buf.write("\u0155\u0647\3\2\2\2\u0157\u0653\3\2\2\2\u0159\u065a\3") - buf.write("\2\2\2\u015b\u0661\3\2\2\2\u015d\u0666\3\2\2\2\u015f\u066e") - buf.write("\3\2\2\2\u0161\u0674\3\2\2\2\u0163\u0679\3\2\2\2\u0165") - buf.write("\u0680\3\2\2\2\u0167\u0689\3\2\2\2\u0169\u068e\3\2\2\2") - buf.write("\u016b\u0691\3\2\2\2\u016d\u0694\3\2\2\2\u016f\u069e\3") - buf.write("\2\2\2\u0171\u06a5\3\2\2\2\u0173\u06a8\3\2\2\2\u0175\u06ad") - buf.write("\3\2\2\2\u0177\u06b2\3\2\2\2\u0179\u06bd\3\2\2\2\u017b") - buf.write("\u06c4\3\2\2\2\u017d\u06ca\3\2\2\2\u017f\u06d0\3\2\2\2") - buf.write("\u0181\u06d8\3\2\2\2\u0183\u06df\3\2\2\2\u0185\u06ea\3") - buf.write("\2\2\2\u0187\u06f4\3\2\2\2\u0189\u06ff\3\2\2\2\u018b\u0709") - buf.write("\3\2\2\2\u018d\u0713\3\2\2\2\u018f\u071b\3\2\2\2\u0191") - buf.write("\u0722\3\2\2\2\u0193\u072b\3\2\2\2\u0195\u0733\3\2\2\2") - buf.write("\u0197\u0739\3\2\2\2\u0199\u0745\3\2\2\2\u019b\u074d\3") - buf.write("\2\2\2\u019d\u0751\3\2\2\2\u019f\u0757\3\2\2\2\u01a1\u0763") - buf.write("\3\2\2\2\u01a3\u0774\3\2\2\2\u01a5\u077b\3\2\2\2\u01a7") - buf.write("\u0784\3\2\2\2\u01a9\u078d\3\2\2\2\u01ab\u0792\3\2\2\2") - buf.write("\u01ad\u0798\3\2\2\2\u01af\u07a5\3\2\2\2\u01b1\u07af\3") - buf.write("\2\2\2\u01b3\u07b5\3\2\2\2\u01b5\u07bc\3\2\2\2\u01b7\u07c1") - buf.write("\3\2\2\2\u01b9\u07cf\3\2\2\2\u01bb\u07e0\3\2\2\2\u01bd") - buf.write("\u07e8\3\2\2\2\u01bf\u07f8\3\2\2\2\u01c1\u0808\3\2\2\2") - buf.write("\u01c3\u0811\3\2\2\2\u01c5\u081a\3\2\2\2\u01c7\u0823\3") - buf.write("\2\2\2\u01c9\u0830\3\2\2\2\u01cb\u083d\3\2\2\2\u01cd\u0849") - buf.write("\3\2\2\2\u01cf\u0855\3\2\2\2\u01d1\u0860\3\2\2\2\u01d3") - buf.write("\u086d\3\2\2\2\u01d5\u0874\3\2\2\2\u01d7\u087e\3\2\2\2") - buf.write("\u01d9\u0898\3\2\2\2\u01db\u08af\3\2\2\2\u01dd\u08cc\3") - buf.write("\2\2\2\u01df\u08e6\3\2\2\2\u01e1\u08ea\3\2\2\2\u01e3\u08f3") - buf.write("\3\2\2\2\u01e5\u08f5\3\2\2\2\u01e7\u08f8\3\2\2\2\u01e9") - buf.write("\u08ff\3\2\2\2\u01eb\u0910\3\2\2\2\u01ed\u0912\3\2\2\2") - buf.write("\u01ef\u0930\3\2\2\2\u01f1\u0933\3\2\2\2\u01f3\u0939\3") - buf.write("\2\2\2\u01f5\u093b\3\2\2\2\u01f7\u0949\3\2\2\2\u01f9\u01fa") + buf.write("\2=\u0257\3\2\2\2?\u0260\3\2\2\2A\u0268\3\2\2\2C\u0270") + buf.write("\3\2\2\2E\u0279\3\2\2\2G\u0284\3\2\2\2I\u028e\3\2\2\2") + buf.write("K\u0298\3\2\2\2M\u02a3\3\2\2\2O\u02ad\3\2\2\2Q\u02b8\3") + buf.write("\2\2\2S\u02bb\3\2\2\2U\u02c0\3\2\2\2W\u02c5\3\2\2\2Y\u02ca") + buf.write("\3\2\2\2[\u02d3\3\2\2\2]\u02da\3\2\2\2_\u02dd\3\2\2\2") + buf.write("a\u02e1\3\2\2\2c\u02e4\3\2\2\2e\u02e8\3\2\2\2g\u02ec\3") + buf.write("\2\2\2i\u02f4\3\2\2\2k\u02f7\3\2\2\2m\u02fe\3\2\2\2o\u0303") + buf.write("\3\2\2\2q\u030a\3\2\2\2s\u030d\3\2\2\2u\u0313\3\2\2\2") + buf.write("w\u0318\3\2\2\2y\u0320\3\2\2\2{\u032a\3\2\2\2}\u0331\3") + buf.write("\2\2\2\177\u0336\3\2\2\2\u0081\u033e\3\2\2\2\u0083\u0347") + buf.write("\3\2\2\2\u0085\u034e\3\2\2\2\u0087\u0354\3\2\2\2\u0089") + buf.write("\u035e\3\2\2\2\u008b\u0361\3\2\2\2\u008d\u0368\3\2\2\2") + buf.write("\u008f\u0372\3\2\2\2\u0091\u037c\3\2\2\2\u0093\u0380\3") + buf.write("\2\2\2\u0095\u0385\3\2\2\2\u0097\u0390\3\2\2\2\u0099\u0396") + buf.write("\3\2\2\2\u009b\u0399\3\2\2\2\u009d\u039e\3\2\2\2\u009f") + buf.write("\u03a2\3\2\2\2\u00a1\u03a7\3\2\2\2\u00a3\u03ab\3\2\2\2") + buf.write("\u00a5\u03af\3\2\2\2\u00a7\u03b5\3\2\2\2\u00a9\u03ba\3") + buf.write("\2\2\2\u00ab\u03c2\3\2\2\2\u00ad\u03c6\3\2\2\2\u00af\u03ca") + buf.write("\3\2\2\2\u00b1\u03cd\3\2\2\2\u00b3\u03d1\3\2\2\2\u00b5") + buf.write("\u03d7\3\2\2\2\u00b7\u03dd\3\2\2\2\u00b9\u03e3\3\2\2\2") + buf.write("\u00bb\u03e7\3\2\2\2\u00bd\u03ee\3\2\2\2\u00bf\u03f1\3") + buf.write("\2\2\2\u00c1\u03f6\3\2\2\2\u00c3\u03fc\3\2\2\2\u00c5\u0402") + buf.write("\3\2\2\2\u00c7\u0409\3\2\2\2\u00c9\u040d\3\2\2\2\u00cb") + buf.write("\u0411\3\2\2\2\u00cd\u0418\3\2\2\2\u00cf\u041e\3\2\2\2") + buf.write("\u00d1\u0429\3\2\2\2\u00d3\u0431\3\2\2\2\u00d5\u043b\3") + buf.write("\2\2\2\u00d7\u0442\3\2\2\2\u00d9\u0448\3\2\2\2\u00db\u044c") + buf.write("\3\2\2\2\u00dd\u045a\3\2\2\2\u00df\u0460\3\2\2\2\u00e1") + buf.write("\u0471\3\2\2\2\u00e3\u0476\3\2\2\2\u00e5\u047a\3\2\2\2") + buf.write("\u00e7\u0484\3\2\2\2\u00e9\u0486\3\2\2\2\u00eb\u048e\3") + buf.write("\2\2\2\u00ed\u049a\3\2\2\2\u00ef\u04a3\3\2\2\2\u00f1\u04a8") + buf.write("\3\2\2\2\u00f3\u04b2\3\2\2\2\u00f5\u04ba\3\2\2\2\u00f7") + buf.write("\u04c3\3\2\2\2\u00f9\u04ca\3\2\2\2\u00fb\u04cd\3\2\2\2") + buf.write("\u00fd\u04d7\3\2\2\2\u00ff\u04e4\3\2\2\2\u0101\u04ec\3") + buf.write("\2\2\2\u0103\u04f1\3\2\2\2\u0105\u04f5\3\2\2\2\u0107\u0502") + buf.write("\3\2\2\2\u0109\u0508\3\2\2\2\u010b\u050e\3\2\2\2\u010d") + buf.write("\u0514\3\2\2\2\u010f\u051c\3\2\2\2\u0111\u0521\3\2\2\2") + buf.write("\u0113\u0527\3\2\2\2\u0115\u052c\3\2\2\2\u0117\u0530\3") + buf.write("\2\2\2\u0119\u0538\3\2\2\2\u011b\u0543\3\2\2\2\u011d\u054f") + buf.write("\3\2\2\2\u011f\u0557\3\2\2\2\u0121\u0560\3\2\2\2\u0123") + buf.write("\u0566\3\2\2\2\u0125\u056d\3\2\2\2\u0127\u0574\3\2\2\2") + buf.write("\u0129\u0580\3\2\2\2\u012b\u058b\3\2\2\2\u012d\u058f\3") + buf.write("\2\2\2\u012f\u0594\3\2\2\2\u0131\u05a4\3\2\2\2\u0133\u05a9") + buf.write("\3\2\2\2\u0135\u05b3\3\2\2\2\u0137\u05bd\3\2\2\2\u0139") + buf.write("\u05c7\3\2\2\2\u013b\u05d1\3\2\2\2\u013d\u05d6\3\2\2\2") + buf.write("\u013f\u05dc\3\2\2\2\u0141\u05e4\3\2\2\2\u0143\u05ea\3") + buf.write("\2\2\2\u0145\u05fb\3\2\2\2\u0147\u0609\3\2\2\2\u0149\u0617") + buf.write("\3\2\2\2\u014b\u0621\3\2\2\2\u014d\u062a\3\2\2\2\u014f") + buf.write("\u0636\3\2\2\2\u0151\u0640\3\2\2\2\u0153\u0648\3\2\2\2") + buf.write("\u0155\u064d\3\2\2\2\u0157\u0659\3\2\2\2\u0159\u0660\3") + buf.write("\2\2\2\u015b\u0667\3\2\2\2\u015d\u066c\3\2\2\2\u015f\u0674") + buf.write("\3\2\2\2\u0161\u067a\3\2\2\2\u0163\u067f\3\2\2\2\u0165") + buf.write("\u0686\3\2\2\2\u0167\u068f\3\2\2\2\u0169\u0694\3\2\2\2") + buf.write("\u016b\u0697\3\2\2\2\u016d\u069a\3\2\2\2\u016f\u06a4\3") + buf.write("\2\2\2\u0171\u06ab\3\2\2\2\u0173\u06ae\3\2\2\2\u0175\u06b3") + buf.write("\3\2\2\2\u0177\u06b8\3\2\2\2\u0179\u06c3\3\2\2\2\u017b") + buf.write("\u06ca\3\2\2\2\u017d\u06d0\3\2\2\2\u017f\u06d6\3\2\2\2") + buf.write("\u0181\u06de\3\2\2\2\u0183\u06e5\3\2\2\2\u0185\u06f0\3") + buf.write("\2\2\2\u0187\u06fa\3\2\2\2\u0189\u0705\3\2\2\2\u018b\u070f") + buf.write("\3\2\2\2\u018d\u0719\3\2\2\2\u018f\u0721\3\2\2\2\u0191") + buf.write("\u0728\3\2\2\2\u0193\u0731\3\2\2\2\u0195\u0739\3\2\2\2") + buf.write("\u0197\u073f\3\2\2\2\u0199\u074b\3\2\2\2\u019b\u0753\3") + buf.write("\2\2\2\u019d\u0757\3\2\2\2\u019f\u075d\3\2\2\2\u01a1\u0769") + buf.write("\3\2\2\2\u01a3\u077a\3\2\2\2\u01a5\u0781\3\2\2\2\u01a7") + buf.write("\u078a\3\2\2\2\u01a9\u0793\3\2\2\2\u01ab\u0798\3\2\2\2") + buf.write("\u01ad\u079e\3\2\2\2\u01af\u07ab\3\2\2\2\u01b1\u07b5\3") + buf.write("\2\2\2\u01b3\u07bb\3\2\2\2\u01b5\u07c2\3\2\2\2\u01b7\u07c7") + buf.write("\3\2\2\2\u01b9\u07d5\3\2\2\2\u01bb\u07e6\3\2\2\2\u01bd") + buf.write("\u07ee\3\2\2\2\u01bf\u07fe\3\2\2\2\u01c1\u080e\3\2\2\2") + buf.write("\u01c3\u0817\3\2\2\2\u01c5\u0820\3\2\2\2\u01c7\u0829\3") + buf.write("\2\2\2\u01c9\u0836\3\2\2\2\u01cb\u0843\3\2\2\2\u01cd\u084f") + buf.write("\3\2\2\2\u01cf\u085b\3\2\2\2\u01d1\u0866\3\2\2\2\u01d3") + buf.write("\u0873\3\2\2\2\u01d5\u087a\3\2\2\2\u01d7\u0884\3\2\2\2") + buf.write("\u01d9\u089e\3\2\2\2\u01db\u08b5\3\2\2\2\u01dd\u08d2\3") + buf.write("\2\2\2\u01df\u08ec\3\2\2\2\u01e1\u08f0\3\2\2\2\u01e3\u08f9") + buf.write("\3\2\2\2\u01e5\u08fb\3\2\2\2\u01e7\u08fe\3\2\2\2\u01e9") + buf.write("\u0902\3\2\2\2\u01eb\u090f\3\2\2\2\u01ed\u0911\3\2\2\2") + buf.write("\u01ef\u092f\3\2\2\2\u01f1\u0932\3\2\2\2\u01f3\u0938\3") + buf.write("\2\2\2\u01f5\u093a\3\2\2\2\u01f7\u0948\3\2\2\2\u01f9\u01fa") buf.write("\7*\2\2\u01fa\4\3\2\2\2\u01fb\u01fc\7+\2\2\u01fc\6\3\2") buf.write("\2\2\u01fd\u01fe\7]\2\2\u01fe\b\3\2\2\2\u01ff\u0200\7") buf.write("_\2\2\u0200\n\3\2\2\2\u0201\u0202\7}\2\2\u0202\f\3\2\2") @@ -512,608 +511,606 @@ def serializedATN(): buf.write("\2\2\2\u0260\u0261\7f\2\2\u0261\u0262\7c\2\2\u0262\u0263") buf.write("\7v\2\2\u0263\u0264\7g\2\2\u0264\u0265\7c\2\2\u0265\u0266") buf.write("\7f\2\2\u0266\u0267\7f\2\2\u0267@\3\2\2\2\u0268\u0269") - buf.write("\7{\2\2\u0269\u026a\7g\2\2\u026a\u026b\7c\2\2\u026b\u026c") - buf.write("\7t\2\2\u026cB\3\2\2\2\u026d\u026e\7o\2\2\u026e\u026f") - buf.write("\7q\2\2\u026f\u0270\7p\2\2\u0270\u0271\7v\2\2\u0271\u0272") - buf.write("\7j\2\2\u0272D\3\2\2\2\u0273\u0274\7f\2\2\u0274\u0275") - buf.write("\7c\2\2\u0275\u0276\7{\2\2\u0276\u0277\7q\2\2\u0277\u0278") - buf.write("\7h\2\2\u0278\u0279\7o\2\2\u0279\u027a\7q\2\2\u027a\u027b") - buf.write("\7p\2\2\u027b\u027c\7v\2\2\u027c\u027d\7j\2\2\u027dF\3") - buf.write("\2\2\2\u027e\u027f\7f\2\2\u027f\u0280\7c\2\2\u0280\u0281") - buf.write("\7{\2\2\u0281\u0282\7q\2\2\u0282\u0283\7h\2\2\u0283\u0284") - buf.write("\7{\2\2\u0284\u0285\7g\2\2\u0285\u0286\7c\2\2\u0286\u0287") - buf.write("\7t\2\2\u0287H\3\2\2\2\u0288\u0289\7f\2\2\u0289\u028a") - buf.write("\7c\2\2\u028a\u028b\7{\2\2\u028b\u028c\7v\2\2\u028c\u028d") - buf.write("\7q\2\2\u028d\u028e\7{\2\2\u028e\u028f\7g\2\2\u028f\u0290") - buf.write("\7c\2\2\u0290\u0291\7t\2\2\u0291J\3\2\2\2\u0292\u0293") - buf.write("\7f\2\2\u0293\u0294\7c\2\2\u0294\u0295\7{\2\2\u0295\u0296") - buf.write("\7v\2\2\u0296\u0297\7q\2\2\u0297\u0298\7o\2\2\u0298\u0299") - buf.write("\7q\2\2\u0299\u029a\7p\2\2\u029a\u029b\7v\2\2\u029b\u029c") - buf.write("\7j\2\2\u029cL\3\2\2\2\u029d\u029e\7{\2\2\u029e\u029f") - buf.write("\7g\2\2\u029f\u02a0\7c\2\2\u02a0\u02a1\7t\2\2\u02a1\u02a2") - buf.write("\7v\2\2\u02a2\u02a3\7q\2\2\u02a3\u02a4\7f\2\2\u02a4\u02a5") - buf.write("\7c\2\2\u02a5\u02a6\7{\2\2\u02a6N\3\2\2\2\u02a7\u02a8") - buf.write("\7o\2\2\u02a8\u02a9\7q\2\2\u02a9\u02aa\7p\2\2\u02aa\u02ab") - buf.write("\7v\2\2\u02ab\u02ac\7j\2\2\u02ac\u02ad\7v\2\2\u02ad\u02ae") - buf.write("\7q\2\2\u02ae\u02af\7f\2\2\u02af\u02b0\7c\2\2\u02b0\u02b1") - buf.write("\7{\2\2\u02b1P\3\2\2\2\u02b2\u02b3\7q\2\2\u02b3\u02b4") - buf.write("\7p\2\2\u02b4R\3\2\2\2\u02b5\u02b6\7f\2\2\u02b6\u02b7") - buf.write("\7t\2\2\u02b7\u02b8\7q\2\2\u02b8\u02b9\7r\2\2\u02b9T\3") - buf.write("\2\2\2\u02ba\u02bb\7m\2\2\u02bb\u02bc\7g\2\2\u02bc\u02bd") - buf.write("\7g\2\2\u02bd\u02be\7r\2\2\u02beV\3\2\2\2\u02bf\u02c0") - buf.write("\7e\2\2\u02c0\u02c1\7c\2\2\u02c1\u02c2\7n\2\2\u02c2\u02c3") - buf.write("\7e\2\2\u02c3X\3\2\2\2\u02c4\u02c5\7c\2\2\u02c5\u02c6") - buf.write("\7v\2\2\u02c6\u02c7\7v\2\2\u02c7\u02c8\7t\2\2\u02c8\u02c9") - buf.write("\7e\2\2\u02c9\u02ca\7c\2\2\u02ca\u02cb\7n\2\2\u02cb\u02cc") - buf.write("\7e\2\2\u02ccZ\3\2\2\2\u02cd\u02ce\7t\2\2\u02ce\u02cf") - buf.write("\7g\2\2\u02cf\u02d0\7p\2\2\u02d0\u02d1\7c\2\2\u02d1\u02d2") - buf.write("\7o\2\2\u02d2\u02d3\7g\2\2\u02d3\\\3\2\2\2\u02d4\u02d5") - buf.write("\7c\2\2\u02d5\u02d6\7u\2\2\u02d6^\3\2\2\2\u02d7\u02d8") - buf.write("\7c\2\2\u02d8\u02d9\7p\2\2\u02d9\u02da\7f\2\2\u02da`\3") - buf.write("\2\2\2\u02db\u02dc\7q\2\2\u02dc\u02dd\7t\2\2\u02ddb\3") - buf.write("\2\2\2\u02de\u02df\7z\2\2\u02df\u02e0\7q\2\2\u02e0\u02e1") - buf.write("\7t\2\2\u02e1d\3\2\2\2\u02e2\u02e3\7p\2\2\u02e3\u02e4") - buf.write("\7q\2\2\u02e4\u02e5\7v\2\2\u02e5f\3\2\2\2\u02e6\u02e7") - buf.write("\7d\2\2\u02e7\u02e8\7g\2\2\u02e8\u02e9\7v\2\2\u02e9\u02ea") - buf.write("\7y\2\2\u02ea\u02eb\7g\2\2\u02eb\u02ec\7g\2\2\u02ec\u02ed") - buf.write("\7p\2\2\u02edh\3\2\2\2\u02ee\u02ef\7k\2\2\u02ef\u02f0") - buf.write("\7p\2\2\u02f0j\3\2\2\2\u02f1\u02f2\7p\2\2\u02f2\u02f3") - buf.write("\7q\2\2\u02f3\u02f4\7v\2\2\u02f4\u02f5\7a\2\2\u02f5\u02f6") - buf.write("\7k\2\2\u02f6\u02f7\7p\2\2\u02f7l\3\2\2\2\u02f8\u02f9") - buf.write("\7p\2\2\u02f9\u02fa\7w\2\2\u02fa\u02fb\7n\2\2\u02fb\u02fc") - buf.write("\7n\2\2\u02fcn\3\2\2\2\u02fd\u02fe\7k\2\2\u02fe\u02ff") - buf.write("\7u\2\2\u02ff\u0300\7p\2\2\u0300\u0301\7w\2\2\u0301\u0302") - buf.write("\7n\2\2\u0302\u0303\7n\2\2\u0303p\3\2\2\2\u0304\u0305") - buf.write("\7g\2\2\u0305\u0306\7z\2\2\u0306r\3\2\2\2\u0307\u0308") - buf.write("\7w\2\2\u0308\u0309\7p\2\2\u0309\u030a\7k\2\2\u030a\u030b") - buf.write("\7q\2\2\u030b\u030c\7p\2\2\u030ct\3\2\2\2\u030d\u030e") - buf.write("\7f\2\2\u030e\u030f\7k\2\2\u030f\u0310\7h\2\2\u0310\u0311") - buf.write("\7h\2\2\u0311v\3\2\2\2\u0312\u0313\7u\2\2\u0313\u0314") - buf.write("\7{\2\2\u0314\u0315\7o\2\2\u0315\u0316\7f\2\2\u0316\u0317") - buf.write("\7k\2\2\u0317\u0318\7h\2\2\u0318\u0319\7h\2\2\u0319x\3") - buf.write("\2\2\2\u031a\u031b\7k\2\2\u031b\u031c\7p\2\2\u031c\u031d") - buf.write("\7v\2\2\u031d\u031e\7g\2\2\u031e\u031f\7t\2\2\u031f\u0320") - buf.write("\7u\2\2\u0320\u0321\7g\2\2\u0321\u0322\7e\2\2\u0322\u0323") - buf.write("\7v\2\2\u0323z\3\2\2\2\u0324\u0325\7t\2\2\u0325\u0326") - buf.write("\7c\2\2\u0326\u0327\7p\2\2\u0327\u0328\7f\2\2\u0328\u0329") - buf.write("\7q\2\2\u0329\u032a\7o\2\2\u032a|\3\2\2\2\u032b\u032c") - buf.write("\7m\2\2\u032c\u032d\7g\2\2\u032d\u032e\7{\2\2\u032e\u032f") - buf.write("\7u\2\2\u032f~\3\2\2\2\u0330\u0331\7k\2\2\u0331\u0332") - buf.write("\7p\2\2\u0332\u0333\7v\2\2\u0333\u0334\7{\2\2\u0334\u0335") - buf.write("\7g\2\2\u0335\u0336\7c\2\2\u0336\u0337\7t\2\2\u0337\u0080") - buf.write("\3\2\2\2\u0338\u0339\7k\2\2\u0339\u033a\7p\2\2\u033a\u033b") - buf.write("\7v\2\2\u033b\u033c\7o\2\2\u033c\u033d\7q\2\2\u033d\u033e") - buf.write("\7p\2\2\u033e\u033f\7v\2\2\u033f\u0340\7j\2\2\u0340\u0082") - buf.write("\3\2\2\2\u0341\u0342\7k\2\2\u0342\u0343\7p\2\2\u0343\u0344") - buf.write("\7v\2\2\u0344\u0345\7f\2\2\u0345\u0346\7c\2\2\u0346\u0347") - buf.write("\7{\2\2\u0347\u0084\3\2\2\2\u0348\u0349\7e\2\2\u0349\u034a") - buf.write("\7j\2\2\u034a\u034b\7g\2\2\u034b\u034c\7e\2\2\u034c\u034d") - buf.write("\7m\2\2\u034d\u0086\3\2\2\2\u034e\u034f\7g\2\2\u034f\u0350") - buf.write("\7z\2\2\u0350\u0351\7k\2\2\u0351\u0352\7u\2\2\u0352\u0353") - buf.write("\7v\2\2\u0353\u0354\7u\2\2\u0354\u0355\7a\2\2\u0355\u0356") - buf.write("\7k\2\2\u0356\u0357\7p\2\2\u0357\u0088\3\2\2\2\u0358\u0359") - buf.write("\7v\2\2\u0359\u035a\7q\2\2\u035a\u008a\3\2\2\2\u035b\u035c") - buf.write("\7t\2\2\u035c\u035d\7g\2\2\u035d\u035e\7v\2\2\u035e\u035f") - buf.write("\7w\2\2\u035f\u0360\7t\2\2\u0360\u0361\7p\2\2\u0361\u008c") - buf.write("\3\2\2\2\u0362\u0363\7k\2\2\u0363\u0364\7o\2\2\u0364\u0365") - buf.write("\7d\2\2\u0365\u0366\7c\2\2\u0366\u0367\7n\2\2\u0367\u0368") - buf.write("\7c\2\2\u0368\u0369\7p\2\2\u0369\u036a\7e\2\2\u036a\u036b") - buf.write("\7g\2\2\u036b\u008e\3\2\2\2\u036c\u036d\7g\2\2\u036d\u036e") - buf.write("\7t\2\2\u036e\u036f\7t\2\2\u036f\u0370\7q\2\2\u0370\u0371") - buf.write("\7t\2\2\u0371\u0372\7e\2\2\u0372\u0373\7q\2\2\u0373\u0374") - buf.write("\7f\2\2\u0374\u0375\7g\2\2\u0375\u0090\3\2\2\2\u0376\u0377") - buf.write("\7c\2\2\u0377\u0378\7n\2\2\u0378\u0379\7n\2\2\u0379\u0092") - buf.write("\3\2\2\2\u037a\u037b\7c\2\2\u037b\u037c\7i\2\2\u037c\u037d") - buf.write("\7i\2\2\u037d\u037e\7t\2\2\u037e\u0094\3\2\2\2\u037f\u0380") - buf.write("\7g\2\2\u0380\u0381\7t\2\2\u0381\u0382\7t\2\2\u0382\u0383") - buf.write("\7q\2\2\u0383\u0384\7t\2\2\u0384\u0385\7n\2\2\u0385\u0386") - buf.write("\7g\2\2\u0386\u0387\7x\2\2\u0387\u0388\7g\2\2\u0388\u0389") - buf.write("\7n\2\2\u0389\u0096\3\2\2\2\u038a\u038b\7q\2\2\u038b\u038c") - buf.write("\7t\2\2\u038c\u038d\7f\2\2\u038d\u038e\7g\2\2\u038e\u038f") - buf.write("\7t\2\2\u038f\u0098\3\2\2\2\u0390\u0391\7d\2\2\u0391\u0392") - buf.write("\7{\2\2\u0392\u009a\3\2\2\2\u0393\u0394\7t\2\2\u0394\u0395") - buf.write("\7c\2\2\u0395\u0396\7p\2\2\u0396\u0397\7m\2\2\u0397\u009c") - buf.write("\3\2\2\2\u0398\u0399\7c\2\2\u0399\u039a\7u\2\2\u039a\u039b") - buf.write("\7e\2\2\u039b\u009e\3\2\2\2\u039c\u039d\7f\2\2\u039d\u039e") - buf.write("\7g\2\2\u039e\u039f\7u\2\2\u039f\u03a0\7e\2\2\u03a0\u00a0") - buf.write("\3\2\2\2\u03a1\u03a2\7o\2\2\u03a2\u03a3\7k\2\2\u03a3\u03a4") - buf.write("\7p\2\2\u03a4\u00a2\3\2\2\2\u03a5\u03a6\7o\2\2\u03a6\u03a7") - buf.write("\7c\2\2\u03a7\u03a8\7z\2\2\u03a8\u00a4\3\2\2\2\u03a9\u03aa") - buf.write("\7h\2\2\u03aa\u03ab\7k\2\2\u03ab\u03ac\7t\2\2\u03ac\u03ad") - buf.write("\7u\2\2\u03ad\u03ae\7v\2\2\u03ae\u00a6\3\2\2\2\u03af\u03b0") - buf.write("\7n\2\2\u03b0\u03b1\7c\2\2\u03b1\u03b2\7u\2\2\u03b2\u03b3") - buf.write("\7v\2\2\u03b3\u00a8\3\2\2\2\u03b4\u03b5\7k\2\2\u03b5\u03b6") - buf.write("\7p\2\2\u03b6\u03b7\7f\2\2\u03b7\u03b8\7g\2\2\u03b8\u03b9") - buf.write("\7z\2\2\u03b9\u03ba\7q\2\2\u03ba\u03bb\7h\2\2\u03bb\u00aa") - buf.write("\3\2\2\2\u03bc\u03bd\7c\2\2\u03bd\u03be\7d\2\2\u03be\u03bf") - buf.write("\7u\2\2\u03bf\u00ac\3\2\2\2\u03c0\u03c1\7m\2\2\u03c1\u03c2") - buf.write("\7g\2\2\u03c2\u03c3\7{\2\2\u03c3\u00ae\3\2\2\2\u03c4\u03c5") - buf.write("\7n\2\2\u03c5\u03c6\7p\2\2\u03c6\u00b0\3\2\2\2\u03c7\u03c8") - buf.write("\7n\2\2\u03c8\u03c9\7q\2\2\u03c9\u03ca\7i\2\2\u03ca\u00b2") - buf.write("\3\2\2\2\u03cb\u03cc\7v\2\2\u03cc\u03cd\7t\2\2\u03cd\u03ce") - buf.write("\7w\2\2\u03ce\u03cf\7p\2\2\u03cf\u03d0\7e\2\2\u03d0\u00b4") - buf.write("\3\2\2\2\u03d1\u03d2\7t\2\2\u03d2\u03d3\7q\2\2\u03d3\u03d4") - buf.write("\7w\2\2\u03d4\u03d5\7p\2\2\u03d5\u03d6\7f\2\2\u03d6\u00b6") - buf.write("\3\2\2\2\u03d7\u03d8\7r\2\2\u03d8\u03d9\7q\2\2\u03d9\u03da") - buf.write("\7y\2\2\u03da\u03db\7g\2\2\u03db\u03dc\7t\2\2\u03dc\u00b8") - buf.write("\3\2\2\2\u03dd\u03de\7o\2\2\u03de\u03df\7q\2\2\u03df\u03e0") - buf.write("\7f\2\2\u03e0\u00ba\3\2\2\2\u03e1\u03e2\7n\2\2\u03e2\u03e3") - buf.write("\7g\2\2\u03e3\u03e4\7p\2\2\u03e4\u03e5\7i\2\2\u03e5\u03e6") - buf.write("\7v\2\2\u03e6\u03e7\7j\2\2\u03e7\u00bc\3\2\2\2\u03e8\u03e9") - buf.write("\7~\2\2\u03e9\u03ea\7~\2\2\u03ea\u00be\3\2\2\2\u03eb\u03ec") - buf.write("\7v\2\2\u03ec\u03ed\7t\2\2\u03ed\u03ee\7k\2\2\u03ee\u03ef") - buf.write("\7o\2\2\u03ef\u00c0\3\2\2\2\u03f0\u03f1\7w\2\2\u03f1\u03f2") - buf.write("\7r\2\2\u03f2\u03f3\7r\2\2\u03f3\u03f4\7g\2\2\u03f4\u03f5") - buf.write("\7t\2\2\u03f5\u00c2\3\2\2\2\u03f6\u03f7\7n\2\2\u03f7\u03f8") - buf.write("\7q\2\2\u03f8\u03f9\7y\2\2\u03f9\u03fa\7g\2\2\u03fa\u03fb") - buf.write("\7t\2\2\u03fb\u00c4\3\2\2\2\u03fc\u03fd\7u\2\2\u03fd\u03fe") - buf.write("\7w\2\2\u03fe\u03ff\7d\2\2\u03ff\u0400\7u\2\2\u0400\u0401") - buf.write("\7v\2\2\u0401\u0402\7t\2\2\u0402\u00c6\3\2\2\2\u0403\u0404") - buf.write("\7u\2\2\u0404\u0405\7w\2\2\u0405\u0406\7o\2\2\u0406\u00c8") - buf.write("\3\2\2\2\u0407\u0408\7c\2\2\u0408\u0409\7x\2\2\u0409\u040a") - buf.write("\7i\2\2\u040a\u00ca\3\2\2\2\u040b\u040c\7o\2\2\u040c\u040d") - buf.write("\7g\2\2\u040d\u040e\7f\2\2\u040e\u040f\7k\2\2\u040f\u0410") - buf.write("\7c\2\2\u0410\u0411\7p\2\2\u0411\u00cc\3\2\2\2\u0412\u0413") - buf.write("\7e\2\2\u0413\u0414\7q\2\2\u0414\u0415\7w\2\2\u0415\u0416") - buf.write("\7p\2\2\u0416\u0417\7v\2\2\u0417\u00ce\3\2\2\2\u0418\u0419") - buf.write("\7k\2\2\u0419\u041a\7f\2\2\u041a\u041b\7g\2\2\u041b\u041c") - buf.write("\7p\2\2\u041c\u041d\7v\2\2\u041d\u041e\7k\2\2\u041e\u041f") - buf.write("\7h\2\2\u041f\u0420\7k\2\2\u0420\u0421\7g\2\2\u0421\u0422") - buf.write("\7t\2\2\u0422\u00d0\3\2\2\2\u0423\u0424\7o\2\2\u0424\u0425") - buf.write("\7g\2\2\u0425\u0426\7c\2\2\u0426\u0427\7u\2\2\u0427\u0428") - buf.write("\7w\2\2\u0428\u0429\7t\2\2\u0429\u042a\7g\2\2\u042a\u00d2") - buf.write("\3\2\2\2\u042b\u042c\7c\2\2\u042c\u042d\7v\2\2\u042d\u042e") - buf.write("\7v\2\2\u042e\u042f\7t\2\2\u042f\u0430\7k\2\2\u0430\u0431") - buf.write("\7d\2\2\u0431\u0432\7w\2\2\u0432\u0433\7v\2\2\u0433\u0434") - buf.write("\7g\2\2\u0434\u00d4\3\2\2\2\u0435\u0436\7h\2\2\u0436\u0437") - buf.write("\7k\2\2\u0437\u0438\7n\2\2\u0438\u0439\7v\2\2\u0439\u043a") - buf.write("\7g\2\2\u043a\u043b\7t\2\2\u043b\u00d6\3\2\2\2\u043c\u043d") - buf.write("\7o\2\2\u043d\u043e\7g\2\2\u043e\u043f\7t\2\2\u043f\u0440") - buf.write("\7i\2\2\u0440\u0441\7g\2\2\u0441\u00d8\3\2\2\2\u0442\u0443") - buf.write("\7g\2\2\u0443\u0444\7z\2\2\u0444\u0445\7r\2\2\u0445\u00da") - buf.write("\3\2\2\2\u0446\u0447\7e\2\2\u0447\u0448\7q\2\2\u0448\u0449") - buf.write("\7o\2\2\u0449\u044a\7r\2\2\u044a\u044b\7q\2\2\u044b\u044c") - buf.write("\7p\2\2\u044c\u044d\7g\2\2\u044d\u044e\7p\2\2\u044e\u044f") - buf.write("\7v\2\2\u044f\u0450\7T\2\2\u0450\u0451\7q\2\2\u0451\u0452") - buf.write("\7n\2\2\u0452\u0453\7g\2\2\u0453\u00dc\3\2\2\2\u0454\u0455") - buf.write("\7x\2\2\u0455\u0456\7k\2\2\u0456\u0457\7t\2\2\u0457\u0458") - buf.write("\7c\2\2\u0458\u0459\7n\2\2\u0459\u00de\3\2\2\2\u045a\u045b") - buf.write("\7o\2\2\u045b\u045c\7c\2\2\u045c\u045d\7v\2\2\u045d\u045e") - buf.write("\7e\2\2\u045e\u045f\7j\2\2\u045f\u0460\7a\2\2\u0460\u0461") - buf.write("\7e\2\2\u0461\u0462\7j\2\2\u0462\u0463\7c\2\2\u0463\u0464") - buf.write("\7t\2\2\u0464\u0465\7c\2\2\u0465\u0466\7e\2\2\u0466\u0467") - buf.write("\7v\2\2\u0467\u0468\7g\2\2\u0468\u0469\7t\2\2\u0469\u046a") - buf.write("\7u\2\2\u046a\u00e0\3\2\2\2\u046b\u046c\7v\2\2\u046c\u046d") - buf.write("\7{\2\2\u046d\u046e\7r\2\2\u046e\u046f\7g\2\2\u046f\u00e2") - buf.write("\3\2\2\2\u0470\u0471\7p\2\2\u0471\u0472\7x\2\2\u0472\u0473") - buf.write("\7n\2\2\u0473\u00e4\3\2\2\2\u0474\u0475\7j\2\2\u0475\u0476") - buf.write("\7k\2\2\u0476\u0477\7g\2\2\u0477\u0478\7t\2\2\u0478\u0479") - buf.write("\7c\2\2\u0479\u047a\7t\2\2\u047a\u047b\7e\2\2\u047b\u047c") - buf.write("\7j\2\2\u047c\u047d\7{\2\2\u047d\u00e6\3\2\2\2\u047e\u047f") - buf.write("\7a\2\2\u047f\u00e8\3\2\2\2\u0480\u0481\7k\2\2\u0481\u0482") - buf.write("\7p\2\2\u0482\u0483\7x\2\2\u0483\u0484\7c\2\2\u0484\u0485") - buf.write("\7n\2\2\u0485\u0486\7k\2\2\u0486\u0487\7f\2\2\u0487\u00ea") - buf.write("\3\2\2\2\u0488\u0489\7x\2\2\u0489\u048a\7c\2\2\u048a\u048b") - buf.write("\7n\2\2\u048b\u048c\7w\2\2\u048c\u048d\7g\2\2\u048d\u048e") - buf.write("\7f\2\2\u048e\u048f\7q\2\2\u048f\u0490\7o\2\2\u0490\u0491") - buf.write("\7c\2\2\u0491\u0492\7k\2\2\u0492\u0493\7p\2\2\u0493\u00ec") - buf.write("\3\2\2\2\u0494\u0495\7x\2\2\u0495\u0496\7c\2\2\u0496\u0497") - buf.write("\7t\2\2\u0497\u0498\7k\2\2\u0498\u0499\7c\2\2\u0499\u049a") - buf.write("\7d\2\2\u049a\u049b\7n\2\2\u049b\u049c\7g\2\2\u049c\u00ee") - buf.write("\3\2\2\2\u049d\u049e\7f\2\2\u049e\u049f\7c\2\2\u049f\u04a0") - buf.write("\7v\2\2\u04a0\u04a1\7c\2\2\u04a1\u00f0\3\2\2\2\u04a2\u04a3") - buf.write("\7u\2\2\u04a3\u04a4\7v\2\2\u04a4\u04a5\7t\2\2\u04a5\u04a6") - buf.write("\7w\2\2\u04a6\u04a7\7e\2\2\u04a7\u04a8\7v\2\2\u04a8\u04a9") - buf.write("\7w\2\2\u04a9\u04aa\7t\2\2\u04aa\u04ab\7g\2\2\u04ab\u00f2") - buf.write("\3\2\2\2\u04ac\u04ad\7f\2\2\u04ad\u04ae\7c\2\2\u04ae\u04af") - buf.write("\7v\2\2\u04af\u04b0\7c\2\2\u04b0\u04b1\7u\2\2\u04b1\u04b2") - buf.write("\7g\2\2\u04b2\u04b3\7v\2\2\u04b3\u00f4\3\2\2\2\u04b4\u04b5") - buf.write("\7q\2\2\u04b5\u04b6\7r\2\2\u04b6\u04b7\7g\2\2\u04b7\u04b8") - buf.write("\7t\2\2\u04b8\u04b9\7c\2\2\u04b9\u04ba\7v\2\2\u04ba\u04bb") - buf.write("\7q\2\2\u04bb\u04bc\7t\2\2\u04bc\u00f6\3\2\2\2\u04bd\u04be") - buf.write("\7f\2\2\u04be\u04bf\7g\2\2\u04bf\u04c0\7h\2\2\u04c0\u04c1") - buf.write("\7k\2\2\u04c1\u04c2\7p\2\2\u04c2\u04c3\7g\2\2\u04c3\u00f8") - buf.write("\3\2\2\2\u04c4\u04c5\7>\2\2\u04c5\u04c6\7/\2\2\u04c6\u00fa") - buf.write("\3\2\2\2\u04c7\u04c8\7f\2\2\u04c8\u04c9\7c\2\2\u04c9\u04ca") - buf.write("\7v\2\2\u04ca\u04cb\7c\2\2\u04cb\u04cc\7r\2\2\u04cc\u04cd") - buf.write("\7q\2\2\u04cd\u04ce\7k\2\2\u04ce\u04cf\7p\2\2\u04cf\u04d0") - buf.write("\7v\2\2\u04d0\u00fc\3\2\2\2\u04d1\u04d2\7j\2\2\u04d2\u04d3") - buf.write("\7k\2\2\u04d3\u04d4\7g\2\2\u04d4\u04d5\7t\2\2\u04d5\u04d6") - buf.write("\7c\2\2\u04d6\u04d7\7t\2\2\u04d7\u04d8\7e\2\2\u04d8\u04d9") - buf.write("\7j\2\2\u04d9\u04da\7k\2\2\u04da\u04db\7e\2\2\u04db\u04dc") - buf.write("\7c\2\2\u04dc\u04dd\7n\2\2\u04dd\u00fe\3\2\2\2\u04de\u04df") - buf.write("\7t\2\2\u04df\u04e0\7w\2\2\u04e0\u04e1\7n\2\2\u04e1\u04e2") - buf.write("\7g\2\2\u04e2\u04e3\7u\2\2\u04e3\u04e4\7g\2\2\u04e4\u04e5") - buf.write("\7v\2\2\u04e5\u0100\3\2\2\2\u04e6\u04e7\7t\2\2\u04e7\u04e8") - buf.write("\7w\2\2\u04e8\u04e9\7n\2\2\u04e9\u04ea\7g\2\2\u04ea\u0102") - buf.write("\3\2\2\2\u04eb\u04ec\7g\2\2\u04ec\u04ed\7p\2\2\u04ed\u04ee") - buf.write("\7f\2\2\u04ee\u0104\3\2\2\2\u04ef\u04f0\7c\2\2\u04f0\u04f1") - buf.write("\7n\2\2\u04f1\u04f2\7v\2\2\u04f2\u04f3\7g\2\2\u04f3\u04f4") - buf.write("\7t\2\2\u04f4\u04f5\7F\2\2\u04f5\u04f6\7c\2\2\u04f6\u04f7") - buf.write("\7v\2\2\u04f7\u04f8\7c\2\2\u04f8\u04f9\7u\2\2\u04f9\u04fa") - buf.write("\7g\2\2\u04fa\u04fb\7v\2\2\u04fb\u0106\3\2\2\2\u04fc\u04fd") - buf.write("\7n\2\2\u04fd\u04fe\7v\2\2\u04fe\u04ff\7t\2\2\u04ff\u0500") - buf.write("\7k\2\2\u0500\u0501\7o\2\2\u0501\u0108\3\2\2\2\u0502\u0503") - buf.write("\7t\2\2\u0503\u0504\7v\2\2\u0504\u0505\7t\2\2\u0505\u0506") - buf.write("\7k\2\2\u0506\u0507\7o\2\2\u0507\u010a\3\2\2\2\u0508\u0509") - buf.write("\7k\2\2\u0509\u050a\7p\2\2\u050a\u050b\7u\2\2\u050b\u050c") - buf.write("\7v\2\2\u050c\u050d\7t\2\2\u050d\u010c\3\2\2\2\u050e\u050f") - buf.write("\7t\2\2\u050f\u0510\7g\2\2\u0510\u0511\7r\2\2\u0511\u0512") - buf.write("\7n\2\2\u0512\u0513\7c\2\2\u0513\u0514\7e\2\2\u0514\u0515") - buf.write("\7g\2\2\u0515\u010e\3\2\2\2\u0516\u0517\7e\2\2\u0517\u0518") - buf.write("\7g\2\2\u0518\u0519\7k\2\2\u0519\u051a\7n\2\2\u051a\u0110") - buf.write("\3\2\2\2\u051b\u051c\7h\2\2\u051c\u051d\7n\2\2\u051d\u051e") - buf.write("\7q\2\2\u051e\u051f\7q\2\2\u051f\u0520\7t\2\2\u0520\u0112") - buf.write("\3\2\2\2\u0521\u0522\7u\2\2\u0522\u0523\7s\2\2\u0523\u0524") - buf.write("\7t\2\2\u0524\u0525\7v\2\2\u0525\u0114\3\2\2\2\u0526\u0527") - buf.write("\7c\2\2\u0527\u0528\7p\2\2\u0528\u0529\7{\2\2\u0529\u0116") - buf.write("\3\2\2\2\u052a\u052b\7u\2\2\u052b\u052c\7g\2\2\u052c\u052d") - buf.write("\7v\2\2\u052d\u052e\7f\2\2\u052e\u052f\7k\2\2\u052f\u0530") - buf.write("\7h\2\2\u0530\u0531\7h\2\2\u0531\u0118\3\2\2\2\u0532\u0533") - buf.write("\7u\2\2\u0533\u0534\7v\2\2\u0534\u0535\7f\2\2\u0535\u0536") - buf.write("\7f\2\2\u0536\u0537\7g\2\2\u0537\u0538\7x\2\2\u0538\u0539") - buf.write("\7a\2\2\u0539\u053a\7r\2\2\u053a\u053b\7q\2\2\u053b\u053c") - buf.write("\7r\2\2\u053c\u011a\3\2\2\2\u053d\u053e\7u\2\2\u053e\u053f") - buf.write("\7v\2\2\u053f\u0540\7f\2\2\u0540\u0541\7f\2\2\u0541\u0542") - buf.write("\7g\2\2\u0542\u0543\7x\2\2\u0543\u0544\7a\2\2\u0544\u0545") - buf.write("\7u\2\2\u0545\u0546\7c\2\2\u0546\u0547\7o\2\2\u0547\u0548") - buf.write("\7r\2\2\u0548\u011c\3\2\2\2\u0549\u054a\7x\2\2\u054a\u054b") - buf.write("\7c\2\2\u054b\u054c\7t\2\2\u054c\u054d\7a\2\2\u054d\u054e") - buf.write("\7r\2\2\u054e\u054f\7q\2\2\u054f\u0550\7r\2\2\u0550\u011e") - buf.write("\3\2\2\2\u0551\u0552\7x\2\2\u0552\u0553\7c\2\2\u0553\u0554") - buf.write("\7t\2\2\u0554\u0555\7a\2\2\u0555\u0556\7u\2\2\u0556\u0557") - buf.write("\7c\2\2\u0557\u0558\7o\2\2\u0558\u0559\7r\2\2\u0559\u0120") - buf.write("\3\2\2\2\u055a\u055b\7i\2\2\u055b\u055c\7t\2\2\u055c\u055d") - buf.write("\7q\2\2\u055d\u055e\7w\2\2\u055e\u055f\7r\2\2\u055f\u0122") - buf.write("\3\2\2\2\u0560\u0561\7g\2\2\u0561\u0562\7z\2\2\u0562\u0563") - buf.write("\7e\2\2\u0563\u0564\7g\2\2\u0564\u0565\7r\2\2\u0565\u0566") - buf.write("\7v\2\2\u0566\u0124\3\2\2\2\u0567\u0568\7j\2\2\u0568\u0569") - buf.write("\7c\2\2\u0569\u056a\7x\2\2\u056a\u056b\7k\2\2\u056b\u056c") - buf.write("\7p\2\2\u056c\u056d\7i\2\2\u056d\u0126\3\2\2\2\u056e\u056f") - buf.write("\7h\2\2\u056f\u0570\7k\2\2\u0570\u0571\7t\2\2\u0571\u0572") - buf.write("\7u\2\2\u0572\u0573\7v\2\2\u0573\u0574\7a\2\2\u0574\u0575") - buf.write("\7x\2\2\u0575\u0576\7c\2\2\u0576\u0577\7n\2\2\u0577\u0578") - buf.write("\7w\2\2\u0578\u0579\7g\2\2\u0579\u0128\3\2\2\2\u057a\u057b") - buf.write("\7n\2\2\u057b\u057c\7c\2\2\u057c\u057d\7u\2\2\u057d\u057e") - buf.write("\7v\2\2\u057e\u057f\7a\2\2\u057f\u0580\7x\2\2\u0580\u0581") - buf.write("\7c\2\2\u0581\u0582\7n\2\2\u0582\u0583\7w\2\2\u0583\u0584") - buf.write("\7g\2\2\u0584\u012a\3\2\2\2\u0585\u0586\7n\2\2\u0586\u0587") - buf.write("\7c\2\2\u0587\u0588\7i\2\2\u0588\u012c\3\2\2\2\u0589\u058a") - buf.write("\7n\2\2\u058a\u058b\7g\2\2\u058b\u058c\7c\2\2\u058c\u058d") - buf.write("\7f\2\2\u058d\u012e\3\2\2\2\u058e\u058f\7t\2\2\u058f\u0590") - buf.write("\7c\2\2\u0590\u0591\7v\2\2\u0591\u0592\7k\2\2\u0592\u0593") - buf.write("\7q\2\2\u0593\u0594\7a\2\2\u0594\u0595\7v\2\2\u0595\u0596") - buf.write("\7q\2\2\u0596\u0597\7a\2\2\u0597\u0598\7t\2\2\u0598\u0599") - buf.write("\7g\2\2\u0599\u059a\7r\2\2\u059a\u059b\7q\2\2\u059b\u059c") - buf.write("\7t\2\2\u059c\u059d\7v\2\2\u059d\u0130\3\2\2\2\u059e\u059f") - buf.write("\7q\2\2\u059f\u05a0\7x\2\2\u05a0\u05a1\7g\2\2\u05a1\u05a2") - buf.write("\7t\2\2\u05a2\u0132\3\2\2\2\u05a3\u05a4\7r\2\2\u05a4\u05a5") - buf.write("\7t\2\2\u05a5\u05a6\7g\2\2\u05a6\u05a7\7e\2\2\u05a7\u05a8") - buf.write("\7g\2\2\u05a8\u05a9\7f\2\2\u05a9\u05aa\7k\2\2\u05aa\u05ab") - buf.write("\7p\2\2\u05ab\u05ac\7i\2\2\u05ac\u0134\3\2\2\2\u05ad\u05ae") - buf.write("\7h\2\2\u05ae\u05af\7q\2\2\u05af\u05b0\7n\2\2\u05b0\u05b1") - buf.write("\7n\2\2\u05b1\u05b2\7q\2\2\u05b2\u05b3\7y\2\2\u05b3\u05b4") - buf.write("\7k\2\2\u05b4\u05b5\7p\2\2\u05b5\u05b6\7i\2\2\u05b6\u0136") - buf.write("\3\2\2\2\u05b7\u05b8\7w\2\2\u05b8\u05b9\7p\2\2\u05b9\u05ba") - buf.write("\7d\2\2\u05ba\u05bb\7q\2\2\u05bb\u05bc\7w\2\2\u05bc\u05bd") - buf.write("\7p\2\2\u05bd\u05be\7f\2\2\u05be\u05bf\7g\2\2\u05bf\u05c0") - buf.write("\7f\2\2\u05c0\u0138\3\2\2\2\u05c1\u05c2\7r\2\2\u05c2\u05c3") - buf.write("\7c\2\2\u05c3\u05c4\7t\2\2\u05c4\u05c5\7v\2\2\u05c5\u05c6") - buf.write("\7k\2\2\u05c6\u05c7\7v\2\2\u05c7\u05c8\7k\2\2\u05c8\u05c9") - buf.write("\7q\2\2\u05c9\u05ca\7p\2\2\u05ca\u013a\3\2\2\2\u05cb\u05cc") - buf.write("\7t\2\2\u05cc\u05cd\7q\2\2\u05cd\u05ce\7y\2\2\u05ce\u05cf") - buf.write("\7u\2\2\u05cf\u013c\3\2\2\2\u05d0\u05d1\7t\2\2\u05d1\u05d2") - buf.write("\7c\2\2\u05d2\u05d3\7p\2\2\u05d3\u05d4\7i\2\2\u05d4\u05d5") - buf.write("\7g\2\2\u05d5\u013e\3\2\2\2\u05d6\u05d7\7e\2\2\u05d7\u05d8") - buf.write("\7w\2\2\u05d8\u05d9\7t\2\2\u05d9\u05da\7t\2\2\u05da\u05db") - buf.write("\7g\2\2\u05db\u05dc\7p\2\2\u05dc\u05dd\7v\2\2\u05dd\u0140") - buf.write("\3\2\2\2\u05de\u05df\7x\2\2\u05df\u05e0\7c\2\2\u05e0\u05e1") - buf.write("\7n\2\2\u05e1\u05e2\7k\2\2\u05e2\u05e3\7f\2\2\u05e3\u0142") - buf.write("\3\2\2\2\u05e4\u05e5\7h\2\2\u05e5\u05e6\7k\2\2\u05e6\u05e7") - buf.write("\7n\2\2\u05e7\u05e8\7n\2\2\u05e8\u05e9\7a\2\2\u05e9\u05ea") - buf.write("\7v\2\2\u05ea\u05eb\7k\2\2\u05eb\u05ec\7o\2\2\u05ec\u05ed") - buf.write("\7g\2\2\u05ed\u05ee\7a\2\2\u05ee\u05ef\7u\2\2\u05ef\u05f0") - buf.write("\7g\2\2\u05f0\u05f1\7t\2\2\u05f1\u05f2\7k\2\2\u05f2\u05f3") - buf.write("\7g\2\2\u05f3\u05f4\7u\2\2\u05f4\u0144\3\2\2\2\u05f5\u05f6") - buf.write("\7h\2\2\u05f6\u05f7\7n\2\2\u05f7\u05f8\7q\2\2\u05f8\u05f9") - buf.write("\7y\2\2\u05f9\u05fa\7a\2\2\u05fa\u05fb\7v\2\2\u05fb\u05fc") - buf.write("\7q\2\2\u05fc\u05fd\7a\2\2\u05fd\u05fe\7u\2\2\u05fe\u05ff") - buf.write("\7v\2\2\u05ff\u0600\7q\2\2\u0600\u0601\7e\2\2\u0601\u0602") - buf.write("\7m\2\2\u0602\u0146\3\2\2\2\u0603\u0604\7u\2\2\u0604\u0605") + buf.write("\7i\2\2\u0269\u026a\7g\2\2\u026a\u026b\7v\2\2\u026b\u026c") + buf.write("\7{\2\2\u026c\u026d\7g\2\2\u026d\u026e\7c\2\2\u026e\u026f") + buf.write("\7t\2\2\u026fB\3\2\2\2\u0270\u0271\7i\2\2\u0271\u0272") + buf.write("\7g\2\2\u0272\u0273\7v\2\2\u0273\u0274\7o\2\2\u0274\u0275") + buf.write("\7q\2\2\u0275\u0276\7p\2\2\u0276\u0277\7v\2\2\u0277\u0278") + buf.write("\7j\2\2\u0278D\3\2\2\2\u0279\u027a\7f\2\2\u027a\u027b") + buf.write("\7c\2\2\u027b\u027c\7{\2\2\u027c\u027d\7q\2\2\u027d\u027e") + buf.write("\7h\2\2\u027e\u027f\7o\2\2\u027f\u0280\7q\2\2\u0280\u0281") + buf.write("\7p\2\2\u0281\u0282\7v\2\2\u0282\u0283\7j\2\2\u0283F\3") + buf.write("\2\2\2\u0284\u0285\7f\2\2\u0285\u0286\7c\2\2\u0286\u0287") + buf.write("\7{\2\2\u0287\u0288\7q\2\2\u0288\u0289\7h\2\2\u0289\u028a") + buf.write("\7{\2\2\u028a\u028b\7g\2\2\u028b\u028c\7c\2\2\u028c\u028d") + buf.write("\7t\2\2\u028dH\3\2\2\2\u028e\u028f\7f\2\2\u028f\u0290") + buf.write("\7c\2\2\u0290\u0291\7{\2\2\u0291\u0292\7v\2\2\u0292\u0293") + buf.write("\7q\2\2\u0293\u0294\7{\2\2\u0294\u0295\7g\2\2\u0295\u0296") + buf.write("\7c\2\2\u0296\u0297\7t\2\2\u0297J\3\2\2\2\u0298\u0299") + buf.write("\7f\2\2\u0299\u029a\7c\2\2\u029a\u029b\7{\2\2\u029b\u029c") + buf.write("\7v\2\2\u029c\u029d\7q\2\2\u029d\u029e\7o\2\2\u029e\u029f") + buf.write("\7q\2\2\u029f\u02a0\7p\2\2\u02a0\u02a1\7v\2\2\u02a1\u02a2") + buf.write("\7j\2\2\u02a2L\3\2\2\2\u02a3\u02a4\7{\2\2\u02a4\u02a5") + buf.write("\7g\2\2\u02a5\u02a6\7c\2\2\u02a6\u02a7\7t\2\2\u02a7\u02a8") + buf.write("\7v\2\2\u02a8\u02a9\7q\2\2\u02a9\u02aa\7f\2\2\u02aa\u02ab") + buf.write("\7c\2\2\u02ab\u02ac\7{\2\2\u02acN\3\2\2\2\u02ad\u02ae") + buf.write("\7o\2\2\u02ae\u02af\7q\2\2\u02af\u02b0\7p\2\2\u02b0\u02b1") + buf.write("\7v\2\2\u02b1\u02b2\7j\2\2\u02b2\u02b3\7v\2\2\u02b3\u02b4") + buf.write("\7q\2\2\u02b4\u02b5\7f\2\2\u02b5\u02b6\7c\2\2\u02b6\u02b7") + buf.write("\7{\2\2\u02b7P\3\2\2\2\u02b8\u02b9\7q\2\2\u02b9\u02ba") + buf.write("\7p\2\2\u02baR\3\2\2\2\u02bb\u02bc\7f\2\2\u02bc\u02bd") + buf.write("\7t\2\2\u02bd\u02be\7q\2\2\u02be\u02bf\7r\2\2\u02bfT\3") + buf.write("\2\2\2\u02c0\u02c1\7m\2\2\u02c1\u02c2\7g\2\2\u02c2\u02c3") + buf.write("\7g\2\2\u02c3\u02c4\7r\2\2\u02c4V\3\2\2\2\u02c5\u02c6") + buf.write("\7e\2\2\u02c6\u02c7\7c\2\2\u02c7\u02c8\7n\2\2\u02c8\u02c9") + buf.write("\7e\2\2\u02c9X\3\2\2\2\u02ca\u02cb\7c\2\2\u02cb\u02cc") + buf.write("\7v\2\2\u02cc\u02cd\7v\2\2\u02cd\u02ce\7t\2\2\u02ce\u02cf") + buf.write("\7e\2\2\u02cf\u02d0\7c\2\2\u02d0\u02d1\7n\2\2\u02d1\u02d2") + buf.write("\7e\2\2\u02d2Z\3\2\2\2\u02d3\u02d4\7t\2\2\u02d4\u02d5") + buf.write("\7g\2\2\u02d5\u02d6\7p\2\2\u02d6\u02d7\7c\2\2\u02d7\u02d8") + buf.write("\7o\2\2\u02d8\u02d9\7g\2\2\u02d9\\\3\2\2\2\u02da\u02db") + buf.write("\7c\2\2\u02db\u02dc\7u\2\2\u02dc^\3\2\2\2\u02dd\u02de") + buf.write("\7c\2\2\u02de\u02df\7p\2\2\u02df\u02e0\7f\2\2\u02e0`\3") + buf.write("\2\2\2\u02e1\u02e2\7q\2\2\u02e2\u02e3\7t\2\2\u02e3b\3") + buf.write("\2\2\2\u02e4\u02e5\7z\2\2\u02e5\u02e6\7q\2\2\u02e6\u02e7") + buf.write("\7t\2\2\u02e7d\3\2\2\2\u02e8\u02e9\7p\2\2\u02e9\u02ea") + buf.write("\7q\2\2\u02ea\u02eb\7v\2\2\u02ebf\3\2\2\2\u02ec\u02ed") + buf.write("\7d\2\2\u02ed\u02ee\7g\2\2\u02ee\u02ef\7v\2\2\u02ef\u02f0") + buf.write("\7y\2\2\u02f0\u02f1\7g\2\2\u02f1\u02f2\7g\2\2\u02f2\u02f3") + buf.write("\7p\2\2\u02f3h\3\2\2\2\u02f4\u02f5\7k\2\2\u02f5\u02f6") + buf.write("\7p\2\2\u02f6j\3\2\2\2\u02f7\u02f8\7p\2\2\u02f8\u02f9") + buf.write("\7q\2\2\u02f9\u02fa\7v\2\2\u02fa\u02fb\7a\2\2\u02fb\u02fc") + buf.write("\7k\2\2\u02fc\u02fd\7p\2\2\u02fdl\3\2\2\2\u02fe\u02ff") + buf.write("\7p\2\2\u02ff\u0300\7w\2\2\u0300\u0301\7n\2\2\u0301\u0302") + buf.write("\7n\2\2\u0302n\3\2\2\2\u0303\u0304\7k\2\2\u0304\u0305") + buf.write("\7u\2\2\u0305\u0306\7p\2\2\u0306\u0307\7w\2\2\u0307\u0308") + buf.write("\7n\2\2\u0308\u0309\7n\2\2\u0309p\3\2\2\2\u030a\u030b") + buf.write("\7g\2\2\u030b\u030c\7z\2\2\u030cr\3\2\2\2\u030d\u030e") + buf.write("\7w\2\2\u030e\u030f\7p\2\2\u030f\u0310\7k\2\2\u0310\u0311") + buf.write("\7q\2\2\u0311\u0312\7p\2\2\u0312t\3\2\2\2\u0313\u0314") + buf.write("\7f\2\2\u0314\u0315\7k\2\2\u0315\u0316\7h\2\2\u0316\u0317") + buf.write("\7h\2\2\u0317v\3\2\2\2\u0318\u0319\7u\2\2\u0319\u031a") + buf.write("\7{\2\2\u031a\u031b\7o\2\2\u031b\u031c\7f\2\2\u031c\u031d") + buf.write("\7k\2\2\u031d\u031e\7h\2\2\u031e\u031f\7h\2\2\u031fx\3") + buf.write("\2\2\2\u0320\u0321\7k\2\2\u0321\u0322\7p\2\2\u0322\u0323") + buf.write("\7v\2\2\u0323\u0324\7g\2\2\u0324\u0325\7t\2\2\u0325\u0326") + buf.write("\7u\2\2\u0326\u0327\7g\2\2\u0327\u0328\7e\2\2\u0328\u0329") + buf.write("\7v\2\2\u0329z\3\2\2\2\u032a\u032b\7t\2\2\u032b\u032c") + buf.write("\7c\2\2\u032c\u032d\7p\2\2\u032d\u032e\7f\2\2\u032e\u032f") + buf.write("\7q\2\2\u032f\u0330\7o\2\2\u0330|\3\2\2\2\u0331\u0332") + buf.write("\7m\2\2\u0332\u0333\7g\2\2\u0333\u0334\7{\2\2\u0334\u0335") + buf.write("\7u\2\2\u0335~\3\2\2\2\u0336\u0337\7k\2\2\u0337\u0338") + buf.write("\7p\2\2\u0338\u0339\7v\2\2\u0339\u033a\7{\2\2\u033a\u033b") + buf.write("\7g\2\2\u033b\u033c\7c\2\2\u033c\u033d\7t\2\2\u033d\u0080") + buf.write("\3\2\2\2\u033e\u033f\7k\2\2\u033f\u0340\7p\2\2\u0340\u0341") + buf.write("\7v\2\2\u0341\u0342\7o\2\2\u0342\u0343\7q\2\2\u0343\u0344") + buf.write("\7p\2\2\u0344\u0345\7v\2\2\u0345\u0346\7j\2\2\u0346\u0082") + buf.write("\3\2\2\2\u0347\u0348\7k\2\2\u0348\u0349\7p\2\2\u0349\u034a") + buf.write("\7v\2\2\u034a\u034b\7f\2\2\u034b\u034c\7c\2\2\u034c\u034d") + buf.write("\7{\2\2\u034d\u0084\3\2\2\2\u034e\u034f\7e\2\2\u034f\u0350") + buf.write("\7j\2\2\u0350\u0351\7g\2\2\u0351\u0352\7e\2\2\u0352\u0353") + buf.write("\7m\2\2\u0353\u0086\3\2\2\2\u0354\u0355\7g\2\2\u0355\u0356") + buf.write("\7z\2\2\u0356\u0357\7k\2\2\u0357\u0358\7u\2\2\u0358\u0359") + buf.write("\7v\2\2\u0359\u035a\7u\2\2\u035a\u035b\7a\2\2\u035b\u035c") + buf.write("\7k\2\2\u035c\u035d\7p\2\2\u035d\u0088\3\2\2\2\u035e\u035f") + buf.write("\7v\2\2\u035f\u0360\7q\2\2\u0360\u008a\3\2\2\2\u0361\u0362") + buf.write("\7t\2\2\u0362\u0363\7g\2\2\u0363\u0364\7v\2\2\u0364\u0365") + buf.write("\7w\2\2\u0365\u0366\7t\2\2\u0366\u0367\7p\2\2\u0367\u008c") + buf.write("\3\2\2\2\u0368\u0369\7k\2\2\u0369\u036a\7o\2\2\u036a\u036b") + buf.write("\7d\2\2\u036b\u036c\7c\2\2\u036c\u036d\7n\2\2\u036d\u036e") + buf.write("\7c\2\2\u036e\u036f\7p\2\2\u036f\u0370\7e\2\2\u0370\u0371") + buf.write("\7g\2\2\u0371\u008e\3\2\2\2\u0372\u0373\7g\2\2\u0373\u0374") + buf.write("\7t\2\2\u0374\u0375\7t\2\2\u0375\u0376\7q\2\2\u0376\u0377") + buf.write("\7t\2\2\u0377\u0378\7e\2\2\u0378\u0379\7q\2\2\u0379\u037a") + buf.write("\7f\2\2\u037a\u037b\7g\2\2\u037b\u0090\3\2\2\2\u037c\u037d") + buf.write("\7c\2\2\u037d\u037e\7n\2\2\u037e\u037f\7n\2\2\u037f\u0092") + buf.write("\3\2\2\2\u0380\u0381\7c\2\2\u0381\u0382\7i\2\2\u0382\u0383") + buf.write("\7i\2\2\u0383\u0384\7t\2\2\u0384\u0094\3\2\2\2\u0385\u0386") + buf.write("\7g\2\2\u0386\u0387\7t\2\2\u0387\u0388\7t\2\2\u0388\u0389") + buf.write("\7q\2\2\u0389\u038a\7t\2\2\u038a\u038b\7n\2\2\u038b\u038c") + buf.write("\7g\2\2\u038c\u038d\7x\2\2\u038d\u038e\7g\2\2\u038e\u038f") + buf.write("\7n\2\2\u038f\u0096\3\2\2\2\u0390\u0391\7q\2\2\u0391\u0392") + buf.write("\7t\2\2\u0392\u0393\7f\2\2\u0393\u0394\7g\2\2\u0394\u0395") + buf.write("\7t\2\2\u0395\u0098\3\2\2\2\u0396\u0397\7d\2\2\u0397\u0398") + buf.write("\7{\2\2\u0398\u009a\3\2\2\2\u0399\u039a\7t\2\2\u039a\u039b") + buf.write("\7c\2\2\u039b\u039c\7p\2\2\u039c\u039d\7m\2\2\u039d\u009c") + buf.write("\3\2\2\2\u039e\u039f\7c\2\2\u039f\u03a0\7u\2\2\u03a0\u03a1") + buf.write("\7e\2\2\u03a1\u009e\3\2\2\2\u03a2\u03a3\7f\2\2\u03a3\u03a4") + buf.write("\7g\2\2\u03a4\u03a5\7u\2\2\u03a5\u03a6\7e\2\2\u03a6\u00a0") + buf.write("\3\2\2\2\u03a7\u03a8\7o\2\2\u03a8\u03a9\7k\2\2\u03a9\u03aa") + buf.write("\7p\2\2\u03aa\u00a2\3\2\2\2\u03ab\u03ac\7o\2\2\u03ac\u03ad") + buf.write("\7c\2\2\u03ad\u03ae\7z\2\2\u03ae\u00a4\3\2\2\2\u03af\u03b0") + buf.write("\7h\2\2\u03b0\u03b1\7k\2\2\u03b1\u03b2\7t\2\2\u03b2\u03b3") + buf.write("\7u\2\2\u03b3\u03b4\7v\2\2\u03b4\u00a6\3\2\2\2\u03b5\u03b6") + buf.write("\7n\2\2\u03b6\u03b7\7c\2\2\u03b7\u03b8\7u\2\2\u03b8\u03b9") + buf.write("\7v\2\2\u03b9\u00a8\3\2\2\2\u03ba\u03bb\7k\2\2\u03bb\u03bc") + buf.write("\7p\2\2\u03bc\u03bd\7f\2\2\u03bd\u03be\7g\2\2\u03be\u03bf") + buf.write("\7z\2\2\u03bf\u03c0\7q\2\2\u03c0\u03c1\7h\2\2\u03c1\u00aa") + buf.write("\3\2\2\2\u03c2\u03c3\7c\2\2\u03c3\u03c4\7d\2\2\u03c4\u03c5") + buf.write("\7u\2\2\u03c5\u00ac\3\2\2\2\u03c6\u03c7\7m\2\2\u03c7\u03c8") + buf.write("\7g\2\2\u03c8\u03c9\7{\2\2\u03c9\u00ae\3\2\2\2\u03ca\u03cb") + buf.write("\7n\2\2\u03cb\u03cc\7p\2\2\u03cc\u00b0\3\2\2\2\u03cd\u03ce") + buf.write("\7n\2\2\u03ce\u03cf\7q\2\2\u03cf\u03d0\7i\2\2\u03d0\u00b2") + buf.write("\3\2\2\2\u03d1\u03d2\7v\2\2\u03d2\u03d3\7t\2\2\u03d3\u03d4") + buf.write("\7w\2\2\u03d4\u03d5\7p\2\2\u03d5\u03d6\7e\2\2\u03d6\u00b4") + buf.write("\3\2\2\2\u03d7\u03d8\7t\2\2\u03d8\u03d9\7q\2\2\u03d9\u03da") + buf.write("\7w\2\2\u03da\u03db\7p\2\2\u03db\u03dc\7f\2\2\u03dc\u00b6") + buf.write("\3\2\2\2\u03dd\u03de\7r\2\2\u03de\u03df\7q\2\2\u03df\u03e0") + buf.write("\7y\2\2\u03e0\u03e1\7g\2\2\u03e1\u03e2\7t\2\2\u03e2\u00b8") + buf.write("\3\2\2\2\u03e3\u03e4\7o\2\2\u03e4\u03e5\7q\2\2\u03e5\u03e6") + buf.write("\7f\2\2\u03e6\u00ba\3\2\2\2\u03e7\u03e8\7n\2\2\u03e8\u03e9") + buf.write("\7g\2\2\u03e9\u03ea\7p\2\2\u03ea\u03eb\7i\2\2\u03eb\u03ec") + buf.write("\7v\2\2\u03ec\u03ed\7j\2\2\u03ed\u00bc\3\2\2\2\u03ee\u03ef") + buf.write("\7~\2\2\u03ef\u03f0\7~\2\2\u03f0\u00be\3\2\2\2\u03f1\u03f2") + buf.write("\7v\2\2\u03f2\u03f3\7t\2\2\u03f3\u03f4\7k\2\2\u03f4\u03f5") + buf.write("\7o\2\2\u03f5\u00c0\3\2\2\2\u03f6\u03f7\7w\2\2\u03f7\u03f8") + buf.write("\7r\2\2\u03f8\u03f9\7r\2\2\u03f9\u03fa\7g\2\2\u03fa\u03fb") + buf.write("\7t\2\2\u03fb\u00c2\3\2\2\2\u03fc\u03fd\7n\2\2\u03fd\u03fe") + buf.write("\7q\2\2\u03fe\u03ff\7y\2\2\u03ff\u0400\7g\2\2\u0400\u0401") + buf.write("\7t\2\2\u0401\u00c4\3\2\2\2\u0402\u0403\7u\2\2\u0403\u0404") + buf.write("\7w\2\2\u0404\u0405\7d\2\2\u0405\u0406\7u\2\2\u0406\u0407") + buf.write("\7v\2\2\u0407\u0408\7t\2\2\u0408\u00c6\3\2\2\2\u0409\u040a") + buf.write("\7u\2\2\u040a\u040b\7w\2\2\u040b\u040c\7o\2\2\u040c\u00c8") + buf.write("\3\2\2\2\u040d\u040e\7c\2\2\u040e\u040f\7x\2\2\u040f\u0410") + buf.write("\7i\2\2\u0410\u00ca\3\2\2\2\u0411\u0412\7o\2\2\u0412\u0413") + buf.write("\7g\2\2\u0413\u0414\7f\2\2\u0414\u0415\7k\2\2\u0415\u0416") + buf.write("\7c\2\2\u0416\u0417\7p\2\2\u0417\u00cc\3\2\2\2\u0418\u0419") + buf.write("\7e\2\2\u0419\u041a\7q\2\2\u041a\u041b\7w\2\2\u041b\u041c") + buf.write("\7p\2\2\u041c\u041d\7v\2\2\u041d\u00ce\3\2\2\2\u041e\u041f") + buf.write("\7k\2\2\u041f\u0420\7f\2\2\u0420\u0421\7g\2\2\u0421\u0422") + buf.write("\7p\2\2\u0422\u0423\7v\2\2\u0423\u0424\7k\2\2\u0424\u0425") + buf.write("\7h\2\2\u0425\u0426\7k\2\2\u0426\u0427\7g\2\2\u0427\u0428") + buf.write("\7t\2\2\u0428\u00d0\3\2\2\2\u0429\u042a\7o\2\2\u042a\u042b") + buf.write("\7g\2\2\u042b\u042c\7c\2\2\u042c\u042d\7u\2\2\u042d\u042e") + buf.write("\7w\2\2\u042e\u042f\7t\2\2\u042f\u0430\7g\2\2\u0430\u00d2") + buf.write("\3\2\2\2\u0431\u0432\7c\2\2\u0432\u0433\7v\2\2\u0433\u0434") + buf.write("\7v\2\2\u0434\u0435\7t\2\2\u0435\u0436\7k\2\2\u0436\u0437") + buf.write("\7d\2\2\u0437\u0438\7w\2\2\u0438\u0439\7v\2\2\u0439\u043a") + buf.write("\7g\2\2\u043a\u00d4\3\2\2\2\u043b\u043c\7h\2\2\u043c\u043d") + buf.write("\7k\2\2\u043d\u043e\7n\2\2\u043e\u043f\7v\2\2\u043f\u0440") + buf.write("\7g\2\2\u0440\u0441\7t\2\2\u0441\u00d6\3\2\2\2\u0442\u0443") + buf.write("\7o\2\2\u0443\u0444\7g\2\2\u0444\u0445\7t\2\2\u0445\u0446") + buf.write("\7i\2\2\u0446\u0447\7g\2\2\u0447\u00d8\3\2\2\2\u0448\u0449") + buf.write("\7g\2\2\u0449\u044a\7z\2\2\u044a\u044b\7r\2\2\u044b\u00da") + buf.write("\3\2\2\2\u044c\u044d\7e\2\2\u044d\u044e\7q\2\2\u044e\u044f") + buf.write("\7o\2\2\u044f\u0450\7r\2\2\u0450\u0451\7q\2\2\u0451\u0452") + buf.write("\7p\2\2\u0452\u0453\7g\2\2\u0453\u0454\7p\2\2\u0454\u0455") + buf.write("\7v\2\2\u0455\u0456\7T\2\2\u0456\u0457\7q\2\2\u0457\u0458") + buf.write("\7n\2\2\u0458\u0459\7g\2\2\u0459\u00dc\3\2\2\2\u045a\u045b") + buf.write("\7x\2\2\u045b\u045c\7k\2\2\u045c\u045d\7t\2\2\u045d\u045e") + buf.write("\7c\2\2\u045e\u045f\7n\2\2\u045f\u00de\3\2\2\2\u0460\u0461") + buf.write("\7o\2\2\u0461\u0462\7c\2\2\u0462\u0463\7v\2\2\u0463\u0464") + buf.write("\7e\2\2\u0464\u0465\7j\2\2\u0465\u0466\7a\2\2\u0466\u0467") + buf.write("\7e\2\2\u0467\u0468\7j\2\2\u0468\u0469\7c\2\2\u0469\u046a") + buf.write("\7t\2\2\u046a\u046b\7c\2\2\u046b\u046c\7e\2\2\u046c\u046d") + buf.write("\7v\2\2\u046d\u046e\7g\2\2\u046e\u046f\7t\2\2\u046f\u0470") + buf.write("\7u\2\2\u0470\u00e0\3\2\2\2\u0471\u0472\7v\2\2\u0472\u0473") + buf.write("\7{\2\2\u0473\u0474\7r\2\2\u0474\u0475\7g\2\2\u0475\u00e2") + buf.write("\3\2\2\2\u0476\u0477\7p\2\2\u0477\u0478\7x\2\2\u0478\u0479") + buf.write("\7n\2\2\u0479\u00e4\3\2\2\2\u047a\u047b\7j\2\2\u047b\u047c") + buf.write("\7k\2\2\u047c\u047d\7g\2\2\u047d\u047e\7t\2\2\u047e\u047f") + buf.write("\7c\2\2\u047f\u0480\7t\2\2\u0480\u0481\7e\2\2\u0481\u0482") + buf.write("\7j\2\2\u0482\u0483\7{\2\2\u0483\u00e6\3\2\2\2\u0484\u0485") + buf.write("\7a\2\2\u0485\u00e8\3\2\2\2\u0486\u0487\7k\2\2\u0487\u0488") + buf.write("\7p\2\2\u0488\u0489\7x\2\2\u0489\u048a\7c\2\2\u048a\u048b") + buf.write("\7n\2\2\u048b\u048c\7k\2\2\u048c\u048d\7f\2\2\u048d\u00ea") + buf.write("\3\2\2\2\u048e\u048f\7x\2\2\u048f\u0490\7c\2\2\u0490\u0491") + buf.write("\7n\2\2\u0491\u0492\7w\2\2\u0492\u0493\7g\2\2\u0493\u0494") + buf.write("\7f\2\2\u0494\u0495\7q\2\2\u0495\u0496\7o\2\2\u0496\u0497") + buf.write("\7c\2\2\u0497\u0498\7k\2\2\u0498\u0499\7p\2\2\u0499\u00ec") + buf.write("\3\2\2\2\u049a\u049b\7x\2\2\u049b\u049c\7c\2\2\u049c\u049d") + buf.write("\7t\2\2\u049d\u049e\7k\2\2\u049e\u049f\7c\2\2\u049f\u04a0") + buf.write("\7d\2\2\u04a0\u04a1\7n\2\2\u04a1\u04a2\7g\2\2\u04a2\u00ee") + buf.write("\3\2\2\2\u04a3\u04a4\7f\2\2\u04a4\u04a5\7c\2\2\u04a5\u04a6") + buf.write("\7v\2\2\u04a6\u04a7\7c\2\2\u04a7\u00f0\3\2\2\2\u04a8\u04a9") + buf.write("\7u\2\2\u04a9\u04aa\7v\2\2\u04aa\u04ab\7t\2\2\u04ab\u04ac") + buf.write("\7w\2\2\u04ac\u04ad\7e\2\2\u04ad\u04ae\7v\2\2\u04ae\u04af") + buf.write("\7w\2\2\u04af\u04b0\7t\2\2\u04b0\u04b1\7g\2\2\u04b1\u00f2") + buf.write("\3\2\2\2\u04b2\u04b3\7f\2\2\u04b3\u04b4\7c\2\2\u04b4\u04b5") + buf.write("\7v\2\2\u04b5\u04b6\7c\2\2\u04b6\u04b7\7u\2\2\u04b7\u04b8") + buf.write("\7g\2\2\u04b8\u04b9\7v\2\2\u04b9\u00f4\3\2\2\2\u04ba\u04bb") + buf.write("\7q\2\2\u04bb\u04bc\7r\2\2\u04bc\u04bd\7g\2\2\u04bd\u04be") + buf.write("\7t\2\2\u04be\u04bf\7c\2\2\u04bf\u04c0\7v\2\2\u04c0\u04c1") + buf.write("\7q\2\2\u04c1\u04c2\7t\2\2\u04c2\u00f6\3\2\2\2\u04c3\u04c4") + buf.write("\7f\2\2\u04c4\u04c5\7g\2\2\u04c5\u04c6\7h\2\2\u04c6\u04c7") + buf.write("\7k\2\2\u04c7\u04c8\7p\2\2\u04c8\u04c9\7g\2\2\u04c9\u00f8") + buf.write("\3\2\2\2\u04ca\u04cb\7>\2\2\u04cb\u04cc\7/\2\2\u04cc\u00fa") + buf.write("\3\2\2\2\u04cd\u04ce\7f\2\2\u04ce\u04cf\7c\2\2\u04cf\u04d0") + buf.write("\7v\2\2\u04d0\u04d1\7c\2\2\u04d1\u04d2\7r\2\2\u04d2\u04d3") + buf.write("\7q\2\2\u04d3\u04d4\7k\2\2\u04d4\u04d5\7p\2\2\u04d5\u04d6") + buf.write("\7v\2\2\u04d6\u00fc\3\2\2\2\u04d7\u04d8\7j\2\2\u04d8\u04d9") + buf.write("\7k\2\2\u04d9\u04da\7g\2\2\u04da\u04db\7t\2\2\u04db\u04dc") + buf.write("\7c\2\2\u04dc\u04dd\7t\2\2\u04dd\u04de\7e\2\2\u04de\u04df") + buf.write("\7j\2\2\u04df\u04e0\7k\2\2\u04e0\u04e1\7e\2\2\u04e1\u04e2") + buf.write("\7c\2\2\u04e2\u04e3\7n\2\2\u04e3\u00fe\3\2\2\2\u04e4\u04e5") + buf.write("\7t\2\2\u04e5\u04e6\7w\2\2\u04e6\u04e7\7n\2\2\u04e7\u04e8") + buf.write("\7g\2\2\u04e8\u04e9\7u\2\2\u04e9\u04ea\7g\2\2\u04ea\u04eb") + buf.write("\7v\2\2\u04eb\u0100\3\2\2\2\u04ec\u04ed\7t\2\2\u04ed\u04ee") + buf.write("\7w\2\2\u04ee\u04ef\7n\2\2\u04ef\u04f0\7g\2\2\u04f0\u0102") + buf.write("\3\2\2\2\u04f1\u04f2\7g\2\2\u04f2\u04f3\7p\2\2\u04f3\u04f4") + buf.write("\7f\2\2\u04f4\u0104\3\2\2\2\u04f5\u04f6\7c\2\2\u04f6\u04f7") + buf.write("\7n\2\2\u04f7\u04f8\7v\2\2\u04f8\u04f9\7g\2\2\u04f9\u04fa") + buf.write("\7t\2\2\u04fa\u04fb\7F\2\2\u04fb\u04fc\7c\2\2\u04fc\u04fd") + buf.write("\7v\2\2\u04fd\u04fe\7c\2\2\u04fe\u04ff\7u\2\2\u04ff\u0500") + buf.write("\7g\2\2\u0500\u0501\7v\2\2\u0501\u0106\3\2\2\2\u0502\u0503") + buf.write("\7n\2\2\u0503\u0504\7v\2\2\u0504\u0505\7t\2\2\u0505\u0506") + buf.write("\7k\2\2\u0506\u0507\7o\2\2\u0507\u0108\3\2\2\2\u0508\u0509") + buf.write("\7t\2\2\u0509\u050a\7v\2\2\u050a\u050b\7t\2\2\u050b\u050c") + buf.write("\7k\2\2\u050c\u050d\7o\2\2\u050d\u010a\3\2\2\2\u050e\u050f") + buf.write("\7k\2\2\u050f\u0510\7p\2\2\u0510\u0511\7u\2\2\u0511\u0512") + buf.write("\7v\2\2\u0512\u0513\7t\2\2\u0513\u010c\3\2\2\2\u0514\u0515") + buf.write("\7t\2\2\u0515\u0516\7g\2\2\u0516\u0517\7r\2\2\u0517\u0518") + buf.write("\7n\2\2\u0518\u0519\7c\2\2\u0519\u051a\7e\2\2\u051a\u051b") + buf.write("\7g\2\2\u051b\u010e\3\2\2\2\u051c\u051d\7e\2\2\u051d\u051e") + buf.write("\7g\2\2\u051e\u051f\7k\2\2\u051f\u0520\7n\2\2\u0520\u0110") + buf.write("\3\2\2\2\u0521\u0522\7h\2\2\u0522\u0523\7n\2\2\u0523\u0524") + buf.write("\7q\2\2\u0524\u0525\7q\2\2\u0525\u0526\7t\2\2\u0526\u0112") + buf.write("\3\2\2\2\u0527\u0528\7u\2\2\u0528\u0529\7s\2\2\u0529\u052a") + buf.write("\7t\2\2\u052a\u052b\7v\2\2\u052b\u0114\3\2\2\2\u052c\u052d") + buf.write("\7c\2\2\u052d\u052e\7p\2\2\u052e\u052f\7{\2\2\u052f\u0116") + buf.write("\3\2\2\2\u0530\u0531\7u\2\2\u0531\u0532\7g\2\2\u0532\u0533") + buf.write("\7v\2\2\u0533\u0534\7f\2\2\u0534\u0535\7k\2\2\u0535\u0536") + buf.write("\7h\2\2\u0536\u0537\7h\2\2\u0537\u0118\3\2\2\2\u0538\u0539") + buf.write("\7u\2\2\u0539\u053a\7v\2\2\u053a\u053b\7f\2\2\u053b\u053c") + buf.write("\7f\2\2\u053c\u053d\7g\2\2\u053d\u053e\7x\2\2\u053e\u053f") + buf.write("\7a\2\2\u053f\u0540\7r\2\2\u0540\u0541\7q\2\2\u0541\u0542") + buf.write("\7r\2\2\u0542\u011a\3\2\2\2\u0543\u0544\7u\2\2\u0544\u0545") + buf.write("\7v\2\2\u0545\u0546\7f\2\2\u0546\u0547\7f\2\2\u0547\u0548") + buf.write("\7g\2\2\u0548\u0549\7x\2\2\u0549\u054a\7a\2\2\u054a\u054b") + buf.write("\7u\2\2\u054b\u054c\7c\2\2\u054c\u054d\7o\2\2\u054d\u054e") + buf.write("\7r\2\2\u054e\u011c\3\2\2\2\u054f\u0550\7x\2\2\u0550\u0551") + buf.write("\7c\2\2\u0551\u0552\7t\2\2\u0552\u0553\7a\2\2\u0553\u0554") + buf.write("\7r\2\2\u0554\u0555\7q\2\2\u0555\u0556\7r\2\2\u0556\u011e") + buf.write("\3\2\2\2\u0557\u0558\7x\2\2\u0558\u0559\7c\2\2\u0559\u055a") + buf.write("\7t\2\2\u055a\u055b\7a\2\2\u055b\u055c\7u\2\2\u055c\u055d") + buf.write("\7c\2\2\u055d\u055e\7o\2\2\u055e\u055f\7r\2\2\u055f\u0120") + buf.write("\3\2\2\2\u0560\u0561\7i\2\2\u0561\u0562\7t\2\2\u0562\u0563") + buf.write("\7q\2\2\u0563\u0564\7w\2\2\u0564\u0565\7r\2\2\u0565\u0122") + buf.write("\3\2\2\2\u0566\u0567\7g\2\2\u0567\u0568\7z\2\2\u0568\u0569") + buf.write("\7e\2\2\u0569\u056a\7g\2\2\u056a\u056b\7r\2\2\u056b\u056c") + buf.write("\7v\2\2\u056c\u0124\3\2\2\2\u056d\u056e\7j\2\2\u056e\u056f") + buf.write("\7c\2\2\u056f\u0570\7x\2\2\u0570\u0571\7k\2\2\u0571\u0572") + buf.write("\7p\2\2\u0572\u0573\7i\2\2\u0573\u0126\3\2\2\2\u0574\u0575") + buf.write("\7h\2\2\u0575\u0576\7k\2\2\u0576\u0577\7t\2\2\u0577\u0578") + buf.write("\7u\2\2\u0578\u0579\7v\2\2\u0579\u057a\7a\2\2\u057a\u057b") + buf.write("\7x\2\2\u057b\u057c\7c\2\2\u057c\u057d\7n\2\2\u057d\u057e") + buf.write("\7w\2\2\u057e\u057f\7g\2\2\u057f\u0128\3\2\2\2\u0580\u0581") + buf.write("\7n\2\2\u0581\u0582\7c\2\2\u0582\u0583\7u\2\2\u0583\u0584") + buf.write("\7v\2\2\u0584\u0585\7a\2\2\u0585\u0586\7x\2\2\u0586\u0587") + buf.write("\7c\2\2\u0587\u0588\7n\2\2\u0588\u0589\7w\2\2\u0589\u058a") + buf.write("\7g\2\2\u058a\u012a\3\2\2\2\u058b\u058c\7n\2\2\u058c\u058d") + buf.write("\7c\2\2\u058d\u058e\7i\2\2\u058e\u012c\3\2\2\2\u058f\u0590") + buf.write("\7n\2\2\u0590\u0591\7g\2\2\u0591\u0592\7c\2\2\u0592\u0593") + buf.write("\7f\2\2\u0593\u012e\3\2\2\2\u0594\u0595\7t\2\2\u0595\u0596") + buf.write("\7c\2\2\u0596\u0597\7v\2\2\u0597\u0598\7k\2\2\u0598\u0599") + buf.write("\7q\2\2\u0599\u059a\7a\2\2\u059a\u059b\7v\2\2\u059b\u059c") + buf.write("\7q\2\2\u059c\u059d\7a\2\2\u059d\u059e\7t\2\2\u059e\u059f") + buf.write("\7g\2\2\u059f\u05a0\7r\2\2\u05a0\u05a1\7q\2\2\u05a1\u05a2") + buf.write("\7t\2\2\u05a2\u05a3\7v\2\2\u05a3\u0130\3\2\2\2\u05a4\u05a5") + buf.write("\7q\2\2\u05a5\u05a6\7x\2\2\u05a6\u05a7\7g\2\2\u05a7\u05a8") + buf.write("\7t\2\2\u05a8\u0132\3\2\2\2\u05a9\u05aa\7r\2\2\u05aa\u05ab") + buf.write("\7t\2\2\u05ab\u05ac\7g\2\2\u05ac\u05ad\7e\2\2\u05ad\u05ae") + buf.write("\7g\2\2\u05ae\u05af\7f\2\2\u05af\u05b0\7k\2\2\u05b0\u05b1") + buf.write("\7p\2\2\u05b1\u05b2\7i\2\2\u05b2\u0134\3\2\2\2\u05b3\u05b4") + buf.write("\7h\2\2\u05b4\u05b5\7q\2\2\u05b5\u05b6\7n\2\2\u05b6\u05b7") + buf.write("\7n\2\2\u05b7\u05b8\7q\2\2\u05b8\u05b9\7y\2\2\u05b9\u05ba") + buf.write("\7k\2\2\u05ba\u05bb\7p\2\2\u05bb\u05bc\7i\2\2\u05bc\u0136") + buf.write("\3\2\2\2\u05bd\u05be\7w\2\2\u05be\u05bf\7p\2\2\u05bf\u05c0") + buf.write("\7d\2\2\u05c0\u05c1\7q\2\2\u05c1\u05c2\7w\2\2\u05c2\u05c3") + buf.write("\7p\2\2\u05c3\u05c4\7f\2\2\u05c4\u05c5\7g\2\2\u05c5\u05c6") + buf.write("\7f\2\2\u05c6\u0138\3\2\2\2\u05c7\u05c8\7r\2\2\u05c8\u05c9") + buf.write("\7c\2\2\u05c9\u05ca\7t\2\2\u05ca\u05cb\7v\2\2\u05cb\u05cc") + buf.write("\7k\2\2\u05cc\u05cd\7v\2\2\u05cd\u05ce\7k\2\2\u05ce\u05cf") + buf.write("\7q\2\2\u05cf\u05d0\7p\2\2\u05d0\u013a\3\2\2\2\u05d1\u05d2") + buf.write("\7t\2\2\u05d2\u05d3\7q\2\2\u05d3\u05d4\7y\2\2\u05d4\u05d5") + buf.write("\7u\2\2\u05d5\u013c\3\2\2\2\u05d6\u05d7\7t\2\2\u05d7\u05d8") + buf.write("\7c\2\2\u05d8\u05d9\7p\2\2\u05d9\u05da\7i\2\2\u05da\u05db") + buf.write("\7g\2\2\u05db\u013e\3\2\2\2\u05dc\u05dd\7e\2\2\u05dd\u05de") + buf.write("\7w\2\2\u05de\u05df\7t\2\2\u05df\u05e0\7t\2\2\u05e0\u05e1") + buf.write("\7g\2\2\u05e1\u05e2\7p\2\2\u05e2\u05e3\7v\2\2\u05e3\u0140") + buf.write("\3\2\2\2\u05e4\u05e5\7x\2\2\u05e5\u05e6\7c\2\2\u05e6\u05e7") + buf.write("\7n\2\2\u05e7\u05e8\7k\2\2\u05e8\u05e9\7f\2\2\u05e9\u0142") + buf.write("\3\2\2\2\u05ea\u05eb\7h\2\2\u05eb\u05ec\7k\2\2\u05ec\u05ed") + buf.write("\7n\2\2\u05ed\u05ee\7n\2\2\u05ee\u05ef\7a\2\2\u05ef\u05f0") + buf.write("\7v\2\2\u05f0\u05f1\7k\2\2\u05f1\u05f2\7o\2\2\u05f2\u05f3") + buf.write("\7g\2\2\u05f3\u05f4\7a\2\2\u05f4\u05f5\7u\2\2\u05f5\u05f6") + buf.write("\7g\2\2\u05f6\u05f7\7t\2\2\u05f7\u05f8\7k\2\2\u05f8\u05f9") + buf.write("\7g\2\2\u05f9\u05fa\7u\2\2\u05fa\u0144\3\2\2\2\u05fb\u05fc") + buf.write("\7h\2\2\u05fc\u05fd\7n\2\2\u05fd\u05fe\7q\2\2\u05fe\u05ff") + buf.write("\7y\2\2\u05ff\u0600\7a\2\2\u0600\u0601\7v\2\2\u0601\u0602") + buf.write("\7q\2\2\u0602\u0603\7a\2\2\u0603\u0604\7u\2\2\u0604\u0605") buf.write("\7v\2\2\u0605\u0606\7q\2\2\u0606\u0607\7e\2\2\u0607\u0608") - buf.write("\7m\2\2\u0608\u0609\7a\2\2\u0609\u060a\7v\2\2\u060a\u060b") - buf.write("\7q\2\2\u060b\u060c\7a\2\2\u060c\u060d\7h\2\2\u060d\u060e") - buf.write("\7n\2\2\u060e\u060f\7q\2\2\u060f\u0610\7y\2\2\u0610\u0148") - buf.write("\3\2\2\2\u0611\u0612\7v\2\2\u0612\u0613\7k\2\2\u0613\u0614") - buf.write("\7o\2\2\u0614\u0615\7g\2\2\u0615\u0616\7u\2\2\u0616\u0617") - buf.write("\7j\2\2\u0617\u0618\7k\2\2\u0618\u0619\7h\2\2\u0619\u061a") - buf.write("\7v\2\2\u061a\u014a\3\2\2\2\u061b\u061c\7o\2\2\u061c\u061d") - buf.write("\7g\2\2\u061d\u061e\7c\2\2\u061e\u061f\7u\2\2\u061f\u0620") - buf.write("\7w\2\2\u0620\u0621\7t\2\2\u0621\u0622\7g\2\2\u0622\u0623") - buf.write("\7u\2\2\u0623\u014c\3\2\2\2\u0624\u0625\7p\2\2\u0625\u0626") - buf.write("\7q\2\2\u0626\u0627\7a\2\2\u0627\u0628\7o\2\2\u0628\u0629") - buf.write("\7g\2\2\u0629\u062a\7c\2\2\u062a\u062b\7u\2\2\u062b\u062c") - buf.write("\7w\2\2\u062c\u062d\7t\2\2\u062d\u062e\7g\2\2\u062e\u062f") - buf.write("\7u\2\2\u062f\u014e\3\2\2\2\u0630\u0631\7e\2\2\u0631\u0632") - buf.write("\7q\2\2\u0632\u0633\7p\2\2\u0633\u0634\7f\2\2\u0634\u0635") - buf.write("\7k\2\2\u0635\u0636\7v\2\2\u0636\u0637\7k\2\2\u0637\u0638") - buf.write("\7q\2\2\u0638\u0639\7p\2\2\u0639\u0150\3\2\2\2\u063a\u063b") - buf.write("\7d\2\2\u063b\u063c\7q\2\2\u063c\u063d\7q\2\2\u063d\u063e") - buf.write("\7n\2\2\u063e\u063f\7g\2\2\u063f\u0640\7c\2\2\u0640\u0641") - buf.write("\7p\2\2\u0641\u0152\3\2\2\2\u0642\u0643\7f\2\2\u0643\u0644") - buf.write("\7c\2\2\u0644\u0645\7v\2\2\u0645\u0646\7g\2\2\u0646\u0154") - buf.write("\3\2\2\2\u0647\u0648\7v\2\2\u0648\u0649\7k\2\2\u0649\u064a") - buf.write("\7o\2\2\u064a\u064b\7g\2\2\u064b\u064c\7a\2\2\u064c\u064d") - buf.write("\7r\2\2\u064d\u064e\7g\2\2\u064e\u064f\7t\2\2\u064f\u0650") - buf.write("\7k\2\2\u0650\u0651\7q\2\2\u0651\u0652\7f\2\2\u0652\u0156") - buf.write("\3\2\2\2\u0653\u0654\7p\2\2\u0654\u0655\7w\2\2\u0655\u0656") - buf.write("\7o\2\2\u0656\u0657\7d\2\2\u0657\u0658\7g\2\2\u0658\u0659") - buf.write("\7t\2\2\u0659\u0158\3\2\2\2\u065a\u065b\7u\2\2\u065b\u065c") - buf.write("\7v\2\2\u065c\u065d\7t\2\2\u065d\u065e\7k\2\2\u065e\u065f") - buf.write("\7p\2\2\u065f\u0660\7i\2\2\u0660\u015a\3\2\2\2\u0661\u0662") - buf.write("\7v\2\2\u0662\u0663\7k\2\2\u0663\u0664\7o\2\2\u0664\u0665") - buf.write("\7g\2\2\u0665\u015c\3\2\2\2\u0666\u0667\7k\2\2\u0667\u0668") - buf.write("\7p\2\2\u0668\u0669\7v\2\2\u0669\u066a\7g\2\2\u066a\u066b") - buf.write("\7i\2\2\u066b\u066c\7g\2\2\u066c\u066d\7t\2\2\u066d\u015e") - buf.write("\3\2\2\2\u066e\u066f\7h\2\2\u066f\u0670\7n\2\2\u0670\u0671") - buf.write("\7q\2\2\u0671\u0672\7c\2\2\u0672\u0673\7v\2\2\u0673\u0160") - buf.write("\3\2\2\2\u0674\u0675\7n\2\2\u0675\u0676\7k\2\2\u0676\u0677") - buf.write("\7u\2\2\u0677\u0678\7v\2\2\u0678\u0162\3\2\2\2\u0679\u067a") - buf.write("\7t\2\2\u067a\u067b\7g\2\2\u067b\u067c\7e\2\2\u067c\u067d") - buf.write("\7q\2\2\u067d\u067e\7t\2\2\u067e\u067f\7f\2\2\u067f\u0164") - buf.write("\3\2\2\2\u0680\u0681\7t\2\2\u0681\u0682\7g\2\2\u0682\u0683") - buf.write("\7u\2\2\u0683\u0684\7v\2\2\u0684\u0685\7t\2\2\u0685\u0686") - buf.write("\7k\2\2\u0686\u0687\7e\2\2\u0687\u0688\7v\2\2\u0688\u0166") - buf.write("\3\2\2\2\u0689\u068a\7{\2\2\u068a\u068b\7{\2\2\u068b\u068c") - buf.write("\7{\2\2\u068c\u068d\7{\2\2\u068d\u0168\3\2\2\2\u068e\u068f") - buf.write("\7o\2\2\u068f\u0690\7o\2\2\u0690\u016a\3\2\2\2\u0691\u0692") - buf.write("\7f\2\2\u0692\u0693\7f\2\2\u0693\u016c\3\2\2\2\u0694\u0695") - buf.write("\7o\2\2\u0695\u0696\7c\2\2\u0696\u0697\7z\2\2\u0697\u0698") - buf.write("\7N\2\2\u0698\u0699\7g\2\2\u0699\u069a\7p\2\2\u069a\u069b") - buf.write("\7i\2\2\u069b\u069c\7v\2\2\u069c\u069d\7j\2\2\u069d\u016e") - buf.write("\3\2\2\2\u069e\u069f\7t\2\2\u069f\u06a0\7g\2\2\u06a0\u06a1") - buf.write("\7i\2\2\u06a1\u06a2\7g\2\2\u06a2\u06a3\7z\2\2\u06a3\u06a4") - buf.write("\7r\2\2\u06a4\u0170\3\2\2\2\u06a5\u06a6\7k\2\2\u06a6\u06a7") - buf.write("\7u\2\2\u06a7\u0172\3\2\2\2\u06a8\u06a9\7y\2\2\u06a9\u06aa") - buf.write("\7j\2\2\u06aa\u06ab\7g\2\2\u06ab\u06ac\7p\2\2\u06ac\u0174") - buf.write("\3\2\2\2\u06ad\u06ae\7h\2\2\u06ae\u06af\7t\2\2\u06af\u06b0") - buf.write("\7q\2\2\u06b0\u06b1\7o\2\2\u06b1\u0176\3\2\2\2\u06b2\u06b3") - buf.write("\7c\2\2\u06b3\u06b4\7i\2\2\u06b4\u06b5\7i\2\2\u06b5\u06b6") - buf.write("\7t\2\2\u06b6\u06b7\7g\2\2\u06b7\u06b8\7i\2\2\u06b8\u06b9") - buf.write("\7c\2\2\u06b9\u06ba\7v\2\2\u06ba\u06bb\7g\2\2\u06bb\u06bc") - buf.write("\7u\2\2\u06bc\u0178\3\2\2\2\u06bd\u06be\7r\2\2\u06be\u06bf") - buf.write("\7q\2\2\u06bf\u06c0\7k\2\2\u06c0\u06c1\7p\2\2\u06c1\u06c2") - buf.write("\7v\2\2\u06c2\u06c3\7u\2\2\u06c3\u017a\3\2\2\2\u06c4\u06c5") - buf.write("\7r\2\2\u06c5\u06c6\7q\2\2\u06c6\u06c7\7k\2\2\u06c7\u06c8") - buf.write("\7p\2\2\u06c8\u06c9\7v\2\2\u06c9\u017c\3\2\2\2\u06ca\u06cb") - buf.write("\7v\2\2\u06cb\u06cc\7q\2\2\u06cc\u06cd\7v\2\2\u06cd\u06ce") - buf.write("\7c\2\2\u06ce\u06cf\7n\2\2\u06cf\u017e\3\2\2\2\u06d0\u06d1") - buf.write("\7r\2\2\u06d1\u06d2\7c\2\2\u06d2\u06d3\7t\2\2\u06d3\u06d4") - buf.write("\7v\2\2\u06d4\u06d5\7k\2\2\u06d5\u06d6\7c\2\2\u06d6\u06d7") - buf.write("\7n\2\2\u06d7\u0180\3\2\2\2\u06d8\u06d9\7c\2\2\u06d9\u06da") - buf.write("\7n\2\2\u06da\u06db\7y\2\2\u06db\u06dc\7c\2\2\u06dc\u06dd") - buf.write("\7{\2\2\u06dd\u06de\7u\2\2\u06de\u0182\3\2\2\2\u06df\u06e0") - buf.write("\7k\2\2\u06e0\u06e1\7p\2\2\u06e1\u06e2\7p\2\2\u06e2\u06e3") - buf.write("\7g\2\2\u06e3\u06e4\7t\2\2\u06e4\u06e5\7a\2\2\u06e5\u06e6") - buf.write("\7l\2\2\u06e6\u06e7\7q\2\2\u06e7\u06e8\7k\2\2\u06e8\u06e9") - buf.write("\7p\2\2\u06e9\u0184\3\2\2\2\u06ea\u06eb\7n\2\2\u06eb\u06ec") - buf.write("\7g\2\2\u06ec\u06ed\7h\2\2\u06ed\u06ee\7v\2\2\u06ee\u06ef") - buf.write("\7a\2\2\u06ef\u06f0\7l\2\2\u06f0\u06f1\7q\2\2\u06f1\u06f2") - buf.write("\7k\2\2\u06f2\u06f3\7p\2\2\u06f3\u0186\3\2\2\2\u06f4\u06f5") - buf.write("\7e\2\2\u06f5\u06f6\7t\2\2\u06f6\u06f7\7q\2\2\u06f7\u06f8") - buf.write("\7u\2\2\u06f8\u06f9\7u\2\2\u06f9\u06fa\7a\2\2\u06fa\u06fb") - buf.write("\7l\2\2\u06fb\u06fc\7q\2\2\u06fc\u06fd\7k\2\2\u06fd\u06fe") - buf.write("\7p\2\2\u06fe\u0188\3\2\2\2\u06ff\u0700\7h\2\2\u0700\u0701") - buf.write("\7w\2\2\u0701\u0702\7n\2\2\u0702\u0703\7n\2\2\u0703\u0704") - buf.write("\7a\2\2\u0704\u0705\7l\2\2\u0705\u0706\7q\2\2\u0706\u0707") - buf.write("\7k\2\2\u0707\u0708\7p\2\2\u0708\u018a\3\2\2\2\u0709\u070a") - buf.write("\7o\2\2\u070a\u070b\7c\2\2\u070b\u070c\7r\2\2\u070c\u070d") - buf.write("\7u\2\2\u070d\u070e\7a\2\2\u070e\u070f\7h\2\2\u070f\u0710") - buf.write("\7t\2\2\u0710\u0711\7q\2\2\u0711\u0712\7o\2\2\u0712\u018c") - buf.write("\3\2\2\2\u0713\u0714\7o\2\2\u0714\u0715\7c\2\2\u0715\u0716") - buf.write("\7r\2\2\u0716\u0717\7u\2\2\u0717\u0718\7a\2\2\u0718\u0719") - buf.write("\7v\2\2\u0719\u071a\7q\2\2\u071a\u018e\3\2\2\2\u071b\u071c") - buf.write("\7o\2\2\u071c\u071d\7c\2\2\u071d\u071e\7r\2\2\u071e\u071f") - buf.write("\7a\2\2\u071f\u0720\7v\2\2\u0720\u0721\7q\2\2\u0721\u0190") - buf.write("\3\2\2\2\u0722\u0723\7o\2\2\u0723\u0724\7c\2\2\u0724\u0725") - buf.write("\7r\2\2\u0725\u0726\7a\2\2\u0726\u0727\7h\2\2\u0727\u0728") - buf.write("\7t\2\2\u0728\u0729\7q\2\2\u0729\u072a\7o\2\2\u072a\u0192") - buf.write("\3\2\2\2\u072b\u072c\7t\2\2\u072c\u072d\7g\2\2\u072d\u072e") - buf.write("\7v\2\2\u072e\u072f\7w\2\2\u072f\u0730\7t\2\2\u0730\u0731") - buf.write("\7p\2\2\u0731\u0732\7u\2\2\u0732\u0194\3\2\2\2\u0733\u0734") - buf.write("\7r\2\2\u0734\u0735\7k\2\2\u0735\u0736\7x\2\2\u0736\u0737") - buf.write("\7q\2\2\u0737\u0738\7v\2\2\u0738\u0196\3\2\2\2\u0739\u073a") - buf.write("\7e\2\2\u073a\u073b\7w\2\2\u073b\u073c\7u\2\2\u073c\u073d") - buf.write("\7v\2\2\u073d\u073e\7q\2\2\u073e\u073f\7o\2\2\u073f\u0740") - buf.write("\7R\2\2\u0740\u0741\7k\2\2\u0741\u0742\7x\2\2\u0742\u0743") - buf.write("\7q\2\2\u0743\u0744\7v\2\2\u0744\u0198\3\2\2\2\u0745\u0746") - buf.write("\7w\2\2\u0746\u0747\7p\2\2\u0747\u0748\7r\2\2\u0748\u0749") - buf.write("\7k\2\2\u0749\u074a\7x\2\2\u074a\u074b\7q\2\2\u074b\u074c") - buf.write("\7v\2\2\u074c\u019a\3\2\2\2\u074d\u074e\7u\2\2\u074e\u074f") - buf.write("\7w\2\2\u074f\u0750\7d\2\2\u0750\u019c\3\2\2\2\u0751\u0752") - buf.write("\7c\2\2\u0752\u0753\7r\2\2\u0753\u0754\7r\2\2\u0754\u0755") - buf.write("\7n\2\2\u0755\u0756\7{\2\2\u0756\u019e\3\2\2\2\u0757\u0758") - buf.write("\7e\2\2\u0758\u0759\7q\2\2\u0759\u075a\7p\2\2\u075a\u075b") - buf.write("\7f\2\2\u075b\u075c\7k\2\2\u075c\u075d\7v\2\2\u075d\u075e") - buf.write("\7k\2\2\u075e\u075f\7q\2\2\u075f\u0760\7p\2\2\u0760\u0761") - buf.write("\7g\2\2\u0761\u0762\7f\2\2\u0762\u01a0\3\2\2\2\u0763\u0764") - buf.write("\7r\2\2\u0764\u0765\7g\2\2\u0765\u0766\7t\2\2\u0766\u0767") - buf.write("\7k\2\2\u0767\u0768\7q\2\2\u0768\u0769\7f\2\2\u0769\u076a") - buf.write("\7a\2\2\u076a\u076b\7k\2\2\u076b\u076c\7p\2\2\u076c\u076d") - buf.write("\7f\2\2\u076d\u076e\7k\2\2\u076e\u076f\7e\2\2\u076f\u0770") - buf.write("\7c\2\2\u0770\u0771\7v\2\2\u0771\u0772\7q\2\2\u0772\u0773") - buf.write("\7t\2\2\u0773\u01a2\3\2\2\2\u0774\u0775\7u\2\2\u0775\u0776") - buf.write("\7k\2\2\u0776\u0777\7p\2\2\u0777\u0778\7i\2\2\u0778\u0779") - buf.write("\7n\2\2\u0779\u077a\7g\2\2\u077a\u01a4\3\2\2\2\u077b\u077c") - buf.write("\7f\2\2\u077c\u077d\7w\2\2\u077d\u077e\7t\2\2\u077e\u077f") - buf.write("\7c\2\2\u077f\u0780\7v\2\2\u0780\u0781\7k\2\2\u0781\u0782") - buf.write("\7q\2\2\u0782\u0783\7p\2\2\u0783\u01a6\3\2\2\2\u0784\u0785") - buf.write("\7v\2\2\u0785\u0786\7k\2\2\u0786\u0787\7o\2\2\u0787\u0788") - buf.write("\7g\2\2\u0788\u0789\7a\2\2\u0789\u078a\7c\2\2\u078a\u078b") - buf.write("\7i\2\2\u078b\u078c\7i\2\2\u078c\u01a8\3\2\2\2\u078d\u078e") - buf.write("\7w\2\2\u078e\u078f\7p\2\2\u078f\u0790\7k\2\2\u0790\u0791") - buf.write("\7v\2\2\u0791\u01aa\3\2\2\2\u0792\u0793\7X\2\2\u0793\u0794") - buf.write("\7c\2\2\u0794\u0795\7n\2\2\u0795\u0796\7w\2\2\u0796\u0797") - buf.write("\7g\2\2\u0797\u01ac\3\2\2\2\u0798\u0799\7x\2\2\u0799\u079a") + buf.write("\7m\2\2\u0608\u0146\3\2\2\2\u0609\u060a\7u\2\2\u060a\u060b") + buf.write("\7v\2\2\u060b\u060c\7q\2\2\u060c\u060d\7e\2\2\u060d\u060e") + buf.write("\7m\2\2\u060e\u060f\7a\2\2\u060f\u0610\7v\2\2\u0610\u0611") + buf.write("\7q\2\2\u0611\u0612\7a\2\2\u0612\u0613\7h\2\2\u0613\u0614") + buf.write("\7n\2\2\u0614\u0615\7q\2\2\u0615\u0616\7y\2\2\u0616\u0148") + buf.write("\3\2\2\2\u0617\u0618\7v\2\2\u0618\u0619\7k\2\2\u0619\u061a") + buf.write("\7o\2\2\u061a\u061b\7g\2\2\u061b\u061c\7u\2\2\u061c\u061d") + buf.write("\7j\2\2\u061d\u061e\7k\2\2\u061e\u061f\7h\2\2\u061f\u0620") + buf.write("\7v\2\2\u0620\u014a\3\2\2\2\u0621\u0622\7o\2\2\u0622\u0623") + buf.write("\7g\2\2\u0623\u0624\7c\2\2\u0624\u0625\7u\2\2\u0625\u0626") + buf.write("\7w\2\2\u0626\u0627\7t\2\2\u0627\u0628\7g\2\2\u0628\u0629") + buf.write("\7u\2\2\u0629\u014c\3\2\2\2\u062a\u062b\7p\2\2\u062b\u062c") + buf.write("\7q\2\2\u062c\u062d\7a\2\2\u062d\u062e\7o\2\2\u062e\u062f") + buf.write("\7g\2\2\u062f\u0630\7c\2\2\u0630\u0631\7u\2\2\u0631\u0632") + buf.write("\7w\2\2\u0632\u0633\7t\2\2\u0633\u0634\7g\2\2\u0634\u0635") + buf.write("\7u\2\2\u0635\u014e\3\2\2\2\u0636\u0637\7e\2\2\u0637\u0638") + buf.write("\7q\2\2\u0638\u0639\7p\2\2\u0639\u063a\7f\2\2\u063a\u063b") + buf.write("\7k\2\2\u063b\u063c\7v\2\2\u063c\u063d\7k\2\2\u063d\u063e") + buf.write("\7q\2\2\u063e\u063f\7p\2\2\u063f\u0150\3\2\2\2\u0640\u0641") + buf.write("\7d\2\2\u0641\u0642\7q\2\2\u0642\u0643\7q\2\2\u0643\u0644") + buf.write("\7n\2\2\u0644\u0645\7g\2\2\u0645\u0646\7c\2\2\u0646\u0647") + buf.write("\7p\2\2\u0647\u0152\3\2\2\2\u0648\u0649\7f\2\2\u0649\u064a") + buf.write("\7c\2\2\u064a\u064b\7v\2\2\u064b\u064c\7g\2\2\u064c\u0154") + buf.write("\3\2\2\2\u064d\u064e\7v\2\2\u064e\u064f\7k\2\2\u064f\u0650") + buf.write("\7o\2\2\u0650\u0651\7g\2\2\u0651\u0652\7a\2\2\u0652\u0653") + buf.write("\7r\2\2\u0653\u0654\7g\2\2\u0654\u0655\7t\2\2\u0655\u0656") + buf.write("\7k\2\2\u0656\u0657\7q\2\2\u0657\u0658\7f\2\2\u0658\u0156") + buf.write("\3\2\2\2\u0659\u065a\7p\2\2\u065a\u065b\7w\2\2\u065b\u065c") + buf.write("\7o\2\2\u065c\u065d\7d\2\2\u065d\u065e\7g\2\2\u065e\u065f") + buf.write("\7t\2\2\u065f\u0158\3\2\2\2\u0660\u0661\7u\2\2\u0661\u0662") + buf.write("\7v\2\2\u0662\u0663\7t\2\2\u0663\u0664\7k\2\2\u0664\u0665") + buf.write("\7p\2\2\u0665\u0666\7i\2\2\u0666\u015a\3\2\2\2\u0667\u0668") + buf.write("\7v\2\2\u0668\u0669\7k\2\2\u0669\u066a\7o\2\2\u066a\u066b") + buf.write("\7g\2\2\u066b\u015c\3\2\2\2\u066c\u066d\7k\2\2\u066d\u066e") + buf.write("\7p\2\2\u066e\u066f\7v\2\2\u066f\u0670\7g\2\2\u0670\u0671") + buf.write("\7i\2\2\u0671\u0672\7g\2\2\u0672\u0673\7t\2\2\u0673\u015e") + buf.write("\3\2\2\2\u0674\u0675\7h\2\2\u0675\u0676\7n\2\2\u0676\u0677") + buf.write("\7q\2\2\u0677\u0678\7c\2\2\u0678\u0679\7v\2\2\u0679\u0160") + buf.write("\3\2\2\2\u067a\u067b\7n\2\2\u067b\u067c\7k\2\2\u067c\u067d") + buf.write("\7u\2\2\u067d\u067e\7v\2\2\u067e\u0162\3\2\2\2\u067f\u0680") + buf.write("\7t\2\2\u0680\u0681\7g\2\2\u0681\u0682\7e\2\2\u0682\u0683") + buf.write("\7q\2\2\u0683\u0684\7t\2\2\u0684\u0685\7f\2\2\u0685\u0164") + buf.write("\3\2\2\2\u0686\u0687\7t\2\2\u0687\u0688\7g\2\2\u0688\u0689") + buf.write("\7u\2\2\u0689\u068a\7v\2\2\u068a\u068b\7t\2\2\u068b\u068c") + buf.write("\7k\2\2\u068c\u068d\7e\2\2\u068d\u068e\7v\2\2\u068e\u0166") + buf.write("\3\2\2\2\u068f\u0690\7{\2\2\u0690\u0691\7{\2\2\u0691\u0692") + buf.write("\7{\2\2\u0692\u0693\7{\2\2\u0693\u0168\3\2\2\2\u0694\u0695") + buf.write("\7o\2\2\u0695\u0696\7o\2\2\u0696\u016a\3\2\2\2\u0697\u0698") + buf.write("\7f\2\2\u0698\u0699\7f\2\2\u0699\u016c\3\2\2\2\u069a\u069b") + buf.write("\7o\2\2\u069b\u069c\7c\2\2\u069c\u069d\7z\2\2\u069d\u069e") + buf.write("\7N\2\2\u069e\u069f\7g\2\2\u069f\u06a0\7p\2\2\u06a0\u06a1") + buf.write("\7i\2\2\u06a1\u06a2\7v\2\2\u06a2\u06a3\7j\2\2\u06a3\u016e") + buf.write("\3\2\2\2\u06a4\u06a5\7t\2\2\u06a5\u06a6\7g\2\2\u06a6\u06a7") + buf.write("\7i\2\2\u06a7\u06a8\7g\2\2\u06a8\u06a9\7z\2\2\u06a9\u06aa") + buf.write("\7r\2\2\u06aa\u0170\3\2\2\2\u06ab\u06ac\7k\2\2\u06ac\u06ad") + buf.write("\7u\2\2\u06ad\u0172\3\2\2\2\u06ae\u06af\7y\2\2\u06af\u06b0") + buf.write("\7j\2\2\u06b0\u06b1\7g\2\2\u06b1\u06b2\7p\2\2\u06b2\u0174") + buf.write("\3\2\2\2\u06b3\u06b4\7h\2\2\u06b4\u06b5\7t\2\2\u06b5\u06b6") + buf.write("\7q\2\2\u06b6\u06b7\7o\2\2\u06b7\u0176\3\2\2\2\u06b8\u06b9") + buf.write("\7c\2\2\u06b9\u06ba\7i\2\2\u06ba\u06bb\7i\2\2\u06bb\u06bc") + buf.write("\7t\2\2\u06bc\u06bd\7g\2\2\u06bd\u06be\7i\2\2\u06be\u06bf") + buf.write("\7c\2\2\u06bf\u06c0\7v\2\2\u06c0\u06c1\7g\2\2\u06c1\u06c2") + buf.write("\7u\2\2\u06c2\u0178\3\2\2\2\u06c3\u06c4\7r\2\2\u06c4\u06c5") + buf.write("\7q\2\2\u06c5\u06c6\7k\2\2\u06c6\u06c7\7p\2\2\u06c7\u06c8") + buf.write("\7v\2\2\u06c8\u06c9\7u\2\2\u06c9\u017a\3\2\2\2\u06ca\u06cb") + buf.write("\7r\2\2\u06cb\u06cc\7q\2\2\u06cc\u06cd\7k\2\2\u06cd\u06ce") + buf.write("\7p\2\2\u06ce\u06cf\7v\2\2\u06cf\u017c\3\2\2\2\u06d0\u06d1") + buf.write("\7v\2\2\u06d1\u06d2\7q\2\2\u06d2\u06d3\7v\2\2\u06d3\u06d4") + buf.write("\7c\2\2\u06d4\u06d5\7n\2\2\u06d5\u017e\3\2\2\2\u06d6\u06d7") + buf.write("\7r\2\2\u06d7\u06d8\7c\2\2\u06d8\u06d9\7t\2\2\u06d9\u06da") + buf.write("\7v\2\2\u06da\u06db\7k\2\2\u06db\u06dc\7c\2\2\u06dc\u06dd") + buf.write("\7n\2\2\u06dd\u0180\3\2\2\2\u06de\u06df\7c\2\2\u06df\u06e0") + buf.write("\7n\2\2\u06e0\u06e1\7y\2\2\u06e1\u06e2\7c\2\2\u06e2\u06e3") + buf.write("\7{\2\2\u06e3\u06e4\7u\2\2\u06e4\u0182\3\2\2\2\u06e5\u06e6") + buf.write("\7k\2\2\u06e6\u06e7\7p\2\2\u06e7\u06e8\7p\2\2\u06e8\u06e9") + buf.write("\7g\2\2\u06e9\u06ea\7t\2\2\u06ea\u06eb\7a\2\2\u06eb\u06ec") + buf.write("\7l\2\2\u06ec\u06ed\7q\2\2\u06ed\u06ee\7k\2\2\u06ee\u06ef") + buf.write("\7p\2\2\u06ef\u0184\3\2\2\2\u06f0\u06f1\7n\2\2\u06f1\u06f2") + buf.write("\7g\2\2\u06f2\u06f3\7h\2\2\u06f3\u06f4\7v\2\2\u06f4\u06f5") + buf.write("\7a\2\2\u06f5\u06f6\7l\2\2\u06f6\u06f7\7q\2\2\u06f7\u06f8") + buf.write("\7k\2\2\u06f8\u06f9\7p\2\2\u06f9\u0186\3\2\2\2\u06fa\u06fb") + buf.write("\7e\2\2\u06fb\u06fc\7t\2\2\u06fc\u06fd\7q\2\2\u06fd\u06fe") + buf.write("\7u\2\2\u06fe\u06ff\7u\2\2\u06ff\u0700\7a\2\2\u0700\u0701") + buf.write("\7l\2\2\u0701\u0702\7q\2\2\u0702\u0703\7k\2\2\u0703\u0704") + buf.write("\7p\2\2\u0704\u0188\3\2\2\2\u0705\u0706\7h\2\2\u0706\u0707") + buf.write("\7w\2\2\u0707\u0708\7n\2\2\u0708\u0709\7n\2\2\u0709\u070a") + buf.write("\7a\2\2\u070a\u070b\7l\2\2\u070b\u070c\7q\2\2\u070c\u070d") + buf.write("\7k\2\2\u070d\u070e\7p\2\2\u070e\u018a\3\2\2\2\u070f\u0710") + buf.write("\7o\2\2\u0710\u0711\7c\2\2\u0711\u0712\7r\2\2\u0712\u0713") + buf.write("\7u\2\2\u0713\u0714\7a\2\2\u0714\u0715\7h\2\2\u0715\u0716") + buf.write("\7t\2\2\u0716\u0717\7q\2\2\u0717\u0718\7o\2\2\u0718\u018c") + buf.write("\3\2\2\2\u0719\u071a\7o\2\2\u071a\u071b\7c\2\2\u071b\u071c") + buf.write("\7r\2\2\u071c\u071d\7u\2\2\u071d\u071e\7a\2\2\u071e\u071f") + buf.write("\7v\2\2\u071f\u0720\7q\2\2\u0720\u018e\3\2\2\2\u0721\u0722") + buf.write("\7o\2\2\u0722\u0723\7c\2\2\u0723\u0724\7r\2\2\u0724\u0725") + buf.write("\7a\2\2\u0725\u0726\7v\2\2\u0726\u0727\7q\2\2\u0727\u0190") + buf.write("\3\2\2\2\u0728\u0729\7o\2\2\u0729\u072a\7c\2\2\u072a\u072b") + buf.write("\7r\2\2\u072b\u072c\7a\2\2\u072c\u072d\7h\2\2\u072d\u072e") + buf.write("\7t\2\2\u072e\u072f\7q\2\2\u072f\u0730\7o\2\2\u0730\u0192") + buf.write("\3\2\2\2\u0731\u0732\7t\2\2\u0732\u0733\7g\2\2\u0733\u0734") + buf.write("\7v\2\2\u0734\u0735\7w\2\2\u0735\u0736\7t\2\2\u0736\u0737") + buf.write("\7p\2\2\u0737\u0738\7u\2\2\u0738\u0194\3\2\2\2\u0739\u073a") + buf.write("\7r\2\2\u073a\u073b\7k\2\2\u073b\u073c\7x\2\2\u073c\u073d") + buf.write("\7q\2\2\u073d\u073e\7v\2\2\u073e\u0196\3\2\2\2\u073f\u0740") + buf.write("\7e\2\2\u0740\u0741\7w\2\2\u0741\u0742\7u\2\2\u0742\u0743") + buf.write("\7v\2\2\u0743\u0744\7q\2\2\u0744\u0745\7o\2\2\u0745\u0746") + buf.write("\7R\2\2\u0746\u0747\7k\2\2\u0747\u0748\7x\2\2\u0748\u0749") + buf.write("\7q\2\2\u0749\u074a\7v\2\2\u074a\u0198\3\2\2\2\u074b\u074c") + buf.write("\7w\2\2\u074c\u074d\7p\2\2\u074d\u074e\7r\2\2\u074e\u074f") + buf.write("\7k\2\2\u074f\u0750\7x\2\2\u0750\u0751\7q\2\2\u0751\u0752") + buf.write("\7v\2\2\u0752\u019a\3\2\2\2\u0753\u0754\7u\2\2\u0754\u0755") + buf.write("\7w\2\2\u0755\u0756\7d\2\2\u0756\u019c\3\2\2\2\u0757\u0758") + buf.write("\7c\2\2\u0758\u0759\7r\2\2\u0759\u075a\7r\2\2\u075a\u075b") + buf.write("\7n\2\2\u075b\u075c\7{\2\2\u075c\u019e\3\2\2\2\u075d\u075e") + buf.write("\7e\2\2\u075e\u075f\7q\2\2\u075f\u0760\7p\2\2\u0760\u0761") + buf.write("\7f\2\2\u0761\u0762\7k\2\2\u0762\u0763\7v\2\2\u0763\u0764") + buf.write("\7k\2\2\u0764\u0765\7q\2\2\u0765\u0766\7p\2\2\u0766\u0767") + buf.write("\7g\2\2\u0767\u0768\7f\2\2\u0768\u01a0\3\2\2\2\u0769\u076a") + buf.write("\7r\2\2\u076a\u076b\7g\2\2\u076b\u076c\7t\2\2\u076c\u076d") + buf.write("\7k\2\2\u076d\u076e\7q\2\2\u076e\u076f\7f\2\2\u076f\u0770") + buf.write("\7a\2\2\u0770\u0771\7k\2\2\u0771\u0772\7p\2\2\u0772\u0773") + buf.write("\7f\2\2\u0773\u0774\7k\2\2\u0774\u0775\7e\2\2\u0775\u0776") + buf.write("\7c\2\2\u0776\u0777\7v\2\2\u0777\u0778\7q\2\2\u0778\u0779") + buf.write("\7t\2\2\u0779\u01a2\3\2\2\2\u077a\u077b\7u\2\2\u077b\u077c") + buf.write("\7k\2\2\u077c\u077d\7p\2\2\u077d\u077e\7i\2\2\u077e\u077f") + buf.write("\7n\2\2\u077f\u0780\7g\2\2\u0780\u01a4\3\2\2\2\u0781\u0782") + buf.write("\7f\2\2\u0782\u0783\7w\2\2\u0783\u0784\7t\2\2\u0784\u0785") + buf.write("\7c\2\2\u0785\u0786\7v\2\2\u0786\u0787\7k\2\2\u0787\u0788") + buf.write("\7q\2\2\u0788\u0789\7p\2\2\u0789\u01a6\3\2\2\2\u078a\u078b") + buf.write("\7v\2\2\u078b\u078c\7k\2\2\u078c\u078d\7o\2\2\u078d\u078e") + buf.write("\7g\2\2\u078e\u078f\7a\2\2\u078f\u0790\7c\2\2\u0790\u0791") + buf.write("\7i\2\2\u0791\u0792\7i\2\2\u0792\u01a8\3\2\2\2\u0793\u0794") + buf.write("\7w\2\2\u0794\u0795\7p\2\2\u0795\u0796\7k\2\2\u0796\u0797") + buf.write("\7v\2\2\u0797\u01aa\3\2\2\2\u0798\u0799\7X\2\2\u0799\u079a") buf.write("\7c\2\2\u079a\u079b\7n\2\2\u079b\u079c\7w\2\2\u079c\u079d") - buf.write("\7g\2\2\u079d\u079e\7f\2\2\u079e\u079f\7q\2\2\u079f\u07a0") - buf.write("\7o\2\2\u07a0\u07a1\7c\2\2\u07a1\u07a2\7k\2\2\u07a2\u07a3") - buf.write("\7p\2\2\u07a3\u07a4\7u\2\2\u07a4\u01ae\3\2\2\2\u07a5\u07a6") - buf.write("\7x\2\2\u07a6\u07a7\7c\2\2\u07a7\u07a8\7t\2\2\u07a8\u07a9") - buf.write("\7k\2\2\u07a9\u07aa\7c\2\2\u07aa\u07ab\7d\2\2\u07ab\u07ac") - buf.write("\7n\2\2\u07ac\u07ad\7g\2\2\u07ad\u07ae\7u\2\2\u07ae\u01b0") - buf.write("\3\2\2\2\u07af\u07b0\7k\2\2\u07b0\u07b1\7p\2\2\u07b1\u07b2") - buf.write("\7r\2\2\u07b2\u07b3\7w\2\2\u07b3\u07b4\7v\2\2\u07b4\u01b2") - buf.write("\3\2\2\2\u07b5\u07b6\7q\2\2\u07b6\u07b7\7w\2\2\u07b7\u07b8") - buf.write("\7v\2\2\u07b8\u07b9\7r\2\2\u07b9\u07ba\7w\2\2\u07ba\u07bb") - buf.write("\7v\2\2\u07bb\u01b4\3\2\2\2\u07bc\u07bd\7e\2\2\u07bd\u07be") - buf.write("\7c\2\2\u07be\u07bf\7u\2\2\u07bf\u07c0\7v\2\2\u07c0\u01b6") - buf.write("\3\2\2\2\u07c1\u07c2\7t\2\2\u07c2\u07c3\7w\2\2\u07c3\u07c4") - buf.write("\7n\2\2\u07c4\u07c5\7g\2\2\u07c5\u07c6\7a\2\2\u07c6\u07c7") - buf.write("\7r\2\2\u07c7\u07c8\7t\2\2\u07c8\u07c9\7k\2\2\u07c9\u07ca") - buf.write("\7q\2\2\u07ca\u07cb\7t\2\2\u07cb\u07cc\7k\2\2\u07cc\u07cd") - buf.write("\7v\2\2\u07cd\u07ce\7{\2\2\u07ce\u01b8\3\2\2\2\u07cf\u07d0") - buf.write("\7f\2\2\u07d0\u07d1\7c\2\2\u07d1\u07d2\7v\2\2\u07d2\u07d3") - buf.write("\7c\2\2\u07d3\u07d4\7u\2\2\u07d4\u07d5\7g\2\2\u07d5\u07d6") - buf.write("\7v\2\2\u07d6\u07d7\7a\2\2\u07d7\u07d8\7r\2\2\u07d8\u07d9") - buf.write("\7t\2\2\u07d9\u07da\7k\2\2\u07da\u07db\7q\2\2\u07db\u07dc") - buf.write("\7t\2\2\u07dc\u07dd\7k\2\2\u07dd\u07de\7v\2\2\u07de\u07df") - buf.write("\7{\2\2\u07df\u01ba\3\2\2\2\u07e0\u07e1\7f\2\2\u07e1\u07e2") - buf.write("\7g\2\2\u07e2\u07e3\7h\2\2\u07e3\u07e4\7c\2\2\u07e4\u07e5") - buf.write("\7w\2\2\u07e5\u07e6\7n\2\2\u07e6\u07e7\7v\2\2\u07e7\u01bc") - buf.write("\3\2\2\2\u07e8\u07e9\7e\2\2\u07e9\u07ea\7j\2\2\u07ea\u07eb") - buf.write("\7g\2\2\u07eb\u07ec\7e\2\2\u07ec\u07ed\7m\2\2\u07ed\u07ee") - buf.write("\7a\2\2\u07ee\u07ef\7f\2\2\u07ef\u07f0\7c\2\2\u07f0\u07f1") - buf.write("\7v\2\2\u07f1\u07f2\7c\2\2\u07f2\u07f3\7r\2\2\u07f3\u07f4") - buf.write("\7q\2\2\u07f4\u07f5\7k\2\2\u07f5\u07f6\7p\2\2\u07f6\u07f7") - buf.write("\7v\2\2\u07f7\u01be\3\2\2\2\u07f8\u07f9\7e\2\2\u07f9\u07fa") - buf.write("\7j\2\2\u07fa\u07fb\7g\2\2\u07fb\u07fc\7e\2\2\u07fc\u07fd") - buf.write("\7m\2\2\u07fd\u07fe\7a\2\2\u07fe\u07ff\7j\2\2\u07ff\u0800") - buf.write("\7k\2\2\u0800\u0801\7g\2\2\u0801\u0802\7t\2\2\u0802\u0803") - buf.write("\7c\2\2\u0803\u0804\7t\2\2\u0804\u0805\7e\2\2\u0805\u0806") - buf.write("\7j\2\2\u0806\u0807\7{\2\2\u0807\u01c0\3\2\2\2\u0808\u0809") - buf.write("\7e\2\2\u0809\u080a\7q\2\2\u080a\u080b\7o\2\2\u080b\u080c") - buf.write("\7r\2\2\u080c\u080d\7w\2\2\u080d\u080e\7v\2\2\u080e\u080f") - buf.write("\7g\2\2\u080f\u0810\7f\2\2\u0810\u01c2\3\2\2\2\u0811\u0812") - buf.write("\7p\2\2\u0812\u0813\7q\2\2\u0813\u0814\7p\2\2\u0814\u0815") - buf.write("\7a\2\2\u0815\u0816\7p\2\2\u0816\u0817\7w\2\2\u0817\u0818") - buf.write("\7n\2\2\u0818\u0819\7n\2\2\u0819\u01c4\3\2\2\2\u081a\u081b") - buf.write("\7p\2\2\u081b\u081c\7q\2\2\u081c\u081d\7p\2\2\u081d\u081e") - buf.write("\7a\2\2\u081e\u081f\7|\2\2\u081f\u0820\7g\2\2\u0820\u0821") - buf.write("\7t\2\2\u0821\u0822\7q\2\2\u0822\u01c6\3\2\2\2\u0823\u0824") - buf.write("\7r\2\2\u0824\u0825\7c\2\2\u0825\u0826\7t\2\2\u0826\u0827") - buf.write("\7v\2\2\u0827\u0828\7k\2\2\u0828\u0829\7c\2\2\u0829\u082a") - buf.write("\7n\2\2\u082a\u082b\7a\2\2\u082b\u082c\7p\2\2\u082c\u082d") - buf.write("\7w\2\2\u082d\u082e\7n\2\2\u082e\u082f\7n\2\2\u082f\u01c8") - buf.write("\3\2\2\2\u0830\u0831\7r\2\2\u0831\u0832\7c\2\2\u0832\u0833") - buf.write("\7t\2\2\u0833\u0834\7v\2\2\u0834\u0835\7k\2\2\u0835\u0836") - buf.write("\7c\2\2\u0836\u0837\7n\2\2\u0837\u0838\7a\2\2\u0838\u0839") - buf.write("\7|\2\2\u0839\u083a\7g\2\2\u083a\u083b\7t\2\2\u083b\u083c") - buf.write("\7q\2\2\u083c\u01ca\3\2\2\2\u083d\u083e\7c\2\2\u083e\u083f") - buf.write("\7n\2\2\u083f\u0840\7y\2\2\u0840\u0841\7c\2\2\u0841\u0842") - buf.write("\7{\2\2\u0842\u0843\7u\2\2\u0843\u0844\7a\2\2\u0844\u0845") - buf.write("\7p\2\2\u0845\u0846\7w\2\2\u0846\u0847\7n\2\2\u0847\u0848") - buf.write("\7n\2\2\u0848\u01cc\3\2\2\2\u0849\u084a\7c\2\2\u084a\u084b") - buf.write("\7n\2\2\u084b\u084c\7y\2\2\u084c\u084d\7c\2\2\u084d\u084e") - buf.write("\7{\2\2\u084e\u084f\7u\2\2\u084f\u0850\7a\2\2\u0850\u0851") - buf.write("\7|\2\2\u0851\u0852\7g\2\2\u0852\u0853\7t\2\2\u0853\u0854") - buf.write("\7q\2\2\u0854\u01ce\3\2\2\2\u0855\u0856\7e\2\2\u0856\u0857") - buf.write("\7q\2\2\u0857\u0858\7o\2\2\u0858\u0859\7r\2\2\u0859\u085a") - buf.write("\7q\2\2\u085a\u085b\7p\2\2\u085b\u085c\7g\2\2\u085c\u085d") - buf.write("\7p\2\2\u085d\u085e\7v\2\2\u085e\u085f\7u\2\2\u085f\u01d0") - buf.write("\3\2\2\2\u0860\u0861\7c\2\2\u0861\u0862\7n\2\2\u0862\u0863") - buf.write("\7n\2\2\u0863\u0864\7a\2\2\u0864\u0865\7o\2\2\u0865\u0866") - buf.write("\7g\2\2\u0866\u0867\7c\2\2\u0867\u0868\7u\2\2\u0868\u0869") - buf.write("\7w\2\2\u0869\u086a\7t\2\2\u086a\u086b\7g\2\2\u086b\u086c") - buf.write("\7u\2\2\u086c\u01d2\3\2\2\2\u086d\u086e\7u\2\2\u086e\u086f") - buf.write("\7e\2\2\u086f\u0870\7c\2\2\u0870\u0871\7n\2\2\u0871\u0872") - buf.write("\7c\2\2\u0872\u0873\7t\2\2\u0873\u01d4\3\2\2\2\u0874\u0875") - buf.write("\7e\2\2\u0875\u0876\7q\2\2\u0876\u0877\7o\2\2\u0877\u0878") - buf.write("\7r\2\2\u0878\u0879\7q\2\2\u0879\u087a\7p\2\2\u087a\u087b") - buf.write("\7g\2\2\u087b\u087c\7p\2\2\u087c\u087d\7v\2\2\u087d\u01d6") - buf.write("\3\2\2\2\u087e\u087f\7f\2\2\u087f\u0880\7c\2\2\u0880\u0881") - buf.write("\7v\2\2\u0881\u0882\7c\2\2\u0882\u0883\7r\2\2\u0883\u0884") - buf.write("\7q\2\2\u0884\u0885\7k\2\2\u0885\u0886\7p\2\2\u0886\u0887") - buf.write("\7v\2\2\u0887\u0888\7a\2\2\u0888\u0889\7q\2\2\u0889\u088a") - buf.write("\7p\2\2\u088a\u088b\7a\2\2\u088b\u088c\7x\2\2\u088c\u088d") - buf.write("\7c\2\2\u088d\u088e\7n\2\2\u088e\u088f\7w\2\2\u088f\u0890") - buf.write("\7g\2\2\u0890\u0891\7f\2\2\u0891\u0892\7q\2\2\u0892\u0893") - buf.write("\7o\2\2\u0893\u0894\7c\2\2\u0894\u0895\7k\2\2\u0895\u0896") - buf.write("\7p\2\2\u0896\u0897\7u\2\2\u0897\u01d8\3\2\2\2\u0898\u0899") - buf.write("\7f\2\2\u0899\u089a\7c\2\2\u089a\u089b\7v\2\2\u089b\u089c") - buf.write("\7c\2\2\u089c\u089d\7r\2\2\u089d\u089e\7q\2\2\u089e\u089f") - buf.write("\7k\2\2\u089f\u08a0\7p\2\2\u08a0\u08a1\7v\2\2\u08a1\u08a2") - buf.write("\7a\2\2\u08a2\u08a3\7q\2\2\u08a3\u08a4\7p\2\2\u08a4\u08a5") - buf.write("\7a\2\2\u08a5\u08a6\7x\2\2\u08a6\u08a7\7c\2\2\u08a7\u08a8") - buf.write("\7t\2\2\u08a8\u08a9\7k\2\2\u08a9\u08aa\7c\2\2\u08aa\u08ab") - buf.write("\7d\2\2\u08ab\u08ac\7n\2\2\u08ac\u08ad\7g\2\2\u08ad\u08ae") - buf.write("\7u\2\2\u08ae\u01da\3\2\2\2\u08af\u08b0\7j\2\2\u08b0\u08b1") - buf.write("\7k\2\2\u08b1\u08b2\7g\2\2\u08b2\u08b3\7t\2\2\u08b3\u08b4") - buf.write("\7c\2\2\u08b4\u08b5\7t\2\2\u08b5\u08b6\7e\2\2\u08b6\u08b7") - buf.write("\7j\2\2\u08b7\u08b8\7k\2\2\u08b8\u08b9\7e\2\2\u08b9\u08ba") - buf.write("\7c\2\2\u08ba\u08bb\7n\2\2\u08bb\u08bc\7a\2\2\u08bc\u08bd") - buf.write("\7q\2\2\u08bd\u08be\7p\2\2\u08be\u08bf\7a\2\2\u08bf\u08c0") - buf.write("\7x\2\2\u08c0\u08c1\7c\2\2\u08c1\u08c2\7n\2\2\u08c2\u08c3") - buf.write("\7w\2\2\u08c3\u08c4\7g\2\2\u08c4\u08c5\7f\2\2\u08c5\u08c6") - buf.write("\7q\2\2\u08c6\u08c7\7o\2\2\u08c7\u08c8\7c\2\2\u08c8\u08c9") - buf.write("\7k\2\2\u08c9\u08ca\7p\2\2\u08ca\u08cb\7u\2\2\u08cb\u01dc") - buf.write("\3\2\2\2\u08cc\u08cd\7j\2\2\u08cd\u08ce\7k\2\2\u08ce\u08cf") - buf.write("\7g\2\2\u08cf\u08d0\7t\2\2\u08d0\u08d1\7c\2\2\u08d1\u08d2") - buf.write("\7t\2\2\u08d2\u08d3\7e\2\2\u08d3\u08d4\7j\2\2\u08d4\u08d5") - buf.write("\7k\2\2\u08d5\u08d6\7e\2\2\u08d6\u08d7\7c\2\2\u08d7\u08d8") - buf.write("\7n\2\2\u08d8\u08d9\7a\2\2\u08d9\u08da\7q\2\2\u08da\u08db") - buf.write("\7p\2\2\u08db\u08dc\7a\2\2\u08dc\u08dd\7x\2\2\u08dd\u08de") - buf.write("\7c\2\2\u08de\u08df\7t\2\2\u08df\u08e0\7k\2\2\u08e0\u08e1") - buf.write("\7c\2\2\u08e1\u08e2\7d\2\2\u08e2\u08e3\7n\2\2\u08e3\u08e4") - buf.write("\7g\2\2\u08e4\u08e5\7u\2\2\u08e5\u01de\3\2\2\2\u08e6\u08e7") - buf.write("\7u\2\2\u08e7\u08e8\7g\2\2\u08e8\u08e9\7v\2\2\u08e9\u01e0") - buf.write("\3\2\2\2\u08ea\u08eb\7n\2\2\u08eb\u08ec\7c\2\2\u08ec\u08ed") - buf.write("\7p\2\2\u08ed\u08ee\7i\2\2\u08ee\u08ef\7w\2\2\u08ef\u08f0") - buf.write("\7c\2\2\u08f0\u08f1\7i\2\2\u08f1\u08f2\7g\2\2\u08f2\u01e2") - buf.write("\3\2\2\2\u08f3\u08f4\t\2\2\2\u08f4\u01e4\3\2\2\2\u08f5") - buf.write("\u08f6\4\62;\2\u08f6\u01e6\3\2\2\2\u08f7\u08f9\5\35\17") - buf.write("\2\u08f8\u08f7\3\2\2\2\u08f8\u08f9\3\2\2\2\u08f9\u08fb") - buf.write("\3\2\2\2\u08fa\u08fc\5\u01e5\u00f3\2\u08fb\u08fa\3\2\2") - buf.write("\2\u08fc\u08fd\3\2\2\2\u08fd\u08fb\3\2\2\2\u08fd\u08fe") - buf.write("\3\2\2\2\u08fe\u01e8\3\2\2\2\u08ff\u0900\5\u01e7\u00f4") - buf.write("\2\u0900\u0904\7\60\2\2\u0901\u0903\5\u01e7\u00f4\2\u0902") - buf.write("\u0901\3\2\2\2\u0903\u0906\3\2\2\2\u0904\u0902\3\2\2\2") - buf.write("\u0904\u0905\3\2\2\2\u0905\u01ea\3\2\2\2\u0906\u0904\3") - buf.write("\2\2\2\u0907\u0908\7v\2\2\u0908\u0909\7t\2\2\u0909\u090a") - buf.write("\7w\2\2\u090a\u0911\7g\2\2\u090b\u090c\7h\2\2\u090c\u090d") - buf.write("\7c\2\2\u090d\u090e\7n\2\2\u090e\u090f\7u\2\2\u090f\u0911") - buf.write("\7g\2\2\u0910\u0907\3\2\2\2\u0910\u090b\3\2\2\2\u0911") - buf.write("\u01ec\3\2\2\2\u0912\u0916\7$\2\2\u0913\u0915\n\3\2\2") - buf.write("\u0914\u0913\3\2\2\2\u0915\u0918\3\2\2\2\u0916\u0914\3") - buf.write("\2\2\2\u0916\u0917\3\2\2\2\u0917\u0919\3\2\2\2\u0918\u0916") - buf.write("\3\2\2\2\u0919\u091a\7$\2\2\u091a\u01ee\3\2\2\2\u091b") - buf.write("\u091f\5\u01e3\u00f2\2\u091c\u091e\t\4\2\2\u091d\u091c") - buf.write("\3\2\2\2\u091e\u0921\3\2\2\2\u091f\u091d\3\2\2\2\u091f") - buf.write("\u0920\3\2\2\2\u0920\u0931\3\2\2\2\u0921\u091f\3\2\2\2") - buf.write("\u0922\u0924\5\u01e5\u00f3\2\u0923\u0925\t\4\2\2\u0924") - buf.write("\u0923\3\2\2\2\u0925\u0926\3\2\2\2\u0926\u0924\3\2\2\2") - buf.write("\u0926\u0927\3\2\2\2\u0927\u0931\3\2\2\2\u0928\u092c\7") - buf.write(")\2\2\u0929\u092b\13\2\2\2\u092a\u0929\3\2\2\2\u092b\u092e") - buf.write("\3\2\2\2\u092c\u092d\3\2\2\2\u092c\u092a\3\2\2\2\u092d") - buf.write("\u092f\3\2\2\2\u092e\u092c\3\2\2\2\u092f\u0931\7)\2\2") - buf.write("\u0930\u091b\3\2\2\2\u0930\u0922\3\2\2\2\u0930\u0928\3") - buf.write("\2\2\2\u0931\u01f0\3\2\2\2\u0932\u0934\t\5\2\2\u0933\u0932") - buf.write("\3\2\2\2\u0934\u0935\3\2\2\2\u0935\u0933\3\2\2\2\u0935") - buf.write("\u0936\3\2\2\2\u0936\u0937\3\2\2\2\u0937\u0938\b\u00f9") - buf.write("\2\2\u0938\u01f2\3\2\2\2\u0939\u093a\7=\2\2\u093a\u01f4") - buf.write("\3\2\2\2\u093b\u093c\7\61\2\2\u093c\u093d\7,\2\2\u093d") - buf.write("\u0941\3\2\2\2\u093e\u0940\13\2\2\2\u093f\u093e\3\2\2") - buf.write("\2\u0940\u0943\3\2\2\2\u0941\u0942\3\2\2\2\u0941\u093f") - buf.write("\3\2\2\2\u0942\u0944\3\2\2\2\u0943\u0941\3\2\2\2\u0944") - buf.write("\u0945\7,\2\2\u0945\u0946\7\61\2\2\u0946\u0947\3\2\2\2") - buf.write("\u0947\u0948\b\u00fb\3\2\u0948\u01f6\3\2\2\2\u0949\u094a") - buf.write("\7\61\2\2\u094a\u094b\7\61\2\2\u094b\u094f\3\2\2\2\u094c") - buf.write("\u094e\13\2\2\2\u094d\u094c\3\2\2\2\u094e\u0951\3\2\2") - buf.write("\2\u094f\u0950\3\2\2\2\u094f\u094d\3\2\2\2\u0950\u0952") - buf.write("\3\2\2\2\u0951\u094f\3\2\2\2\u0952\u0953\7\f\2\2\u0953") - buf.write("\u0954\3\2\2\2\u0954\u0955\b\u00fc\3\2\u0955\u01f8\3\2") - buf.write("\2\2\17\2\u08f8\u08fd\u0904\u0910\u0916\u091f\u0926\u092c") - buf.write("\u0930\u0935\u0941\u094f\4\2\3\2\2\4\2") + buf.write("\7g\2\2\u079d\u01ac\3\2\2\2\u079e\u079f\7x\2\2\u079f\u07a0") + buf.write("\7c\2\2\u07a0\u07a1\7n\2\2\u07a1\u07a2\7w\2\2\u07a2\u07a3") + buf.write("\7g\2\2\u07a3\u07a4\7f\2\2\u07a4\u07a5\7q\2\2\u07a5\u07a6") + buf.write("\7o\2\2\u07a6\u07a7\7c\2\2\u07a7\u07a8\7k\2\2\u07a8\u07a9") + buf.write("\7p\2\2\u07a9\u07aa\7u\2\2\u07aa\u01ae\3\2\2\2\u07ab\u07ac") + buf.write("\7x\2\2\u07ac\u07ad\7c\2\2\u07ad\u07ae\7t\2\2\u07ae\u07af") + buf.write("\7k\2\2\u07af\u07b0\7c\2\2\u07b0\u07b1\7d\2\2\u07b1\u07b2") + buf.write("\7n\2\2\u07b2\u07b3\7g\2\2\u07b3\u07b4\7u\2\2\u07b4\u01b0") + buf.write("\3\2\2\2\u07b5\u07b6\7k\2\2\u07b6\u07b7\7p\2\2\u07b7\u07b8") + buf.write("\7r\2\2\u07b8\u07b9\7w\2\2\u07b9\u07ba\7v\2\2\u07ba\u01b2") + buf.write("\3\2\2\2\u07bb\u07bc\7q\2\2\u07bc\u07bd\7w\2\2\u07bd\u07be") + buf.write("\7v\2\2\u07be\u07bf\7r\2\2\u07bf\u07c0\7w\2\2\u07c0\u07c1") + buf.write("\7v\2\2\u07c1\u01b4\3\2\2\2\u07c2\u07c3\7e\2\2\u07c3\u07c4") + buf.write("\7c\2\2\u07c4\u07c5\7u\2\2\u07c5\u07c6\7v\2\2\u07c6\u01b6") + buf.write("\3\2\2\2\u07c7\u07c8\7t\2\2\u07c8\u07c9\7w\2\2\u07c9\u07ca") + buf.write("\7n\2\2\u07ca\u07cb\7g\2\2\u07cb\u07cc\7a\2\2\u07cc\u07cd") + buf.write("\7r\2\2\u07cd\u07ce\7t\2\2\u07ce\u07cf\7k\2\2\u07cf\u07d0") + buf.write("\7q\2\2\u07d0\u07d1\7t\2\2\u07d1\u07d2\7k\2\2\u07d2\u07d3") + buf.write("\7v\2\2\u07d3\u07d4\7{\2\2\u07d4\u01b8\3\2\2\2\u07d5\u07d6") + buf.write("\7f\2\2\u07d6\u07d7\7c\2\2\u07d7\u07d8\7v\2\2\u07d8\u07d9") + buf.write("\7c\2\2\u07d9\u07da\7u\2\2\u07da\u07db\7g\2\2\u07db\u07dc") + buf.write("\7v\2\2\u07dc\u07dd\7a\2\2\u07dd\u07de\7r\2\2\u07de\u07df") + buf.write("\7t\2\2\u07df\u07e0\7k\2\2\u07e0\u07e1\7q\2\2\u07e1\u07e2") + buf.write("\7t\2\2\u07e2\u07e3\7k\2\2\u07e3\u07e4\7v\2\2\u07e4\u07e5") + buf.write("\7{\2\2\u07e5\u01ba\3\2\2\2\u07e6\u07e7\7f\2\2\u07e7\u07e8") + buf.write("\7g\2\2\u07e8\u07e9\7h\2\2\u07e9\u07ea\7c\2\2\u07ea\u07eb") + buf.write("\7w\2\2\u07eb\u07ec\7n\2\2\u07ec\u07ed\7v\2\2\u07ed\u01bc") + buf.write("\3\2\2\2\u07ee\u07ef\7e\2\2\u07ef\u07f0\7j\2\2\u07f0\u07f1") + buf.write("\7g\2\2\u07f1\u07f2\7e\2\2\u07f2\u07f3\7m\2\2\u07f3\u07f4") + buf.write("\7a\2\2\u07f4\u07f5\7f\2\2\u07f5\u07f6\7c\2\2\u07f6\u07f7") + buf.write("\7v\2\2\u07f7\u07f8\7c\2\2\u07f8\u07f9\7r\2\2\u07f9\u07fa") + buf.write("\7q\2\2\u07fa\u07fb\7k\2\2\u07fb\u07fc\7p\2\2\u07fc\u07fd") + buf.write("\7v\2\2\u07fd\u01be\3\2\2\2\u07fe\u07ff\7e\2\2\u07ff\u0800") + buf.write("\7j\2\2\u0800\u0801\7g\2\2\u0801\u0802\7e\2\2\u0802\u0803") + buf.write("\7m\2\2\u0803\u0804\7a\2\2\u0804\u0805\7j\2\2\u0805\u0806") + buf.write("\7k\2\2\u0806\u0807\7g\2\2\u0807\u0808\7t\2\2\u0808\u0809") + buf.write("\7c\2\2\u0809\u080a\7t\2\2\u080a\u080b\7e\2\2\u080b\u080c") + buf.write("\7j\2\2\u080c\u080d\7{\2\2\u080d\u01c0\3\2\2\2\u080e\u080f") + buf.write("\7e\2\2\u080f\u0810\7q\2\2\u0810\u0811\7o\2\2\u0811\u0812") + buf.write("\7r\2\2\u0812\u0813\7w\2\2\u0813\u0814\7v\2\2\u0814\u0815") + buf.write("\7g\2\2\u0815\u0816\7f\2\2\u0816\u01c2\3\2\2\2\u0817\u0818") + buf.write("\7p\2\2\u0818\u0819\7q\2\2\u0819\u081a\7p\2\2\u081a\u081b") + buf.write("\7a\2\2\u081b\u081c\7p\2\2\u081c\u081d\7w\2\2\u081d\u081e") + buf.write("\7n\2\2\u081e\u081f\7n\2\2\u081f\u01c4\3\2\2\2\u0820\u0821") + buf.write("\7p\2\2\u0821\u0822\7q\2\2\u0822\u0823\7p\2\2\u0823\u0824") + buf.write("\7a\2\2\u0824\u0825\7|\2\2\u0825\u0826\7g\2\2\u0826\u0827") + buf.write("\7t\2\2\u0827\u0828\7q\2\2\u0828\u01c6\3\2\2\2\u0829\u082a") + buf.write("\7r\2\2\u082a\u082b\7c\2\2\u082b\u082c\7t\2\2\u082c\u082d") + buf.write("\7v\2\2\u082d\u082e\7k\2\2\u082e\u082f\7c\2\2\u082f\u0830") + buf.write("\7n\2\2\u0830\u0831\7a\2\2\u0831\u0832\7p\2\2\u0832\u0833") + buf.write("\7w\2\2\u0833\u0834\7n\2\2\u0834\u0835\7n\2\2\u0835\u01c8") + buf.write("\3\2\2\2\u0836\u0837\7r\2\2\u0837\u0838\7c\2\2\u0838\u0839") + buf.write("\7t\2\2\u0839\u083a\7v\2\2\u083a\u083b\7k\2\2\u083b\u083c") + buf.write("\7c\2\2\u083c\u083d\7n\2\2\u083d\u083e\7a\2\2\u083e\u083f") + buf.write("\7|\2\2\u083f\u0840\7g\2\2\u0840\u0841\7t\2\2\u0841\u0842") + buf.write("\7q\2\2\u0842\u01ca\3\2\2\2\u0843\u0844\7c\2\2\u0844\u0845") + buf.write("\7n\2\2\u0845\u0846\7y\2\2\u0846\u0847\7c\2\2\u0847\u0848") + buf.write("\7{\2\2\u0848\u0849\7u\2\2\u0849\u084a\7a\2\2\u084a\u084b") + buf.write("\7p\2\2\u084b\u084c\7w\2\2\u084c\u084d\7n\2\2\u084d\u084e") + buf.write("\7n\2\2\u084e\u01cc\3\2\2\2\u084f\u0850\7c\2\2\u0850\u0851") + buf.write("\7n\2\2\u0851\u0852\7y\2\2\u0852\u0853\7c\2\2\u0853\u0854") + buf.write("\7{\2\2\u0854\u0855\7u\2\2\u0855\u0856\7a\2\2\u0856\u0857") + buf.write("\7|\2\2\u0857\u0858\7g\2\2\u0858\u0859\7t\2\2\u0859\u085a") + buf.write("\7q\2\2\u085a\u01ce\3\2\2\2\u085b\u085c\7e\2\2\u085c\u085d") + buf.write("\7q\2\2\u085d\u085e\7o\2\2\u085e\u085f\7r\2\2\u085f\u0860") + buf.write("\7q\2\2\u0860\u0861\7p\2\2\u0861\u0862\7g\2\2\u0862\u0863") + buf.write("\7p\2\2\u0863\u0864\7v\2\2\u0864\u0865\7u\2\2\u0865\u01d0") + buf.write("\3\2\2\2\u0866\u0867\7c\2\2\u0867\u0868\7n\2\2\u0868\u0869") + buf.write("\7n\2\2\u0869\u086a\7a\2\2\u086a\u086b\7o\2\2\u086b\u086c") + buf.write("\7g\2\2\u086c\u086d\7c\2\2\u086d\u086e\7u\2\2\u086e\u086f") + buf.write("\7w\2\2\u086f\u0870\7t\2\2\u0870\u0871\7g\2\2\u0871\u0872") + buf.write("\7u\2\2\u0872\u01d2\3\2\2\2\u0873\u0874\7u\2\2\u0874\u0875") + buf.write("\7e\2\2\u0875\u0876\7c\2\2\u0876\u0877\7n\2\2\u0877\u0878") + buf.write("\7c\2\2\u0878\u0879\7t\2\2\u0879\u01d4\3\2\2\2\u087a\u087b") + buf.write("\7e\2\2\u087b\u087c\7q\2\2\u087c\u087d\7o\2\2\u087d\u087e") + buf.write("\7r\2\2\u087e\u087f\7q\2\2\u087f\u0880\7p\2\2\u0880\u0881") + buf.write("\7g\2\2\u0881\u0882\7p\2\2\u0882\u0883\7v\2\2\u0883\u01d6") + buf.write("\3\2\2\2\u0884\u0885\7f\2\2\u0885\u0886\7c\2\2\u0886\u0887") + buf.write("\7v\2\2\u0887\u0888\7c\2\2\u0888\u0889\7r\2\2\u0889\u088a") + buf.write("\7q\2\2\u088a\u088b\7k\2\2\u088b\u088c\7p\2\2\u088c\u088d") + buf.write("\7v\2\2\u088d\u088e\7a\2\2\u088e\u088f\7q\2\2\u088f\u0890") + buf.write("\7p\2\2\u0890\u0891\7a\2\2\u0891\u0892\7x\2\2\u0892\u0893") + buf.write("\7c\2\2\u0893\u0894\7n\2\2\u0894\u0895\7w\2\2\u0895\u0896") + buf.write("\7g\2\2\u0896\u0897\7f\2\2\u0897\u0898\7q\2\2\u0898\u0899") + buf.write("\7o\2\2\u0899\u089a\7c\2\2\u089a\u089b\7k\2\2\u089b\u089c") + buf.write("\7p\2\2\u089c\u089d\7u\2\2\u089d\u01d8\3\2\2\2\u089e\u089f") + buf.write("\7f\2\2\u089f\u08a0\7c\2\2\u08a0\u08a1\7v\2\2\u08a1\u08a2") + buf.write("\7c\2\2\u08a2\u08a3\7r\2\2\u08a3\u08a4\7q\2\2\u08a4\u08a5") + buf.write("\7k\2\2\u08a5\u08a6\7p\2\2\u08a6\u08a7\7v\2\2\u08a7\u08a8") + buf.write("\7a\2\2\u08a8\u08a9\7q\2\2\u08a9\u08aa\7p\2\2\u08aa\u08ab") + buf.write("\7a\2\2\u08ab\u08ac\7x\2\2\u08ac\u08ad\7c\2\2\u08ad\u08ae") + buf.write("\7t\2\2\u08ae\u08af\7k\2\2\u08af\u08b0\7c\2\2\u08b0\u08b1") + buf.write("\7d\2\2\u08b1\u08b2\7n\2\2\u08b2\u08b3\7g\2\2\u08b3\u08b4") + buf.write("\7u\2\2\u08b4\u01da\3\2\2\2\u08b5\u08b6\7j\2\2\u08b6\u08b7") + buf.write("\7k\2\2\u08b7\u08b8\7g\2\2\u08b8\u08b9\7t\2\2\u08b9\u08ba") + buf.write("\7c\2\2\u08ba\u08bb\7t\2\2\u08bb\u08bc\7e\2\2\u08bc\u08bd") + buf.write("\7j\2\2\u08bd\u08be\7k\2\2\u08be\u08bf\7e\2\2\u08bf\u08c0") + buf.write("\7c\2\2\u08c0\u08c1\7n\2\2\u08c1\u08c2\7a\2\2\u08c2\u08c3") + buf.write("\7q\2\2\u08c3\u08c4\7p\2\2\u08c4\u08c5\7a\2\2\u08c5\u08c6") + buf.write("\7x\2\2\u08c6\u08c7\7c\2\2\u08c7\u08c8\7n\2\2\u08c8\u08c9") + buf.write("\7w\2\2\u08c9\u08ca\7g\2\2\u08ca\u08cb\7f\2\2\u08cb\u08cc") + buf.write("\7q\2\2\u08cc\u08cd\7o\2\2\u08cd\u08ce\7c\2\2\u08ce\u08cf") + buf.write("\7k\2\2\u08cf\u08d0\7p\2\2\u08d0\u08d1\7u\2\2\u08d1\u01dc") + buf.write("\3\2\2\2\u08d2\u08d3\7j\2\2\u08d3\u08d4\7k\2\2\u08d4\u08d5") + buf.write("\7g\2\2\u08d5\u08d6\7t\2\2\u08d6\u08d7\7c\2\2\u08d7\u08d8") + buf.write("\7t\2\2\u08d8\u08d9\7e\2\2\u08d9\u08da\7j\2\2\u08da\u08db") + buf.write("\7k\2\2\u08db\u08dc\7e\2\2\u08dc\u08dd\7c\2\2\u08dd\u08de") + buf.write("\7n\2\2\u08de\u08df\7a\2\2\u08df\u08e0\7q\2\2\u08e0\u08e1") + buf.write("\7p\2\2\u08e1\u08e2\7a\2\2\u08e2\u08e3\7x\2\2\u08e3\u08e4") + buf.write("\7c\2\2\u08e4\u08e5\7t\2\2\u08e5\u08e6\7k\2\2\u08e6\u08e7") + buf.write("\7c\2\2\u08e7\u08e8\7d\2\2\u08e8\u08e9\7n\2\2\u08e9\u08ea") + buf.write("\7g\2\2\u08ea\u08eb\7u\2\2\u08eb\u01de\3\2\2\2\u08ec\u08ed") + buf.write("\7u\2\2\u08ed\u08ee\7g\2\2\u08ee\u08ef\7v\2\2\u08ef\u01e0") + buf.write("\3\2\2\2\u08f0\u08f1\7n\2\2\u08f1\u08f2\7c\2\2\u08f2\u08f3") + buf.write("\7p\2\2\u08f3\u08f4\7i\2\2\u08f4\u08f5\7w\2\2\u08f5\u08f6") + buf.write("\7c\2\2\u08f6\u08f7\7i\2\2\u08f7\u08f8\7g\2\2\u08f8\u01e2") + buf.write("\3\2\2\2\u08f9\u08fa\t\2\2\2\u08fa\u01e4\3\2\2\2\u08fb") + buf.write("\u08fc\4\62;\2\u08fc\u01e6\3\2\2\2\u08fd\u08ff\5\u01e5") + buf.write("\u00f3\2\u08fe\u08fd\3\2\2\2\u08ff\u0900\3\2\2\2\u0900") + buf.write("\u08fe\3\2\2\2\u0900\u0901\3\2\2\2\u0901\u01e8\3\2\2\2") + buf.write("\u0902\u0903\5\u01e7\u00f4\2\u0903\u0904\7\60\2\2\u0904") + buf.write("\u0905\5\u01e7\u00f4\2\u0905\u01ea\3\2\2\2\u0906\u0907") + buf.write("\7v\2\2\u0907\u0908\7t\2\2\u0908\u0909\7w\2\2\u0909\u0910") + buf.write("\7g\2\2\u090a\u090b\7h\2\2\u090b\u090c\7c\2\2\u090c\u090d") + buf.write("\7n\2\2\u090d\u090e\7u\2\2\u090e\u0910\7g\2\2\u090f\u0906") + buf.write("\3\2\2\2\u090f\u090a\3\2\2\2\u0910\u01ec\3\2\2\2\u0911") + buf.write("\u0915\7$\2\2\u0912\u0914\n\3\2\2\u0913\u0912\3\2\2\2") + buf.write("\u0914\u0917\3\2\2\2\u0915\u0913\3\2\2\2\u0915\u0916\3") + buf.write("\2\2\2\u0916\u0918\3\2\2\2\u0917\u0915\3\2\2\2\u0918\u0919") + buf.write("\7$\2\2\u0919\u01ee\3\2\2\2\u091a\u091e\5\u01e3\u00f2") + buf.write("\2\u091b\u091d\t\4\2\2\u091c\u091b\3\2\2\2\u091d\u0920") + buf.write("\3\2\2\2\u091e\u091c\3\2\2\2\u091e\u091f\3\2\2\2\u091f") + buf.write("\u0930\3\2\2\2\u0920\u091e\3\2\2\2\u0921\u0923\5\u01e5") + buf.write("\u00f3\2\u0922\u0924\t\4\2\2\u0923\u0922\3\2\2\2\u0924") + buf.write("\u0925\3\2\2\2\u0925\u0923\3\2\2\2\u0925\u0926\3\2\2\2") + buf.write("\u0926\u0930\3\2\2\2\u0927\u092b\7)\2\2\u0928\u092a\13") + buf.write("\2\2\2\u0929\u0928\3\2\2\2\u092a\u092d\3\2\2\2\u092b\u092c") + buf.write("\3\2\2\2\u092b\u0929\3\2\2\2\u092c\u092e\3\2\2\2\u092d") + buf.write("\u092b\3\2\2\2\u092e\u0930\7)\2\2\u092f\u091a\3\2\2\2") + buf.write("\u092f\u0921\3\2\2\2\u092f\u0927\3\2\2\2\u0930\u01f0\3") + buf.write("\2\2\2\u0931\u0933\t\5\2\2\u0932\u0931\3\2\2\2\u0933\u0934") + buf.write("\3\2\2\2\u0934\u0932\3\2\2\2\u0934\u0935\3\2\2\2\u0935") + buf.write("\u0936\3\2\2\2\u0936\u0937\b\u00f9\2\2\u0937\u01f2\3\2") + buf.write("\2\2\u0938\u0939\7=\2\2\u0939\u01f4\3\2\2\2\u093a\u093b") + buf.write("\7\61\2\2\u093b\u093c\7,\2\2\u093c\u0940\3\2\2\2\u093d") + buf.write("\u093f\13\2\2\2\u093e\u093d\3\2\2\2\u093f\u0942\3\2\2") + buf.write("\2\u0940\u0941\3\2\2\2\u0940\u093e\3\2\2\2\u0941\u0943") + buf.write("\3\2\2\2\u0942\u0940\3\2\2\2\u0943\u0944\7,\2\2\u0944") + buf.write("\u0945\7\61\2\2\u0945\u0946\3\2\2\2\u0946\u0947\b\u00fb") + buf.write("\3\2\u0947\u01f6\3\2\2\2\u0948\u0949\7\61\2\2\u0949\u094a") + buf.write("\7\61\2\2\u094a\u094e\3\2\2\2\u094b\u094d\n\6\2\2\u094c") + buf.write("\u094b\3\2\2\2\u094d\u0950\3\2\2\2\u094e\u094c\3\2\2\2") + buf.write("\u094e\u094f\3\2\2\2\u094f\u0951\3\2\2\2\u0950\u094e\3") + buf.write("\2\2\2\u0951\u0952\b\u00fc\3\2\u0952\u01f8\3\2\2\2\r\2") + buf.write("\u0900\u090f\u0915\u091e\u0925\u092b\u092f\u0934\u0940") + buf.write("\u094e\4\2\3\2\2\4\2") return buf.getvalue() @@ -1409,8 +1406,8 @@ class Lexer(ANTLRLexer): "'current_date'", "'datediff'", "'dateadd'", - "'year'", - "'month'", + "'getyear'", + "'getmonth'", "'dayofmonth'", "'dayofyear'", "'daytoyear'", @@ -2132,6 +2129,7 @@ class Lexer(ANTLRLexer): def __init__(self, input=None, output: TextIO = sys.stdout): super().__init__(input, output) + self.checkVersion("4.9.2") self._interp = LexerATNSimulator( self, self.atn, self.decisionsToDFA, PredictionContextCache() ) diff --git a/src/vtlengine/AST/Grammar/parser.py b/src/vtlengine/AST/Grammar/parser.py index 5522db123..ab5a61b9a 100644 --- a/src/vtlengine/AST/Grammar/parser.py +++ b/src/vtlengine/AST/Grammar/parser.py @@ -13,7 +13,7 @@ def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u00fb") - buf.write("\u073e\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\u076b\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31") @@ -27,713 +27,716 @@ def serializedATN(): buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t") buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4") buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4") - buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\3\2\3\2") - buf.write("\3\2\7\2\u00e2\n\2\f\2\16\2\u00e5\13\2\3\2\3\2\3\3\3\3") - buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00f2\n\3\3\4\3\4\3\4") - buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3") - buf.write("\4\3\4\3\4\3\4\3\4\6\4\u0109\n\4\r\4\16\4\u010a\3\4\3") - buf.write("\4\3\4\3\4\3\4\5\4\u0112\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3") + buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\3") + buf.write("\2\3\2\3\2\7\2\u00e4\n\2\f\2\16\2\u00e7\13\2\3\2\3\2\3") + buf.write("\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\5\3\u00f4\n\3\3\4\3") buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4") - buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u0130\n\4\7\4\u0132") - buf.write("\n\4\f\4\16\4\u0135\13\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3") + buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\6\4\u010b\n\4\r\4\16\4\u010c") + buf.write("\3\4\3\4\3\4\3\4\3\4\5\4\u0114\n\4\3\4\3\4\3\4\3\4\3\4") + buf.write("\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3") + buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u0132\n\4\7") + buf.write("\4\u0134\n\4\f\4\16\4\u0137\13\4\3\5\3\5\3\5\3\5\3\5\3") buf.write("\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5") - buf.write("\6\5\u014c\n\5\r\5\16\5\u014d\3\5\3\5\3\5\3\5\3\5\5\5") - buf.write("\u0155\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5") - buf.write("\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5\u016b\n\5\7\5") - buf.write("\u016d\n\5\f\5\16\5\u0170\13\5\3\6\3\6\3\6\3\6\3\6\3\6") - buf.write("\3\6\3\6\5\6\u017a\n\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7") - buf.write("\3\7\3\7\3\7\3\7\5\7\u0188\n\7\3\b\3\b\3\b\3\b\3\b\3\b") - buf.write("\3\b\5\b\u0191\n\b\3\t\3\t\3\t\3\t\7\t\u0197\n\t\f\t\16") - buf.write("\t\u019a\13\t\3\n\3\n\3\n\3\n\5\n\u01a0\n\n\5\n\u01a2") - buf.write("\n\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\7\f\u01ab\n\f\f\f") - buf.write("\16\f\u01ae\13\f\3\r\3\r\3\r\3\r\7\r\u01b4\n\r\f\r\16") - buf.write("\r\u01b7\13\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17") - buf.write("\3\17\3\17\3\17\3\17\3\17\7\17\u01c6\n\17\f\17\16\17\u01c9") - buf.write("\13\17\3\20\3\20\3\20\3\20\7\20\u01cf\n\20\f\20\16\20") - buf.write("\u01d2\13\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3") - buf.write("\21\3\21\3\21\3\21\5\21\u01e0\n\21\3\22\3\22\3\22\3\22") - buf.write("\3\22\3\22\3\22\7\22\u01e9\n\22\f\22\16\22\u01ec\13\22") - buf.write("\5\22\u01ee\n\22\3\22\3\22\3\22\5\22\u01f3\n\22\3\22\3") - buf.write("\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22") + buf.write("\3\5\3\5\6\5\u014e\n\5\r\5\16\5\u014f\3\5\3\5\3\5\3\5") + buf.write("\3\5\5\5\u0157\n\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5") + buf.write("\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5\u016d") + buf.write("\n\5\7\5\u016f\n\5\f\5\16\5\u0172\13\5\3\6\3\6\3\6\3\6") + buf.write("\3\6\3\6\3\6\3\6\5\6\u017c\n\6\3\7\3\7\3\7\3\7\3\7\3\7") + buf.write("\3\7\3\7\3\7\3\7\3\7\3\7\5\7\u018a\n\7\3\b\3\b\3\b\3\b") + buf.write("\3\b\3\b\3\b\5\b\u0193\n\b\3\t\3\t\3\t\3\t\7\t\u0199\n") + buf.write("\t\f\t\16\t\u019c\13\t\3\n\3\n\3\n\3\n\5\n\u01a2\n\n\5") + buf.write("\n\u01a4\n\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\7\f\u01ad") + buf.write("\n\f\f\f\16\f\u01b0\13\f\3\r\3\r\3\r\3\r\7\r\u01b6\n\r") + buf.write("\f\r\16\r\u01b9\13\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17") + buf.write("\3\17\3\17\3\17\3\17\3\17\3\17\7\17\u01c8\n\17\f\17\16") + buf.write("\17\u01cb\13\17\3\20\3\20\3\20\3\20\7\20\u01d1\n\20\f") + buf.write("\20\16\20\u01d4\13\20\3\21\3\21\3\21\3\21\3\21\3\21\3") + buf.write("\21\3\21\3\21\3\21\3\21\3\21\5\21\u01e2\n\21\3\22\3\22") + buf.write("\3\22\3\22\3\22\3\22\3\22\7\22\u01eb\n\22\f\22\16\22\u01ee") + buf.write("\13\22\5\22\u01f0\n\22\3\22\3\22\3\22\5\22\u01f5\n\22") buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22") - buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\5\22\u0214\n") - buf.write("\22\3\23\3\23\3\23\3\23\3\23\7\23\u021b\n\23\f\23\16\23") - buf.write("\u021e\13\23\5\23\u0220\n\23\3\23\3\23\3\23\3\23\3\23") - buf.write("\3\23\3\23\3\23\5\23\u022a\n\23\3\23\3\23\3\23\5\23\u022f") - buf.write("\n\23\7\23\u0231\n\23\f\23\16\23\u0234\13\23\3\23\3\23") - buf.write("\3\23\5\23\u0239\n\23\3\23\3\23\5\23\u023d\n\23\3\23\3") - buf.write("\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u0247\n\23\3\23") - buf.write("\3\23\5\23\u024b\n\23\3\23\3\23\5\23\u024f\n\23\3\24\3") - buf.write("\24\3\24\3\24\3\24\7\24\u0256\n\24\f\24\16\24\u0259\13") - buf.write("\24\5\24\u025b\n\24\3\24\3\24\3\24\3\24\3\24\3\24\3\24") - buf.write("\3\24\5\24\u0265\n\24\3\24\3\24\5\24\u0269\n\24\3\24\3") - buf.write("\24\3\24\3\24\3\24\3\24\3\24\3\24\5\24\u0273\n\24\3\24") - buf.write("\3\24\3\24\5\24\u0278\n\24\7\24\u027a\n\24\f\24\16\24") - buf.write("\u027d\13\24\3\24\3\24\3\24\5\24\u0282\n\24\3\24\3\24") - buf.write("\5\24\u0286\n\24\3\24\3\24\5\24\u028a\n\24\3\25\3\25\5") - buf.write("\25\u028e\n\25\3\26\3\26\5\26\u0292\n\26\3\27\3\27\3\27") - buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27") - buf.write("\5\27\u02a2\n\27\3\27\3\27\5\27\u02a6\n\27\3\27\3\27\3") - buf.write("\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u02b1\n\27\3\27") - buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u02bc\n") - buf.write("\27\3\27\3\27\5\27\u02c0\n\27\3\27\3\27\5\27\u02c4\n\27") - buf.write("\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30") - buf.write("\3\30\3\30\3\30\5\30\u02d4\n\30\3\30\3\30\5\30\u02d8\n") - buf.write("\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\5\30") - buf.write("\u02e3\n\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3") - buf.write("\30\5\30\u02ee\n\30\3\30\3\30\5\30\u02f2\n\30\3\30\3\30") - buf.write("\5\30\u02f6\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3") - buf.write("\31\3\31\3\31\5\31\u0302\n\31\3\31\3\31\3\31\3\31\3\31") - buf.write("\3\31\3\31\3\31\3\31\5\31\u030d\n\31\3\32\3\32\3\32\3") - buf.write("\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u0319\n\32\3\32") - buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u0324\n") + buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22") + buf.write("\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\5\22\u0216") + buf.write("\n\22\3\23\3\23\3\23\3\23\3\23\7\23\u021d\n\23\f\23\16") + buf.write("\23\u0220\13\23\5\23\u0222\n\23\3\23\3\23\3\23\3\23\3") + buf.write("\23\3\23\3\23\3\23\5\23\u022c\n\23\3\23\3\23\3\23\5\23") + buf.write("\u0231\n\23\7\23\u0233\n\23\f\23\16\23\u0236\13\23\3\23") + buf.write("\3\23\3\23\5\23\u023b\n\23\3\23\3\23\5\23\u023f\n\23\3") + buf.write("\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\5\23\u0249\n\23") + buf.write("\3\23\3\23\5\23\u024d\n\23\3\23\3\23\5\23\u0251\n\23\3") + buf.write("\24\3\24\3\24\3\24\3\24\7\24\u0258\n\24\f\24\16\24\u025b") + buf.write("\13\24\5\24\u025d\n\24\3\24\3\24\3\24\3\24\3\24\3\24\3") + buf.write("\24\3\24\5\24\u0267\n\24\3\24\3\24\5\24\u026b\n\24\3\24") + buf.write("\3\24\3\24\3\24\3\24\3\24\3\24\3\24\5\24\u0275\n\24\3") + buf.write("\24\3\24\3\24\5\24\u027a\n\24\7\24\u027c\n\24\f\24\16") + buf.write("\24\u027f\13\24\3\24\3\24\3\24\5\24\u0284\n\24\3\24\3") + buf.write("\24\5\24\u0288\n\24\3\24\3\24\5\24\u028c\n\24\3\25\3\25") + buf.write("\5\25\u0290\n\25\3\26\3\26\5\26\u0294\n\26\3\27\3\27\3") + buf.write("\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27") + buf.write("\3\27\5\27\u02a4\n\27\3\27\3\27\5\27\u02a8\n\27\3\27\3") + buf.write("\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u02b3\n\27") + buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u02be") + buf.write("\n\27\3\27\3\27\5\27\u02c2\n\27\3\27\3\27\5\27\u02c6\n") + buf.write("\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30") + buf.write("\3\30\3\30\3\30\3\30\5\30\u02d6\n\30\3\30\3\30\5\30\u02da") + buf.write("\n\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\5\30") + buf.write("\u02e5\n\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3") + buf.write("\30\5\30\u02f0\n\30\3\30\3\30\5\30\u02f4\n\30\3\30\3\30") + buf.write("\5\30\u02f8\n\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3") + buf.write("\31\3\31\3\31\5\31\u0304\n\31\3\31\3\31\3\31\3\31\3\31") + buf.write("\3\31\3\31\3\31\3\31\5\31\u030f\n\31\3\32\3\32\3\32\3") + buf.write("\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u031b\n\32\3\32") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u0326\n") buf.write("\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33") buf.write("\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33") - buf.write("\3\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u0342\n\33\3") - buf.write("\33\3\33\5\33\u0346\n\33\3\34\3\34\3\34\3\34\3\34\3\34") + buf.write("\3\33\3\33\3\33\3\33\3\33\3\33\3\33\5\33\u0344\n\33\3") + buf.write("\33\3\33\5\33\u0348\n\33\3\34\3\34\3\34\3\34\3\34\3\34") buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34") - buf.write("\3\34\3\34\3\34\3\34\5\34\u035d\n\34\3\35\3\35\3\35\5") - buf.write("\35\u0362\n\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u036a") + buf.write("\3\34\3\34\3\34\3\34\5\34\u035f\n\34\3\35\3\35\3\35\5") + buf.write("\35\u0364\n\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u036c") buf.write("\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") - buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u037f") - buf.write("\n\35\3\35\3\35\5\35\u0383\n\35\3\35\3\35\5\35\u0387\n") + buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u0381") + buf.write("\n\35\3\35\3\35\5\35\u0385\n\35\3\35\3\35\5\35\u0389\n") buf.write("\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") - buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u03c5\n\35\3\36\3") - buf.write("\36\3\36\5\36\u03ca\n\36\3\36\3\36\3\36\3\36\3\36\3\36") - buf.write("\5\36\u03d2\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3") + buf.write("\3\35\3\35\3\35\3\35\3\35\3\35\5\35\u03c7\n\35\3\36\3") + buf.write("\36\3\36\5\36\u03cc\n\36\3\36\3\36\3\36\3\36\3\36\3\36") + buf.write("\5\36\u03d4\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3") buf.write("\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") - buf.write("\3\36\5\36\u03e7\n\36\3\36\3\36\5\36\u03eb\n\36\3\36\3") - buf.write("\36\5\36\u03ef\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") + buf.write("\3\36\5\36\u03e9\n\36\3\36\3\36\5\36\u03ed\n\36\3\36\3") + buf.write("\36\5\36\u03f1\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36") - buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u042d") - buf.write("\n\36\3\37\3\37\3\37\3\37\3\37\6\37\u0434\n\37\r\37\16") - buf.write("\37\u0435\3\37\3\37\3\37\3\37\3\37\3\37\3\37\6\37\u043f") - buf.write("\n\37\r\37\16\37\u0440\3\37\3\37\3\37\3\37\3\37\3\37\3") - buf.write("\37\3\37\3\37\5\37\u044c\n\37\3 \3 \3 \3 \3 \3 \5 \u0454") - buf.write("\n \3 \3 \5 \u0458\n \3 \5 \u045b\n \3 \5 \u045e\n \3") - buf.write(" \5 \u0461\n \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\7!\u046e") - buf.write("\n!\f!\16!\u0471\13!\5!\u0473\n!\3!\5!\u0476\n!\3!\3!") - buf.write("\3!\3!\3!\3!\3!\3!\5!\u0480\n!\3!\3!\5!\u0484\n!\3!\5") - buf.write("!\u0487\n!\3!\5!\u048a\n!\3!\5!\u048d\n!\3!\3!\3!\3!\3") - buf.write("!\3!\5!\u0495\n!\3!\5!\u0498\n!\3!\5!\u049b\n!\3!\5!\u049e") - buf.write('\n!\3!\3!\5!\u04a2\n!\3"\3"\3"\3"\3"\3"\3"\3#\3') - buf.write("#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\5$\u04ba\n$\3") - buf.write("%\3%\3%\3%\3%\5%\u04c1\n%\5%\u04c3\n%\3%\3%\3&\3&\3&\3") - buf.write("&\3&\3&\5&\u04cd\n&\3&\5&\u04d0\n&\3&\5&\u04d3\n&\3&\3") - buf.write("&\3&\3&\3&\3&\3&\3&\3&\3&\5&\u04df\n&\5&\u04e1\n&\3&\3") - buf.write("&\3&\5&\u04e6\n&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3") - buf.write("&\3&\5&\u04f6\n&\3'\3'\3'\3'\3'\3'\5'\u04fe\n'") - buf.write("\3'\5'\u0501\n'\3'\5'\u0504\n'\3'\3'\3'\3'\3") - buf.write("'\3'\3'\3'\3'\5'\u050f\n'\5'\u0511\n'\3'\3'") - buf.write("\3'\5'\u0516\n'\3'\3'\3'\3'\3'\3'\3'\3'\3'") - buf.write("\3'\5'\u0522\n'\3'\3'\3'\3'\3'\3'\3'\3'\3'") - buf.write("\3'\3'\3'\3'\3'\5'\u0532\n'\3(\3(\3(\3(\3)\3)\3") - buf.write(")\7)\u053b\n)\f)\16)\u053e\13)\3*\5*\u0541\n*\3*\3*\3") - buf.write("*\3*\3+\5+\u0548\n+\3+\3+\3+\3+\3,\3,\3,\3,\5,\u0552\n") - buf.write(",\3-\3-\3-\3-\3-\3-\3-\3-\5-\u055c\n-\3-\3-\5-\u0560\n") - buf.write("-\3.\3.\3.\7.\u0565\n.\f.\16.\u0568\13.\3/\3/\3/\7/\u056d") - buf.write("\n/\f/\16/\u0570\13/\3/\3/\3/\3/\7/\u0576\n/\f/\16/\u0579") - buf.write("\13/\5/\u057b\n/\3\60\3\60\3\60\5\60\u0580\n\60\3\61\5") - buf.write("\61\u0583\n\61\3\61\3\61\3\61\5\61\u0588\n\61\3\61\5\61") - buf.write("\u058b\n\61\3\61\5\61\u058e\n\61\3\62\3\62\3\62\3\63\3") - buf.write("\63\3\63\3\63\3\63\7\63\u0598\n\63\f\63\16\63\u059b\13") - buf.write("\63\3\64\3\64\3\64\3\64\3\64\7\64\u05a2\n\64\f\64\16\64") - buf.write("\u05a5\13\64\3\65\3\65\5\65\u05a9\n\65\3\66\3\66\3\66") - buf.write("\5\66\u05ae\n\66\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3") - buf.write("8\38\38\38\38\38\38\38\38\38\38\58\u05c2\n8\39\39\39\3") - buf.write("9\39\79\u05c9\n9\f9\169\u05cc\139\39\39\39\59\u05d1\n") - buf.write("9\3:\3:\3:\3;\3;\3;\3;\5;\u05da\n;\3<\3<\3<\5<\u05df\n") - buf.write("<\3=\3=\5=\u05e3\n=\3>\3>\3>\3>\3>\5>\u05ea\n>\3?\3?\3") - buf.write("?\5?\u05ef\n?\3@\3@\5@\u05f3\n@\3@\5@\u05f6\n@\3@\5@\u05f9") - buf.write("\n@\3@\5@\u05fc\n@\3A\3A\3A\3A\3A\5A\u0603\nA\3B\3B\3") - buf.write("B\3B\3B\7B\u060a\nB\fB\16B\u060d\13B\3B\3B\5B\u0611\n") - buf.write("B\3C\3C\5C\u0615\nC\3D\3D\3D\3D\3D\5D\u061c\nD\3E\3E\3") - buf.write("E\3E\3E\3E\7E\u0624\nE\fE\16E\u0627\13E\3E\3E\5E\u062b") - buf.write("\nE\3E\3E\3E\3E\3E\7E\u0632\nE\fE\16E\u0635\13E\3E\3E") - buf.write("\5E\u0639\nE\5E\u063b\nE\3F\3F\3F\3F\3F\3F\3F\3F\7F\u0645") - buf.write("\nF\fF\16F\u0648\13F\3F\3F\5F\u064c\nF\3F\5F\u064f\nF") - buf.write("\3F\3F\3F\3F\3F\3F\3F\7F\u0658\nF\fF\16F\u065b\13F\3F") - buf.write("\3F\5F\u065f\nF\3F\3F\5F\u0663\nF\5F\u0665\nF\3G\3G\3") - buf.write("H\3H\3I\3I\3I\3I\7I\u066f\nI\fI\16I\u0672\13I\3J\3J\3") - buf.write("J\5J\u0677\nJ\3K\3K\3K\7K\u067c\nK\fK\16K\u067f\13K\3") - buf.write("L\3L\5L\u0683\nL\3L\3L\3L\3L\5L\u0689\nL\3L\3L\5L\u068d") - buf.write("\nL\3L\5L\u0690\nL\3M\3M\3M\7M\u0695\nM\fM\16M\u0698\13") - buf.write("M\3N\3N\5N\u069c\nN\3N\3N\5N\u06a0\nN\3N\5N\u06a3\nN\3") - buf.write("O\3O\3O\5O\u06a8\nO\3O\3O\3O\3P\3P\3P\7P\u06b0\nP\fP\16") - buf.write("P\u06b3\13P\3Q\3Q\3Q\3Q\5Q\u06b9\nQ\3Q\3Q\5Q\u06bd\nQ") - buf.write("\3Q\3Q\7Q\u06c1\nQ\fQ\16Q\u06c4\13Q\3R\5R\u06c7\nR\3R") - buf.write("\3R\3R\3R\3R\5R\u06ce\nR\3S\3S\3T\3T\3T\3T\3T\3T\3T\3") - buf.write("T\7T\u06da\nT\fT\16T\u06dd\13T\3T\3T\5T\u06e1\nT\3U\3") - buf.write("U\3U\5U\u06e6\nU\3V\3V\5V\u06ea\nV\3W\3W\3X\3X\3Y\3Y\3") - buf.write("Y\3Y\7Y\u06f4\nY\fY\16Y\u06f7\13Y\3Z\3Z\3[\3[\3[\3\\\3") - buf.write("\\\3]\3]\3^\3^\3_\3_\3`\3`\3a\3a\3a\5a\u070b\na\3b\3b") - buf.write("\3b\3b\7b\u0711\nb\fb\16b\u0714\13b\3b\3b\3c\3c\3c\3d") - buf.write("\3d\3d\3e\3e\3f\3f\5f\u0722\nf\3g\3g\5g\u0726\ng\3h\3") - buf.write("h\3h\3h\3h\5h\u072d\nh\3i\3i\3i\3j\3j\3k\3k\3l\3l\3m\3") - buf.write("m\3n\3n\3o\3o\3o\2\4\6\bp\2\4\6\b\n\f\16\20\22\24\26\30") - buf.write('\32\34\36 "$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`b') - buf.write("dfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c") - buf.write("\u008e\u0090\u0092\u0094\u0096\u0098\u009a\u009c\u009e") - buf.write("\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa\u00ac\u00ae\u00b0") - buf.write("\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc\u00be\u00c0\u00c2") - buf.write("\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce\u00d0\u00d2\u00d4") - buf.write("\u00d6\u00d8\u00da\u00dc\2'\4\2\17\20\64\64\3\2\21\22") - buf.write("\4\2\17\20``\3\2\62\63\3\2\66\67\3\2+,\4\2\u00cc\u00cc") - buf.write("\u00ce\u00ce\3\2\u00c3\u00c4\3\2\u00c5\u00c6\5\2__ac\u0085") - buf.write("\u0086\6\2WWYYnn\u0089\u008b\3\2[\\\5\2??ZZ]^\4\2JJ\u00d3") - buf.write("\u00d3\3\2\u00a4\u00a5\4\2uu\u00f6\u00f6\3\2TU\4\2==\u008d") - buf.write("\u008d\4\2JJvv\5\2RSeh\u008e\u0091\6\2RSeh\u008e\u0091") - buf.write("\u0095\u0096\3\2\u0097\u0098\3\2PQ\4\2NN\u0093\u0093\3") - buf.write("\2wx\3\2\17\20\4\2\u00f3\u00f4\u00f7\u00f7\4\2\17\17\21") - buf.write("\21\5\2JJvv\u00ea\u00ea\3\2\u00e3\u00e8\4\2{{\u00de\u00de") - buf.write("\5\2{{\u0082\u0082\u00dd\u00dd\4\2JJ\u00e2\u00e2\3\2\t") - buf.write("\16\4\288\u00f3\u00f6\5\2\u00aa\u00b0\u00d4\u00d4\u00eb") - buf.write("\u00eb\4\2JJ\u00f5\u00f5\2\u07fa\2\u00e3\3\2\2\2\4\u00f1") - buf.write("\3\2\2\2\6\u0111\3\2\2\2\b\u0154\3\2\2\2\n\u0179\3\2\2") - buf.write("\2\f\u0187\3\2\2\2\16\u0190\3\2\2\2\20\u0192\3\2\2\2\22") - buf.write("\u019b\3\2\2\2\24\u01a3\3\2\2\2\26\u01a6\3\2\2\2\30\u01af") - buf.write("\3\2\2\2\32\u01b8\3\2\2\2\34\u01bd\3\2\2\2\36\u01ca\3") - buf.write('\2\2\2 \u01df\3\2\2\2"\u0213\3\2\2\2$\u024e\3\2\2\2&') - buf.write("\u0289\3\2\2\2(\u028d\3\2\2\2*\u0291\3\2\2\2,\u02c3\3") - buf.write("\2\2\2.\u02f5\3\2\2\2\60\u030c\3\2\2\2\62\u0323\3\2\2") - buf.write("\2\64\u0345\3\2\2\2\66\u035c\3\2\2\28\u03c4\3\2\2\2:\u042c") - buf.write("\3\2\2\2<\u044b\3\2\2\2>\u044d\3\2\2\2@\u04a1\3\2\2\2") - buf.write("B\u04a3\3\2\2\2D\u04aa\3\2\2\2F\u04b9\3\2\2\2H\u04bb\3") - buf.write("\2\2\2J\u04f5\3\2\2\2L\u0531\3\2\2\2N\u0533\3\2\2\2P\u0537") - buf.write("\3\2\2\2R\u0540\3\2\2\2T\u0547\3\2\2\2V\u054d\3\2\2\2") - buf.write("X\u055f\3\2\2\2Z\u0561\3\2\2\2\\\u0569\3\2\2\2^\u057c") - buf.write("\3\2\2\2`\u0582\3\2\2\2b\u058f\3\2\2\2d\u0592\3\2\2\2") - buf.write("f\u059c\3\2\2\2h\u05a6\3\2\2\2j\u05ad\3\2\2\2l\u05b4\3") - buf.write("\2\2\2n\u05c1\3\2\2\2p\u05d0\3\2\2\2r\u05d2\3\2\2\2t\u05d5") - buf.write("\3\2\2\2v\u05de\3\2\2\2x\u05e2\3\2\2\2z\u05e9\3\2\2\2") - buf.write("|\u05ee\3\2\2\2~\u05f2\3\2\2\2\u0080\u05fd\3\2\2\2\u0082") - buf.write("\u0604\3\2\2\2\u0084\u0614\3\2\2\2\u0086\u0616\3\2\2\2") - buf.write("\u0088\u063a\3\2\2\2\u008a\u0664\3\2\2\2\u008c\u0666\3") - buf.write("\2\2\2\u008e\u0668\3\2\2\2\u0090\u066a\3\2\2\2\u0092\u0673") - buf.write("\3\2\2\2\u0094\u0678\3\2\2\2\u0096\u0682\3\2\2\2\u0098") - buf.write("\u0691\3\2\2\2\u009a\u069b\3\2\2\2\u009c\u06a4\3\2\2\2") - buf.write("\u009e\u06ac\3\2\2\2\u00a0\u06b8\3\2\2\2\u00a2\u06c6\3") - buf.write("\2\2\2\u00a4\u06cf\3\2\2\2\u00a6\u06e0\3\2\2\2\u00a8\u06e2") - buf.write("\3\2\2\2\u00aa\u06e7\3\2\2\2\u00ac\u06eb\3\2\2\2\u00ae") - buf.write("\u06ed\3\2\2\2\u00b0\u06ef\3\2\2\2\u00b2\u06f8\3\2\2\2") - buf.write("\u00b4\u06fa\3\2\2\2\u00b6\u06fd\3\2\2\2\u00b8\u06ff\3") - buf.write("\2\2\2\u00ba\u0701\3\2\2\2\u00bc\u0703\3\2\2\2\u00be\u0705") - buf.write("\3\2\2\2\u00c0\u0707\3\2\2\2\u00c2\u070c\3\2\2\2\u00c4") - buf.write("\u0717\3\2\2\2\u00c6\u071a\3\2\2\2\u00c8\u071d\3\2\2\2") - buf.write("\u00ca\u0721\3\2\2\2\u00cc\u0725\3\2\2\2\u00ce\u072c\3") - buf.write("\2\2\2\u00d0\u072e\3\2\2\2\u00d2\u0731\3\2\2\2\u00d4\u0733") - buf.write("\3\2\2\2\u00d6\u0735\3\2\2\2\u00d8\u0737\3\2\2\2\u00da") - buf.write("\u0739\3\2\2\2\u00dc\u073b\3\2\2\2\u00de\u00df\5\4\3\2") - buf.write("\u00df\u00e0\7\u00f9\2\2\u00e0\u00e2\3\2\2\2\u00e1\u00de") - buf.write("\3\2\2\2\u00e2\u00e5\3\2\2\2\u00e3\u00e1\3\2\2\2\u00e3") - buf.write("\u00e4\3\2\2\2\u00e4\u00e6\3\2\2\2\u00e5\u00e3\3\2\2\2") - buf.write("\u00e6\u00e7\7\2\2\3\u00e7\3\3\2\2\2\u00e8\u00e9\5\u00bc") - buf.write("_\2\u00e9\u00ea\7\26\2\2\u00ea\u00eb\5\6\4\2\u00eb\u00f2") - buf.write("\3\2\2\2\u00ec\u00ed\5\u00bc_\2\u00ed\u00ee\7~\2\2\u00ee") - buf.write('\u00ef\5\6\4\2\u00ef\u00f2\3\2\2\2\u00f0\u00f2\5"\22') - buf.write("\2\u00f1\u00e8\3\2\2\2\u00f1\u00ec\3\2\2\2\u00f1\u00f0") - buf.write("\3\2\2\2\u00f2\5\3\2\2\2\u00f3\u00f4\b\4\1\2\u00f4\u00f5") - buf.write("\7\3\2\2\u00f5\u00f6\5\6\4\2\u00f6\u00f7\7\4\2\2\u00f7") - buf.write("\u0112\3\2\2\2\u00f8\u0112\5\f\7\2\u00f9\u00fa\t\2\2\2") - buf.write("\u00fa\u0112\5\6\4\r\u00fb\u00fc\7\31\2\2\u00fc\u00fd") - buf.write("\5\6\4\2\u00fd\u00fe\7\33\2\2\u00fe\u00ff\5\6\4\2\u00ff") - buf.write("\u0100\7\34\2\2\u0100\u0101\5\6\4\6\u0101\u0112\3\2\2") - buf.write("\2\u0102\u0108\7\32\2\2\u0103\u0104\7\u00bb\2\2\u0104") - buf.write("\u0105\5\6\4\2\u0105\u0106\7\33\2\2\u0106\u0107\5\6\4") - buf.write("\2\u0107\u0109\3\2\2\2\u0108\u0103\3\2\2\2\u0109\u010a") - buf.write("\3\2\2\2\u010a\u0108\3\2\2\2\u010a\u010b\3\2\2\2\u010b") - buf.write("\u010c\3\2\2\2\u010c\u010d\7\34\2\2\u010d\u010e\5\6\4") - buf.write("\5\u010e\u0112\3\2\2\2\u010f\u0112\5\u00d8m\2\u0110\u0112") - buf.write("\5\u00bc_\2\u0111\u00f3\3\2\2\2\u0111\u00f8\3\2\2\2\u0111") - buf.write("\u00f9\3\2\2\2\u0111\u00fb\3\2\2\2\u0111\u0102\3\2\2\2") - buf.write("\u0111\u010f\3\2\2\2\u0111\u0110\3\2\2\2\u0112\u0133\3") - buf.write("\2\2\2\u0113\u0114\f\f\2\2\u0114\u0115\t\3\2\2\u0115\u0132") - buf.write("\5\6\4\r\u0116\u0117\f\13\2\2\u0117\u0118\t\4\2\2\u0118") - buf.write("\u0132\5\6\4\f\u0119\u011a\f\n\2\2\u011a\u011b\5\u00c8") - buf.write("e\2\u011b\u011c\5\6\4\13\u011c\u0132\3\2\2\2\u011d\u011e") - buf.write("\f\b\2\2\u011e\u011f\7\61\2\2\u011f\u0132\5\6\4\t\u0120") - buf.write("\u0121\f\7\2\2\u0121\u0122\t\5\2\2\u0122\u0132\5\6\4\b") - buf.write("\u0123\u0124\f\17\2\2\u0124\u0125\7\5\2\2\u0125\u0126") - buf.write("\5\16\b\2\u0126\u0127\7\6\2\2\u0127\u0132\3\2\2\2\u0128") - buf.write("\u0129\f\16\2\2\u0129\u012a\7\27\2\2\u012a\u0132\5\u00be") - buf.write("`\2\u012b\u012c\f\t\2\2\u012c\u012f\t\6\2\2\u012d\u0130") - buf.write("\5\u00c2b\2\u012e\u0130\5\u00d2j\2\u012f\u012d\3\2\2\2") - buf.write("\u012f\u012e\3\2\2\2\u0130\u0132\3\2\2\2\u0131\u0113\3") - buf.write("\2\2\2\u0131\u0116\3\2\2\2\u0131\u0119\3\2\2\2\u0131\u011d") - buf.write("\3\2\2\2\u0131\u0120\3\2\2\2\u0131\u0123\3\2\2\2\u0131") - buf.write("\u0128\3\2\2\2\u0131\u012b\3\2\2\2\u0132\u0135\3\2\2\2") - buf.write("\u0133\u0131\3\2\2\2\u0133\u0134\3\2\2\2\u0134\7\3\2\2") - buf.write("\2\u0135\u0133\3\2\2\2\u0136\u0137\b\5\1\2\u0137\u0138") - buf.write("\7\3\2\2\u0138\u0139\5\b\5\2\u0139\u013a\7\4\2\2\u013a") - buf.write("\u0155\3\2\2\2\u013b\u0155\5\n\6\2\u013c\u013d\t\2\2\2") - buf.write("\u013d\u0155\5\b\5\r\u013e\u013f\7\31\2\2\u013f\u0140") - buf.write("\5\b\5\2\u0140\u0141\7\33\2\2\u0141\u0142\5\b\5\2\u0142") - buf.write("\u0143\7\34\2\2\u0143\u0144\5\b\5\6\u0144\u0155\3\2\2") - buf.write("\2\u0145\u014b\7\32\2\2\u0146\u0147\7\u00bb\2\2\u0147") - buf.write("\u0148\5\b\5\2\u0148\u0149\7\33\2\2\u0149\u014a\5\b\5") - buf.write("\2\u014a\u014c\3\2\2\2\u014b\u0146\3\2\2\2\u014c\u014d") - buf.write("\3\2\2\2\u014d\u014b\3\2\2\2\u014d\u014e\3\2\2\2\u014e") - buf.write("\u014f\3\2\2\2\u014f\u0150\7\34\2\2\u0150\u0151\5\b\5") - buf.write("\5\u0151\u0155\3\2\2\2\u0152\u0155\5\u00d8m\2\u0153\u0155") - buf.write("\5\u00c0a\2\u0154\u0136\3\2\2\2\u0154\u013b\3\2\2\2\u0154") - buf.write("\u013c\3\2\2\2\u0154\u013e\3\2\2\2\u0154\u0145\3\2\2\2") - buf.write("\u0154\u0152\3\2\2\2\u0154\u0153\3\2\2\2\u0155\u016e\3") - buf.write("\2\2\2\u0156\u0157\f\f\2\2\u0157\u0158\t\3\2\2\u0158\u016d") - buf.write("\5\b\5\r\u0159\u015a\f\13\2\2\u015a\u015b\t\4\2\2\u015b") - buf.write("\u016d\5\b\5\f\u015c\u015d\f\n\2\2\u015d\u015e\5\u00c8") - buf.write("e\2\u015e\u015f\5\b\5\13\u015f\u016d\3\2\2\2\u0160\u0161") - buf.write("\f\b\2\2\u0161\u0162\7\61\2\2\u0162\u016d\5\b\5\t\u0163") - buf.write("\u0164\f\7\2\2\u0164\u0165\t\5\2\2\u0165\u016d\5\b\5\b") - buf.write("\u0166\u0167\f\t\2\2\u0167\u016a\t\6\2\2\u0168\u016b\5") - buf.write("\u00c2b\2\u0169\u016b\5\u00d2j\2\u016a\u0168\3\2\2\2\u016a") - buf.write("\u0169\3\2\2\2\u016b\u016d\3\2\2\2\u016c\u0156\3\2\2\2") - buf.write("\u016c\u0159\3\2\2\2\u016c\u015c\3\2\2\2\u016c\u0160\3") - buf.write("\2\2\2\u016c\u0163\3\2\2\2\u016c\u0166\3\2\2\2\u016d\u0170") - buf.write("\3\2\2\2\u016e\u016c\3\2\2\2\u016e\u016f\3\2\2\2\u016f") - buf.write("\t\3\2\2\2\u0170\u016e\3\2\2\2\u0171\u017a\5&\24\2\u0172") - buf.write("\u017a\5.\30\2\u0173\u017a\5\62\32\2\u0174\u017a\5\66") - buf.write("\34\2\u0175\u017a\5:\36\2\u0176\u017a\5D#\2\u0177\u017a") - buf.write("\5F$\2\u0178\u017a\5L'\2\u0179\u0171\3\2\2\2\u0179\u0172") - buf.write("\3\2\2\2\u0179\u0173\3\2\2\2\u0179\u0174\3\2\2\2\u0179") - buf.write("\u0175\3\2\2\2\u0179\u0176\3\2\2\2\u0179\u0177\3\2\2\2") - buf.write("\u0179\u0178\3\2\2\2\u017a\13\3\2\2\2\u017b\u0188\5 \21") - buf.write("\2\u017c\u0188\5$\23\2\u017d\u0188\5,\27\2\u017e\u0188") - buf.write("\5\60\31\2\u017f\u0188\5\64\33\2\u0180\u0188\58\35\2\u0181") - buf.write("\u0188\5<\37\2\u0182\u0188\5> \2\u0183\u0188\5@!\2\u0184") - buf.write('\u0188\5B"\2\u0185\u0188\5H%\2\u0186\u0188\5J&\2\u0187') - buf.write("\u017b\3\2\2\2\u0187\u017c\3\2\2\2\u0187\u017d\3\2\2\2") - buf.write("\u0187\u017e\3\2\2\2\u0187\u017f\3\2\2\2\u0187\u0180\3") - buf.write("\2\2\2\u0187\u0181\3\2\2\2\u0187\u0182\3\2\2\2\u0187\u0183") - buf.write("\3\2\2\2\u0187\u0184\3\2\2\2\u0187\u0185\3\2\2\2\u0187") - buf.write("\u0186\3\2\2\2\u0188\r\3\2\2\2\u0189\u0191\5\20\t\2\u018a") - buf.write("\u0191\5\22\n\2\u018b\u0191\5\24\13\2\u018c\u0191\5\26") - buf.write("\f\2\u018d\u0191\5\30\r\2\u018e\u0191\5\32\16\2\u018f") - buf.write("\u0191\5\36\20\2\u0190\u0189\3\2\2\2\u0190\u018a\3\2\2") - buf.write("\2\u0190\u018b\3\2\2\2\u0190\u018c\3\2\2\2\u0190\u018d") - buf.write("\3\2\2\2\u0190\u018e\3\2\2\2\u0190\u018f\3\2\2\2\u0191") - buf.write("\17\3\2\2\2\u0192\u0193\7/\2\2\u0193\u0198\5N(\2\u0194") - buf.write("\u0195\7\23\2\2\u0195\u0197\5N(\2\u0196\u0194\3\2\2\2") - buf.write("\u0197\u019a\3\2\2\2\u0198\u0196\3\2\2\2\u0198\u0199\3") - buf.write("\2\2\2\u0199\21\3\2\2\2\u019a\u0198\3\2\2\2\u019b\u019c") - buf.write("\7K\2\2\u019c\u01a1\5P)\2\u019d\u019f\5p9\2\u019e\u01a0") - buf.write("\5r:\2\u019f\u019e\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a2") - buf.write("\3\2\2\2\u01a1\u019d\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2") - buf.write("\23\3\2\2\2\u01a3\u01a4\7l\2\2\u01a4\u01a5\5\b\5\2\u01a5") - buf.write("\25\3\2\2\2\u01a6\u01a7\7-\2\2\u01a7\u01ac\5T+\2\u01a8") - buf.write("\u01a9\7\23\2\2\u01a9\u01ab\5T+\2\u01aa\u01a8\3\2\2\2") - buf.write("\u01ab\u01ae\3\2\2\2\u01ac\u01aa\3\2\2\2\u01ac\u01ad\3") - buf.write("\2\2\2\u01ad\27\3\2\2\2\u01ae\u01ac\3\2\2\2\u01af\u01b0") - buf.write("\t\7\2\2\u01b0\u01b5\5\u00c0a\2\u01b1\u01b2\7\23\2\2\u01b2") - buf.write("\u01b4\5\u00c0a\2\u01b3\u01b1\3\2\2\2\u01b4\u01b7\3\2") - buf.write("\2\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6\31") - buf.write("\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01b9\t\b\2\2\u01b9") - buf.write("\u01ba\5\u00c0a\2\u01ba\u01bb\7\23\2\2\u01bb\u01bc\5\u00c0") - buf.write("a\2\u01bc\33\3\2\2\2\u01bd\u01be\7\u00cd\2\2\u01be\u01bf") - buf.write("\5\u00c0a\2\u01bf\u01c0\7\23\2\2\u01c0\u01c1\5\u00c0a") - buf.write("\2\u01c1\u01c2\7\66\2\2\u01c2\u01c7\5\u00d8m\2\u01c3\u01c4") - buf.write("\7\23\2\2\u01c4\u01c6\5\u00d8m\2\u01c5\u01c3\3\2\2\2\u01c6") - buf.write("\u01c9\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c7\u01c8\3\2\2\2") - buf.write("\u01c8\35\3\2\2\2\u01c9\u01c7\3\2\2\2\u01ca\u01cb\7\u00cf") - buf.write("\2\2\u01cb\u01d0\5V,\2\u01cc\u01cd\7\23\2\2\u01cd\u01cf") - buf.write("\5V,\2\u01ce\u01cc\3\2\2\2\u01cf\u01d2\3\2\2\2\u01d0\u01ce") - buf.write("\3\2\2\2\u01d0\u01d1\3\2\2\2\u01d1\37\3\2\2\2\u01d2\u01d0") - buf.write("\3\2\2\2\u01d3\u01d4\t\t\2\2\u01d4\u01d5\7\3\2\2\u01d5") - buf.write("\u01d6\5\\/\2\u01d6\u01d7\5`\61\2\u01d7\u01d8\7\4\2\2") - buf.write("\u01d8\u01e0\3\2\2\2\u01d9\u01da\t\n\2\2\u01da\u01db\7") - buf.write("\3\2\2\u01db\u01dc\5Z.\2\u01dc\u01dd\5`\61\2\u01dd\u01de") - buf.write("\7\4\2\2\u01de\u01e0\3\2\2\2\u01df\u01d3\3\2\2\2\u01df") - buf.write("\u01d9\3\2\2\2\u01e0!\3\2\2\2\u01e1\u01e2\7}\2\2\u01e2") - buf.write("\u01e3\7|\2\2\u01e3\u01e4\5\u00d4k\2\u01e4\u01ed\7\3\2") - buf.write("\2\u01e5\u01ea\5t;\2\u01e6\u01e7\7\23\2\2\u01e7\u01e9") - buf.write("\5t;\2\u01e8\u01e6\3\2\2\2\u01e9\u01ec\3\2\2\2\u01ea\u01e8") - buf.write("\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ee\3\2\2\2\u01ec") - buf.write("\u01ea\3\2\2\2\u01ed\u01e5\3\2\2\2\u01ed\u01ee\3\2\2\2") - buf.write("\u01ee\u01ef\3\2\2\2\u01ef\u01f2\7\4\2\2\u01f0\u01f1\7") - buf.write("\u00cb\2\2\u01f1\u01f3\5v<\2\u01f2\u01f0\3\2\2\2\u01f2") - buf.write("\u01f3\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7\u00ba") - buf.write("\2\2\u01f5\u01f6\5\6\4\2\u01f6\u01f7\7\u0083\2\2\u01f7") - buf.write("\u01f8\7|\2\2\u01f8\u0214\3\2\2\2\u01f9\u01fa\7}\2\2\u01fa") - buf.write("\u01fb\7\177\2\2\u01fb\u01fc\7\u0081\2\2\u01fc\u01fd\5") - buf.write("\u008eH\2\u01fd\u01fe\7\3\2\2\u01fe\u01ff\5\u0090I\2\u01ff") - buf.write("\u0200\7\4\2\2\u0200\u0201\7\u00ba\2\2\u0201\u0202\5\u0094") - buf.write("K\2\u0202\u0203\7\u0083\2\2\u0203\u0204\7\177\2\2\u0204") - buf.write("\u0205\7\u0081\2\2\u0205\u0214\3\2\2\2\u0206\u0207\7}") - buf.write("\2\2\u0207\u0208\7\u0080\2\2\u0208\u0209\7\u0081\2\2\u0209") - buf.write("\u020a\5\u008eH\2\u020a\u020b\7\3\2\2\u020b\u020c\5\u009c") - buf.write("O\2\u020c\u020d\7\4\2\2\u020d\u020e\7\u00ba\2\2\u020e") - buf.write("\u020f\5\u0098M\2\u020f\u0210\7\u0083\2\2\u0210\u0211") - buf.write("\7\u0080\2\2\u0211\u0212\7\u0081\2\2\u0212\u0214\3\2\2") - buf.write("\2\u0213\u01e1\3\2\2\2\u0213\u01f9\3\2\2\2\u0213\u0206") - buf.write("\3\2\2\2\u0214#\3\2\2\2\u0215\u0216\5\u00d4k\2\u0216\u021f") - buf.write("\7\3\2\2\u0217\u021c\5*\26\2\u0218\u0219\7\23\2\2\u0219") - buf.write("\u021b\5*\26\2\u021a\u0218\3\2\2\2\u021b\u021e\3\2\2\2") - buf.write("\u021c\u021a\3\2\2\2\u021c\u021d\3\2\2\2\u021d\u0220\3") - buf.write("\2\2\2\u021e\u021c\3\2\2\2\u021f\u0217\3\2\2\2\u021f\u0220") - buf.write("\3\2\2\2\u0220\u0221\3\2\2\2\u0221\u0222\7\4\2\2\u0222") - buf.write("\u024f\3\2\2\2\u0223\u0224\7\30\2\2\u0224\u0225\7\3\2") - buf.write("\2\u0225\u0226\5\u00d6l\2\u0226\u0229\7\3\2\2\u0227\u022a") - buf.write("\5\u00bc_\2\u0228\u022a\5X-\2\u0229\u0227\3\2\2\2\u0229") - buf.write("\u0228\3\2\2\2\u0229\u022a\3\2\2\2\u022a\u0232\3\2\2\2") - buf.write("\u022b\u022e\7\23\2\2\u022c\u022f\5\u00bc_\2\u022d\u022f") - buf.write("\5X-\2\u022e\u022c\3\2\2\2\u022e\u022d\3\2\2\2\u022f\u0231") - buf.write("\3\2\2\2\u0230\u022b\3\2\2\2\u0231\u0234\3\2\2\2\u0232") - buf.write("\u0230\3\2\2\2\u0232\u0233\3\2\2\2\u0233\u0235\3\2\2\2") - buf.write("\u0234\u0232\3\2\2\2\u0235\u0238\7\4\2\2\u0236\u0237\7") - buf.write("\u00f2\2\2\u0237\u0239\7\u00f6\2\2\u0238\u0236\3\2\2\2") - buf.write("\u0238\u0239\3\2\2\2\u0239\u023c\3\2\2\2\u023a\u023b\7") - buf.write("\u00cb\2\2\u023b\u023d\5\u0084C\2\u023c\u023a\3\2\2\2") - buf.write("\u023c\u023d\3\2\2\2\u023d\u023e\3\2\2\2\u023e\u023f\7") - buf.write("\4\2\2\u023f\u024f\3\2\2\2\u0240\u0241\7\u00dc\2\2\u0241") - buf.write("\u0242\7\3\2\2\u0242\u0243\5\6\4\2\u0243\u0246\7\23\2") - buf.write("\2\u0244\u0247\5\u00dan\2\u0245\u0247\5\u008cG\2\u0246") - buf.write("\u0244\3\2\2\2\u0246\u0245\3\2\2\2\u0247\u024a\3\2\2\2") - buf.write("\u0248\u0249\7\23\2\2\u0249\u024b\7\u00f6\2\2\u024a\u0248") - buf.write("\3\2\2\2\u024a\u024b\3\2\2\2\u024b\u024c\3\2\2\2\u024c") - buf.write("\u024d\7\4\2\2\u024d\u024f\3\2\2\2\u024e\u0215\3\2\2\2") - buf.write("\u024e\u0223\3\2\2\2\u024e\u0240\3\2\2\2\u024f%\3\2\2") - buf.write("\2\u0250\u0251\5\u00d4k\2\u0251\u025a\7\3\2\2\u0252\u0257") - buf.write("\5(\25\2\u0253\u0254\7\23\2\2\u0254\u0256\5(\25\2\u0255") - buf.write("\u0253\3\2\2\2\u0256\u0259\3\2\2\2\u0257\u0255\3\2\2\2") - buf.write("\u0257\u0258\3\2\2\2\u0258\u025b\3\2\2\2\u0259\u0257\3") - buf.write("\2\2\2\u025a\u0252\3\2\2\2\u025a\u025b\3\2\2\2\u025b\u025c") - buf.write("\3\2\2\2\u025c\u025d\7\4\2\2\u025d\u028a\3\2\2\2\u025e") - buf.write("\u025f\7\u00dc\2\2\u025f\u0260\7\3\2\2\u0260\u0261\5\b") - buf.write("\5\2\u0261\u0264\7\23\2\2\u0262\u0265\5\u00dan\2\u0263") - buf.write("\u0265\5\u008cG\2\u0264\u0262\3\2\2\2\u0264\u0263\3\2") - buf.write("\2\2\u0265\u0268\3\2\2\2\u0266\u0267\7\23\2\2\u0267\u0269") - buf.write("\7\u00f6\2\2\u0268\u0266\3\2\2\2\u0268\u0269\3\2\2\2\u0269") - buf.write("\u026a\3\2\2\2\u026a\u026b\7\4\2\2\u026b\u028a\3\2\2\2") - buf.write("\u026c\u026d\7\30\2\2\u026d\u026e\7\3\2\2\u026e\u026f") - buf.write("\5\u00d6l\2\u026f\u0272\7\3\2\2\u0270\u0273\5\u00c0a\2") - buf.write("\u0271\u0273\5X-\2\u0272\u0270\3\2\2\2\u0272\u0271\3\2") - buf.write("\2\2\u0272\u0273\3\2\2\2\u0273\u027b\3\2\2\2\u0274\u0277") - buf.write("\7\23\2\2\u0275\u0278\5\u00c0a\2\u0276\u0278\5X-\2\u0277") - buf.write("\u0275\3\2\2\2\u0277\u0276\3\2\2\2\u0278\u027a\3\2\2\2") - buf.write("\u0279\u0274\3\2\2\2\u027a\u027d\3\2\2\2\u027b\u0279\3") - buf.write("\2\2\2\u027b\u027c\3\2\2\2\u027c\u027e\3\2\2\2\u027d\u027b") - buf.write("\3\2\2\2\u027e\u0281\7\4\2\2\u027f\u0280\7\u00f2\2\2\u0280") - buf.write("\u0282\7\u00f6\2\2\u0281\u027f\3\2\2\2\u0281\u0282\3\2") - buf.write("\2\2\u0282\u0285\3\2\2\2\u0283\u0284\7\u00cb\2\2\u0284") - buf.write("\u0286\5x=\2\u0285\u0283\3\2\2\2\u0285\u0286\3\2\2\2\u0286") - buf.write("\u0287\3\2\2\2\u0287\u0288\7\4\2\2\u0288\u028a\3\2\2\2") - buf.write("\u0289\u0250\3\2\2\2\u0289\u025e\3\2\2\2\u0289\u026c\3") - buf.write("\2\2\2\u028a'\3\2\2\2\u028b\u028e\5\b\5\2\u028c\u028e") - buf.write("\7u\2\2\u028d\u028b\3\2\2\2\u028d\u028c\3\2\2\2\u028e") - buf.write(")\3\2\2\2\u028f\u0292\5\6\4\2\u0290\u0292\7u\2\2\u0291") - buf.write("\u028f\3\2\2\2\u0291\u0290\3\2\2\2\u0292+\3\2\2\2\u0293") - buf.write("\u0294\t\13\2\2\u0294\u0295\7\3\2\2\u0295\u0296\5\6\4") - buf.write("\2\u0296\u0297\7\4\2\2\u0297\u02c4\3\2\2\2\u0298\u0299") - buf.write("\7d\2\2\u0299\u029a\7\3\2\2\u029a\u02a5\5\6\4\2\u029b") - buf.write("\u029c\7\23\2\2\u029c\u029d\5\u00caf\2\u029d\u029e\3\2") - buf.write("\2\2\u029e\u029f\7\23\2\2\u029f\u02a0\5\u00caf\2\u02a0") - buf.write("\u02a2\3\2\2\2\u02a1\u029b\3\2\2\2\u02a1\u02a2\3\2\2\2") - buf.write("\u02a2\u02a6\3\2\2\2\u02a3\u02a4\7\23\2\2\u02a4\u02a6") - buf.write("\5\u00caf\2\u02a5\u02a1\3\2\2\2\u02a5\u02a3\3\2\2\2\u02a6") - buf.write("\u02a7\3\2\2\2\u02a7\u02a8\7\4\2\2\u02a8\u02c4\3\2\2\2") - buf.write("\u02a9\u02aa\7\u0088\2\2\u02aa\u02ab\7\3\2\2\u02ab\u02ac") - buf.write("\5\6\4\2\u02ac\u02ad\7\23\2\2\u02ad\u02b0\5\6\4\2\u02ae") - buf.write("\u02af\7\23\2\2\u02af\u02b1\5\u00caf\2\u02b0\u02ae\3\2") - buf.write("\2\2\u02b0\u02b1\3\2\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b3") - buf.write("\7\4\2\2\u02b3\u02c4\3\2\2\2\u02b4\u02b5\7\u0087\2\2\u02b5") - buf.write("\u02b6\7\3\2\2\u02b6\u02b7\5\6\4\2\u02b7\u02b8\7\23\2") - buf.write("\2\u02b8\u02bb\5\6\4\2\u02b9\u02ba\7\23\2\2\u02ba\u02bc") - buf.write("\5\u00caf\2\u02bb\u02b9\3\2\2\2\u02bb\u02bc\3\2\2\2\u02bc") - buf.write("\u02bf\3\2\2\2\u02bd\u02be\7\23\2\2\u02be\u02c0\5\u00ca") - buf.write("f\2\u02bf\u02bd\3\2\2\2\u02bf\u02c0\3\2\2\2\u02c0\u02c1") - buf.write("\3\2\2\2\u02c1\u02c2\7\4\2\2\u02c2\u02c4\3\2\2\2\u02c3") - buf.write("\u0293\3\2\2\2\u02c3\u0298\3\2\2\2\u02c3\u02a9\3\2\2\2") - buf.write("\u02c3\u02b4\3\2\2\2\u02c4-\3\2\2\2\u02c5\u02c6\t\13\2") - buf.write("\2\u02c6\u02c7\7\3\2\2\u02c7\u02c8\5\b\5\2\u02c8\u02c9") - buf.write("\7\4\2\2\u02c9\u02f6\3\2\2\2\u02ca\u02cb\7d\2\2\u02cb") - buf.write("\u02cc\7\3\2\2\u02cc\u02d7\5\b\5\2\u02cd\u02ce\7\23\2") - buf.write("\2\u02ce\u02cf\5\u00ccg\2\u02cf\u02d0\3\2\2\2\u02d0\u02d1") - buf.write("\7\23\2\2\u02d1\u02d2\5\u00ccg\2\u02d2\u02d4\3\2\2\2\u02d3") - buf.write("\u02cd\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4\u02d8\3\2\2\2") - buf.write("\u02d5\u02d6\7\23\2\2\u02d6\u02d8\5\u00ccg\2\u02d7\u02d3") - buf.write("\3\2\2\2\u02d7\u02d5\3\2\2\2\u02d8\u02d9\3\2\2\2\u02d9") - buf.write("\u02da\7\4\2\2\u02da\u02f6\3\2\2\2\u02db\u02dc\7\u0088") - buf.write("\2\2\u02dc\u02dd\7\3\2\2\u02dd\u02de\5\b\5\2\u02de\u02df") - buf.write("\7\23\2\2\u02df\u02e2\5\b\5\2\u02e0\u02e1\7\23\2\2\u02e1") - buf.write("\u02e3\5\u00ccg\2\u02e2\u02e0\3\2\2\2\u02e2\u02e3\3\2") - buf.write("\2\2\u02e3\u02e4\3\2\2\2\u02e4\u02e5\7\4\2\2\u02e5\u02f6") - buf.write("\3\2\2\2\u02e6\u02e7\7\u0087\2\2\u02e7\u02e8\7\3\2\2\u02e8") - buf.write("\u02e9\5\b\5\2\u02e9\u02ea\7\23\2\2\u02ea\u02ed\5\b\5") - buf.write("\2\u02eb\u02ec\7\23\2\2\u02ec\u02ee\5\u00ccg\2\u02ed\u02eb") - buf.write("\3\2\2\2\u02ed\u02ee\3\2\2\2\u02ee\u02f1\3\2\2\2\u02ef") - buf.write("\u02f0\7\23\2\2\u02f0\u02f2\5\u00ccg\2\u02f1\u02ef\3\2") - buf.write("\2\2\u02f1\u02f2\3\2\2\2\u02f2\u02f3\3\2\2\2\u02f3\u02f4") - buf.write("\7\4\2\2\u02f4\u02f6\3\2\2\2\u02f5\u02c5\3\2\2\2\u02f5") - buf.write("\u02ca\3\2\2\2\u02f5\u02db\3\2\2\2\u02f5\u02e6\3\2\2\2") - buf.write("\u02f6/\3\2\2\2\u02f7\u02f8\t\f\2\2\u02f8\u02f9\7\3\2") - buf.write("\2\u02f9\u02fa\5\6\4\2\u02fa\u02fb\7\4\2\2\u02fb\u030d") - buf.write("\3\2\2\2\u02fc\u02fd\t\r\2\2\u02fd\u02fe\7\3\2\2\u02fe") - buf.write("\u0301\5\6\4\2\u02ff\u0300\7\23\2\2\u0300\u0302\5\u00ca") - buf.write("f\2\u0301\u02ff\3\2\2\2\u0301\u0302\3\2\2\2\u0302\u0303") - buf.write("\3\2\2\2\u0303\u0304\7\4\2\2\u0304\u030d\3\2\2\2\u0305") - buf.write("\u0306\t\16\2\2\u0306\u0307\7\3\2\2\u0307\u0308\5\6\4") - buf.write("\2\u0308\u0309\7\23\2\2\u0309\u030a\5\6\4\2\u030a\u030b") - buf.write("\7\4\2\2\u030b\u030d\3\2\2\2\u030c\u02f7\3\2\2\2\u030c") - buf.write("\u02fc\3\2\2\2\u030c\u0305\3\2\2\2\u030d\61\3\2\2\2\u030e") - buf.write("\u030f\t\f\2\2\u030f\u0310\7\3\2\2\u0310\u0311\5\b\5\2") - buf.write("\u0311\u0312\7\4\2\2\u0312\u0324\3\2\2\2\u0313\u0314\t") - buf.write("\r\2\2\u0314\u0315\7\3\2\2\u0315\u0318\5\b\5\2\u0316\u0317") - buf.write("\7\23\2\2\u0317\u0319\5\u00ccg\2\u0318\u0316\3\2\2\2\u0318") - buf.write("\u0319\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u031b\7\4\2\2") - buf.write("\u031b\u0324\3\2\2\2\u031c\u031d\t\16\2\2\u031d\u031e") - buf.write("\7\3\2\2\u031e\u031f\5\b\5\2\u031f\u0320\7\23\2\2\u0320") - buf.write("\u0321\5\b\5\2\u0321\u0322\7\4\2\2\u0322\u0324\3\2\2\2") - buf.write("\u0323\u030e\3\2\2\2\u0323\u0313\3\2\2\2\u0323\u031c\3") - buf.write("\2\2\2\u0324\63\3\2\2\2\u0325\u0326\7\65\2\2\u0326\u0327") - buf.write("\7\3\2\2\u0327\u0328\5\6\4\2\u0328\u0329\7\23\2\2\u0329") - buf.write("\u032a\5\6\4\2\u032a\u032b\7\23\2\2\u032b\u032c\5\6\4") - buf.write("\2\u032c\u032d\7\4\2\2\u032d\u0346\3\2\2\2\u032e\u032f") - buf.write("\7q\2\2\u032f\u0330\7\3\2\2\u0330\u0331\5\6\4\2\u0331") - buf.write("\u0332\7\23\2\2\u0332\u0333\5\6\4\2\u0333\u0334\7\4\2") - buf.write("\2\u0334\u0346\3\2\2\2\u0335\u0336\79\2\2\u0336\u0337") - buf.write("\7\3\2\2\u0337\u0338\5\6\4\2\u0338\u0339\7\4\2\2\u0339") - buf.write("\u0346\3\2\2\2\u033a\u033b\7E\2\2\u033b\u033c\7\3\2\2") - buf.write("\u033c\u033d\5\6\4\2\u033d\u033e\7\23\2\2\u033e\u0341") - buf.write("\5\6\4\2\u033f\u0340\7\23\2\2\u0340\u0342\5\u00dco\2\u0341") - buf.write("\u033f\3\2\2\2\u0341\u0342\3\2\2\2\u0342\u0343\3\2\2\2") - buf.write("\u0343\u0344\7\4\2\2\u0344\u0346\3\2\2\2\u0345\u0325\3") - buf.write("\2\2\2\u0345\u032e\3\2\2\2\u0345\u0335\3\2\2\2\u0345\u033a") - buf.write("\3\2\2\2\u0346\65\3\2\2\2\u0347\u0348\7\65\2\2\u0348\u0349") - buf.write("\7\3\2\2\u0349\u034a\5\b\5\2\u034a\u034b\7\23\2\2\u034b") - buf.write("\u034c\5\b\5\2\u034c\u034d\7\23\2\2\u034d\u034e\5\b\5") - buf.write("\2\u034e\u034f\7\4\2\2\u034f\u035d\3\2\2\2\u0350\u0351") - buf.write("\7q\2\2\u0351\u0352\7\3\2\2\u0352\u0353\5\b\5\2\u0353") - buf.write("\u0354\7\23\2\2\u0354\u0355\5\b\5\2\u0355\u0356\7\4\2") - buf.write("\2\u0356\u035d\3\2\2\2\u0357\u0358\79\2\2\u0358\u0359") - buf.write("\7\3\2\2\u0359\u035a\5\b\5\2\u035a\u035b\7\4\2\2\u035b") - buf.write("\u035d\3\2\2\2\u035c\u0347\3\2\2\2\u035c\u0350\3\2\2\2") - buf.write("\u035c\u0357\3\2\2\2\u035d\67\3\2\2\2\u035e\u035f\7\u00d2") - buf.write("\2\2\u035f\u0361\7\3\2\2\u0360\u0362\5\6\4\2\u0361\u0360") - buf.write("\3\2\2\2\u0361\u0362\3\2\2\2\u0362\u0363\3\2\2\2\u0363") - buf.write("\u03c5\7\4\2\2\u0364\u0365\7\u00a3\2\2\u0365\u0366\7\3") - buf.write("\2\2\u0366\u0369\5\6\4\2\u0367\u0368\7\23\2\2\u0368\u036a") - buf.write("\t\17\2\2\u0369\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a") - buf.write("\u036b\3\2\2\2\u036b\u036c\7\4\2\2\u036c\u03c5\3\2\2\2") - buf.write("\u036d\u036e\t\20\2\2\u036e\u036f\7\3\2\2\u036f\u0370") - buf.write("\5\6\4\2\u0370\u0371\7\4\2\2\u0371\u03c5\3\2\2\2\u0372") - buf.write("\u0373\7\u00a6\2\2\u0373\u0374\7\3\2\2\u0374\u0375\5\6") - buf.write("\4\2\u0375\u0376\7\23\2\2\u0376\u0377\5l\67\2\u0377\u0378") - buf.write("\7\4\2\2\u0378\u03c5\3\2\2\2\u0379\u037a\7\u00d5\2\2\u037a") - buf.write("\u037b\7\3\2\2\u037b\u037e\7\u00f6\2\2\u037c\u037d\7\23") - buf.write("\2\2\u037d\u037f\t\21\2\2\u037e\u037c\3\2\2\2\u037e\u037f") - buf.write("\3\2\2\2\u037f\u0382\3\2\2\2\u0380\u0381\7\23\2\2\u0381") - buf.write("\u0383\5\u00caf\2\u0382\u0380\3\2\2\2\u0382\u0383\3\2") - buf.write("\2\2\u0383\u0386\3\2\2\2\u0384\u0385\7\23\2\2\u0385\u0387") - buf.write("\t\22\2\2\u0386\u0384\3\2\2\2\u0386\u0387\3\2\2\2\u0387") - buf.write("\u0388\3\2\2\2\u0388\u03c5\7\4\2\2\u0389\u038a\7\37\2") - buf.write("\2\u038a\u038b\7\3\2\2\u038b\u03c5\7\4\2\2\u038c\u038d") - buf.write("\7 \2\2\u038d\u038e\7\3\2\2\u038e\u038f\5\6\4\2\u038f") - buf.write("\u0390\7\23\2\2\u0390\u0391\5\6\4\2\u0391\u0392\7\4\2") - buf.write("\2\u0392\u03c5\3\2\2\2\u0393\u0394\7!\2\2\u0394\u0395") - buf.write("\7\3\2\2\u0395\u0396\5\6\4\2\u0396\u0397\7\23\2\2\u0397") - buf.write("\u0398\5\6\4\2\u0398\u0399\7\23\2\2\u0399\u039a\5\6\4") - buf.write("\2\u039a\u039b\7\4\2\2\u039b\u03c5\3\2\2\2\u039c\u039d") - buf.write('\7"\2\2\u039d\u039e\7\3\2\2\u039e\u039f\5\6\4\2\u039f') - buf.write("\u03a0\7\4\2\2\u03a0\u03c5\3\2\2\2\u03a1\u03a2\7#\2\2") - buf.write("\u03a2\u03a3\7\3\2\2\u03a3\u03a4\5\6\4\2\u03a4\u03a5\7") - buf.write("\4\2\2\u03a5\u03c5\3\2\2\2\u03a6\u03a7\7$\2\2\u03a7\u03a8") - buf.write("\7\3\2\2\u03a8\u03a9\5\6\4\2\u03a9\u03aa\7\4\2\2\u03aa") - buf.write("\u03c5\3\2\2\2\u03ab\u03ac\7%\2\2\u03ac\u03ad\7\3\2\2") - buf.write("\u03ad\u03ae\5\6\4\2\u03ae\u03af\7\4\2\2\u03af\u03c5\3") - buf.write("\2\2\2\u03b0\u03b1\7&\2\2\u03b1\u03b2\7\3\2\2\u03b2\u03b3") - buf.write("\5\6\4\2\u03b3\u03b4\7\4\2\2\u03b4\u03c5\3\2\2\2\u03b5") - buf.write("\u03b6\7'\2\2\u03b6\u03b7\7\3\2\2\u03b7\u03b8\5\6\4\2") - buf.write("\u03b8\u03b9\7\4\2\2\u03b9\u03c5\3\2\2\2\u03ba\u03bb\7") - buf.write("(\2\2\u03bb\u03bc\7\3\2\2\u03bc\u03bd\5\6\4\2\u03bd\u03be") - buf.write("\7\4\2\2\u03be\u03c5\3\2\2\2\u03bf\u03c0\7)\2\2\u03c0") - buf.write("\u03c1\7\3\2\2\u03c1\u03c2\5\6\4\2\u03c2\u03c3\7\4\2\2") - buf.write("\u03c3\u03c5\3\2\2\2\u03c4\u035e\3\2\2\2\u03c4\u0364\3") - buf.write("\2\2\2\u03c4\u036d\3\2\2\2\u03c4\u0372\3\2\2\2\u03c4\u0379") - buf.write("\3\2\2\2\u03c4\u0389\3\2\2\2\u03c4\u038c\3\2\2\2\u03c4") - buf.write("\u0393\3\2\2\2\u03c4\u039c\3\2\2\2\u03c4\u03a1\3\2\2\2") - buf.write("\u03c4\u03a6\3\2\2\2\u03c4\u03ab\3\2\2\2\u03c4\u03b0\3") - buf.write("\2\2\2\u03c4\u03b5\3\2\2\2\u03c4\u03ba\3\2\2\2\u03c4\u03bf") - buf.write("\3\2\2\2\u03c59\3\2\2\2\u03c6\u03c7\7\u00d2\2\2\u03c7") - buf.write("\u03c9\7\3\2\2\u03c8\u03ca\5\b\5\2\u03c9\u03c8\3\2\2\2") - buf.write("\u03c9\u03ca\3\2\2\2\u03ca\u03cb\3\2\2\2\u03cb\u042d\7") - buf.write("\4\2\2\u03cc\u03cd\7\u00a3\2\2\u03cd\u03ce\7\3\2\2\u03ce") - buf.write("\u03d1\5\b\5\2\u03cf\u03d0\7\23\2\2\u03d0\u03d2\t\17\2") - buf.write("\2\u03d1\u03cf\3\2\2\2\u03d1\u03d2\3\2\2\2\u03d2\u03d3") - buf.write("\3\2\2\2\u03d3\u03d4\7\4\2\2\u03d4\u042d\3\2\2\2\u03d5") - buf.write("\u03d6\t\20\2\2\u03d6\u03d7\7\3\2\2\u03d7\u03d8\5\b\5") - buf.write("\2\u03d8\u03d9\7\4\2\2\u03d9\u042d\3\2\2\2\u03da\u03db") - buf.write("\7\u00a6\2\2\u03db\u03dc\7\3\2\2\u03dc\u03dd\5\b\5\2\u03dd") - buf.write("\u03de\7\23\2\2\u03de\u03df\5l\67\2\u03df\u03e0\7\4\2") - buf.write("\2\u03e0\u042d\3\2\2\2\u03e1\u03e2\7\u00d5\2\2\u03e2\u03e3") - buf.write("\7\3\2\2\u03e3\u03e6\7\u00f6\2\2\u03e4\u03e5\7\23\2\2") - buf.write("\u03e5\u03e7\t\21\2\2\u03e6\u03e4\3\2\2\2\u03e6\u03e7") - buf.write("\3\2\2\2\u03e7\u03ea\3\2\2\2\u03e8\u03e9\7\23\2\2\u03e9") - buf.write("\u03eb\5\u00ccg\2\u03ea\u03e8\3\2\2\2\u03ea\u03eb\3\2") - buf.write("\2\2\u03eb\u03ee\3\2\2\2\u03ec\u03ed\7\23\2\2\u03ed\u03ef") - buf.write("\t\22\2\2\u03ee\u03ec\3\2\2\2\u03ee\u03ef\3\2\2\2\u03ef") - buf.write("\u03f0\3\2\2\2\u03f0\u042d\7\4\2\2\u03f1\u03f2\7\37\2") - buf.write("\2\u03f2\u03f3\7\3\2\2\u03f3\u042d\7\4\2\2\u03f4\u03f5") - buf.write("\7 \2\2\u03f5\u03f6\7\3\2\2\u03f6\u03f7\5\b\5\2\u03f7") - buf.write("\u03f8\7\23\2\2\u03f8\u03f9\5\b\5\2\u03f9\u03fa\7\4\2") - buf.write("\2\u03fa\u042d\3\2\2\2\u03fb\u03fc\7!\2\2\u03fc\u03fd") - buf.write("\7\3\2\2\u03fd\u03fe\5\b\5\2\u03fe\u03ff\7\23\2\2\u03ff") - buf.write("\u0400\5\b\5\2\u0400\u0401\7\23\2\2\u0401\u0402\5\b\5") - buf.write("\2\u0402\u0403\7\4\2\2\u0403\u042d\3\2\2\2\u0404\u0405") - buf.write('\7"\2\2\u0405\u0406\7\3\2\2\u0406\u0407\5\b\5\2\u0407') - buf.write("\u0408\7\4\2\2\u0408\u042d\3\2\2\2\u0409\u040a\7#\2\2") - buf.write("\u040a\u040b\7\3\2\2\u040b\u040c\5\b\5\2\u040c\u040d\7") - buf.write("\4\2\2\u040d\u042d\3\2\2\2\u040e\u040f\7$\2\2\u040f\u0410") - buf.write("\7\3\2\2\u0410\u0411\5\b\5\2\u0411\u0412\7\4\2\2\u0412") - buf.write("\u042d\3\2\2\2\u0413\u0414\7%\2\2\u0414\u0415\7\3\2\2") - buf.write("\u0415\u0416\5\b\5\2\u0416\u0417\7\4\2\2\u0417\u042d\3") - buf.write("\2\2\2\u0418\u0419\7&\2\2\u0419\u041a\7\3\2\2\u041a\u041b") - buf.write("\5\b\5\2\u041b\u041c\7\4\2\2\u041c\u042d\3\2\2\2\u041d") - buf.write("\u041e\7'\2\2\u041e\u041f\7\3\2\2\u041f\u0420\5\b\5\2") - buf.write("\u0420\u0421\7\4\2\2\u0421\u042d\3\2\2\2\u0422\u0423\7") - buf.write("(\2\2\u0423\u0424\7\3\2\2\u0424\u0425\5\b\5\2\u0425\u0426") - buf.write("\7\4\2\2\u0426\u042d\3\2\2\2\u0427\u0428\7)\2\2\u0428") - buf.write("\u0429\7\3\2\2\u0429\u042a\5\b\5\2\u042a\u042b\7\4\2\2") - buf.write("\u042b\u042d\3\2\2\2\u042c\u03c6\3\2\2\2\u042c\u03cc\3") - buf.write("\2\2\2\u042c\u03d5\3\2\2\2\u042c\u03da\3\2\2\2\u042c\u03e1") - buf.write("\3\2\2\2\u042c\u03f1\3\2\2\2\u042c\u03f4\3\2\2\2\u042c") - buf.write("\u03fb\3\2\2\2\u042c\u0404\3\2\2\2\u042c\u0409\3\2\2\2") - buf.write("\u042c\u040e\3\2\2\2\u042c\u0413\3\2\2\2\u042c\u0418\3") - buf.write("\2\2\2\u042c\u041d\3\2\2\2\u042c\u0422\3\2\2\2\u042c\u0427") - buf.write("\3\2\2\2\u042d;\3\2\2\2\u042e\u042f\7;\2\2\u042f\u0430") - buf.write("\7\3\2\2\u0430\u0433\5\6\4\2\u0431\u0432\7\23\2\2\u0432") - buf.write("\u0434\5\6\4\2\u0433\u0431\3\2\2\2\u0434\u0435\3\2\2\2") - buf.write("\u0435\u0433\3\2\2\2\u0435\u0436\3\2\2\2\u0436\u0437\3") - buf.write("\2\2\2\u0437\u0438\7\4\2\2\u0438\u044c\3\2\2\2\u0439\u043a") - buf.write("\7>\2\2\u043a\u043b\7\3\2\2\u043b\u043e\5\6\4\2\u043c") - buf.write("\u043d\7\23\2\2\u043d\u043f\5\6\4\2\u043e\u043c\3\2\2") - buf.write("\2\u043f\u0440\3\2\2\2\u0440\u043e\3\2\2\2\u0440\u0441") - buf.write("\3\2\2\2\u0441\u0442\3\2\2\2\u0442\u0443\7\4\2\2\u0443") - buf.write("\u044c\3\2\2\2\u0444\u0445\t\23\2\2\u0445\u0446\7\3\2") - buf.write("\2\u0446\u0447\5\6\4\2\u0447\u0448\7\23\2\2\u0448\u0449") - buf.write("\5\6\4\2\u0449\u044a\7\4\2\2\u044a\u044c\3\2\2\2\u044b") - buf.write("\u042e\3\2\2\2\u044b\u0439\3\2\2\2\u044b\u0444\3\2\2\2") - buf.write("\u044c=\3\2\2\2\u044d\u044e\7t\2\2\u044e\u044f\7\3\2\2") - buf.write("\u044f\u0450\5\6\4\2\u0450\u0451\7\23\2\2\u0451\u0453") - buf.write("\7\u00f7\2\2\u0452\u0454\5\u00b0Y\2\u0453\u0452\3\2\2") - buf.write("\2\u0453\u0454\3\2\2\2\u0454\u0457\3\2\2\2\u0455\u0456") - buf.write("\7\u0082\2\2\u0456\u0458\5\u00c0a\2\u0457\u0455\3\2\2") - buf.write("\2\u0457\u0458\3\2\2\2\u0458\u045a\3\2\2\2\u0459\u045b") - buf.write("\5\u00aeX\2\u045a\u0459\3\2\2\2\u045a\u045b\3\2\2\2\u045b") - buf.write("\u045d\3\2\2\2\u045c\u045e\5\u00b6\\\2\u045d\u045c\3\2") - buf.write("\2\2\u045d\u045e\3\2\2\2\u045e\u0460\3\2\2\2\u045f\u0461") - buf.write("\5\u00b8]\2\u0460\u045f\3\2\2\2\u0460\u0461\3\2\2\2\u0461") - buf.write("\u0462\3\2\2\2\u0462\u0463\7\4\2\2\u0463?\3\2\2\2\u0464") - buf.write("\u0465\7\u00e0\2\2\u0465\u0466\7\3\2\2\u0466\u0467\5\6") - buf.write("\4\2\u0467\u0468\7\23\2\2\u0468\u0472\7\u00f7\2\2\u0469") - buf.write("\u046a\7\u00e9\2\2\u046a\u046f\5\u00c0a\2\u046b\u046c") - buf.write("\7\23\2\2\u046c\u046e\5\u00c0a\2\u046d\u046b\3\2\2\2\u046e") - buf.write("\u0471\3\2\2\2\u046f\u046d\3\2\2\2\u046f\u0470\3\2\2\2") - buf.write("\u0470\u0473\3\2\2\2\u0471\u046f\3\2\2\2\u0472\u0469\3") - buf.write("\2\2\2\u0472\u0473\3\2\2\2\u0473\u0475\3\2\2\2\u0474\u0476") - buf.write("\5\u00acW\2\u0475\u0474\3\2\2\2\u0475\u0476\3\2\2\2\u0476") - buf.write("\u0477\3\2\2\2\u0477\u0478\7\4\2\2\u0478\u04a2\3\2\2\2") - buf.write("\u0479\u047a\7\u00e1\2\2\u047a\u047b\7\3\2\2\u047b\u047c") - buf.write("\5\6\4\2\u047c\u047d\7\23\2\2\u047d\u047f\7\u00f7\2\2") - buf.write("\u047e\u0480\5\u00b0Y\2\u047f\u047e\3\2\2\2\u047f\u0480") - buf.write("\3\2\2\2\u0480\u0483\3\2\2\2\u0481\u0482\7\u0082\2\2\u0482") - buf.write("\u0484\5\u00c0a\2\u0483\u0481\3\2\2\2\u0483\u0484\3\2") - buf.write("\2\2\u0484\u0486\3\2\2\2\u0485\u0487\5\u00aeX\2\u0486") - buf.write("\u0485\3\2\2\2\u0486\u0487\3\2\2\2\u0487\u0489\3\2\2\2") - buf.write("\u0488\u048a\5\u00b2Z\2\u0489\u0488\3\2\2\2\u0489\u048a") - buf.write("\3\2\2\2\u048a\u048c\3\2\2\2\u048b\u048d\5\u00acW\2\u048c") - buf.write("\u048b\3\2\2\2\u048c\u048d\3\2\2\2\u048d\u048e\3\2\2\2") - buf.write("\u048e\u048f\7\4\2\2\u048f\u04a2\3\2\2\2\u0490\u0491\7") - buf.write("D\2\2\u0491\u0492\7\3\2\2\u0492\u0494\5\6\4\2\u0493\u0495") - buf.write("\5\u00c4c\2\u0494\u0493\3\2\2\2\u0494\u0495\3\2\2\2\u0495") - buf.write("\u0497\3\2\2\2\u0496\u0498\5\u00c6d\2\u0497\u0496\3\2") - buf.write("\2\2\u0497\u0498\3\2\2\2\u0498\u049a\3\2\2\2\u0499\u049b") - buf.write("\5\u00b4[\2\u049a\u0499\3\2\2\2\u049a\u049b\3\2\2\2\u049b") - buf.write("\u049d\3\2\2\2\u049c\u049e\t\24\2\2\u049d\u049c\3\2\2") - buf.write("\2\u049d\u049e\3\2\2\2\u049e\u049f\3\2\2\2\u049f\u04a0") - buf.write("\7\4\2\2\u04a0\u04a2\3\2\2\2\u04a1\u0464\3\2\2\2\u04a1") - buf.write("\u0479\3\2\2\2\u04a1\u0490\3\2\2\2\u04a2A\3\2\2\2\u04a3") - buf.write("\u04a4\7s\2\2\u04a4\u04a5\7\3\2\2\u04a5\u04a6\5\6\4\2") - buf.write("\u04a6\u04a7\7\23\2\2\u04a7\u04a8\5\6\4\2\u04a8\u04a9") - buf.write("\7\4\2\2\u04a9C\3\2\2\2\u04aa\u04ab\7s\2\2\u04ab\u04ac") - buf.write("\7\3\2\2\u04ac\u04ad\5\b\5\2\u04ad\u04ae\7\23\2\2\u04ae") - buf.write("\u04af\5\b\5\2\u04af\u04b0\7\4\2\2\u04b0E\3\2\2\2\u04b1") - buf.write("\u04b2\t\25\2\2\u04b2\u04b3\7\3\2\2\u04b3\u04b4\5\b\5") - buf.write("\2\u04b4\u04b5\7\4\2\2\u04b5\u04ba\3\2\2\2\u04b6\u04b7") - buf.write("\7h\2\2\u04b7\u04b8\7\3\2\2\u04b8\u04ba\7\4\2\2\u04b9") - buf.write("\u04b1\3\2\2\2\u04b9\u04b6\3\2\2\2\u04baG\3\2\2\2\u04bb") - buf.write("\u04bc\t\25\2\2\u04bc\u04bd\7\3\2\2\u04bd\u04c2\5\6\4") - buf.write("\2\u04be\u04c0\5p9\2\u04bf\u04c1\5r:\2\u04c0\u04bf\3\2") - buf.write("\2\2\u04c0\u04c1\3\2\2\2\u04c1\u04c3\3\2\2\2\u04c2\u04be") - buf.write("\3\2\2\2\u04c2\u04c3\3\2\2\2\u04c3\u04c4\3\2\2\2\u04c4") - buf.write("\u04c5\7\4\2\2\u04c5I\3\2\2\2\u04c6\u04c7\t\26\2\2\u04c7") - buf.write("\u04c8\7\3\2\2\u04c8\u04c9\5\6\4\2\u04c9\u04ca\7\u009a") - buf.write("\2\2\u04ca\u04cc\7\3\2\2\u04cb\u04cd\5d\63\2\u04cc\u04cb") - buf.write("\3\2\2\2\u04cc\u04cd\3\2\2\2\u04cd\u04cf\3\2\2\2\u04ce") - buf.write("\u04d0\5f\64\2\u04cf\u04ce\3\2\2\2\u04cf\u04d0\3\2\2\2") - buf.write("\u04d0\u04d2\3\2\2\2\u04d1\u04d3\5j\66\2\u04d2\u04d1\3") - buf.write("\2\2\2\u04d2\u04d3\3\2\2\2\u04d3\u04d4\3\2\2\2\u04d4\u04d5") - buf.write("\7\4\2\2\u04d5\u04d6\7\4\2\2\u04d6\u04f6\3\2\2\2\u04d7") - buf.write("\u04d8\t\27\2\2\u04d8\u04d9\7\3\2\2\u04d9\u04e0\5\6\4") - buf.write("\2\u04da\u04db\7\23\2\2\u04db\u04de\5l\67\2\u04dc\u04dd") - buf.write("\7\23\2\2\u04dd\u04df\5X-\2\u04de\u04dc\3\2\2\2\u04de") - buf.write("\u04df\3\2\2\2\u04df\u04e1\3\2\2\2\u04e0\u04da\3\2\2\2") - buf.write("\u04e0\u04e1\3\2\2\2\u04e1\u04e2\3\2\2\2\u04e2\u04e3\7") - buf.write("\u009a\2\2\u04e3\u04e5\7\3\2\2\u04e4\u04e6\5d\63\2\u04e5") - buf.write("\u04e4\3\2\2\2\u04e5\u04e6\3\2\2\2\u04e6\u04e7\3\2\2\2") - buf.write("\u04e7\u04e8\5f\64\2\u04e8\u04e9\3\2\2\2\u04e9\u04ea\7") - buf.write("\4\2\2\u04ea\u04eb\7\4\2\2\u04eb\u04f6\3\2\2\2\u04ec\u04ed") - buf.write("\7\u0099\2\2\u04ed\u04ee\7\3\2\2\u04ee\u04ef\5\6\4\2\u04ef") - buf.write("\u04f0\7\u009a\2\2\u04f0\u04f1\7\3\2\2\u04f1\u04f2\5d") - buf.write("\63\2\u04f2\u04f3\7\4\2\2\u04f3\u04f4\7\4\2\2\u04f4\u04f6") - buf.write("\3\2\2\2\u04f5\u04c6\3\2\2\2\u04f5\u04d7\3\2\2\2\u04f5") - buf.write("\u04ec\3\2\2\2\u04f6K\3\2\2\2\u04f7\u04f8\t\26\2\2\u04f8") - buf.write("\u04f9\7\3\2\2\u04f9\u04fa\5\b\5\2\u04fa\u04fb\7\u009a") - buf.write("\2\2\u04fb\u04fd\7\3\2\2\u04fc\u04fe\5d\63\2\u04fd\u04fc") - buf.write("\3\2\2\2\u04fd\u04fe\3\2\2\2\u04fe\u0500\3\2\2\2\u04ff") - buf.write("\u0501\5f\64\2\u0500\u04ff\3\2\2\2\u0500\u0501\3\2\2\2") - buf.write("\u0501\u0503\3\2\2\2\u0502\u0504\5j\66\2\u0503\u0502\3") - buf.write("\2\2\2\u0503\u0504\3\2\2\2\u0504\u0505\3\2\2\2\u0505\u0506") - buf.write("\7\4\2\2\u0506\u0507\7\4\2\2\u0507\u0532\3\2\2\2\u0508") - buf.write("\u0509\t\27\2\2\u0509\u050a\7\3\2\2\u050a\u0510\5\b\5") - buf.write("\2\u050b\u050c\7\23\2\2\u050c\u050e\5l\67\2\u050d\u050f") - buf.write("\5X-\2\u050e\u050d\3\2\2\2\u050e\u050f\3\2\2\2\u050f\u0511") - buf.write("\3\2\2\2\u0510\u050b\3\2\2\2\u0510\u0511\3\2\2\2\u0511") - buf.write("\u0512\3\2\2\2\u0512\u0513\7\u009a\2\2\u0513\u0515\7\3") - buf.write("\2\2\u0514\u0516\5d\63\2\u0515\u0514\3\2\2\2\u0515\u0516") - buf.write("\3\2\2\2\u0516\u0517\3\2\2\2\u0517\u0518\5f\64\2\u0518") - buf.write("\u0519\3\2\2\2\u0519\u051a\7\4\2\2\u051a\u051b\7\4\2\2") - buf.write("\u051b\u0532\3\2\2\2\u051c\u051d\7O\2\2\u051d\u051e\7") - buf.write("\3\2\2\u051e\u051f\7\u009a\2\2\u051f\u0521\7\3\2\2\u0520") - buf.write("\u0522\5d\63\2\u0521\u0520\3\2\2\2\u0521\u0522\3\2\2\2") - buf.write("\u0522\u0523\3\2\2\2\u0523\u0524\5f\64\2\u0524\u0525\3") - buf.write("\2\2\2\u0525\u0526\7\4\2\2\u0526\u0527\7\4\2\2\u0527\u0532") - buf.write("\3\2\2\2\u0528\u0529\7\u0099\2\2\u0529\u052a\7\3\2\2\u052a") - buf.write("\u052b\5\b\5\2\u052b\u052c\7\u009a\2\2\u052c\u052d\7\3") - buf.write("\2\2\u052d\u052e\5d\63\2\u052e\u052f\7\4\2\2\u052f\u0530") - buf.write("\7\4\2\2\u0530\u0532\3\2\2\2\u0531\u04f7\3\2\2\2\u0531") - buf.write("\u0508\3\2\2\2\u0531\u051c\3\2\2\2\u0531\u0528\3\2\2\2") - buf.write("\u0532M\3\2\2\2\u0533\u0534\5\u00c0a\2\u0534\u0535\7F") - buf.write("\2\2\u0535\u0536\5\u00c0a\2\u0536O\3\2\2\2\u0537\u053c") - buf.write("\5R*\2\u0538\u0539\7\23\2\2\u0539\u053b\5R*\2\u053a\u0538") - buf.write("\3\2\2\2\u053b\u053e\3\2\2\2\u053c\u053a\3\2\2\2\u053c") - buf.write("\u053d\3\2\2\2\u053dQ\3\2\2\2\u053e\u053c\3\2\2\2\u053f") - buf.write("\u0541\5\u00ceh\2\u0540\u053f\3\2\2\2\u0540\u0541\3\2") - buf.write("\2\2\u0541\u0542\3\2\2\2\u0542\u0543\5\u00c0a\2\u0543") - buf.write("\u0544\7\26\2\2\u0544\u0545\5F$\2\u0545S\3\2\2\2\u0546") - buf.write("\u0548\5\u00ceh\2\u0547\u0546\3\2\2\2\u0547\u0548\3\2") - buf.write("\2\2\u0548\u0549\3\2\2\2\u0549\u054a\5\u00c0a\2\u054a") - buf.write("\u054b\7\26\2\2\u054b\u054c\5\b\5\2\u054cU\3\2\2\2\u054d") - buf.write("\u054e\5\u00c0a\2\u054e\u0551\7\t\2\2\u054f\u0552\5X-") - buf.write("\2\u0550\u0552\5\u00bc_\2\u0551\u054f\3\2\2\2\u0551\u0550") - buf.write("\3\2\2\2\u0552W\3\2\2\2\u0553\u0560\5\u00d8m\2\u0554\u0555") - buf.write("\7\u00dc\2\2\u0555\u0556\7\3\2\2\u0556\u0557\5\u00d8m") - buf.write("\2\u0557\u0558\7\23\2\2\u0558\u055b\5\u00dan\2\u0559\u055a") + buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\5\36\u042f") + buf.write("\n\36\3\37\3\37\3\37\3\37\3\37\6\37\u0436\n\37\r\37\16") + buf.write("\37\u0437\3\37\3\37\3\37\3\37\3\37\3\37\3\37\6\37\u0441") + buf.write("\n\37\r\37\16\37\u0442\3\37\3\37\3\37\3\37\3\37\3\37\3") + buf.write("\37\3\37\3\37\5\37\u044e\n\37\3 \3 \3 \3 \3 \3 \5 \u0456") + buf.write("\n \3 \3 \5 \u045a\n \3 \5 \u045d\n \3 \5 \u0460\n \3") + buf.write(" \5 \u0463\n \3 \3 \3!\3!\3!\3!\3!\3!\3!\3!\3!\7!\u0470") + buf.write("\n!\f!\16!\u0473\13!\5!\u0475\n!\3!\5!\u0478\n!\3!\3!") + buf.write("\3!\3!\3!\3!\3!\3!\5!\u0482\n!\3!\3!\5!\u0486\n!\3!\5") + buf.write("!\u0489\n!\3!\5!\u048c\n!\3!\5!\u048f\n!\3!\3!\3!\3!\3") + buf.write("!\3!\5!\u0497\n!\3!\5!\u049a\n!\3!\5!\u049d\n!\3!\5!\u04a0") + buf.write('\n!\3!\3!\5!\u04a4\n!\3"\3"\3"\3"\3"\3"\3"\3#\3') + buf.write("#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\5$\u04bc\n$\3") + buf.write("%\3%\3%\3%\3%\5%\u04c3\n%\5%\u04c5\n%\3%\3%\3&\3&\3&\3") + buf.write("&\3&\3&\5&\u04cf\n&\3&\5&\u04d2\n&\3&\5&\u04d5\n&\3&\3") + buf.write("&\3&\3&\3&\3&\3&\3&\3&\3&\5&\u04e1\n&\5&\u04e3\n&\3&\3") + buf.write("&\3&\5&\u04e8\n&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3&\3") + buf.write("&\3&\5&\u04f8\n&\3'\3'\3'\3'\3'\3'\5'\u0500\n'") + buf.write("\3'\5'\u0503\n'\3'\5'\u0506\n'\3'\3'\3'\3'\3") + buf.write("'\3'\3'\3'\3'\5'\u0511\n'\5'\u0513\n'\3'\3'") + buf.write("\3'\5'\u0518\n'\3'\3'\3'\3'\3'\3'\3'\3'\3'") + buf.write("\3'\5'\u0524\n'\3'\3'\3'\3'\3'\3'\3'\3'\3'") + buf.write("\3'\3'\3'\3'\3'\5'\u0534\n'\3(\3(\3(\3(\3)\3)\3") + buf.write(")\7)\u053d\n)\f)\16)\u0540\13)\3*\5*\u0543\n*\3*\3*\3") + buf.write("*\3*\3+\5+\u054a\n+\3+\3+\3+\3+\3,\3,\3,\3,\3-\3-\3-\3") + buf.write("-\3-\3-\3-\3-\5-\u055c\n-\3-\3-\5-\u0560\n-\3.\3.\3.\7") + buf.write(".\u0565\n.\f.\16.\u0568\13.\3/\3/\3/\7/\u056d\n/\f/\16") + buf.write("/\u0570\13/\3/\3/\3/\3/\7/\u0576\n/\f/\16/\u0579\13/\5") + buf.write("/\u057b\n/\3\60\3\60\3\60\5\60\u0580\n\60\3\61\5\61\u0583") + buf.write("\n\61\3\61\3\61\3\61\5\61\u0588\n\61\3\61\5\61\u058b\n") + buf.write("\61\3\61\5\61\u058e\n\61\3\62\3\62\3\62\3\63\3\63\3\63") + buf.write("\3\63\3\63\7\63\u0598\n\63\f\63\16\63\u059b\13\63\3\64") + buf.write("\3\64\3\64\3\64\3\64\7\64\u05a2\n\64\f\64\16\64\u05a5") + buf.write("\13\64\3\65\3\65\5\65\u05a9\n\65\3\66\3\66\3\66\5\66\u05ae") + buf.write("\n\66\3\66\3\66\3\66\3\66\3\66\3\67\5\67\u05b6\n\67\3") + buf.write("\67\3\67\38\58\u05bb\n8\38\38\39\39\39\39\39\39\39\39") + buf.write("\39\39\39\39\39\59\u05cc\n9\3:\3:\3:\3:\3:\7:\u05d3\n") + buf.write(":\f:\16:\u05d6\13:\3:\3:\3:\3:\3:\5:\u05dd\n:\3:\5:\u05e0") + buf.write("\n:\3:\3:\3:\3:\3:\3:\3:\5:\u05e9\n:\3:\3:\5:\u05ed\n") + buf.write(":\3:\3:\5:\u05f1\n:\3:\5:\u05f4\n:\5:\u05f6\n:\3;\3;\3") + buf.write(";\3<\3<\3<\3<\5<\u05ff\n<\3=\3=\3=\5=\u0604\n=\3>\3>\5") + buf.write(">\u0608\n>\3?\3?\3?\3?\3?\5?\u060f\n?\3@\3@\3@\5@\u0614") + buf.write("\n@\3A\3A\5A\u0618\nA\3A\5A\u061b\nA\3A\5A\u061e\nA\3") + buf.write("A\5A\u0621\nA\3B\3B\3B\3B\3B\5B\u0628\nB\3C\3C\3C\3C\3") + buf.write("C\7C\u062f\nC\fC\16C\u0632\13C\3C\3C\5C\u0636\nC\3D\3") + buf.write("D\5D\u063a\nD\3E\3E\3E\3E\3E\5E\u0641\nE\3F\3F\3F\3F\3") + buf.write("F\3F\7F\u0649\nF\fF\16F\u064c\13F\3F\3F\5F\u0650\nF\3") + buf.write("F\3F\3F\3F\3F\7F\u0657\nF\fF\16F\u065a\13F\3F\3F\5F\u065e") + buf.write("\nF\5F\u0660\nF\3G\3G\3G\3G\3G\3G\3G\3G\7G\u066a\nG\f") + buf.write("G\16G\u066d\13G\3G\3G\5G\u0671\nG\3G\5G\u0674\nG\3G\3") + buf.write("G\3G\3G\3G\3G\3G\7G\u067d\nG\fG\16G\u0680\13G\3G\3G\5") + buf.write("G\u0684\nG\3G\3G\5G\u0688\nG\5G\u068a\nG\3H\3H\3I\3I\3") + buf.write("J\3J\3J\3J\7J\u0694\nJ\fJ\16J\u0697\13J\3K\3K\3K\5K\u069c") + buf.write("\nK\3L\3L\3L\7L\u06a1\nL\fL\16L\u06a4\13L\3M\3M\5M\u06a8") + buf.write("\nM\3M\3M\3M\3M\5M\u06ae\nM\3M\3M\5M\u06b2\nM\3M\5M\u06b5") + buf.write("\nM\3N\3N\3N\7N\u06ba\nN\fN\16N\u06bd\13N\3O\3O\5O\u06c1") + buf.write("\nO\3O\3O\5O\u06c5\nO\3O\5O\u06c8\nO\3P\3P\3P\5P\u06cd") + buf.write("\nP\3P\3P\3P\3Q\3Q\3Q\7Q\u06d5\nQ\fQ\16Q\u06d8\13Q\3R") + buf.write("\3R\3R\3R\5R\u06de\nR\3R\3R\5R\u06e2\nR\3R\3R\7R\u06e6") + buf.write("\nR\fR\16R\u06e9\13R\3S\5S\u06ec\nS\3S\3S\3S\3S\3S\5S") + buf.write("\u06f3\nS\3T\3T\3T\5T\u06f8\nT\3U\3U\3U\3U\3U\3U\3U\3") + buf.write("U\7U\u0702\nU\fU\16U\u0705\13U\3U\3U\5U\u0709\nU\3V\3") + buf.write("V\3V\5V\u070e\nV\3W\3W\5W\u0712\nW\3X\3X\3Y\3Y\3Z\3Z\3") + buf.write("Z\3Z\7Z\u071c\nZ\fZ\16Z\u071f\13Z\3[\3[\3\\\3\\\3\\\3") + buf.write("]\3]\3^\3^\3_\3_\3`\3`\3a\3a\3b\3b\3b\5b\u0733\nb\3c\3") + buf.write("c\3c\3c\7c\u0739\nc\fc\16c\u073c\13c\3c\3c\3d\3d\3d\3") + buf.write("e\3e\3e\3f\3f\3g\3g\5g\u074a\ng\3h\3h\5h\u074e\nh\3i\3") + buf.write("i\3i\3i\3i\5i\u0755\ni\3j\3j\3j\3k\3k\3l\3l\3m\3m\3n\3") + buf.write("n\3n\3n\3n\5n\u0765\nn\3o\3o\3p\3p\3p\2\4\6\bq\2\4\6\b") + buf.write('\n\f\16\20\22\24\26\30\32\34\36 "$&(*,.\60\62\64\668') + buf.write(":<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084") + buf.write("\u0086\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096") + buf.write("\u0098\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8") + buf.write("\u00aa\u00ac\u00ae\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba") + buf.write("\u00bc\u00be\u00c0\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc") + buf.write("\u00ce\u00d0\u00d2\u00d4\u00d6\u00d8\u00da\u00dc\u00de") + buf.write("\2%\4\2\17\20\64\64\3\2\21\22\4\2\17\20``\3\2\62\63\3") + buf.write("\2\66\67\3\2+,\4\2\u00cc\u00cc\u00ce\u00ce\3\2\u00c3\u00c4") + buf.write("\3\2\u00c5\u00c6\5\2__ac\u0085\u0086\6\2WWYYnn\u0089\u008b") + buf.write("\3\2[\\\5\2??ZZ]^\4\2JJ\u00d3\u00d3\3\2\u00a4\u00a5\4") + buf.write("\2uu\u00f6\u00f6\3\2TU\4\2==\u008d\u008d\4\2JJvv\5\2R") + buf.write("Seh\u008e\u0091\6\2RSeh\u008e\u0091\u0095\u0096\3\2\u0097") + buf.write("\u0098\3\2PQ\3\2\17\20\4\2NN\u0093\u0093\3\2wx\4\2\17") + buf.write("\17\21\21\5\2JJvv\u00ea\u00ea\3\2\u00e3\u00e8\4\2{{\u00de") + buf.write("\u00de\5\2{{\u0082\u0082\u00dd\u00dd\4\2JJ\u00e2\u00e2") + buf.write("\3\2\t\16\5\2\u00aa\u00b0\u00d4\u00d4\u00eb\u00eb\4\2") + buf.write("JJ\u00f5\u00f5\2\u0833\2\u00e5\3\2\2\2\4\u00f3\3\2\2\2") + buf.write("\6\u0113\3\2\2\2\b\u0156\3\2\2\2\n\u017b\3\2\2\2\f\u0189") + buf.write("\3\2\2\2\16\u0192\3\2\2\2\20\u0194\3\2\2\2\22\u019d\3") + buf.write("\2\2\2\24\u01a5\3\2\2\2\26\u01a8\3\2\2\2\30\u01b1\3\2") + buf.write("\2\2\32\u01ba\3\2\2\2\34\u01bf\3\2\2\2\36\u01cc\3\2\2") + buf.write('\2 \u01e1\3\2\2\2"\u0215\3\2\2\2$\u0250\3\2\2\2&\u028b') + buf.write("\3\2\2\2(\u028f\3\2\2\2*\u0293\3\2\2\2,\u02c5\3\2\2\2") + buf.write(".\u02f7\3\2\2\2\60\u030e\3\2\2\2\62\u0325\3\2\2\2\64\u0347") + buf.write("\3\2\2\2\66\u035e\3\2\2\28\u03c6\3\2\2\2:\u042e\3\2\2") + buf.write("\2<\u044d\3\2\2\2>\u044f\3\2\2\2@\u04a3\3\2\2\2B\u04a5") + buf.write("\3\2\2\2D\u04ac\3\2\2\2F\u04bb\3\2\2\2H\u04bd\3\2\2\2") + buf.write("J\u04f7\3\2\2\2L\u0533\3\2\2\2N\u0535\3\2\2\2P\u0539\3") + buf.write("\2\2\2R\u0542\3\2\2\2T\u0549\3\2\2\2V\u054f\3\2\2\2X\u055f") + buf.write("\3\2\2\2Z\u0561\3\2\2\2\\\u0569\3\2\2\2^\u057c\3\2\2\2") + buf.write("`\u0582\3\2\2\2b\u058f\3\2\2\2d\u0592\3\2\2\2f\u059c\3") + buf.write("\2\2\2h\u05a6\3\2\2\2j\u05ad\3\2\2\2l\u05b5\3\2\2\2n\u05ba") + buf.write("\3\2\2\2p\u05cb\3\2\2\2r\u05f5\3\2\2\2t\u05f7\3\2\2\2") + buf.write("v\u05fa\3\2\2\2x\u0603\3\2\2\2z\u0607\3\2\2\2|\u060e\3") + buf.write("\2\2\2~\u0613\3\2\2\2\u0080\u0617\3\2\2\2\u0082\u0622") + buf.write("\3\2\2\2\u0084\u0629\3\2\2\2\u0086\u0639\3\2\2\2\u0088") + buf.write("\u063b\3\2\2\2\u008a\u065f\3\2\2\2\u008c\u0689\3\2\2\2") + buf.write("\u008e\u068b\3\2\2\2\u0090\u068d\3\2\2\2\u0092\u068f\3") + buf.write("\2\2\2\u0094\u0698\3\2\2\2\u0096\u069d\3\2\2\2\u0098\u06a7") + buf.write("\3\2\2\2\u009a\u06b6\3\2\2\2\u009c\u06c0\3\2\2\2\u009e") + buf.write("\u06c9\3\2\2\2\u00a0\u06d1\3\2\2\2\u00a2\u06dd\3\2\2\2") + buf.write("\u00a4\u06eb\3\2\2\2\u00a6\u06f7\3\2\2\2\u00a8\u0708\3") + buf.write("\2\2\2\u00aa\u070a\3\2\2\2\u00ac\u070f\3\2\2\2\u00ae\u0713") + buf.write("\3\2\2\2\u00b0\u0715\3\2\2\2\u00b2\u0717\3\2\2\2\u00b4") + buf.write("\u0720\3\2\2\2\u00b6\u0722\3\2\2\2\u00b8\u0725\3\2\2\2") + buf.write("\u00ba\u0727\3\2\2\2\u00bc\u0729\3\2\2\2\u00be\u072b\3") + buf.write("\2\2\2\u00c0\u072d\3\2\2\2\u00c2\u072f\3\2\2\2\u00c4\u0734") + buf.write("\3\2\2\2\u00c6\u073f\3\2\2\2\u00c8\u0742\3\2\2\2\u00ca") + buf.write("\u0745\3\2\2\2\u00cc\u0749\3\2\2\2\u00ce\u074d\3\2\2\2") + buf.write("\u00d0\u0754\3\2\2\2\u00d2\u0756\3\2\2\2\u00d4\u0759\3") + buf.write("\2\2\2\u00d6\u075b\3\2\2\2\u00d8\u075d\3\2\2\2\u00da\u0764") + buf.write("\3\2\2\2\u00dc\u0766\3\2\2\2\u00de\u0768\3\2\2\2\u00e0") + buf.write("\u00e1\5\4\3\2\u00e1\u00e2\7\u00f9\2\2\u00e2\u00e4\3\2") + buf.write("\2\2\u00e3\u00e0\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5\u00e3") + buf.write("\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2\u00e7") + buf.write("\u00e5\3\2\2\2\u00e8\u00e9\7\2\2\3\u00e9\3\3\2\2\2\u00ea") + buf.write("\u00eb\5\u00be`\2\u00eb\u00ec\7\26\2\2\u00ec\u00ed\5\6") + buf.write("\4\2\u00ed\u00f4\3\2\2\2\u00ee\u00ef\5\u00be`\2\u00ef") + buf.write("\u00f0\7~\2\2\u00f0\u00f1\5\6\4\2\u00f1\u00f4\3\2\2\2") + buf.write('\u00f2\u00f4\5"\22\2\u00f3\u00ea\3\2\2\2\u00f3\u00ee') + buf.write("\3\2\2\2\u00f3\u00f2\3\2\2\2\u00f4\5\3\2\2\2\u00f5\u00f6") + buf.write("\b\4\1\2\u00f6\u00f7\7\3\2\2\u00f7\u00f8\5\6\4\2\u00f8") + buf.write("\u00f9\7\4\2\2\u00f9\u0114\3\2\2\2\u00fa\u0114\5\f\7\2") + buf.write("\u00fb\u00fc\t\2\2\2\u00fc\u0114\5\6\4\r\u00fd\u00fe\7") + buf.write("\31\2\2\u00fe\u00ff\5\6\4\2\u00ff\u0100\7\33\2\2\u0100") + buf.write("\u0101\5\6\4\2\u0101\u0102\7\34\2\2\u0102\u0103\5\6\4") + buf.write("\6\u0103\u0114\3\2\2\2\u0104\u010a\7\32\2\2\u0105\u0106") + buf.write("\7\u00bb\2\2\u0106\u0107\5\6\4\2\u0107\u0108\7\33\2\2") + buf.write("\u0108\u0109\5\6\4\2\u0109\u010b\3\2\2\2\u010a\u0105\3") + buf.write("\2\2\2\u010b\u010c\3\2\2\2\u010c\u010a\3\2\2\2\u010c\u010d") + buf.write("\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u010f\7\34\2\2\u010f") + buf.write("\u0110\5\6\4\5\u0110\u0114\3\2\2\2\u0111\u0114\5\u00da") + buf.write("n\2\u0112\u0114\5\u00be`\2\u0113\u00f5\3\2\2\2\u0113\u00fa") + buf.write("\3\2\2\2\u0113\u00fb\3\2\2\2\u0113\u00fd\3\2\2\2\u0113") + buf.write("\u0104\3\2\2\2\u0113\u0111\3\2\2\2\u0113\u0112\3\2\2\2") + buf.write("\u0114\u0135\3\2\2\2\u0115\u0116\f\f\2\2\u0116\u0117\t") + buf.write("\3\2\2\u0117\u0134\5\6\4\r\u0118\u0119\f\13\2\2\u0119") + buf.write("\u011a\t\4\2\2\u011a\u0134\5\6\4\f\u011b\u011c\f\n\2\2") + buf.write("\u011c\u011d\5\u00caf\2\u011d\u011e\5\6\4\13\u011e\u0134") + buf.write("\3\2\2\2\u011f\u0120\f\b\2\2\u0120\u0121\7\61\2\2\u0121") + buf.write("\u0134\5\6\4\t\u0122\u0123\f\7\2\2\u0123\u0124\t\5\2\2") + buf.write("\u0124\u0134\5\6\4\b\u0125\u0126\f\17\2\2\u0126\u0127") + buf.write("\7\5\2\2\u0127\u0128\5\16\b\2\u0128\u0129\7\6\2\2\u0129") + buf.write("\u0134\3\2\2\2\u012a\u012b\f\16\2\2\u012b\u012c\7\27\2") + buf.write("\2\u012c\u0134\5\u00c0a\2\u012d\u012e\f\t\2\2\u012e\u0131") + buf.write("\t\6\2\2\u012f\u0132\5\u00c4c\2\u0130\u0132\5\u00d4k\2") + buf.write("\u0131\u012f\3\2\2\2\u0131\u0130\3\2\2\2\u0132\u0134\3") + buf.write("\2\2\2\u0133\u0115\3\2\2\2\u0133\u0118\3\2\2\2\u0133\u011b") + buf.write("\3\2\2\2\u0133\u011f\3\2\2\2\u0133\u0122\3\2\2\2\u0133") + buf.write("\u0125\3\2\2\2\u0133\u012a\3\2\2\2\u0133\u012d\3\2\2\2") + buf.write("\u0134\u0137\3\2\2\2\u0135\u0133\3\2\2\2\u0135\u0136\3") + buf.write("\2\2\2\u0136\7\3\2\2\2\u0137\u0135\3\2\2\2\u0138\u0139") + buf.write("\b\5\1\2\u0139\u013a\7\3\2\2\u013a\u013b\5\b\5\2\u013b") + buf.write("\u013c\7\4\2\2\u013c\u0157\3\2\2\2\u013d\u0157\5\n\6\2") + buf.write("\u013e\u013f\t\2\2\2\u013f\u0157\5\b\5\r\u0140\u0141\7") + buf.write("\31\2\2\u0141\u0142\5\b\5\2\u0142\u0143\7\33\2\2\u0143") + buf.write("\u0144\5\b\5\2\u0144\u0145\7\34\2\2\u0145\u0146\5\b\5") + buf.write("\6\u0146\u0157\3\2\2\2\u0147\u014d\7\32\2\2\u0148\u0149") + buf.write("\7\u00bb\2\2\u0149\u014a\5\b\5\2\u014a\u014b\7\33\2\2") + buf.write("\u014b\u014c\5\b\5\2\u014c\u014e\3\2\2\2\u014d\u0148\3") + buf.write("\2\2\2\u014e\u014f\3\2\2\2\u014f\u014d\3\2\2\2\u014f\u0150") + buf.write("\3\2\2\2\u0150\u0151\3\2\2\2\u0151\u0152\7\34\2\2\u0152") + buf.write("\u0153\5\b\5\5\u0153\u0157\3\2\2\2\u0154\u0157\5\u00da") + buf.write("n\2\u0155\u0157\5\u00c2b\2\u0156\u0138\3\2\2\2\u0156\u013d") + buf.write("\3\2\2\2\u0156\u013e\3\2\2\2\u0156\u0140\3\2\2\2\u0156") + buf.write("\u0147\3\2\2\2\u0156\u0154\3\2\2\2\u0156\u0155\3\2\2\2") + buf.write("\u0157\u0170\3\2\2\2\u0158\u0159\f\f\2\2\u0159\u015a\t") + buf.write("\3\2\2\u015a\u016f\5\b\5\r\u015b\u015c\f\13\2\2\u015c") + buf.write("\u015d\t\4\2\2\u015d\u016f\5\b\5\f\u015e\u015f\f\n\2\2") + buf.write("\u015f\u0160\5\u00caf\2\u0160\u0161\5\b\5\13\u0161\u016f") + buf.write("\3\2\2\2\u0162\u0163\f\b\2\2\u0163\u0164\7\61\2\2\u0164") + buf.write("\u016f\5\b\5\t\u0165\u0166\f\7\2\2\u0166\u0167\t\5\2\2") + buf.write("\u0167\u016f\5\b\5\b\u0168\u0169\f\t\2\2\u0169\u016c\t") + buf.write("\6\2\2\u016a\u016d\5\u00c4c\2\u016b\u016d\5\u00d4k\2\u016c") + buf.write("\u016a\3\2\2\2\u016c\u016b\3\2\2\2\u016d\u016f\3\2\2\2") + buf.write("\u016e\u0158\3\2\2\2\u016e\u015b\3\2\2\2\u016e\u015e\3") + buf.write("\2\2\2\u016e\u0162\3\2\2\2\u016e\u0165\3\2\2\2\u016e\u0168") + buf.write("\3\2\2\2\u016f\u0172\3\2\2\2\u0170\u016e\3\2\2\2\u0170") + buf.write("\u0171\3\2\2\2\u0171\t\3\2\2\2\u0172\u0170\3\2\2\2\u0173") + buf.write("\u017c\5&\24\2\u0174\u017c\5.\30\2\u0175\u017c\5\62\32") + buf.write("\2\u0176\u017c\5\66\34\2\u0177\u017c\5:\36\2\u0178\u017c") + buf.write("\5D#\2\u0179\u017c\5F$\2\u017a\u017c\5L'\2\u017b\u0173") + buf.write("\3\2\2\2\u017b\u0174\3\2\2\2\u017b\u0175\3\2\2\2\u017b") + buf.write("\u0176\3\2\2\2\u017b\u0177\3\2\2\2\u017b\u0178\3\2\2\2") + buf.write("\u017b\u0179\3\2\2\2\u017b\u017a\3\2\2\2\u017c\13\3\2") + buf.write("\2\2\u017d\u018a\5 \21\2\u017e\u018a\5$\23\2\u017f\u018a") + buf.write("\5,\27\2\u0180\u018a\5\60\31\2\u0181\u018a\5\64\33\2\u0182") + buf.write("\u018a\58\35\2\u0183\u018a\5<\37\2\u0184\u018a\5> \2\u0185") + buf.write('\u018a\5@!\2\u0186\u018a\5B"\2\u0187\u018a\5H%\2\u0188') + buf.write("\u018a\5J&\2\u0189\u017d\3\2\2\2\u0189\u017e\3\2\2\2\u0189") + buf.write("\u017f\3\2\2\2\u0189\u0180\3\2\2\2\u0189\u0181\3\2\2\2") + buf.write("\u0189\u0182\3\2\2\2\u0189\u0183\3\2\2\2\u0189\u0184\3") + buf.write("\2\2\2\u0189\u0185\3\2\2\2\u0189\u0186\3\2\2\2\u0189\u0187") + buf.write("\3\2\2\2\u0189\u0188\3\2\2\2\u018a\r\3\2\2\2\u018b\u0193") + buf.write("\5\20\t\2\u018c\u0193\5\22\n\2\u018d\u0193\5\24\13\2\u018e") + buf.write("\u0193\5\26\f\2\u018f\u0193\5\30\r\2\u0190\u0193\5\32") + buf.write("\16\2\u0191\u0193\5\36\20\2\u0192\u018b\3\2\2\2\u0192") + buf.write("\u018c\3\2\2\2\u0192\u018d\3\2\2\2\u0192\u018e\3\2\2\2") + buf.write("\u0192\u018f\3\2\2\2\u0192\u0190\3\2\2\2\u0192\u0191\3") + buf.write("\2\2\2\u0193\17\3\2\2\2\u0194\u0195\7/\2\2\u0195\u019a") + buf.write("\5N(\2\u0196\u0197\7\23\2\2\u0197\u0199\5N(\2\u0198\u0196") + buf.write("\3\2\2\2\u0199\u019c\3\2\2\2\u019a\u0198\3\2\2\2\u019a") + buf.write("\u019b\3\2\2\2\u019b\21\3\2\2\2\u019c\u019a\3\2\2\2\u019d") + buf.write("\u019e\7K\2\2\u019e\u01a3\5P)\2\u019f\u01a1\5r:\2\u01a0") + buf.write("\u01a2\5t;\2\u01a1\u01a0\3\2\2\2\u01a1\u01a2\3\2\2\2\u01a2") + buf.write("\u01a4\3\2\2\2\u01a3\u019f\3\2\2\2\u01a3\u01a4\3\2\2\2") + buf.write("\u01a4\23\3\2\2\2\u01a5\u01a6\7l\2\2\u01a6\u01a7\5\b\5") + buf.write("\2\u01a7\25\3\2\2\2\u01a8\u01a9\7-\2\2\u01a9\u01ae\5T") + buf.write("+\2\u01aa\u01ab\7\23\2\2\u01ab\u01ad\5T+\2\u01ac\u01aa") + buf.write("\3\2\2\2\u01ad\u01b0\3\2\2\2\u01ae\u01ac\3\2\2\2\u01ae") + buf.write("\u01af\3\2\2\2\u01af\27\3\2\2\2\u01b0\u01ae\3\2\2\2\u01b1") + buf.write("\u01b2\t\7\2\2\u01b2\u01b7\5\u00c2b\2\u01b3\u01b4\7\23") + buf.write("\2\2\u01b4\u01b6\5\u00c2b\2\u01b5\u01b3\3\2\2\2\u01b6") + buf.write("\u01b9\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b7\u01b8\3\2\2\2") + buf.write("\u01b8\31\3\2\2\2\u01b9\u01b7\3\2\2\2\u01ba\u01bb\t\b") + buf.write("\2\2\u01bb\u01bc\5\u00c2b\2\u01bc\u01bd\7\23\2\2\u01bd") + buf.write("\u01be\5\u00c2b\2\u01be\33\3\2\2\2\u01bf\u01c0\7\u00cd") + buf.write("\2\2\u01c0\u01c1\5\u00c2b\2\u01c1\u01c2\7\23\2\2\u01c2") + buf.write("\u01c3\5\u00c2b\2\u01c3\u01c4\7\66\2\2\u01c4\u01c9\5\u00da") + buf.write("n\2\u01c5\u01c6\7\23\2\2\u01c6\u01c8\5\u00dan\2\u01c7") + buf.write("\u01c5\3\2\2\2\u01c8\u01cb\3\2\2\2\u01c9\u01c7\3\2\2\2") + buf.write("\u01c9\u01ca\3\2\2\2\u01ca\35\3\2\2\2\u01cb\u01c9\3\2") + buf.write("\2\2\u01cc\u01cd\7\u00cf\2\2\u01cd\u01d2\5V,\2\u01ce\u01cf") + buf.write("\7\23\2\2\u01cf\u01d1\5V,\2\u01d0\u01ce\3\2\2\2\u01d1") + buf.write("\u01d4\3\2\2\2\u01d2\u01d0\3\2\2\2\u01d2\u01d3\3\2\2\2") + buf.write("\u01d3\37\3\2\2\2\u01d4\u01d2\3\2\2\2\u01d5\u01d6\t\t") + buf.write("\2\2\u01d6\u01d7\7\3\2\2\u01d7\u01d8\5\\/\2\u01d8\u01d9") + buf.write("\5`\61\2\u01d9\u01da\7\4\2\2\u01da\u01e2\3\2\2\2\u01db") + buf.write("\u01dc\t\n\2\2\u01dc\u01dd\7\3\2\2\u01dd\u01de\5Z.\2\u01de") + buf.write("\u01df\5`\61\2\u01df\u01e0\7\4\2\2\u01e0\u01e2\3\2\2\2") + buf.write("\u01e1\u01d5\3\2\2\2\u01e1\u01db\3\2\2\2\u01e2!\3\2\2") + buf.write("\2\u01e3\u01e4\7}\2\2\u01e4\u01e5\7|\2\2\u01e5\u01e6\5") + buf.write("\u00d6l\2\u01e6\u01ef\7\3\2\2\u01e7\u01ec\5v<\2\u01e8") + buf.write("\u01e9\7\23\2\2\u01e9\u01eb\5v<\2\u01ea\u01e8\3\2\2\2") + buf.write("\u01eb\u01ee\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ec\u01ed\3") + buf.write("\2\2\2\u01ed\u01f0\3\2\2\2\u01ee\u01ec\3\2\2\2\u01ef\u01e7") + buf.write("\3\2\2\2\u01ef\u01f0\3\2\2\2\u01f0\u01f1\3\2\2\2\u01f1") + buf.write("\u01f4\7\4\2\2\u01f2\u01f3\7\u00cb\2\2\u01f3\u01f5\5x") + buf.write("=\2\u01f4\u01f2\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\u01f6") + buf.write("\3\2\2\2\u01f6\u01f7\7\u00ba\2\2\u01f7\u01f8\5\6\4\2\u01f8") + buf.write("\u01f9\7\u0083\2\2\u01f9\u01fa\7|\2\2\u01fa\u0216\3\2") + buf.write("\2\2\u01fb\u01fc\7}\2\2\u01fc\u01fd\7\177\2\2\u01fd\u01fe") + buf.write("\7\u0081\2\2\u01fe\u01ff\5\u0090I\2\u01ff\u0200\7\3\2") + buf.write("\2\u0200\u0201\5\u0092J\2\u0201\u0202\7\4\2\2\u0202\u0203") + buf.write("\7\u00ba\2\2\u0203\u0204\5\u0096L\2\u0204\u0205\7\u0083") + buf.write("\2\2\u0205\u0206\7\177\2\2\u0206\u0207\7\u0081\2\2\u0207") + buf.write("\u0216\3\2\2\2\u0208\u0209\7}\2\2\u0209\u020a\7\u0080") + buf.write("\2\2\u020a\u020b\7\u0081\2\2\u020b\u020c\5\u0090I\2\u020c") + buf.write("\u020d\7\3\2\2\u020d\u020e\5\u009eP\2\u020e\u020f\7\4") + buf.write("\2\2\u020f\u0210\7\u00ba\2\2\u0210\u0211\5\u009aN\2\u0211") + buf.write("\u0212\7\u0083\2\2\u0212\u0213\7\u0080\2\2\u0213\u0214") + buf.write("\7\u0081\2\2\u0214\u0216\3\2\2\2\u0215\u01e3\3\2\2\2\u0215") + buf.write("\u01fb\3\2\2\2\u0215\u0208\3\2\2\2\u0216#\3\2\2\2\u0217") + buf.write("\u0218\5\u00d6l\2\u0218\u0221\7\3\2\2\u0219\u021e\5*\26") + buf.write("\2\u021a\u021b\7\23\2\2\u021b\u021d\5*\26\2\u021c\u021a") + buf.write("\3\2\2\2\u021d\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e") + buf.write("\u021f\3\2\2\2\u021f\u0222\3\2\2\2\u0220\u021e\3\2\2\2") + buf.write("\u0221\u0219\3\2\2\2\u0221\u0222\3\2\2\2\u0222\u0223\3") + buf.write("\2\2\2\u0223\u0224\7\4\2\2\u0224\u0251\3\2\2\2\u0225\u0226") + buf.write("\7\30\2\2\u0226\u0227\7\3\2\2\u0227\u0228\5\u00d8m\2\u0228") + buf.write("\u022b\7\3\2\2\u0229\u022c\5\u00be`\2\u022a\u022c\5X-") + buf.write("\2\u022b\u0229\3\2\2\2\u022b\u022a\3\2\2\2\u022b\u022c") + buf.write("\3\2\2\2\u022c\u0234\3\2\2\2\u022d\u0230\7\23\2\2\u022e") + buf.write("\u0231\5\u00be`\2\u022f\u0231\5X-\2\u0230\u022e\3\2\2") + buf.write("\2\u0230\u022f\3\2\2\2\u0231\u0233\3\2\2\2\u0232\u022d") + buf.write("\3\2\2\2\u0233\u0236\3\2\2\2\u0234\u0232\3\2\2\2\u0234") + buf.write("\u0235\3\2\2\2\u0235\u0237\3\2\2\2\u0236\u0234\3\2\2\2") + buf.write("\u0237\u023a\7\4\2\2\u0238\u0239\7\u00f2\2\2\u0239\u023b") + buf.write("\7\u00f6\2\2\u023a\u0238\3\2\2\2\u023a\u023b\3\2\2\2\u023b") + buf.write("\u023e\3\2\2\2\u023c\u023d\7\u00cb\2\2\u023d\u023f\5\u0086") + buf.write("D\2\u023e\u023c\3\2\2\2\u023e\u023f\3\2\2\2\u023f\u0240") + buf.write("\3\2\2\2\u0240\u0241\7\4\2\2\u0241\u0251\3\2\2\2\u0242") + buf.write("\u0243\7\u00dc\2\2\u0243\u0244\7\3\2\2\u0244\u0245\5\6") + buf.write("\4\2\u0245\u0248\7\23\2\2\u0246\u0249\5\u00dco\2\u0247") + buf.write("\u0249\5\u008eH\2\u0248\u0246\3\2\2\2\u0248\u0247\3\2") + buf.write("\2\2\u0249\u024c\3\2\2\2\u024a\u024b\7\23\2\2\u024b\u024d") + buf.write("\7\u00f6\2\2\u024c\u024a\3\2\2\2\u024c\u024d\3\2\2\2\u024d") + buf.write("\u024e\3\2\2\2\u024e\u024f\7\4\2\2\u024f\u0251\3\2\2\2") + buf.write("\u0250\u0217\3\2\2\2\u0250\u0225\3\2\2\2\u0250\u0242\3") + buf.write("\2\2\2\u0251%\3\2\2\2\u0252\u0253\5\u00d6l\2\u0253\u025c") + buf.write("\7\3\2\2\u0254\u0259\5(\25\2\u0255\u0256\7\23\2\2\u0256") + buf.write("\u0258\5(\25\2\u0257\u0255\3\2\2\2\u0258\u025b\3\2\2\2") + buf.write("\u0259\u0257\3\2\2\2\u0259\u025a\3\2\2\2\u025a\u025d\3") + buf.write("\2\2\2\u025b\u0259\3\2\2\2\u025c\u0254\3\2\2\2\u025c\u025d") + buf.write("\3\2\2\2\u025d\u025e\3\2\2\2\u025e\u025f\7\4\2\2\u025f") + buf.write("\u028c\3\2\2\2\u0260\u0261\7\u00dc\2\2\u0261\u0262\7\3") + buf.write("\2\2\u0262\u0263\5\b\5\2\u0263\u0266\7\23\2\2\u0264\u0267") + buf.write("\5\u00dco\2\u0265\u0267\5\u008eH\2\u0266\u0264\3\2\2\2") + buf.write("\u0266\u0265\3\2\2\2\u0267\u026a\3\2\2\2\u0268\u0269\7") + buf.write("\23\2\2\u0269\u026b\7\u00f6\2\2\u026a\u0268\3\2\2\2\u026a") + buf.write("\u026b\3\2\2\2\u026b\u026c\3\2\2\2\u026c\u026d\7\4\2\2") + buf.write("\u026d\u028c\3\2\2\2\u026e\u026f\7\30\2\2\u026f\u0270") + buf.write("\7\3\2\2\u0270\u0271\5\u00d8m\2\u0271\u0274\7\3\2\2\u0272") + buf.write("\u0275\5\u00c2b\2\u0273\u0275\5X-\2\u0274\u0272\3\2\2") + buf.write("\2\u0274\u0273\3\2\2\2\u0274\u0275\3\2\2\2\u0275\u027d") + buf.write("\3\2\2\2\u0276\u0279\7\23\2\2\u0277\u027a\5\u00c2b\2\u0278") + buf.write("\u027a\5X-\2\u0279\u0277\3\2\2\2\u0279\u0278\3\2\2\2\u027a") + buf.write("\u027c\3\2\2\2\u027b\u0276\3\2\2\2\u027c\u027f\3\2\2\2") + buf.write("\u027d\u027b\3\2\2\2\u027d\u027e\3\2\2\2\u027e\u0280\3") + buf.write("\2\2\2\u027f\u027d\3\2\2\2\u0280\u0283\7\4\2\2\u0281\u0282") + buf.write("\7\u00f2\2\2\u0282\u0284\7\u00f6\2\2\u0283\u0281\3\2\2") + buf.write("\2\u0283\u0284\3\2\2\2\u0284\u0287\3\2\2\2\u0285\u0286") + buf.write("\7\u00cb\2\2\u0286\u0288\5z>\2\u0287\u0285\3\2\2\2\u0287") + buf.write("\u0288\3\2\2\2\u0288\u0289\3\2\2\2\u0289\u028a\7\4\2\2") + buf.write("\u028a\u028c\3\2\2\2\u028b\u0252\3\2\2\2\u028b\u0260\3") + buf.write("\2\2\2\u028b\u026e\3\2\2\2\u028c'\3\2\2\2\u028d\u0290") + buf.write("\5\b\5\2\u028e\u0290\7u\2\2\u028f\u028d\3\2\2\2\u028f") + buf.write("\u028e\3\2\2\2\u0290)\3\2\2\2\u0291\u0294\5\6\4\2\u0292") + buf.write("\u0294\7u\2\2\u0293\u0291\3\2\2\2\u0293\u0292\3\2\2\2") + buf.write("\u0294+\3\2\2\2\u0295\u0296\t\13\2\2\u0296\u0297\7\3\2") + buf.write("\2\u0297\u0298\5\6\4\2\u0298\u0299\7\4\2\2\u0299\u02c6") + buf.write("\3\2\2\2\u029a\u029b\7d\2\2\u029b\u029c\7\3\2\2\u029c") + buf.write("\u02a7\5\6\4\2\u029d\u029e\7\23\2\2\u029e\u029f\5\u00cc") + buf.write("g\2\u029f\u02a0\3\2\2\2\u02a0\u02a1\7\23\2\2\u02a1\u02a2") + buf.write("\5\u00ccg\2\u02a2\u02a4\3\2\2\2\u02a3\u029d\3\2\2\2\u02a3") + buf.write("\u02a4\3\2\2\2\u02a4\u02a8\3\2\2\2\u02a5\u02a6\7\23\2") + buf.write("\2\u02a6\u02a8\5\u00ccg\2\u02a7\u02a3\3\2\2\2\u02a7\u02a5") + buf.write("\3\2\2\2\u02a8\u02a9\3\2\2\2\u02a9\u02aa\7\4\2\2\u02aa") + buf.write("\u02c6\3\2\2\2\u02ab\u02ac\7\u0088\2\2\u02ac\u02ad\7\3") + buf.write("\2\2\u02ad\u02ae\5\6\4\2\u02ae\u02af\7\23\2\2\u02af\u02b2") + buf.write("\5\6\4\2\u02b0\u02b1\7\23\2\2\u02b1\u02b3\5\u00ccg\2\u02b2") + buf.write("\u02b0\3\2\2\2\u02b2\u02b3\3\2\2\2\u02b3\u02b4\3\2\2\2") + buf.write("\u02b4\u02b5\7\4\2\2\u02b5\u02c6\3\2\2\2\u02b6\u02b7\7") + buf.write("\u0087\2\2\u02b7\u02b8\7\3\2\2\u02b8\u02b9\5\6\4\2\u02b9") + buf.write("\u02ba\7\23\2\2\u02ba\u02bd\5\6\4\2\u02bb\u02bc\7\23\2") + buf.write("\2\u02bc\u02be\5\u00ccg\2\u02bd\u02bb\3\2\2\2\u02bd\u02be") + buf.write("\3\2\2\2\u02be\u02c1\3\2\2\2\u02bf\u02c0\7\23\2\2\u02c0") + buf.write("\u02c2\5\u00ccg\2\u02c1\u02bf\3\2\2\2\u02c1\u02c2\3\2") + buf.write("\2\2\u02c2\u02c3\3\2\2\2\u02c3\u02c4\7\4\2\2\u02c4\u02c6") + buf.write("\3\2\2\2\u02c5\u0295\3\2\2\2\u02c5\u029a\3\2\2\2\u02c5") + buf.write("\u02ab\3\2\2\2\u02c5\u02b6\3\2\2\2\u02c6-\3\2\2\2\u02c7") + buf.write("\u02c8\t\13\2\2\u02c8\u02c9\7\3\2\2\u02c9\u02ca\5\b\5") + buf.write("\2\u02ca\u02cb\7\4\2\2\u02cb\u02f8\3\2\2\2\u02cc\u02cd") + buf.write("\7d\2\2\u02cd\u02ce\7\3\2\2\u02ce\u02d9\5\b\5\2\u02cf") + buf.write("\u02d0\7\23\2\2\u02d0\u02d1\5\u00ceh\2\u02d1\u02d2\3\2") + buf.write("\2\2\u02d2\u02d3\7\23\2\2\u02d3\u02d4\5\u00ceh\2\u02d4") + buf.write("\u02d6\3\2\2\2\u02d5\u02cf\3\2\2\2\u02d5\u02d6\3\2\2\2") + buf.write("\u02d6\u02da\3\2\2\2\u02d7\u02d8\7\23\2\2\u02d8\u02da") + buf.write("\5\u00ceh\2\u02d9\u02d5\3\2\2\2\u02d9\u02d7\3\2\2\2\u02da") + buf.write("\u02db\3\2\2\2\u02db\u02dc\7\4\2\2\u02dc\u02f8\3\2\2\2") + buf.write("\u02dd\u02de\7\u0088\2\2\u02de\u02df\7\3\2\2\u02df\u02e0") + buf.write("\5\b\5\2\u02e0\u02e1\7\23\2\2\u02e1\u02e4\5\b\5\2\u02e2") + buf.write("\u02e3\7\23\2\2\u02e3\u02e5\5\u00ceh\2\u02e4\u02e2\3\2") + buf.write("\2\2\u02e4\u02e5\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6\u02e7") + buf.write("\7\4\2\2\u02e7\u02f8\3\2\2\2\u02e8\u02e9\7\u0087\2\2\u02e9") + buf.write("\u02ea\7\3\2\2\u02ea\u02eb\5\b\5\2\u02eb\u02ec\7\23\2") + buf.write("\2\u02ec\u02ef\5\b\5\2\u02ed\u02ee\7\23\2\2\u02ee\u02f0") + buf.write("\5\u00ceh\2\u02ef\u02ed\3\2\2\2\u02ef\u02f0\3\2\2\2\u02f0") + buf.write("\u02f3\3\2\2\2\u02f1\u02f2\7\23\2\2\u02f2\u02f4\5\u00ce") + buf.write("h\2\u02f3\u02f1\3\2\2\2\u02f3\u02f4\3\2\2\2\u02f4\u02f5") + buf.write("\3\2\2\2\u02f5\u02f6\7\4\2\2\u02f6\u02f8\3\2\2\2\u02f7") + buf.write("\u02c7\3\2\2\2\u02f7\u02cc\3\2\2\2\u02f7\u02dd\3\2\2\2") + buf.write("\u02f7\u02e8\3\2\2\2\u02f8/\3\2\2\2\u02f9\u02fa\t\f\2") + buf.write("\2\u02fa\u02fb\7\3\2\2\u02fb\u02fc\5\6\4\2\u02fc\u02fd") + buf.write("\7\4\2\2\u02fd\u030f\3\2\2\2\u02fe\u02ff\t\r\2\2\u02ff") + buf.write("\u0300\7\3\2\2\u0300\u0303\5\6\4\2\u0301\u0302\7\23\2") + buf.write("\2\u0302\u0304\5\u00ccg\2\u0303\u0301\3\2\2\2\u0303\u0304") + buf.write("\3\2\2\2\u0304\u0305\3\2\2\2\u0305\u0306\7\4\2\2\u0306") + buf.write("\u030f\3\2\2\2\u0307\u0308\t\16\2\2\u0308\u0309\7\3\2") + buf.write("\2\u0309\u030a\5\6\4\2\u030a\u030b\7\23\2\2\u030b\u030c") + buf.write("\5\6\4\2\u030c\u030d\7\4\2\2\u030d\u030f\3\2\2\2\u030e") + buf.write("\u02f9\3\2\2\2\u030e\u02fe\3\2\2\2\u030e\u0307\3\2\2\2") + buf.write("\u030f\61\3\2\2\2\u0310\u0311\t\f\2\2\u0311\u0312\7\3") + buf.write("\2\2\u0312\u0313\5\b\5\2\u0313\u0314\7\4\2\2\u0314\u0326") + buf.write("\3\2\2\2\u0315\u0316\t\r\2\2\u0316\u0317\7\3\2\2\u0317") + buf.write("\u031a\5\b\5\2\u0318\u0319\7\23\2\2\u0319\u031b\5\u00ce") + buf.write("h\2\u031a\u0318\3\2\2\2\u031a\u031b\3\2\2\2\u031b\u031c") + buf.write("\3\2\2\2\u031c\u031d\7\4\2\2\u031d\u0326\3\2\2\2\u031e") + buf.write("\u031f\t\16\2\2\u031f\u0320\7\3\2\2\u0320\u0321\5\b\5") + buf.write("\2\u0321\u0322\7\23\2\2\u0322\u0323\5\b\5\2\u0323\u0324") + buf.write("\7\4\2\2\u0324\u0326\3\2\2\2\u0325\u0310\3\2\2\2\u0325") + buf.write("\u0315\3\2\2\2\u0325\u031e\3\2\2\2\u0326\63\3\2\2\2\u0327") + buf.write("\u0328\7\65\2\2\u0328\u0329\7\3\2\2\u0329\u032a\5\6\4") + buf.write("\2\u032a\u032b\7\23\2\2\u032b\u032c\5\6\4\2\u032c\u032d") + buf.write("\7\23\2\2\u032d\u032e\5\6\4\2\u032e\u032f\7\4\2\2\u032f") + buf.write("\u0348\3\2\2\2\u0330\u0331\7q\2\2\u0331\u0332\7\3\2\2") + buf.write("\u0332\u0333\5\6\4\2\u0333\u0334\7\23\2\2\u0334\u0335") + buf.write("\5\6\4\2\u0335\u0336\7\4\2\2\u0336\u0348\3\2\2\2\u0337") + buf.write("\u0338\79\2\2\u0338\u0339\7\3\2\2\u0339\u033a\5\6\4\2") + buf.write("\u033a\u033b\7\4\2\2\u033b\u0348\3\2\2\2\u033c\u033d\7") + buf.write("E\2\2\u033d\u033e\7\3\2\2\u033e\u033f\5\6\4\2\u033f\u0340") + buf.write("\7\23\2\2\u0340\u0343\5\6\4\2\u0341\u0342\7\23\2\2\u0342") + buf.write("\u0344\5\u00dep\2\u0343\u0341\3\2\2\2\u0343\u0344\3\2") + buf.write("\2\2\u0344\u0345\3\2\2\2\u0345\u0346\7\4\2\2\u0346\u0348") + buf.write("\3\2\2\2\u0347\u0327\3\2\2\2\u0347\u0330\3\2\2\2\u0347") + buf.write("\u0337\3\2\2\2\u0347\u033c\3\2\2\2\u0348\65\3\2\2\2\u0349") + buf.write("\u034a\7\65\2\2\u034a\u034b\7\3\2\2\u034b\u034c\5\b\5") + buf.write("\2\u034c\u034d\7\23\2\2\u034d\u034e\5\b\5\2\u034e\u034f") + buf.write("\7\23\2\2\u034f\u0350\5\b\5\2\u0350\u0351\7\4\2\2\u0351") + buf.write("\u035f\3\2\2\2\u0352\u0353\7q\2\2\u0353\u0354\7\3\2\2") + buf.write("\u0354\u0355\5\b\5\2\u0355\u0356\7\23\2\2\u0356\u0357") + buf.write("\5\b\5\2\u0357\u0358\7\4\2\2\u0358\u035f\3\2\2\2\u0359") + buf.write("\u035a\79\2\2\u035a\u035b\7\3\2\2\u035b\u035c\5\b\5\2") + buf.write("\u035c\u035d\7\4\2\2\u035d\u035f\3\2\2\2\u035e\u0349\3") + buf.write("\2\2\2\u035e\u0352\3\2\2\2\u035e\u0359\3\2\2\2\u035f\67") + buf.write("\3\2\2\2\u0360\u0361\7\u00d2\2\2\u0361\u0363\7\3\2\2\u0362") + buf.write("\u0364\5\6\4\2\u0363\u0362\3\2\2\2\u0363\u0364\3\2\2\2") + buf.write("\u0364\u0365\3\2\2\2\u0365\u03c7\7\4\2\2\u0366\u0367\7") + buf.write("\u00a3\2\2\u0367\u0368\7\3\2\2\u0368\u036b\5\6\4\2\u0369") + buf.write("\u036a\7\23\2\2\u036a\u036c\t\17\2\2\u036b\u0369\3\2\2") + buf.write("\2\u036b\u036c\3\2\2\2\u036c\u036d\3\2\2\2\u036d\u036e") + buf.write("\7\4\2\2\u036e\u03c7\3\2\2\2\u036f\u0370\t\20\2\2\u0370") + buf.write("\u0371\7\3\2\2\u0371\u0372\5\6\4\2\u0372\u0373\7\4\2\2") + buf.write("\u0373\u03c7\3\2\2\2\u0374\u0375\7\u00a6\2\2\u0375\u0376") + buf.write("\7\3\2\2\u0376\u0377\5\6\4\2\u0377\u0378\7\23\2\2\u0378") + buf.write("\u0379\5l\67\2\u0379\u037a\7\4\2\2\u037a\u03c7\3\2\2\2") + buf.write("\u037b\u037c\7\u00d5\2\2\u037c\u037d\7\3\2\2\u037d\u0380") + buf.write("\7\u00f6\2\2\u037e\u037f\7\23\2\2\u037f\u0381\t\21\2\2") + buf.write("\u0380\u037e\3\2\2\2\u0380\u0381\3\2\2\2\u0381\u0384\3") + buf.write("\2\2\2\u0382\u0383\7\23\2\2\u0383\u0385\5\u00ccg\2\u0384") + buf.write("\u0382\3\2\2\2\u0384\u0385\3\2\2\2\u0385\u0388\3\2\2\2") + buf.write("\u0386\u0387\7\23\2\2\u0387\u0389\t\22\2\2\u0388\u0386") + buf.write("\3\2\2\2\u0388\u0389\3\2\2\2\u0389\u038a\3\2\2\2\u038a") + buf.write("\u03c7\7\4\2\2\u038b\u038c\7\37\2\2\u038c\u038d\7\3\2") + buf.write("\2\u038d\u03c7\7\4\2\2\u038e\u038f\7 \2\2\u038f\u0390") + buf.write("\7\3\2\2\u0390\u0391\5\6\4\2\u0391\u0392\7\23\2\2\u0392") + buf.write("\u0393\5\6\4\2\u0393\u0394\7\4\2\2\u0394\u03c7\3\2\2\2") + buf.write("\u0395\u0396\7!\2\2\u0396\u0397\7\3\2\2\u0397\u0398\5") + buf.write("\6\4\2\u0398\u0399\7\23\2\2\u0399\u039a\5\6\4\2\u039a") + buf.write("\u039b\7\23\2\2\u039b\u039c\5\6\4\2\u039c\u039d\7\4\2") + buf.write('\2\u039d\u03c7\3\2\2\2\u039e\u039f\7"\2\2\u039f\u03a0') + buf.write("\7\3\2\2\u03a0\u03a1\5\6\4\2\u03a1\u03a2\7\4\2\2\u03a2") + buf.write("\u03c7\3\2\2\2\u03a3\u03a4\7#\2\2\u03a4\u03a5\7\3\2\2") + buf.write("\u03a5\u03a6\5\6\4\2\u03a6\u03a7\7\4\2\2\u03a7\u03c7\3") + buf.write("\2\2\2\u03a8\u03a9\7$\2\2\u03a9\u03aa\7\3\2\2\u03aa\u03ab") + buf.write("\5\6\4\2\u03ab\u03ac\7\4\2\2\u03ac\u03c7\3\2\2\2\u03ad") + buf.write("\u03ae\7%\2\2\u03ae\u03af\7\3\2\2\u03af\u03b0\5\6\4\2") + buf.write("\u03b0\u03b1\7\4\2\2\u03b1\u03c7\3\2\2\2\u03b2\u03b3\7") + buf.write("&\2\2\u03b3\u03b4\7\3\2\2\u03b4\u03b5\5\6\4\2\u03b5\u03b6") + buf.write("\7\4\2\2\u03b6\u03c7\3\2\2\2\u03b7\u03b8\7'\2\2\u03b8") + buf.write("\u03b9\7\3\2\2\u03b9\u03ba\5\6\4\2\u03ba\u03bb\7\4\2\2") + buf.write("\u03bb\u03c7\3\2\2\2\u03bc\u03bd\7(\2\2\u03bd\u03be\7") + buf.write("\3\2\2\u03be\u03bf\5\6\4\2\u03bf\u03c0\7\4\2\2\u03c0\u03c7") + buf.write("\3\2\2\2\u03c1\u03c2\7)\2\2\u03c2\u03c3\7\3\2\2\u03c3") + buf.write("\u03c4\5\6\4\2\u03c4\u03c5\7\4\2\2\u03c5\u03c7\3\2\2\2") + buf.write("\u03c6\u0360\3\2\2\2\u03c6\u0366\3\2\2\2\u03c6\u036f\3") + buf.write("\2\2\2\u03c6\u0374\3\2\2\2\u03c6\u037b\3\2\2\2\u03c6\u038b") + buf.write("\3\2\2\2\u03c6\u038e\3\2\2\2\u03c6\u0395\3\2\2\2\u03c6") + buf.write("\u039e\3\2\2\2\u03c6\u03a3\3\2\2\2\u03c6\u03a8\3\2\2\2") + buf.write("\u03c6\u03ad\3\2\2\2\u03c6\u03b2\3\2\2\2\u03c6\u03b7\3") + buf.write("\2\2\2\u03c6\u03bc\3\2\2\2\u03c6\u03c1\3\2\2\2\u03c79") + buf.write("\3\2\2\2\u03c8\u03c9\7\u00d2\2\2\u03c9\u03cb\7\3\2\2\u03ca") + buf.write("\u03cc\5\b\5\2\u03cb\u03ca\3\2\2\2\u03cb\u03cc\3\2\2\2") + buf.write("\u03cc\u03cd\3\2\2\2\u03cd\u042f\7\4\2\2\u03ce\u03cf\7") + buf.write("\u00a3\2\2\u03cf\u03d0\7\3\2\2\u03d0\u03d3\5\b\5\2\u03d1") + buf.write("\u03d2\7\23\2\2\u03d2\u03d4\t\17\2\2\u03d3\u03d1\3\2\2") + buf.write("\2\u03d3\u03d4\3\2\2\2\u03d4\u03d5\3\2\2\2\u03d5\u03d6") + buf.write("\7\4\2\2\u03d6\u042f\3\2\2\2\u03d7\u03d8\t\20\2\2\u03d8") + buf.write("\u03d9\7\3\2\2\u03d9\u03da\5\b\5\2\u03da\u03db\7\4\2\2") + buf.write("\u03db\u042f\3\2\2\2\u03dc\u03dd\7\u00a6\2\2\u03dd\u03de") + buf.write("\7\3\2\2\u03de\u03df\5\b\5\2\u03df\u03e0\7\23\2\2\u03e0") + buf.write("\u03e1\5l\67\2\u03e1\u03e2\7\4\2\2\u03e2\u042f\3\2\2\2") + buf.write("\u03e3\u03e4\7\u00d5\2\2\u03e4\u03e5\7\3\2\2\u03e5\u03e8") + buf.write("\7\u00f6\2\2\u03e6\u03e7\7\23\2\2\u03e7\u03e9\t\21\2\2") + buf.write("\u03e8\u03e6\3\2\2\2\u03e8\u03e9\3\2\2\2\u03e9\u03ec\3") + buf.write("\2\2\2\u03ea\u03eb\7\23\2\2\u03eb\u03ed\5\u00ceh\2\u03ec") + buf.write("\u03ea\3\2\2\2\u03ec\u03ed\3\2\2\2\u03ed\u03f0\3\2\2\2") + buf.write("\u03ee\u03ef\7\23\2\2\u03ef\u03f1\t\22\2\2\u03f0\u03ee") + buf.write("\3\2\2\2\u03f0\u03f1\3\2\2\2\u03f1\u03f2\3\2\2\2\u03f2") + buf.write("\u042f\7\4\2\2\u03f3\u03f4\7\37\2\2\u03f4\u03f5\7\3\2") + buf.write("\2\u03f5\u042f\7\4\2\2\u03f6\u03f7\7 \2\2\u03f7\u03f8") + buf.write("\7\3\2\2\u03f8\u03f9\5\b\5\2\u03f9\u03fa\7\23\2\2\u03fa") + buf.write("\u03fb\5\6\4\2\u03fb\u03fc\7\4\2\2\u03fc\u042f\3\2\2\2") + buf.write("\u03fd\u03fe\7!\2\2\u03fe\u03ff\7\3\2\2\u03ff\u0400\5") + buf.write("\b\5\2\u0400\u0401\7\23\2\2\u0401\u0402\5\b\5\2\u0402") + buf.write("\u0403\7\23\2\2\u0403\u0404\5\b\5\2\u0404\u0405\7\4\2") + buf.write('\2\u0405\u042f\3\2\2\2\u0406\u0407\7"\2\2\u0407\u0408') + buf.write("\7\3\2\2\u0408\u0409\5\b\5\2\u0409\u040a\7\4\2\2\u040a") + buf.write("\u042f\3\2\2\2\u040b\u040c\7#\2\2\u040c\u040d\7\3\2\2") + buf.write("\u040d\u040e\5\b\5\2\u040e\u040f\7\4\2\2\u040f\u042f\3") + buf.write("\2\2\2\u0410\u0411\7$\2\2\u0411\u0412\7\3\2\2\u0412\u0413") + buf.write("\5\b\5\2\u0413\u0414\7\4\2\2\u0414\u042f\3\2\2\2\u0415") + buf.write("\u0416\7%\2\2\u0416\u0417\7\3\2\2\u0417\u0418\5\b\5\2") + buf.write("\u0418\u0419\7\4\2\2\u0419\u042f\3\2\2\2\u041a\u041b\7") + buf.write("&\2\2\u041b\u041c\7\3\2\2\u041c\u041d\5\b\5\2\u041d\u041e") + buf.write("\7\4\2\2\u041e\u042f\3\2\2\2\u041f\u0420\7'\2\2\u0420") + buf.write("\u0421\7\3\2\2\u0421\u0422\5\b\5\2\u0422\u0423\7\4\2\2") + buf.write("\u0423\u042f\3\2\2\2\u0424\u0425\7(\2\2\u0425\u0426\7") + buf.write("\3\2\2\u0426\u0427\5\b\5\2\u0427\u0428\7\4\2\2\u0428\u042f") + buf.write("\3\2\2\2\u0429\u042a\7)\2\2\u042a\u042b\7\3\2\2\u042b") + buf.write("\u042c\5\b\5\2\u042c\u042d\7\4\2\2\u042d\u042f\3\2\2\2") + buf.write("\u042e\u03c8\3\2\2\2\u042e\u03ce\3\2\2\2\u042e\u03d7\3") + buf.write("\2\2\2\u042e\u03dc\3\2\2\2\u042e\u03e3\3\2\2\2\u042e\u03f3") + buf.write("\3\2\2\2\u042e\u03f6\3\2\2\2\u042e\u03fd\3\2\2\2\u042e") + buf.write("\u0406\3\2\2\2\u042e\u040b\3\2\2\2\u042e\u0410\3\2\2\2") + buf.write("\u042e\u0415\3\2\2\2\u042e\u041a\3\2\2\2\u042e\u041f\3") + buf.write("\2\2\2\u042e\u0424\3\2\2\2\u042e\u0429\3\2\2\2\u042f;") + buf.write("\3\2\2\2\u0430\u0431\7;\2\2\u0431\u0432\7\3\2\2\u0432") + buf.write("\u0435\5\6\4\2\u0433\u0434\7\23\2\2\u0434\u0436\5\6\4") + buf.write("\2\u0435\u0433\3\2\2\2\u0436\u0437\3\2\2\2\u0437\u0435") + buf.write("\3\2\2\2\u0437\u0438\3\2\2\2\u0438\u0439\3\2\2\2\u0439") + buf.write("\u043a\7\4\2\2\u043a\u044e\3\2\2\2\u043b\u043c\7>\2\2") + buf.write("\u043c\u043d\7\3\2\2\u043d\u0440\5\6\4\2\u043e\u043f\7") + buf.write("\23\2\2\u043f\u0441\5\6\4\2\u0440\u043e\3\2\2\2\u0441") + buf.write("\u0442\3\2\2\2\u0442\u0440\3\2\2\2\u0442\u0443\3\2\2\2") + buf.write("\u0443\u0444\3\2\2\2\u0444\u0445\7\4\2\2\u0445\u044e\3") + buf.write("\2\2\2\u0446\u0447\t\23\2\2\u0447\u0448\7\3\2\2\u0448") + buf.write("\u0449\5\6\4\2\u0449\u044a\7\23\2\2\u044a\u044b\5\6\4") + buf.write("\2\u044b\u044c\7\4\2\2\u044c\u044e\3\2\2\2\u044d\u0430") + buf.write("\3\2\2\2\u044d\u043b\3\2\2\2\u044d\u0446\3\2\2\2\u044e") + buf.write("=\3\2\2\2\u044f\u0450\7t\2\2\u0450\u0451\7\3\2\2\u0451") + buf.write("\u0452\5\6\4\2\u0452\u0453\7\23\2\2\u0453\u0455\7\u00f7") + buf.write("\2\2\u0454\u0456\5\u00b2Z\2\u0455\u0454\3\2\2\2\u0455") + buf.write("\u0456\3\2\2\2\u0456\u0459\3\2\2\2\u0457\u0458\7\u0082") + buf.write("\2\2\u0458\u045a\5\u00c2b\2\u0459\u0457\3\2\2\2\u0459") + buf.write("\u045a\3\2\2\2\u045a\u045c\3\2\2\2\u045b\u045d\5\u00b0") + buf.write("Y\2\u045c\u045b\3\2\2\2\u045c\u045d\3\2\2\2\u045d\u045f") + buf.write("\3\2\2\2\u045e\u0460\5\u00b8]\2\u045f\u045e\3\2\2\2\u045f") + buf.write("\u0460\3\2\2\2\u0460\u0462\3\2\2\2\u0461\u0463\5\u00ba") + buf.write("^\2\u0462\u0461\3\2\2\2\u0462\u0463\3\2\2\2\u0463\u0464") + buf.write("\3\2\2\2\u0464\u0465\7\4\2\2\u0465?\3\2\2\2\u0466\u0467") + buf.write("\7\u00e0\2\2\u0467\u0468\7\3\2\2\u0468\u0469\5\6\4\2\u0469") + buf.write("\u046a\7\23\2\2\u046a\u0474\7\u00f7\2\2\u046b\u046c\7") + buf.write("\u00e9\2\2\u046c\u0471\5\u00c2b\2\u046d\u046e\7\23\2\2") + buf.write("\u046e\u0470\5\u00c2b\2\u046f\u046d\3\2\2\2\u0470\u0473") + buf.write("\3\2\2\2\u0471\u046f\3\2\2\2\u0471\u0472\3\2\2\2\u0472") + buf.write("\u0475\3\2\2\2\u0473\u0471\3\2\2\2\u0474\u046b\3\2\2\2") + buf.write("\u0474\u0475\3\2\2\2\u0475\u0477\3\2\2\2\u0476\u0478\5") + buf.write("\u00aeX\2\u0477\u0476\3\2\2\2\u0477\u0478\3\2\2\2\u0478") + buf.write("\u0479\3\2\2\2\u0479\u047a\7\4\2\2\u047a\u04a4\3\2\2\2") + buf.write("\u047b\u047c\7\u00e1\2\2\u047c\u047d\7\3\2\2\u047d\u047e") + buf.write("\5\6\4\2\u047e\u047f\7\23\2\2\u047f\u0481\7\u00f7\2\2") + buf.write("\u0480\u0482\5\u00b2Z\2\u0481\u0480\3\2\2\2\u0481\u0482") + buf.write("\3\2\2\2\u0482\u0485\3\2\2\2\u0483\u0484\7\u0082\2\2\u0484") + buf.write("\u0486\5\u00c2b\2\u0485\u0483\3\2\2\2\u0485\u0486\3\2") + buf.write("\2\2\u0486\u0488\3\2\2\2\u0487\u0489\5\u00b0Y\2\u0488") + buf.write("\u0487\3\2\2\2\u0488\u0489\3\2\2\2\u0489\u048b\3\2\2\2") + buf.write("\u048a\u048c\5\u00b4[\2\u048b\u048a\3\2\2\2\u048b\u048c") + buf.write("\3\2\2\2\u048c\u048e\3\2\2\2\u048d\u048f\5\u00aeX\2\u048e") + buf.write("\u048d\3\2\2\2\u048e\u048f\3\2\2\2\u048f\u0490\3\2\2\2") + buf.write("\u0490\u0491\7\4\2\2\u0491\u04a4\3\2\2\2\u0492\u0493\7") + buf.write("D\2\2\u0493\u0494\7\3\2\2\u0494\u0496\5\6\4\2\u0495\u0497") + buf.write("\5\u00c6d\2\u0496\u0495\3\2\2\2\u0496\u0497\3\2\2\2\u0497") + buf.write("\u0499\3\2\2\2\u0498\u049a\5\u00c8e\2\u0499\u0498\3\2") + buf.write("\2\2\u0499\u049a\3\2\2\2\u049a\u049c\3\2\2\2\u049b\u049d") + buf.write("\5\u00b6\\\2\u049c\u049b\3\2\2\2\u049c\u049d\3\2\2\2\u049d") + buf.write("\u049f\3\2\2\2\u049e\u04a0\t\24\2\2\u049f\u049e\3\2\2") + buf.write("\2\u049f\u04a0\3\2\2\2\u04a0\u04a1\3\2\2\2\u04a1\u04a2") + buf.write("\7\4\2\2\u04a2\u04a4\3\2\2\2\u04a3\u0466\3\2\2\2\u04a3") + buf.write("\u047b\3\2\2\2\u04a3\u0492\3\2\2\2\u04a4A\3\2\2\2\u04a5") + buf.write("\u04a6\7s\2\2\u04a6\u04a7\7\3\2\2\u04a7\u04a8\5\6\4\2") + buf.write("\u04a8\u04a9\7\23\2\2\u04a9\u04aa\5\6\4\2\u04aa\u04ab") + buf.write("\7\4\2\2\u04abC\3\2\2\2\u04ac\u04ad\7s\2\2\u04ad\u04ae") + buf.write("\7\3\2\2\u04ae\u04af\5\b\5\2\u04af\u04b0\7\23\2\2\u04b0") + buf.write("\u04b1\5\b\5\2\u04b1\u04b2\7\4\2\2\u04b2E\3\2\2\2\u04b3") + buf.write("\u04b4\t\25\2\2\u04b4\u04b5\7\3\2\2\u04b5\u04b6\5\b\5") + buf.write("\2\u04b6\u04b7\7\4\2\2\u04b7\u04bc\3\2\2\2\u04b8\u04b9") + buf.write("\7h\2\2\u04b9\u04ba\7\3\2\2\u04ba\u04bc\7\4\2\2\u04bb") + buf.write("\u04b3\3\2\2\2\u04bb\u04b8\3\2\2\2\u04bcG\3\2\2\2\u04bd") + buf.write("\u04be\t\25\2\2\u04be\u04bf\7\3\2\2\u04bf\u04c4\5\6\4") + buf.write("\2\u04c0\u04c2\5r:\2\u04c1\u04c3\5t;\2\u04c2\u04c1\3\2") + buf.write("\2\2\u04c2\u04c3\3\2\2\2\u04c3\u04c5\3\2\2\2\u04c4\u04c0") + buf.write("\3\2\2\2\u04c4\u04c5\3\2\2\2\u04c5\u04c6\3\2\2\2\u04c6") + buf.write("\u04c7\7\4\2\2\u04c7I\3\2\2\2\u04c8\u04c9\t\26\2\2\u04c9") + buf.write("\u04ca\7\3\2\2\u04ca\u04cb\5\6\4\2\u04cb\u04cc\7\u009a") + buf.write("\2\2\u04cc\u04ce\7\3\2\2\u04cd\u04cf\5d\63\2\u04ce\u04cd") + buf.write("\3\2\2\2\u04ce\u04cf\3\2\2\2\u04cf\u04d1\3\2\2\2\u04d0") + buf.write("\u04d2\5f\64\2\u04d1\u04d0\3\2\2\2\u04d1\u04d2\3\2\2\2") + buf.write("\u04d2\u04d4\3\2\2\2\u04d3\u04d5\5j\66\2\u04d4\u04d3\3") + buf.write("\2\2\2\u04d4\u04d5\3\2\2\2\u04d5\u04d6\3\2\2\2\u04d6\u04d7") + buf.write("\7\4\2\2\u04d7\u04d8\7\4\2\2\u04d8\u04f8\3\2\2\2\u04d9") + buf.write("\u04da\t\27\2\2\u04da\u04db\7\3\2\2\u04db\u04e2\5\6\4") + buf.write("\2\u04dc\u04dd\7\23\2\2\u04dd\u04e0\5l\67\2\u04de\u04df") + buf.write("\7\23\2\2\u04df\u04e1\5X-\2\u04e0\u04de\3\2\2\2\u04e0") + buf.write("\u04e1\3\2\2\2\u04e1\u04e3\3\2\2\2\u04e2\u04dc\3\2\2\2") + buf.write("\u04e2\u04e3\3\2\2\2\u04e3\u04e4\3\2\2\2\u04e4\u04e5\7") + buf.write("\u009a\2\2\u04e5\u04e7\7\3\2\2\u04e6\u04e8\5d\63\2\u04e7") + buf.write("\u04e6\3\2\2\2\u04e7\u04e8\3\2\2\2\u04e8\u04e9\3\2\2\2") + buf.write("\u04e9\u04ea\5f\64\2\u04ea\u04eb\3\2\2\2\u04eb\u04ec\7") + buf.write("\4\2\2\u04ec\u04ed\7\4\2\2\u04ed\u04f8\3\2\2\2\u04ee\u04ef") + buf.write("\7\u0099\2\2\u04ef\u04f0\7\3\2\2\u04f0\u04f1\5\6\4\2\u04f1") + buf.write("\u04f2\7\u009a\2\2\u04f2\u04f3\7\3\2\2\u04f3\u04f4\5d") + buf.write("\63\2\u04f4\u04f5\7\4\2\2\u04f5\u04f6\7\4\2\2\u04f6\u04f8") + buf.write("\3\2\2\2\u04f7\u04c8\3\2\2\2\u04f7\u04d9\3\2\2\2\u04f7") + buf.write("\u04ee\3\2\2\2\u04f8K\3\2\2\2\u04f9\u04fa\t\26\2\2\u04fa") + buf.write("\u04fb\7\3\2\2\u04fb\u04fc\5\b\5\2\u04fc\u04fd\7\u009a") + buf.write("\2\2\u04fd\u04ff\7\3\2\2\u04fe\u0500\5d\63\2\u04ff\u04fe") + buf.write("\3\2\2\2\u04ff\u0500\3\2\2\2\u0500\u0502\3\2\2\2\u0501") + buf.write("\u0503\5f\64\2\u0502\u0501\3\2\2\2\u0502\u0503\3\2\2\2") + buf.write("\u0503\u0505\3\2\2\2\u0504\u0506\5j\66\2\u0505\u0504\3") + buf.write("\2\2\2\u0505\u0506\3\2\2\2\u0506\u0507\3\2\2\2\u0507\u0508") + buf.write("\7\4\2\2\u0508\u0509\7\4\2\2\u0509\u0534\3\2\2\2\u050a") + buf.write("\u050b\t\27\2\2\u050b\u050c\7\3\2\2\u050c\u0512\5\b\5") + buf.write("\2\u050d\u050e\7\23\2\2\u050e\u0510\5l\67\2\u050f\u0511") + buf.write("\5X-\2\u0510\u050f\3\2\2\2\u0510\u0511\3\2\2\2\u0511\u0513") + buf.write("\3\2\2\2\u0512\u050d\3\2\2\2\u0512\u0513\3\2\2\2\u0513") + buf.write("\u0514\3\2\2\2\u0514\u0515\7\u009a\2\2\u0515\u0517\7\3") + buf.write("\2\2\u0516\u0518\5d\63\2\u0517\u0516\3\2\2\2\u0517\u0518") + buf.write("\3\2\2\2\u0518\u0519\3\2\2\2\u0519\u051a\5f\64\2\u051a") + buf.write("\u051b\3\2\2\2\u051b\u051c\7\4\2\2\u051c\u051d\7\4\2\2") + buf.write("\u051d\u0534\3\2\2\2\u051e\u051f\7O\2\2\u051f\u0520\7") + buf.write("\3\2\2\u0520\u0521\7\u009a\2\2\u0521\u0523\7\3\2\2\u0522") + buf.write("\u0524\5d\63\2\u0523\u0522\3\2\2\2\u0523\u0524\3\2\2\2") + buf.write("\u0524\u0525\3\2\2\2\u0525\u0526\5f\64\2\u0526\u0527\3") + buf.write("\2\2\2\u0527\u0528\7\4\2\2\u0528\u0529\7\4\2\2\u0529\u0534") + buf.write("\3\2\2\2\u052a\u052b\7\u0099\2\2\u052b\u052c\7\3\2\2\u052c") + buf.write("\u052d\5\b\5\2\u052d\u052e\7\u009a\2\2\u052e\u052f\7\3") + buf.write("\2\2\u052f\u0530\5d\63\2\u0530\u0531\7\4\2\2\u0531\u0532") + buf.write("\7\4\2\2\u0532\u0534\3\2\2\2\u0533\u04f9\3\2\2\2\u0533") + buf.write("\u050a\3\2\2\2\u0533\u051e\3\2\2\2\u0533\u052a\3\2\2\2") + buf.write("\u0534M\3\2\2\2\u0535\u0536\5\u00c2b\2\u0536\u0537\7F") + buf.write("\2\2\u0537\u0538\5\u00c2b\2\u0538O\3\2\2\2\u0539\u053e") + buf.write("\5R*\2\u053a\u053b\7\23\2\2\u053b\u053d\5R*\2\u053c\u053a") + buf.write("\3\2\2\2\u053d\u0540\3\2\2\2\u053e\u053c\3\2\2\2\u053e") + buf.write("\u053f\3\2\2\2\u053fQ\3\2\2\2\u0540\u053e\3\2\2\2\u0541") + buf.write("\u0543\5\u00d0i\2\u0542\u0541\3\2\2\2\u0542\u0543\3\2") + buf.write("\2\2\u0543\u0544\3\2\2\2\u0544\u0545\5\u00c2b\2\u0545") + buf.write("\u0546\7\26\2\2\u0546\u0547\5F$\2\u0547S\3\2\2\2\u0548") + buf.write("\u054a\5\u00d0i\2\u0549\u0548\3\2\2\2\u0549\u054a\3\2") + buf.write("\2\2\u054a\u054b\3\2\2\2\u054b\u054c\5\u00c2b\2\u054c") + buf.write("\u054d\7\26\2\2\u054d\u054e\5\b\5\2\u054eU\3\2\2\2\u054f") + buf.write("\u0550\5\u00c2b\2\u0550\u0551\7\t\2\2\u0551\u0552\5X-") + buf.write("\2\u0552W\3\2\2\2\u0553\u0560\5\u00dan\2\u0554\u0555\7") + buf.write("\u00dc\2\2\u0555\u0556\7\3\2\2\u0556\u0557\5\u00dan\2") + buf.write("\u0557\u0558\7\23\2\2\u0558\u055b\5\u00dco\2\u0559\u055a") buf.write("\7\23\2\2\u055a\u055c\7\u00f6\2\2\u055b\u0559\3\2\2\2") buf.write("\u055b\u055c\3\2\2\2\u055c\u055d\3\2\2\2\u055d\u055e\7") buf.write("\4\2\2\u055e\u0560\3\2\2\2\u055f\u0553\3\2\2\2\u055f\u0554") @@ -744,12 +747,12 @@ def serializedATN(): buf.write("\2\u056a\u056b\7\23\2\2\u056b\u056d\5^\60\2\u056c\u056a") buf.write("\3\2\2\2\u056d\u0570\3\2\2\2\u056e\u056c\3\2\2\2\u056e") buf.write("\u056f\3\2\2\2\u056f\u057a\3\2\2\2\u0570\u056e\3\2\2\2") - buf.write("\u0571\u0572\7\35\2\2\u0572\u0577\5\u00c0a\2\u0573\u0574") - buf.write("\7\23\2\2\u0574\u0576\5\u00c0a\2\u0575\u0573\3\2\2\2\u0576") + buf.write("\u0571\u0572\7\35\2\2\u0572\u0577\5\u00c2b\2\u0573\u0574") + buf.write("\7\23\2\2\u0574\u0576\5\u00c2b\2\u0575\u0573\3\2\2\2\u0576") buf.write("\u0579\3\2\2\2\u0577\u0575\3\2\2\2\u0577\u0578\3\2\2\2") buf.write("\u0578\u057b\3\2\2\2\u0579\u0577\3\2\2\2\u057a\u0571\3") buf.write("\2\2\2\u057a\u057b\3\2\2\2\u057b]\3\2\2\2\u057c\u057f") - buf.write("\5\6\4\2\u057d\u057e\7\60\2\2\u057e\u0580\5\u00ba^\2\u057f") + buf.write("\5\6\4\2\u057d\u057e\7\60\2\2\u057e\u0580\5\u00bc_\2\u057f") buf.write("\u057d\3\2\2\2\u057f\u0580\3\2\2\2\u0580_\3\2\2\2\u0581") buf.write("\u0583\5\24\13\2\u0582\u0581\3\2\2\2\u0582\u0583\3\2\2") buf.write("\2\u0583\u0587\3\2\2\2\u0584\u0588\5\26\f\2\u0585\u0588") @@ -760,216 +763,239 @@ def serializedATN(): buf.write("\u058e\5\20\t\2\u058d\u058c\3\2\2\2\u058d\u058e\3\2\2") buf.write("\2\u058ea\3\2\2\2\u058f\u0590\7\u00d0\2\2\u0590\u0591") buf.write("\5\6\4\2\u0591c\3\2\2\2\u0592\u0593\7\u009e\2\2\u0593") - buf.write("\u0594\7N\2\2\u0594\u0599\5\u00c0a\2\u0595\u0596\7\23") - buf.write("\2\2\u0596\u0598\5\u00c0a\2\u0597\u0595\3\2\2\2\u0598") + buf.write("\u0594\7N\2\2\u0594\u0599\5\u00c2b\2\u0595\u0596\7\23") + buf.write("\2\2\u0596\u0598\5\u00c2b\2\u0597\u0595\3\2\2\2\u0598") buf.write("\u059b\3\2\2\2\u0599\u0597\3\2\2\2\u0599\u059a\3\2\2\2") buf.write("\u059ae\3\2\2\2\u059b\u0599\3\2\2\2\u059c\u059d\7M\2\2") buf.write("\u059d\u059e\7N\2\2\u059e\u05a3\5h\65\2\u059f\u05a0\7") buf.write("\23\2\2\u05a0\u05a2\5h\65\2\u05a1\u059f\3\2\2\2\u05a2") buf.write("\u05a5\3\2\2\2\u05a3\u05a1\3\2\2\2\u05a3\u05a4\3\2\2\2") - buf.write("\u05a4g\3\2\2\2\u05a5\u05a3\3\2\2\2\u05a6\u05a8\5\u00c0") - buf.write("a\2\u05a7\u05a9\t\30\2\2\u05a8\u05a7\3\2\2\2\u05a8\u05a9") + buf.write("\u05a4g\3\2\2\2\u05a5\u05a3\3\2\2\2\u05a6\u05a8\5\u00c2") + buf.write("b\2\u05a7\u05a9\t\30\2\2\u05a8\u05a7\3\2\2\2\u05a8\u05a9") buf.write("\3\2\2\2\u05a9i\3\2\2\2\u05aa\u05ab\7y\2\2\u05ab\u05ae") buf.write("\7\u00be\2\2\u05ac\u05ae\7\u00a0\2\2\u05ad\u05aa\3\2\2") buf.write("\2\u05ad\u05ac\3\2\2\2\u05ae\u05af\3\2\2\2\u05af\u05b0") - buf.write("\7\65\2\2\u05b0\u05b1\5n8\2\u05b1\u05b2\7\61\2\2\u05b2") - buf.write("\u05b3\5n8\2\u05b3k\3\2\2\2\u05b4\u05b5\7\u00f3\2\2\u05b5") - buf.write("m\3\2\2\2\u05b6\u05b7\7\u00f3\2\2\u05b7\u05c2\7\u009b") - buf.write("\2\2\u05b8\u05b9\7\u00f3\2\2\u05b9\u05c2\7\u009c\2\2\u05ba") - buf.write("\u05bb\7\u00a1\2\2\u05bb\u05bc\7y\2\2\u05bc\u05c2\7\u00bf") - buf.write("\2\2\u05bd\u05be\7\u009d\2\2\u05be\u05c2\7\u009b\2\2\u05bf") - buf.write("\u05c0\7\u009d\2\2\u05c0\u05c2\7\u009c\2\2\u05c1\u05b6") - buf.write("\3\2\2\2\u05c1\u05b8\3\2\2\2\u05c1\u05ba\3\2\2\2\u05c1") - buf.write("\u05bd\3\2\2\2\u05c1\u05bf\3\2\2\2\u05c2o\3\2\2\2\u05c3") - buf.write("\u05c4\7\u0092\2\2\u05c4\u05c5\t\31\2\2\u05c5\u05ca\5") - buf.write("\u00c0a\2\u05c6\u05c7\7\23\2\2\u05c7\u05c9\5\u00c0a\2") - buf.write("\u05c8\u05c6\3\2\2\2\u05c9\u05cc\3\2\2\2\u05ca\u05c8\3") - buf.write("\2\2\2\u05ca\u05cb\3\2\2\2\u05cb\u05d1\3\2\2\2\u05cc\u05ca") - buf.write("\3\2\2\2\u05cd\u05ce\7\u0092\2\2\u05ce\u05cf\7J\2\2\u05cf") - buf.write("\u05d1\5\b\5\2\u05d0\u05c3\3\2\2\2\u05d0\u05cd\3\2\2\2") - buf.write("\u05d1q\3\2\2\2\u05d2\u05d3\7\u0094\2\2\u05d3\u05d4\5") - buf.write("\b\5\2\u05d4s\3\2\2\2\u05d5\u05d6\5\u00bc_\2\u05d6\u05d9") - buf.write("\5z>\2\u05d7\u05d8\7\u00df\2\2\u05d8\u05da\5X-\2\u05d9") - buf.write("\u05d7\3\2\2\2\u05d9\u05da\3\2\2\2\u05dau\3\2\2\2\u05db") - buf.write("\u05df\5~@\2\u05dc\u05df\5\u0082B\2\u05dd\u05df\5\u0080") - buf.write("A\2\u05de\u05db\3\2\2\2\u05de\u05dc\3\2\2\2\u05de\u05dd") - buf.write("\3\2\2\2\u05dfw\3\2\2\2\u05e0\u05e3\5\u0080A\2\u05e1\u05e3") - buf.write("\5~@\2\u05e2\u05e0\3\2\2\2\u05e2\u05e1\3\2\2\2\u05e3y") - buf.write("\3\2\2\2\u05e4\u05ea\5~@\2\u05e5\u05ea\5\u0082B\2\u05e6") - buf.write("\u05ea\5\u0086D\2\u05e7\u05ea\5|?\2\u05e8\u05ea\5\u0080") - buf.write("A\2\u05e9\u05e4\3\2\2\2\u05e9\u05e5\3\2\2\2\u05e9\u05e6") - buf.write("\3\2\2\2\u05e9\u05e7\3\2\2\2\u05e9\u05e8\3\2\2\2\u05ea") - buf.write("{\3\2\2\2\u05eb\u05ef\7\u0081\2\2\u05ec\u05ef\5\u0088") - buf.write("E\2\u05ed\u05ef\5\u008aF\2\u05ee\u05eb\3\2\2\2\u05ee\u05ec") - buf.write("\3\2\2\2\u05ee\u05ed\3\2\2\2\u05ef}\3\2\2\2\u05f0\u05f3") - buf.write("\5\u00dan\2\u05f1\u05f3\5\u008cG\2\u05f2\u05f0\3\2\2\2") - buf.write("\u05f2\u05f1\3\2\2\2\u05f3\u05f5\3\2\2\2\u05f4\u05f6\5") - buf.write("\u00a6T\2\u05f5\u05f4\3\2\2\2\u05f5\u05f6\3\2\2\2\u05f6") - buf.write("\u05fb\3\2\2\2\u05f7\u05f9\7\64\2\2\u05f8\u05f7\3\2\2") - buf.write("\2\u05f8\u05f9\3\2\2\2\u05f9\u05fa\3\2\2\2\u05fa\u05fc") - buf.write("\78\2\2\u05fb\u05f8\3\2\2\2\u05fb\u05fc\3\2\2\2\u05fc") - buf.write("\177\3\2\2\2\u05fd\u0602\5\u00ceh\2\u05fe\u05ff\7\n\2") - buf.write("\2\u05ff\u0600\5~@\2\u0600\u0601\7\13\2\2\u0601\u0603") - buf.write("\3\2\2\2\u0602\u05fe\3\2\2\2\u0602\u0603\3\2\2\2\u0603") - buf.write("\u0081\3\2\2\2\u0604\u0610\7{\2\2\u0605\u0606\7\7\2\2") - buf.write("\u0606\u060b\5\u00a8U\2\u0607\u0608\7\23\2\2\u0608\u060a") - buf.write("\5\u00a8U\2\u0609\u0607\3\2\2\2\u060a\u060d\3\2\2\2\u060b") - buf.write("\u0609\3\2\2\2\u060b\u060c\3\2\2\2\u060c\u060e\3\2\2\2") - buf.write("\u060d\u060b\3\2\2\2\u060e\u060f\7\b\2\2\u060f\u0611\3") - buf.write("\2\2\2\u0610\u0605\3\2\2\2\u0610\u0611\3\2\2\2\u0611\u0083") - buf.write("\3\2\2\2\u0612\u0615\5\u0082B\2\u0613\u0615\5~@\2\u0614") - buf.write("\u0612\3\2\2\2\u0614\u0613\3\2\2\2\u0615\u0085\3\2\2\2") - buf.write("\u0616\u061b\7\u00f1\2\2\u0617\u0618\7\n\2\2\u0618\u0619") - buf.write("\5~@\2\u0619\u061a\7\13\2\2\u061a\u061c\3\2\2\2\u061b") - buf.write("\u0617\3\2\2\2\u061b\u061c\3\2\2\2\u061c\u0087\3\2\2\2") - buf.write("\u061d\u063b\7\177\2\2\u061e\u062a\7\u00ed\2\2\u061f\u0620") - buf.write("\7\7\2\2\u0620\u0625\5\u008cG\2\u0621\u0622\7\21\2\2\u0622") - buf.write("\u0624\5\u008cG\2\u0623\u0621\3\2\2\2\u0624\u0627\3\2") - buf.write("\2\2\u0625\u0623\3\2\2\2\u0625\u0626\3\2\2\2\u0626\u0628") - buf.write("\3\2\2\2\u0627\u0625\3\2\2\2\u0628\u0629\7\b\2\2\u0629") - buf.write("\u062b\3\2\2\2\u062a\u061f\3\2\2\2\u062a\u062b\3\2\2\2") - buf.write("\u062b\u063b\3\2\2\2\u062c\u0638\7\u00ee\2\2\u062d\u062e") - buf.write("\7\7\2\2\u062e\u0633\5\u00bc_\2\u062f\u0630\7\21\2\2\u0630") - buf.write("\u0632\5\u00bc_\2\u0631\u062f\3\2\2\2\u0632\u0635\3\2") - buf.write("\2\2\u0633\u0631\3\2\2\2\u0633\u0634\3\2\2\2\u0634\u0636") - buf.write("\3\2\2\2\u0635\u0633\3\2\2\2\u0636\u0637\7\b\2\2\u0637") - buf.write("\u0639\3\2\2\2\u0638\u062d\3\2\2\2\u0638\u0639\3\2\2\2") - buf.write("\u0639\u063b\3\2\2\2\u063a\u061d\3\2\2\2\u063a\u061e\3") - buf.write("\2\2\2\u063a\u062c\3\2\2\2\u063b\u0089\3\2\2\2\u063c\u0665") - buf.write("\7\u0080\2\2\u063d\u064e\7\u00ef\2\2\u063e\u063f\7\7\2") - buf.write("\2\u063f\u064b\7\u00f7\2\2\u0640\u0641\7\3\2\2\u0641\u0646") - buf.write("\5\u008cG\2\u0642\u0643\7\21\2\2\u0643\u0645\5\u008cG") - buf.write("\2\u0644\u0642\3\2\2\2\u0645\u0648\3\2\2\2\u0646\u0644") - buf.write("\3\2\2\2\u0646\u0647\3\2\2\2\u0647\u0649\3\2\2\2\u0648") - buf.write("\u0646\3\2\2\2\u0649\u064a\7\4\2\2\u064a\u064c\3\2\2\2") - buf.write("\u064b\u0640\3\2\2\2\u064b\u064c\3\2\2\2\u064c\u064d\3") - buf.write("\2\2\2\u064d\u064f\7\b\2\2\u064e\u063e\3\2\2\2\u064e\u064f") - buf.write("\3\2\2\2\u064f\u0665\3\2\2\2\u0650\u0662\7\u00f0\2\2\u0651") - buf.write("\u0652\7\7\2\2\u0652\u065e\5\u00bc_\2\u0653\u0654\7\3") - buf.write("\2\2\u0654\u0659\5\u00bc_\2\u0655\u0656\7\21\2\2\u0656") - buf.write("\u0658\5\u00bc_\2\u0657\u0655\3\2\2\2\u0658\u065b\3\2") - buf.write("\2\2\u0659\u0657\3\2\2\2\u0659\u065a\3\2\2\2\u065a\u065c") - buf.write("\3\2\2\2\u065b\u0659\3\2\2\2\u065c\u065d\7\4\2\2\u065d") - buf.write("\u065f\3\2\2\2\u065e\u0653\3\2\2\2\u065e\u065f\3\2\2\2") - buf.write("\u065f\u0660\3\2\2\2\u0660\u0661\7\b\2\2\u0661\u0663\3") - buf.write("\2\2\2\u0662\u0651\3\2\2\2\u0662\u0663\3\2\2\2\u0663\u0665") - buf.write("\3\2\2\2\u0664\u063c\3\2\2\2\u0664\u063d\3\2\2\2\u0664") - buf.write("\u0650\3\2\2\2\u0665\u008b\3\2\2\2\u0666\u0667\7\u00f7") - buf.write("\2\2\u0667\u008d\3\2\2\2\u0668\u0669\7\u00f7\2\2\u0669") - buf.write("\u008f\3\2\2\2\u066a\u066b\t\32\2\2\u066b\u0670\5\u0092") - buf.write("J\2\u066c\u066d\7\23\2\2\u066d\u066f\5\u0092J\2\u066e") - buf.write("\u066c\3\2\2\2\u066f\u0672\3\2\2\2\u0670\u066e\3\2\2\2") - buf.write("\u0670\u0671\3\2\2\2\u0671\u0091\3\2\2\2\u0672\u0670\3") - buf.write("\2\2\2\u0673\u0676\5\u00bc_\2\u0674\u0675\7\60\2\2\u0675") - buf.write("\u0677\5\u00ba^\2\u0676\u0674\3\2\2\2\u0676\u0677\3\2") - buf.write("\2\2\u0677\u0093\3\2\2\2\u0678\u067d\5\u0096L\2\u0679") - buf.write("\u067a\7\u00f9\2\2\u067a\u067c\5\u0096L\2\u067b\u0679") - buf.write("\3\2\2\2\u067c\u067f\3\2\2\2\u067d\u067b\3\2\2\2\u067d") - buf.write("\u067e\3\2\2\2\u067e\u0095\3\2\2\2\u067f\u067d\3\2\2\2") - buf.write("\u0680\u0681\7\u00f7\2\2\u0681\u0683\7\25\2\2\u0682\u0680") - buf.write("\3\2\2\2\u0682\u0683\3\2\2\2\u0683\u0688\3\2\2\2\u0684") - buf.write("\u0685\7\u00bb\2\2\u0685\u0686\5\b\5\2\u0686\u0687\7\33") - buf.write("\2\2\u0687\u0689\3\2\2\2\u0688\u0684\3\2\2\2\u0688\u0689") - buf.write("\3\2\2\2\u0689\u068a\3\2\2\2\u068a\u068c\5\b\5\2\u068b") - buf.write("\u068d\5\u00c4c\2\u068c\u068b\3\2\2\2\u068c\u068d\3\2") - buf.write("\2\2\u068d\u068f\3\2\2\2\u068e\u0690\5\u00c6d\2\u068f") - buf.write("\u068e\3\2\2\2\u068f\u0690\3\2\2\2\u0690\u0097\3\2\2\2") - buf.write("\u0691\u0696\5\u009aN\2\u0692\u0693\7\u00f9\2\2\u0693") - buf.write("\u0695\5\u009aN\2\u0694\u0692\3\2\2\2\u0695\u0698\3\2") - buf.write("\2\2\u0696\u0694\3\2\2\2\u0696\u0697\3\2\2\2\u0697\u0099") - buf.write("\3\2\2\2\u0698\u0696\3\2\2\2\u0699\u069a\7\u00f7\2\2\u069a") - buf.write("\u069c\7\25\2\2\u069b\u0699\3\2\2\2\u069b\u069c\3\2\2") - buf.write("\2\u069c\u069d\3\2\2\2\u069d\u069f\5\u00a0Q\2\u069e\u06a0") - buf.write("\5\u00c4c\2\u069f\u069e\3\2\2\2\u069f\u06a0\3\2\2\2\u06a0") - buf.write("\u06a2\3\2\2\2\u06a1\u06a3\5\u00c6d\2\u06a2\u06a1\3\2") - buf.write("\2\2\u06a2\u06a3\3\2\2\2\u06a3\u009b\3\2\2\2\u06a4\u06a7") - buf.write("\t\32\2\2\u06a5\u06a6\7\u00a9\2\2\u06a6\u06a8\5\u009e") - buf.write("P\2\u06a7\u06a5\3\2\2\2\u06a7\u06a8\3\2\2\2\u06a8\u06a9") - buf.write("\3\2\2\2\u06a9\u06aa\7\u0082\2\2\u06aa\u06ab\7\u00f7\2") - buf.write("\2\u06ab\u009d\3\2\2\2\u06ac\u06b1\5\u0092J\2\u06ad\u06ae") - buf.write("\7\23\2\2\u06ae\u06b0\5\u0092J\2\u06af\u06ad\3\2\2\2\u06b0") - buf.write("\u06b3\3\2\2\2\u06b1\u06af\3\2\2\2\u06b1\u06b2\3\2\2\2") - buf.write("\u06b2\u009f\3\2\2\2\u06b3\u06b1\3\2\2\2\u06b4\u06b5\7") - buf.write("\u00bb\2\2\u06b5\u06b6\5\b\5\2\u06b6\u06b7\7\33\2\2\u06b7") - buf.write("\u06b9\3\2\2\2\u06b8\u06b4\3\2\2\2\u06b8\u06b9\3\2\2\2") - buf.write("\u06b9\u06ba\3\2\2\2\u06ba\u06bc\5\u00a4S\2\u06bb\u06bd") - buf.write("\5\u00c8e\2\u06bc\u06bb\3\2\2\2\u06bc\u06bd\3\2\2\2\u06bd") - buf.write("\u06be\3\2\2\2\u06be\u06c2\5\u00a2R\2\u06bf\u06c1\5\u00a2") - buf.write("R\2\u06c0\u06bf\3\2\2\2\u06c1\u06c4\3\2\2\2\u06c2\u06c0") - buf.write("\3\2\2\2\u06c2\u06c3\3\2\2\2\u06c3\u00a1\3\2\2\2\u06c4") - buf.write("\u06c2\3\2\2\2\u06c5\u06c7\t\33\2\2\u06c6\u06c5\3\2\2") - buf.write("\2\u06c6\u06c7\3\2\2\2\u06c7\u06c8\3\2\2\2\u06c8\u06cd") - buf.write("\5\u00a4S\2\u06c9\u06ca\7\5\2\2\u06ca\u06cb\5\b\5\2\u06cb") - buf.write("\u06cc\7\6\2\2\u06cc\u06ce\3\2\2\2\u06cd\u06c9\3\2\2\2") - buf.write("\u06cd\u06ce\3\2\2\2\u06ce\u00a3\3\2\2\2\u06cf\u06d0\t") - buf.write("\34\2\2\u06d0\u00a5\3\2\2\2\u06d1\u06d2\7\5\2\2\u06d2") - buf.write("\u06d3\5\b\5\2\u06d3\u06d4\7\6\2\2\u06d4\u06e1\3\2\2\2") - buf.write("\u06d5\u06d6\7\7\2\2\u06d6\u06db\5X-\2\u06d7\u06d8\7\23") - buf.write("\2\2\u06d8\u06da\5X-\2\u06d9\u06d7\3\2\2\2\u06da\u06dd") - buf.write("\3\2\2\2\u06db\u06d9\3\2\2\2\u06db\u06dc\3\2\2\2\u06dc") - buf.write("\u06de\3\2\2\2\u06dd\u06db\3\2\2\2\u06de\u06df\7\b\2\2") - buf.write("\u06df\u06e1\3\2\2\2\u06e0\u06d1\3\2\2\2\u06e0\u06d5\3") - buf.write("\2\2\2\u06e1\u00a7\3\2\2\2\u06e2\u06e5\5\u0080A\2\u06e3") - buf.write("\u06e6\5\u00c0a\2\u06e4\u06e6\5\u00aaV\2\u06e5\u06e3\3") - buf.write("\2\2\2\u06e5\u06e4\3\2\2\2\u06e6\u00a9\3\2\2\2\u06e7\u06e9") - buf.write("\7u\2\2\u06e8\u06ea\t\35\2\2\u06e9\u06e8\3\2\2\2\u06e9") - buf.write("\u06ea\3\2\2\2\u06ea\u00ab\3\2\2\2\u06eb\u06ec\t\36\2") - buf.write("\2\u06ec\u00ad\3\2\2\2\u06ed\u06ee\t\37\2\2\u06ee\u00af") - buf.write("\3\2\2\2\u06ef\u06f0\7\u00a9\2\2\u06f0\u06f5\5\u00c0a") - buf.write("\2\u06f1\u06f2\7\23\2\2\u06f2\u06f4\5\u00c0a\2\u06f3\u06f1") - buf.write("\3\2\2\2\u06f4\u06f7\3\2\2\2\u06f5\u06f3\3\2\2\2\u06f5") - buf.write("\u06f6\3\2\2\2\u06f6\u00b1\3\2\2\2\u06f7\u06f5\3\2\2\2") - buf.write("\u06f8\u06f9\t \2\2\u06f9\u00b3\3\2\2\2\u06fa\u06fb\7") - buf.write("H\2\2\u06fb\u06fc\5\6\4\2\u06fc\u00b5\3\2\2\2\u06fd\u06fe") - buf.write('\t!\2\2\u06fe\u00b7\3\2\2\2\u06ff\u0700\t"\2\2\u0700') - buf.write("\u00b9\3\2\2\2\u0701\u0702\7\u00f7\2\2\u0702\u00bb\3\2") - buf.write("\2\2\u0703\u0704\7\u00f7\2\2\u0704\u00bd\3\2\2\2\u0705") - buf.write("\u0706\7\u00f7\2\2\u0706\u00bf\3\2\2\2\u0707\u070a\7\u00f7") - buf.write("\2\2\u0708\u0709\7\27\2\2\u0709\u070b\7\u00f7\2\2\u070a") - buf.write("\u0708\3\2\2\2\u070a\u070b\3\2\2\2\u070b\u00c1\3\2\2\2") - buf.write("\u070c\u070d\7\7\2\2\u070d\u0712\5X-\2\u070e\u070f\7\23") - buf.write("\2\2\u070f\u0711\5X-\2\u0710\u070e\3\2\2\2\u0711\u0714") - buf.write("\3\2\2\2\u0712\u0710\3\2\2\2\u0712\u0713\3\2\2\2\u0713") - buf.write("\u0715\3\2\2\2\u0714\u0712\3\2\2\2\u0715\u0716\7\b\2\2") - buf.write("\u0716\u00c3\3\2\2\2\u0717\u0718\7I\2\2\u0718\u0719\5") - buf.write("\u00d8m\2\u0719\u00c5\3\2\2\2\u071a\u071b\7L\2\2\u071b") - buf.write("\u071c\5\u00d8m\2\u071c\u00c7\3\2\2\2\u071d\u071e\t#\2") - buf.write("\2\u071e\u00c9\3\2\2\2\u071f\u0722\5\6\4\2\u0720\u0722") - buf.write("\7u\2\2\u0721\u071f\3\2\2\2\u0721\u0720\3\2\2\2\u0722") - buf.write("\u00cb\3\2\2\2\u0723\u0726\5\b\5\2\u0724\u0726\7u\2\2") - buf.write("\u0725\u0723\3\2\2\2\u0725\u0724\3\2\2\2\u0726\u00cd\3") - buf.write("\2\2\2\u0727\u072d\7j\2\2\u0728\u072d\7\u00ec\2\2\u0729") - buf.write("\u072d\7i\2\2\u072a\u072d\7k\2\2\u072b\u072d\5\u00d0i") - buf.write("\2\u072c\u0727\3\2\2\2\u072c\u0728\3\2\2\2\u072c\u0729") - buf.write("\3\2\2\2\u072c\u072a\3\2\2\2\u072c\u072b\3\2\2\2\u072d") - buf.write("\u00cf\3\2\2\2\u072e\u072f\7p\2\2\u072f\u0730\7k\2\2\u0730") - buf.write("\u00d1\3\2\2\2\u0731\u0732\7\u00f7\2\2\u0732\u00d3\3\2") - buf.write("\2\2\u0733\u0734\7\u00f7\2\2\u0734\u00d5\3\2\2\2\u0735") - buf.write("\u0736\7\u00f7\2\2\u0736\u00d7\3\2\2\2\u0737\u0738\t$") - buf.write("\2\2\u0738\u00d9\3\2\2\2\u0739\u073a\t%\2\2\u073a\u00db") - buf.write("\3\2\2\2\u073b\u073c\t&\2\2\u073c\u00dd\3\2\2\2\u00c5") - buf.write("\u00e3\u00f1\u010a\u0111\u012f\u0131\u0133\u014d\u0154") - buf.write("\u016a\u016c\u016e\u0179\u0187\u0190\u0198\u019f\u01a1") - buf.write("\u01ac\u01b5\u01c7\u01d0\u01df\u01ea\u01ed\u01f2\u0213") - buf.write("\u021c\u021f\u0229\u022e\u0232\u0238\u023c\u0246\u024a") - buf.write("\u024e\u0257\u025a\u0264\u0268\u0272\u0277\u027b\u0281") - buf.write("\u0285\u0289\u028d\u0291\u02a1\u02a5\u02b0\u02bb\u02bf") - buf.write("\u02c3\u02d3\u02d7\u02e2\u02ed\u02f1\u02f5\u0301\u030c") - buf.write("\u0318\u0323\u0341\u0345\u035c\u0361\u0369\u037e\u0382") - buf.write("\u0386\u03c4\u03c9\u03d1\u03e6\u03ea\u03ee\u042c\u0435") - buf.write("\u0440\u044b\u0453\u0457\u045a\u045d\u0460\u046f\u0472") - buf.write("\u0475\u047f\u0483\u0486\u0489\u048c\u0494\u0497\u049a") - buf.write("\u049d\u04a1\u04b9\u04c0\u04c2\u04cc\u04cf\u04d2\u04de") - buf.write("\u04e0\u04e5\u04f5\u04fd\u0500\u0503\u050e\u0510\u0515") - buf.write("\u0521\u0531\u053c\u0540\u0547\u0551\u055b\u055f\u0566") - buf.write("\u056e\u0577\u057a\u057f\u0582\u0587\u058a\u058d\u0599") - buf.write("\u05a3\u05a8\u05ad\u05c1\u05ca\u05d0\u05d9\u05de\u05e2") - buf.write("\u05e9\u05ee\u05f2\u05f5\u05f8\u05fb\u0602\u060b\u0610") - buf.write("\u0614\u061b\u0625\u062a\u0633\u0638\u063a\u0646\u064b") - buf.write("\u064e\u0659\u065e\u0662\u0664\u0670\u0676\u067d\u0682") - buf.write("\u0688\u068c\u068f\u0696\u069b\u069f\u06a2\u06a7\u06b1") - buf.write("\u06b8\u06bc\u06c2\u06c6\u06cd\u06db\u06e0\u06e5\u06e9") - buf.write("\u06f5\u070a\u0712\u0721\u0725\u072c") + buf.write("\7\65\2\2\u05b0\u05b1\5p9\2\u05b1\u05b2\7\61\2\2\u05b2") + buf.write("\u05b3\5p9\2\u05b3k\3\2\2\2\u05b4\u05b6\t\31\2\2\u05b5") + buf.write("\u05b4\3\2\2\2\u05b5\u05b6\3\2\2\2\u05b6\u05b7\3\2\2\2") + buf.write("\u05b7\u05b8\7\u00f3\2\2\u05b8m\3\2\2\2\u05b9\u05bb\t") + buf.write("\31\2\2\u05ba\u05b9\3\2\2\2\u05ba\u05bb\3\2\2\2\u05bb") + buf.write("\u05bc\3\2\2\2\u05bc\u05bd\7\u00f4\2\2\u05bdo\3\2\2\2") + buf.write("\u05be\u05bf\5l\67\2\u05bf\u05c0\7\u009b\2\2\u05c0\u05cc") + buf.write("\3\2\2\2\u05c1\u05c2\5l\67\2\u05c2\u05c3\7\u009c\2\2\u05c3") + buf.write("\u05cc\3\2\2\2\u05c4\u05c5\7\u00a1\2\2\u05c5\u05c6\7y") + buf.write("\2\2\u05c6\u05cc\7\u00bf\2\2\u05c7\u05c8\7\u009d\2\2\u05c8") + buf.write("\u05cc\7\u009b\2\2\u05c9\u05ca\7\u009d\2\2\u05ca\u05cc") + buf.write("\7\u009c\2\2\u05cb\u05be\3\2\2\2\u05cb\u05c1\3\2\2\2\u05cb") + buf.write("\u05c4\3\2\2\2\u05cb\u05c7\3\2\2\2\u05cb\u05c9\3\2\2\2") + buf.write("\u05ccq\3\2\2\2\u05cd\u05ce\7\u0092\2\2\u05ce\u05cf\t") + buf.write("\32\2\2\u05cf\u05d4\5\u00c2b\2\u05d0\u05d1\7\23\2\2\u05d1") + buf.write("\u05d3\5\u00c2b\2\u05d2\u05d0\3\2\2\2\u05d3\u05d6\3\2") + buf.write("\2\2\u05d4\u05d2\3\2\2\2\u05d4\u05d5\3\2\2\2\u05d5\u05df") + buf.write("\3\2\2\2\u05d6\u05d4\3\2\2\2\u05d7\u05d8\7\u00d5\2\2\u05d8") + buf.write("\u05d9\7\3\2\2\u05d9\u05dc\7\u00f6\2\2\u05da\u05db\7\23") + buf.write("\2\2\u05db\u05dd\t\22\2\2\u05dc\u05da\3\2\2\2\u05dc\u05dd") + buf.write("\3\2\2\2\u05dd\u05de\3\2\2\2\u05de\u05e0\7\4\2\2\u05df") + buf.write("\u05d7\3\2\2\2\u05df\u05e0\3\2\2\2\u05e0\u05f6\3\2\2\2") + buf.write("\u05e1\u05e2\7\u0092\2\2\u05e2\u05f3\7J\2\2\u05e3\u05e4") + buf.write("\7\u00d5\2\2\u05e4\u05e5\7\3\2\2\u05e5\u05e8\7\u00f6\2") + buf.write("\2\u05e6\u05e7\7\23\2\2\u05e7\u05e9\t\21\2\2\u05e8\u05e6") + buf.write("\3\2\2\2\u05e8\u05e9\3\2\2\2\u05e9\u05ec\3\2\2\2\u05ea") + buf.write("\u05eb\7\23\2\2\u05eb\u05ed\5\u00ccg\2\u05ec\u05ea\3\2") + buf.write("\2\2\u05ec\u05ed\3\2\2\2\u05ed\u05f0\3\2\2\2\u05ee\u05ef") + buf.write("\7\23\2\2\u05ef\u05f1\t\22\2\2\u05f0\u05ee\3\2\2\2\u05f0") + buf.write("\u05f1\3\2\2\2\u05f1\u05f2\3\2\2\2\u05f2\u05f4\7\4\2\2") + buf.write("\u05f3\u05e3\3\2\2\2\u05f3\u05f4\3\2\2\2\u05f4\u05f6\3") + buf.write("\2\2\2\u05f5\u05cd\3\2\2\2\u05f5\u05e1\3\2\2\2\u05f6s") + buf.write("\3\2\2\2\u05f7\u05f8\7\u0094\2\2\u05f8\u05f9\5\b\5\2\u05f9") + buf.write("u\3\2\2\2\u05fa\u05fb\5\u00be`\2\u05fb\u05fe\5|?\2\u05fc") + buf.write("\u05fd\7\u00df\2\2\u05fd\u05ff\5X-\2\u05fe\u05fc\3\2\2") + buf.write("\2\u05fe\u05ff\3\2\2\2\u05ffw\3\2\2\2\u0600\u0604\5\u0080") + buf.write("A\2\u0601\u0604\5\u0084C\2\u0602\u0604\5\u0082B\2\u0603") + buf.write("\u0600\3\2\2\2\u0603\u0601\3\2\2\2\u0603\u0602\3\2\2\2") + buf.write("\u0604y\3\2\2\2\u0605\u0608\5\u0082B\2\u0606\u0608\5\u0080") + buf.write("A\2\u0607\u0605\3\2\2\2\u0607\u0606\3\2\2\2\u0608{\3\2") + buf.write("\2\2\u0609\u060f\5\u0080A\2\u060a\u060f\5\u0084C\2\u060b") + buf.write("\u060f\5\u0088E\2\u060c\u060f\5~@\2\u060d\u060f\5\u0082") + buf.write("B\2\u060e\u0609\3\2\2\2\u060e\u060a\3\2\2\2\u060e\u060b") + buf.write("\3\2\2\2\u060e\u060c\3\2\2\2\u060e\u060d\3\2\2\2\u060f") + buf.write("}\3\2\2\2\u0610\u0614\7\u0081\2\2\u0611\u0614\5\u008a") + buf.write("F\2\u0612\u0614\5\u008cG\2\u0613\u0610\3\2\2\2\u0613\u0611") + buf.write("\3\2\2\2\u0613\u0612\3\2\2\2\u0614\177\3\2\2\2\u0615\u0618") + buf.write("\5\u00dco\2\u0616\u0618\5\u008eH\2\u0617\u0615\3\2\2\2") + buf.write("\u0617\u0616\3\2\2\2\u0618\u061a\3\2\2\2\u0619\u061b\5") + buf.write("\u00a8U\2\u061a\u0619\3\2\2\2\u061a\u061b\3\2\2\2\u061b") + buf.write("\u0620\3\2\2\2\u061c\u061e\7\64\2\2\u061d\u061c\3\2\2") + buf.write("\2\u061d\u061e\3\2\2\2\u061e\u061f\3\2\2\2\u061f\u0621") + buf.write("\78\2\2\u0620\u061d\3\2\2\2\u0620\u0621\3\2\2\2\u0621") + buf.write("\u0081\3\2\2\2\u0622\u0627\5\u00d0i\2\u0623\u0624\7\n") + buf.write("\2\2\u0624\u0625\5\u0080A\2\u0625\u0626\7\13\2\2\u0626") + buf.write("\u0628\3\2\2\2\u0627\u0623\3\2\2\2\u0627\u0628\3\2\2\2") + buf.write("\u0628\u0083\3\2\2\2\u0629\u0635\7{\2\2\u062a\u062b\7") + buf.write("\7\2\2\u062b\u0630\5\u00aaV\2\u062c\u062d\7\23\2\2\u062d") + buf.write("\u062f\5\u00aaV\2\u062e\u062c\3\2\2\2\u062f\u0632\3\2") + buf.write("\2\2\u0630\u062e\3\2\2\2\u0630\u0631\3\2\2\2\u0631\u0633") + buf.write("\3\2\2\2\u0632\u0630\3\2\2\2\u0633\u0634\7\b\2\2\u0634") + buf.write("\u0636\3\2\2\2\u0635\u062a\3\2\2\2\u0635\u0636\3\2\2\2") + buf.write("\u0636\u0085\3\2\2\2\u0637\u063a\5\u0084C\2\u0638\u063a") + buf.write("\5\u0080A\2\u0639\u0637\3\2\2\2\u0639\u0638\3\2\2\2\u063a") + buf.write("\u0087\3\2\2\2\u063b\u0640\7\u00f1\2\2\u063c\u063d\7\n") + buf.write("\2\2\u063d\u063e\5\u0080A\2\u063e\u063f\7\13\2\2\u063f") + buf.write("\u0641\3\2\2\2\u0640\u063c\3\2\2\2\u0640\u0641\3\2\2\2") + buf.write("\u0641\u0089\3\2\2\2\u0642\u0660\7\177\2\2\u0643\u064f") + buf.write("\7\u00ed\2\2\u0644\u0645\7\7\2\2\u0645\u064a\5\u008eH") + buf.write("\2\u0646\u0647\7\21\2\2\u0647\u0649\5\u008eH\2\u0648\u0646") + buf.write("\3\2\2\2\u0649\u064c\3\2\2\2\u064a\u0648\3\2\2\2\u064a") + buf.write("\u064b\3\2\2\2\u064b\u064d\3\2\2\2\u064c\u064a\3\2\2\2") + buf.write("\u064d\u064e\7\b\2\2\u064e\u0650\3\2\2\2\u064f\u0644\3") + buf.write("\2\2\2\u064f\u0650\3\2\2\2\u0650\u0660\3\2\2\2\u0651\u065d") + buf.write("\7\u00ee\2\2\u0652\u0653\7\7\2\2\u0653\u0658\5\u00be`") + buf.write("\2\u0654\u0655\7\21\2\2\u0655\u0657\5\u00be`\2\u0656\u0654") + buf.write("\3\2\2\2\u0657\u065a\3\2\2\2\u0658\u0656\3\2\2\2\u0658") + buf.write("\u0659\3\2\2\2\u0659\u065b\3\2\2\2\u065a\u0658\3\2\2\2") + buf.write("\u065b\u065c\7\b\2\2\u065c\u065e\3\2\2\2\u065d\u0652\3") + buf.write("\2\2\2\u065d\u065e\3\2\2\2\u065e\u0660\3\2\2\2\u065f\u0642") + buf.write("\3\2\2\2\u065f\u0643\3\2\2\2\u065f\u0651\3\2\2\2\u0660") + buf.write("\u008b\3\2\2\2\u0661\u068a\7\u0080\2\2\u0662\u0673\7\u00ef") + buf.write("\2\2\u0663\u0664\7\7\2\2\u0664\u0670\7\u00f7\2\2\u0665") + buf.write("\u0666\7\3\2\2\u0666\u066b\5\u008eH\2\u0667\u0668\7\21") + buf.write("\2\2\u0668\u066a\5\u008eH\2\u0669\u0667\3\2\2\2\u066a") + buf.write("\u066d\3\2\2\2\u066b\u0669\3\2\2\2\u066b\u066c\3\2\2\2") + buf.write("\u066c\u066e\3\2\2\2\u066d\u066b\3\2\2\2\u066e\u066f\7") + buf.write("\4\2\2\u066f\u0671\3\2\2\2\u0670\u0665\3\2\2\2\u0670\u0671") + buf.write("\3\2\2\2\u0671\u0672\3\2\2\2\u0672\u0674\7\b\2\2\u0673") + buf.write("\u0663\3\2\2\2\u0673\u0674\3\2\2\2\u0674\u068a\3\2\2\2") + buf.write("\u0675\u0687\7\u00f0\2\2\u0676\u0677\7\7\2\2\u0677\u0683") + buf.write("\5\u00be`\2\u0678\u0679\7\3\2\2\u0679\u067e\5\u00be`\2") + buf.write("\u067a\u067b\7\21\2\2\u067b\u067d\5\u00be`\2\u067c\u067a") + buf.write("\3\2\2\2\u067d\u0680\3\2\2\2\u067e\u067c\3\2\2\2\u067e") + buf.write("\u067f\3\2\2\2\u067f\u0681\3\2\2\2\u0680\u067e\3\2\2\2") + buf.write("\u0681\u0682\7\4\2\2\u0682\u0684\3\2\2\2\u0683\u0678\3") + buf.write("\2\2\2\u0683\u0684\3\2\2\2\u0684\u0685\3\2\2\2\u0685\u0686") + buf.write("\7\b\2\2\u0686\u0688\3\2\2\2\u0687\u0676\3\2\2\2\u0687") + buf.write("\u0688\3\2\2\2\u0688\u068a\3\2\2\2\u0689\u0661\3\2\2\2") + buf.write("\u0689\u0662\3\2\2\2\u0689\u0675\3\2\2\2\u068a\u008d\3") + buf.write("\2\2\2\u068b\u068c\7\u00f7\2\2\u068c\u008f\3\2\2\2\u068d") + buf.write("\u068e\7\u00f7\2\2\u068e\u0091\3\2\2\2\u068f\u0690\t\33") + buf.write("\2\2\u0690\u0695\5\u0094K\2\u0691\u0692\7\23\2\2\u0692") + buf.write("\u0694\5\u0094K\2\u0693\u0691\3\2\2\2\u0694\u0697\3\2") + buf.write("\2\2\u0695\u0693\3\2\2\2\u0695\u0696\3\2\2\2\u0696\u0093") + buf.write("\3\2\2\2\u0697\u0695\3\2\2\2\u0698\u069b\5\u00be`\2\u0699") + buf.write("\u069a\7\60\2\2\u069a\u069c\5\u00bc_\2\u069b\u0699\3\2") + buf.write("\2\2\u069b\u069c\3\2\2\2\u069c\u0095\3\2\2\2\u069d\u06a2") + buf.write("\5\u0098M\2\u069e\u069f\7\u00f9\2\2\u069f\u06a1\5\u0098") + buf.write("M\2\u06a0\u069e\3\2\2\2\u06a1\u06a4\3\2\2\2\u06a2\u06a0") + buf.write("\3\2\2\2\u06a2\u06a3\3\2\2\2\u06a3\u0097\3\2\2\2\u06a4") + buf.write("\u06a2\3\2\2\2\u06a5\u06a6\7\u00f7\2\2\u06a6\u06a8\7\25") + buf.write("\2\2\u06a7\u06a5\3\2\2\2\u06a7\u06a8\3\2\2\2\u06a8\u06ad") + buf.write("\3\2\2\2\u06a9\u06aa\7\u00bb\2\2\u06aa\u06ab\5\b\5\2\u06ab") + buf.write("\u06ac\7\33\2\2\u06ac\u06ae\3\2\2\2\u06ad\u06a9\3\2\2") + buf.write("\2\u06ad\u06ae\3\2\2\2\u06ae\u06af\3\2\2\2\u06af\u06b1") + buf.write("\5\b\5\2\u06b0\u06b2\5\u00c6d\2\u06b1\u06b0\3\2\2\2\u06b1") + buf.write("\u06b2\3\2\2\2\u06b2\u06b4\3\2\2\2\u06b3\u06b5\5\u00c8") + buf.write("e\2\u06b4\u06b3\3\2\2\2\u06b4\u06b5\3\2\2\2\u06b5\u0099") + buf.write("\3\2\2\2\u06b6\u06bb\5\u009cO\2\u06b7\u06b8\7\u00f9\2") + buf.write("\2\u06b8\u06ba\5\u009cO\2\u06b9\u06b7\3\2\2\2\u06ba\u06bd") + buf.write("\3\2\2\2\u06bb\u06b9\3\2\2\2\u06bb\u06bc\3\2\2\2\u06bc") + buf.write("\u009b\3\2\2\2\u06bd\u06bb\3\2\2\2\u06be\u06bf\7\u00f7") + buf.write("\2\2\u06bf\u06c1\7\25\2\2\u06c0\u06be\3\2\2\2\u06c0\u06c1") + buf.write("\3\2\2\2\u06c1\u06c2\3\2\2\2\u06c2\u06c4\5\u00a2R\2\u06c3") + buf.write("\u06c5\5\u00c6d\2\u06c4\u06c3\3\2\2\2\u06c4\u06c5\3\2") + buf.write("\2\2\u06c5\u06c7\3\2\2\2\u06c6\u06c8\5\u00c8e\2\u06c7") + buf.write("\u06c6\3\2\2\2\u06c7\u06c8\3\2\2\2\u06c8\u009d\3\2\2\2") + buf.write("\u06c9\u06cc\t\33\2\2\u06ca\u06cb\7\u00a9\2\2\u06cb\u06cd") + buf.write("\5\u00a0Q\2\u06cc\u06ca\3\2\2\2\u06cc\u06cd\3\2\2\2\u06cd") + buf.write("\u06ce\3\2\2\2\u06ce\u06cf\7\u0082\2\2\u06cf\u06d0\7\u00f7") + buf.write("\2\2\u06d0\u009f\3\2\2\2\u06d1\u06d6\5\u0094K\2\u06d2") + buf.write("\u06d3\7\23\2\2\u06d3\u06d5\5\u0094K\2\u06d4\u06d2\3\2") + buf.write("\2\2\u06d5\u06d8\3\2\2\2\u06d6\u06d4\3\2\2\2\u06d6\u06d7") + buf.write("\3\2\2\2\u06d7\u00a1\3\2\2\2\u06d8\u06d6\3\2\2\2\u06d9") + buf.write("\u06da\7\u00bb\2\2\u06da\u06db\5\b\5\2\u06db\u06dc\7\33") + buf.write("\2\2\u06dc\u06de\3\2\2\2\u06dd\u06d9\3\2\2\2\u06dd\u06de") + buf.write("\3\2\2\2\u06de\u06df\3\2\2\2\u06df\u06e1\5\u00a6T\2\u06e0") + buf.write("\u06e2\5\u00caf\2\u06e1\u06e0\3\2\2\2\u06e1\u06e2\3\2") + buf.write("\2\2\u06e2\u06e3\3\2\2\2\u06e3\u06e7\5\u00a4S\2\u06e4") + buf.write("\u06e6\5\u00a4S\2\u06e5\u06e4\3\2\2\2\u06e6\u06e9\3\2") + buf.write("\2\2\u06e7\u06e5\3\2\2\2\u06e7\u06e8\3\2\2\2\u06e8\u00a3") + buf.write("\3\2\2\2\u06e9\u06e7\3\2\2\2\u06ea\u06ec\t\31\2\2\u06eb") + buf.write("\u06ea\3\2\2\2\u06eb\u06ec\3\2\2\2\u06ec\u06ed\3\2\2\2") + buf.write("\u06ed\u06f2\5\u00a6T\2\u06ee\u06ef\7\5\2\2\u06ef\u06f0") + buf.write("\5\b\5\2\u06f0\u06f1\7\6\2\2\u06f1\u06f3\3\2\2\2\u06f2") + buf.write("\u06ee\3\2\2\2\u06f2\u06f3\3\2\2\2\u06f3\u00a5\3\2\2\2") + buf.write("\u06f4\u06f8\7\u00f7\2\2\u06f5\u06f8\5l\67\2\u06f6\u06f8") + buf.write("\5n8\2\u06f7\u06f4\3\2\2\2\u06f7\u06f5\3\2\2\2\u06f7\u06f6") + buf.write("\3\2\2\2\u06f8\u00a7\3\2\2\2\u06f9\u06fa\7\5\2\2\u06fa") + buf.write("\u06fb\5\b\5\2\u06fb\u06fc\7\6\2\2\u06fc\u0709\3\2\2\2") + buf.write("\u06fd\u06fe\7\7\2\2\u06fe\u0703\5X-\2\u06ff\u0700\7\23") + buf.write("\2\2\u0700\u0702\5X-\2\u0701\u06ff\3\2\2\2\u0702\u0705") + buf.write("\3\2\2\2\u0703\u0701\3\2\2\2\u0703\u0704\3\2\2\2\u0704") + buf.write("\u0706\3\2\2\2\u0705\u0703\3\2\2\2\u0706\u0707\7\b\2\2") + buf.write("\u0707\u0709\3\2\2\2\u0708\u06f9\3\2\2\2\u0708\u06fd\3") + buf.write("\2\2\2\u0709\u00a9\3\2\2\2\u070a\u070d\5\u0082B\2\u070b") + buf.write("\u070e\5\u00c2b\2\u070c\u070e\5\u00acW\2\u070d\u070b\3") + buf.write("\2\2\2\u070d\u070c\3\2\2\2\u070e\u00ab\3\2\2\2\u070f\u0711") + buf.write("\7u\2\2\u0710\u0712\t\34\2\2\u0711\u0710\3\2\2\2\u0711") + buf.write("\u0712\3\2\2\2\u0712\u00ad\3\2\2\2\u0713\u0714\t\35\2") + buf.write("\2\u0714\u00af\3\2\2\2\u0715\u0716\t\36\2\2\u0716\u00b1") + buf.write("\3\2\2\2\u0717\u0718\7\u00a9\2\2\u0718\u071d\5\u00c2b") + buf.write("\2\u0719\u071a\7\23\2\2\u071a\u071c\5\u00c2b\2\u071b\u0719") + buf.write("\3\2\2\2\u071c\u071f\3\2\2\2\u071d\u071b\3\2\2\2\u071d") + buf.write("\u071e\3\2\2\2\u071e\u00b3\3\2\2\2\u071f\u071d\3\2\2\2") + buf.write("\u0720\u0721\t\37\2\2\u0721\u00b5\3\2\2\2\u0722\u0723") + buf.write("\7H\2\2\u0723\u0724\5\6\4\2\u0724\u00b7\3\2\2\2\u0725") + buf.write("\u0726\t \2\2\u0726\u00b9\3\2\2\2\u0727\u0728\t!\2\2\u0728") + buf.write("\u00bb\3\2\2\2\u0729\u072a\7\u00f7\2\2\u072a\u00bd\3\2") + buf.write("\2\2\u072b\u072c\7\u00f7\2\2\u072c\u00bf\3\2\2\2\u072d") + buf.write("\u072e\7\u00f7\2\2\u072e\u00c1\3\2\2\2\u072f\u0732\7\u00f7") + buf.write("\2\2\u0730\u0731\7\27\2\2\u0731\u0733\7\u00f7\2\2\u0732") + buf.write("\u0730\3\2\2\2\u0732\u0733\3\2\2\2\u0733\u00c3\3\2\2\2") + buf.write("\u0734\u0735\7\7\2\2\u0735\u073a\5X-\2\u0736\u0737\7\23") + buf.write("\2\2\u0737\u0739\5X-\2\u0738\u0736\3\2\2\2\u0739\u073c") + buf.write("\3\2\2\2\u073a\u0738\3\2\2\2\u073a\u073b\3\2\2\2\u073b") + buf.write("\u073d\3\2\2\2\u073c\u073a\3\2\2\2\u073d\u073e\7\b\2\2") + buf.write("\u073e\u00c5\3\2\2\2\u073f\u0740\7I\2\2\u0740\u0741\5") + buf.write("\u00dan\2\u0741\u00c7\3\2\2\2\u0742\u0743\7L\2\2\u0743") + buf.write('\u0744\5\u00dan\2\u0744\u00c9\3\2\2\2\u0745\u0746\t"') + buf.write("\2\2\u0746\u00cb\3\2\2\2\u0747\u074a\5\6\4\2\u0748\u074a") + buf.write("\7u\2\2\u0749\u0747\3\2\2\2\u0749\u0748\3\2\2\2\u074a") + buf.write("\u00cd\3\2\2\2\u074b\u074e\5\b\5\2\u074c\u074e\7u\2\2") + buf.write("\u074d\u074b\3\2\2\2\u074d\u074c\3\2\2\2\u074e\u00cf\3") + buf.write("\2\2\2\u074f\u0755\7j\2\2\u0750\u0755\7\u00ec\2\2\u0751") + buf.write("\u0755\7i\2\2\u0752\u0755\7k\2\2\u0753\u0755\5\u00d2j") + buf.write("\2\u0754\u074f\3\2\2\2\u0754\u0750\3\2\2\2\u0754\u0751") + buf.write("\3\2\2\2\u0754\u0752\3\2\2\2\u0754\u0753\3\2\2\2\u0755") + buf.write("\u00d1\3\2\2\2\u0756\u0757\7p\2\2\u0757\u0758\7k\2\2\u0758") + buf.write("\u00d3\3\2\2\2\u0759\u075a\7\u00f7\2\2\u075a\u00d5\3\2") + buf.write("\2\2\u075b\u075c\7\u00f7\2\2\u075c\u00d7\3\2\2\2\u075d") + buf.write("\u075e\7\u00f7\2\2\u075e\u00d9\3\2\2\2\u075f\u0765\5l") + buf.write("\67\2\u0760\u0765\5n8\2\u0761\u0765\7\u00f5\2\2\u0762") + buf.write("\u0765\7\u00f6\2\2\u0763\u0765\78\2\2\u0764\u075f\3\2") + buf.write("\2\2\u0764\u0760\3\2\2\2\u0764\u0761\3\2\2\2\u0764\u0762") + buf.write("\3\2\2\2\u0764\u0763\3\2\2\2\u0765\u00db\3\2\2\2\u0766") + buf.write("\u0767\t#\2\2\u0767\u00dd\3\2\2\2\u0768\u0769\t$\2\2\u0769") + buf.write("\u00df\3\2\2\2\u00ce\u00e5\u00f3\u010c\u0113\u0131\u0133") + buf.write("\u0135\u014f\u0156\u016c\u016e\u0170\u017b\u0189\u0192") + buf.write("\u019a\u01a1\u01a3\u01ae\u01b7\u01c9\u01d2\u01e1\u01ec") + buf.write("\u01ef\u01f4\u0215\u021e\u0221\u022b\u0230\u0234\u023a") + buf.write("\u023e\u0248\u024c\u0250\u0259\u025c\u0266\u026a\u0274") + buf.write("\u0279\u027d\u0283\u0287\u028b\u028f\u0293\u02a3\u02a7") + buf.write("\u02b2\u02bd\u02c1\u02c5\u02d5\u02d9\u02e4\u02ef\u02f3") + buf.write("\u02f7\u0303\u030e\u031a\u0325\u0343\u0347\u035e\u0363") + buf.write("\u036b\u0380\u0384\u0388\u03c6\u03cb\u03d3\u03e8\u03ec") + buf.write("\u03f0\u042e\u0437\u0442\u044d\u0455\u0459\u045c\u045f") + buf.write("\u0462\u0471\u0474\u0477\u0481\u0485\u0488\u048b\u048e") + buf.write("\u0496\u0499\u049c\u049f\u04a3\u04bb\u04c2\u04c4\u04ce") + buf.write("\u04d1\u04d4\u04e0\u04e2\u04e7\u04f7\u04ff\u0502\u0505") + buf.write("\u0510\u0512\u0517\u0523\u0533\u053e\u0542\u0549\u055b") + buf.write("\u055f\u0566\u056e\u0577\u057a\u057f\u0582\u0587\u058a") + buf.write("\u058d\u0599\u05a3\u05a8\u05ad\u05b5\u05ba\u05cb\u05d4") + buf.write("\u05dc\u05df\u05e8\u05ec\u05f0\u05f3\u05f5\u05fe\u0603") + buf.write("\u0607\u060e\u0613\u0617\u061a\u061d\u0620\u0627\u0630") + buf.write("\u0635\u0639\u0640\u064a\u064f\u0658\u065d\u065f\u066b") + buf.write("\u0670\u0673\u067e\u0683\u0687\u0689\u0695\u069b\u06a2") + buf.write("\u06a7\u06ad\u06b1\u06b4\u06bb\u06c0\u06c4\u06c7\u06cc") + buf.write("\u06d6\u06dd\u06e1\u06e7\u06eb\u06f2\u06f7\u0703\u0708") + buf.write("\u070d\u0711\u071d\u0732\u073a\u0749\u074d\u0754\u0764") return buf.getvalue() @@ -1015,8 +1041,8 @@ class Parser(ANTLRParser): "'current_date'", "'datediff'", "'dateadd'", - "'year'", - "'month'", + "'getyear'", + "'getmonth'", "'dayofmonth'", "'dayofyear'", "'daytoyear'", @@ -1540,62 +1566,63 @@ class Parser(ANTLRParser): RULE_orderByItem = 51 RULE_windowingClause = 52 RULE_signedInteger = 53 - RULE_limitClauseItem = 54 - RULE_groupingClause = 55 - RULE_havingClause = 56 - RULE_parameterItem = 57 - RULE_outputParameterType = 58 - RULE_outputParameterTypeComponent = 59 - RULE_inputParameterType = 60 - RULE_rulesetType = 61 - RULE_scalarType = 62 - RULE_componentType = 63 - RULE_datasetType = 64 - RULE_evalDatasetType = 65 - RULE_scalarSetType = 66 - RULE_dpRuleset = 67 - RULE_hrRuleset = 68 - RULE_valueDomainName = 69 - RULE_rulesetID = 70 - RULE_rulesetSignature = 71 - RULE_signature = 72 - RULE_ruleClauseDatapoint = 73 - RULE_ruleItemDatapoint = 74 - RULE_ruleClauseHierarchical = 75 - RULE_ruleItemHierarchical = 76 - RULE_hierRuleSignature = 77 - RULE_valueDomainSignature = 78 - RULE_codeItemRelation = 79 - RULE_codeItemRelationClause = 80 - RULE_valueDomainValue = 81 - RULE_scalarTypeConstraint = 82 - RULE_compConstraint = 83 - RULE_multModifier = 84 - RULE_validationOutput = 85 - RULE_validationMode = 86 - RULE_conditionClause = 87 - RULE_inputMode = 88 - RULE_imbalanceExpr = 89 - RULE_inputModeHierarchy = 90 - RULE_outputModeHierarchy = 91 - RULE_alias = 92 - RULE_varID = 93 - RULE_simpleComponentId = 94 - RULE_componentID = 95 - RULE_lists = 96 - RULE_erCode = 97 - RULE_erLevel = 98 - RULE_comparisonOperand = 99 - RULE_optionalExpr = 100 - RULE_optionalExprComponent = 101 - RULE_componentRole = 102 - RULE_viralAttribute = 103 - RULE_valueDomainID = 104 - RULE_operatorID = 105 - RULE_routineName = 106 - RULE_constant = 107 - RULE_basicScalarType = 108 - RULE_retainType = 109 + RULE_signedNumber = 54 + RULE_limitClauseItem = 55 + RULE_groupingClause = 56 + RULE_havingClause = 57 + RULE_parameterItem = 58 + RULE_outputParameterType = 59 + RULE_outputParameterTypeComponent = 60 + RULE_inputParameterType = 61 + RULE_rulesetType = 62 + RULE_scalarType = 63 + RULE_componentType = 64 + RULE_datasetType = 65 + RULE_evalDatasetType = 66 + RULE_scalarSetType = 67 + RULE_dpRuleset = 68 + RULE_hrRuleset = 69 + RULE_valueDomainName = 70 + RULE_rulesetID = 71 + RULE_rulesetSignature = 72 + RULE_signature = 73 + RULE_ruleClauseDatapoint = 74 + RULE_ruleItemDatapoint = 75 + RULE_ruleClauseHierarchical = 76 + RULE_ruleItemHierarchical = 77 + RULE_hierRuleSignature = 78 + RULE_valueDomainSignature = 79 + RULE_codeItemRelation = 80 + RULE_codeItemRelationClause = 81 + RULE_valueDomainValue = 82 + RULE_scalarTypeConstraint = 83 + RULE_compConstraint = 84 + RULE_multModifier = 85 + RULE_validationOutput = 86 + RULE_validationMode = 87 + RULE_conditionClause = 88 + RULE_inputMode = 89 + RULE_imbalanceExpr = 90 + RULE_inputModeHierarchy = 91 + RULE_outputModeHierarchy = 92 + RULE_alias = 93 + RULE_varID = 94 + RULE_simpleComponentId = 95 + RULE_componentID = 96 + RULE_lists = 97 + RULE_erCode = 98 + RULE_erLevel = 99 + RULE_comparisonOperand = 100 + RULE_optionalExpr = 101 + RULE_optionalExprComponent = 102 + RULE_componentRole = 103 + RULE_viralAttribute = 104 + RULE_valueDomainID = 105 + RULE_operatorID = 106 + RULE_routineName = 107 + RULE_constant = 108 + RULE_basicScalarType = 109 + RULE_retainType = 110 ruleNames = [ "start", @@ -1652,6 +1679,7 @@ class Parser(ANTLRParser): "orderByItem", "windowingClause", "signedInteger", + "signedNumber", "limitClauseItem", "groupingClause", "havingClause", @@ -1963,17 +1991,18 @@ class Parser(ANTLRParser): def __init__(self, input: TokenStream, output: TextIO = sys.stdout): super().__init__(input, output) + self.checkVersion("4.9.2") self._interp = ParserATNSimulator( self, self.atn, self.decisionsToDFA, self.sharedContextCache ) self._predicates = None class StartContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def EOF(self): return self.getToken(Parser.EOF, 0) @@ -2001,25 +2030,32 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitStart"): listener.exitStart(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitStart"): + return visitor.visitStart(self) + else: + return visitor.visitChildren(self) + def start(self) -> Any: + localctx = Parser.StartContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_start) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 225 + self.state = 227 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.DEFINE or _la == Parser.IDENTIFIER: - self.state = 220 + self.state = 222 self.statement() - self.state = 221 + self.state = 223 self.match(Parser.EOL) - self.state = 227 + self.state = 229 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 228 + self.state = 230 self.match(Parser.EOF) except RecognitionException as re: localctx.exception = re @@ -2030,11 +2066,11 @@ def start(self) -> Any: return localctx class StatementContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_statement @@ -2043,10 +2079,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DefineExpressionContext(StatementContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.StatementContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.StatementContext + super().__init__(parser) self.copyFrom(ctx) def defOperators(self): @@ -2060,11 +2094,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDefineExpression"): listener.exitDefineExpression(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDefineExpression"): + return visitor.visitDefineExpression(self) + else: + return visitor.visitChildren(self) + class TemporaryAssignmentContext(StatementContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.StatementContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.StatementContext + super().__init__(parser) self.copyFrom(ctx) def varID(self): @@ -2084,11 +2122,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTemporaryAssignment"): listener.exitTemporaryAssignment(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTemporaryAssignment"): + return visitor.visitTemporaryAssignment(self) + else: + return visitor.visitChildren(self) + class PersistAssignmentContext(StatementContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.StatementContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.StatementContext + super().__init__(parser) self.copyFrom(ctx) def varID(self): @@ -2108,39 +2150,46 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitPersistAssignment"): listener.exitPersistAssignment(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitPersistAssignment"): + return visitor.visitPersistAssignment(self) + else: + return visitor.visitChildren(self) + def statement(self): + localctx = Parser.StatementContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_statement) try: - self.state = 239 + self.state = 241 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 1, self._ctx) if la_ == 1: localctx = Parser.TemporaryAssignmentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 230 + self.state = 232 self.varID() - self.state = 231 + self.state = 233 self.match(Parser.ASSIGN) - self.state = 232 + self.state = 234 self.expr(0) pass elif la_ == 2: localctx = Parser.PersistAssignmentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 234 + self.state = 236 self.varID() - self.state = 235 + self.state = 237 self.match(Parser.PUT_SYMBOL) - self.state = 236 + self.state = 238 self.expr(0) pass elif la_ == 3: localctx = Parser.DefineExpressionContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 238 + self.state = 240 self.defOperators() pass @@ -2153,11 +2202,11 @@ def statement(self): return localctx class ExprContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_expr @@ -2166,8 +2215,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class VarIdExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.copyFrom(ctx) def varID(self): @@ -2181,9 +2230,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitVarIdExpr"): listener.exitVarIdExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitVarIdExpr"): + return visitor.visitVarIdExpr(self) + else: + return visitor.visitChildren(self) + class MembershipExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.copyFrom(ctx) def expr(self): @@ -2203,9 +2258,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitMembershipExpr"): listener.exitMembershipExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMembershipExpr"): + return visitor.visitMembershipExpr(self) + else: + return visitor.visitChildren(self) + class InNotInExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.left = None # ExprContext self.op = None # Token self.copyFrom(ctx) @@ -2233,9 +2294,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInNotInExpr"): listener.exitInNotInExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInNotInExpr"): + return visitor.visitInNotInExpr(self) + else: + return visitor.visitChildren(self) + class BooleanExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.left = None # ExprContext self.op = None # Token self.right = None # ExprContext @@ -2264,9 +2331,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBooleanExpr"): listener.exitBooleanExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBooleanExpr"): + return visitor.visitBooleanExpr(self) + else: + return visitor.visitChildren(self) + class ComparisonExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.left = None # ExprContext self.op = None # ComparisonOperandContext self.right = None # ExprContext @@ -2289,9 +2362,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComparisonExpr"): listener.exitComparisonExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComparisonExpr"): + return visitor.visitComparisonExpr(self) + else: + return visitor.visitChildren(self) + class UnaryExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.op = None # Token self.right = None # ExprContext self.copyFrom(ctx) @@ -2316,9 +2395,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryExpr"): listener.exitUnaryExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryExpr"): + return visitor.visitUnaryExpr(self) + else: + return visitor.visitChildren(self) + class FunctionsExpressionContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.copyFrom(ctx) def functions(self): @@ -2332,9 +2417,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFunctionsExpression"): listener.exitFunctionsExpression(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFunctionsExpression"): + return visitor.visitFunctionsExpression(self) + else: + return visitor.visitChildren(self) + class IfExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.conditionalExpr = None # ExprContext self.thenExpr = None # ExprContext self.elseExpr = None # ExprContext @@ -2363,9 +2454,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitIfExpr"): listener.exitIfExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitIfExpr"): + return visitor.visitIfExpr(self) + else: + return visitor.visitChildren(self) + class ClauseExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.dataset = None # ExprContext self.clause = None # DatasetClauseContext self.copyFrom(ctx) @@ -2390,9 +2487,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitClauseExpr"): listener.exitClauseExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitClauseExpr"): + return visitor.visitClauseExpr(self) + else: + return visitor.visitChildren(self) + class CaseExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) + self._expr = None # ExprContext + self.condExpr = list() # of ExprContexts + self.thenExpr = list() # of ExprContexts + self.elseExpr = None # ExprContext self.copyFrom(ctx) def CASE(self): @@ -2427,9 +2534,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCaseExpr"): listener.exitCaseExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCaseExpr"): + return visitor.visitCaseExpr(self) + else: + return visitor.visitChildren(self) + class ArithmeticExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.left = None # ExprContext self.op = None # Token self.right = None # ExprContext @@ -2455,9 +2568,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitArithmeticExpr"): listener.exitArithmeticExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitArithmeticExpr"): + return visitor.visitArithmeticExpr(self) + else: + return visitor.visitChildren(self) + class ParenthesisExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.copyFrom(ctx) def LPAREN(self): @@ -2477,9 +2596,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitParenthesisExpr"): listener.exitParenthesisExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitParenthesisExpr"): + return visitor.visitParenthesisExpr(self) + else: + return visitor.visitChildren(self) + class ConstantExprContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.copyFrom(ctx) def constant(self): @@ -2493,9 +2618,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConstantExpr"): listener.exitConstantExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConstantExpr"): + return visitor.visitConstantExpr(self) + else: + return visitor.visitChildren(self) + class ArithmeticExprOrConcatContext(ExprContext): - def __init__(self, ANTLRParser, ctx: ParserRuleContext): # actually a Parser.ExprContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext + super().__init__(parser) self.left = None # ExprContext self.op = None # Token self.right = None # ExprContext @@ -2524,6 +2655,12 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitArithmeticExprOrConcat"): listener.exitArithmeticExprOrConcat(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitArithmeticExprOrConcat"): + return visitor.visitArithmeticExprOrConcat(self) + else: + return visitor.visitChildren(self) + def expr(self, _p: int = 0): _parentctx = self._ctx _parentState = self.state @@ -2534,7 +2671,7 @@ def expr(self, _p: int = 0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 271 + self.state = 273 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 3, self._ctx) if la_ == 1: @@ -2542,11 +2679,11 @@ def expr(self, _p: int = 0): self._ctx = localctx _prevctx = localctx - self.state = 242 + self.state = 244 self.match(Parser.LPAREN) - self.state = 243 + self.state = 245 self.expr(0) - self.state = 244 + self.state = 246 self.match(Parser.RPAREN) pass @@ -2554,7 +2691,7 @@ def expr(self, _p: int = 0): localctx = Parser.FunctionsExpressionContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 246 + self.state = 248 self.functions() pass @@ -2562,7 +2699,7 @@ def expr(self, _p: int = 0): localctx = Parser.UnaryExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 247 + self.state = 249 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -2576,7 +2713,7 @@ def expr(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 248 + self.state = 250 localctx.right = self.expr(11) pass @@ -2584,17 +2721,17 @@ def expr(self, _p: int = 0): localctx = Parser.IfExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 249 + self.state = 251 self.match(Parser.IF) - self.state = 250 + self.state = 252 localctx.conditionalExpr = self.expr(0) - self.state = 251 + self.state = 253 self.match(Parser.THEN) - self.state = 252 + self.state = 254 localctx.thenExpr = self.expr(0) - self.state = 253 + self.state = 255 self.match(Parser.ELSE) - self.state = 254 + self.state = 256 localctx.elseExpr = self.expr(4) pass @@ -2602,37 +2739,39 @@ def expr(self, _p: int = 0): localctx = Parser.CaseExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 256 + self.state = 258 self.match(Parser.CASE) - self.state = 262 + self.state = 264 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 257 - self.match(Parser.WHEN) - self.state = 258 - self.expr(0) self.state = 259 - self.match(Parser.THEN) + self.match(Parser.WHEN) self.state = 260 - self.expr(0) - self.state = 264 + localctx._expr = self.expr(0) + localctx.condExpr.append(localctx._expr) + self.state = 261 + self.match(Parser.THEN) + self.state = 262 + localctx._expr = self.expr(0) + localctx.thenExpr.append(localctx._expr) + self.state = 266 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la == Parser.WHEN): break - self.state = 266 + self.state = 268 self.match(Parser.ELSE) - self.state = 267 - self.expr(3) + self.state = 269 + localctx.elseExpr = self.expr(3) pass elif la_ == 6: localctx = Parser.ConstantExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 269 + self.state = 271 self.constant() pass @@ -2640,12 +2779,12 @@ def expr(self, _p: int = 0): localctx = Parser.VarIdExprContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 270 + self.state = 272 self.varID() pass self._ctx.stop = self._input.LT(-1) - self.state = 305 + self.state = 307 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input, 6, self._ctx) while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: @@ -2653,7 +2792,7 @@ def expr(self, _p: int = 0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 303 + self.state = 305 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 5, self._ctx) if la_ == 1: @@ -2662,12 +2801,12 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 273 + self.state = 275 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 274 + self.state = 276 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.MUL or _la == Parser.DIV): @@ -2675,7 +2814,7 @@ def expr(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 275 + self.state = 277 localctx.right = self.expr(11) pass @@ -2685,12 +2824,12 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 276 + self.state = 278 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 277 + self.state = 279 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MINUS or _la == Parser.CONCAT): @@ -2698,7 +2837,7 @@ def expr(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 278 + self.state = 280 localctx.right = self.expr(10) pass @@ -2708,14 +2847,14 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 279 + self.state = 281 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 280 + self.state = 282 localctx.op = self.comparisonOperand() - self.state = 281 + self.state = 283 localctx.right = self.expr(9) pass @@ -2725,14 +2864,14 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 283 + self.state = 285 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 284 + self.state = 286 localctx.op = self.match(Parser.AND) - self.state = 285 + self.state = 287 localctx.right = self.expr(7) pass @@ -2742,12 +2881,12 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 286 + self.state = 288 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 287 + self.state = 289 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.OR or _la == Parser.XOR): @@ -2755,7 +2894,7 @@ def expr(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 288 + self.state = 290 localctx.right = self.expr(6) pass @@ -2765,16 +2904,16 @@ def expr(self, _p: int = 0): ) localctx.dataset = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 289 + self.state = 291 if not self.precpred(self._ctx, 13): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") - self.state = 290 + self.state = 292 self.match(Parser.QLPAREN) - self.state = 291 + self.state = 293 localctx.clause = self.datasetClause() - self.state = 292 + self.state = 294 self.match(Parser.QRPAREN) pass @@ -2783,14 +2922,14 @@ def expr(self, _p: int = 0): self, Parser.ExprContext(self, _parentctx, _parentState) ) self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 294 + self.state = 296 if not self.precpred(self._ctx, 12): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") - self.state = 295 + self.state = 297 self.match(Parser.MEMBERSHIP) - self.state = 296 + self.state = 298 self.simpleComponentId() pass @@ -2800,12 +2939,12 @@ def expr(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) - self.state = 297 + self.state = 299 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 298 + self.state = 300 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.IN or _la == Parser.NOT_IN): @@ -2813,15 +2952,15 @@ def expr(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 301 + self.state = 303 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.GLPAREN]: - self.state = 299 + self.state = 301 self.lists() pass elif token in [Parser.IDENTIFIER]: - self.state = 300 + self.state = 302 self.valueDomainID() pass else: @@ -2829,7 +2968,7 @@ def expr(self, _p: int = 0): pass - self.state = 307 + self.state = 309 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input, 6, self._ctx) @@ -2842,11 +2981,11 @@ def expr(self, _p: int = 0): return localctx class ExprComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_exprComponent @@ -2856,9 +2995,9 @@ def copyFrom(self, ctx: ParserRuleContext): class ArithmeticExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.op = None # Token self.right = None # ExprComponentContext @@ -2884,11 +3023,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitArithmeticExprComp"): listener.exitArithmeticExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitArithmeticExprComp"): + return visitor.visitArithmeticExprComp(self) + else: + return visitor.visitChildren(self) + class IfExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.conditionalExpr = None # ExprComponentContext self.thenExpr = None # ExprComponentContext self.elseExpr = None # ExprComponentContext @@ -2917,11 +3062,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitIfExprComp"): listener.exitIfExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitIfExprComp"): + return visitor.visitIfExprComp(self) + else: + return visitor.visitChildren(self) + class ComparisonExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.right = None # ExprComponentContext self.copyFrom(ctx) @@ -2943,11 +3094,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComparisonExprComp"): listener.exitComparisonExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComparisonExprComp"): + return visitor.visitComparisonExprComp(self) + else: + return visitor.visitChildren(self) + class FunctionsExpressionCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def functionsComponents(self): @@ -2961,11 +3118,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFunctionsExpressionComp"): listener.exitFunctionsExpressionComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFunctionsExpressionComp"): + return visitor.visitFunctionsExpressionComp(self) + else: + return visitor.visitChildren(self) + class CompIdContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def componentID(self): @@ -2979,11 +3142,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCompId"): listener.exitCompId(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCompId"): + return visitor.visitCompId(self) + else: + return visitor.visitChildren(self) + class ConstantExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def constant(self): @@ -2997,11 +3166,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConstantExprComp"): listener.exitConstantExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConstantExprComp"): + return visitor.visitConstantExprComp(self) + else: + return visitor.visitChildren(self) + class ArithmeticExprOrConcatCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.op = None # Token self.right = None # ExprComponentContext @@ -3030,11 +3205,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitArithmeticExprOrConcatComp"): listener.exitArithmeticExprOrConcatComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitArithmeticExprOrConcatComp"): + return visitor.visitArithmeticExprOrConcatComp(self) + else: + return visitor.visitChildren(self) + class ParenthesisExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def LPAREN(self): @@ -3054,11 +3235,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitParenthesisExprComp"): listener.exitParenthesisExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitParenthesisExprComp"): + return visitor.visitParenthesisExprComp(self) + else: + return visitor.visitChildren(self) + class InNotInExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.op = None # Token self.copyFrom(ctx) @@ -3086,11 +3273,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInNotInExprComp"): listener.exitInNotInExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInNotInExprComp"): + return visitor.visitInNotInExprComp(self) + else: + return visitor.visitChildren(self) + class UnaryExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.right = None # ExprComponentContext self.copyFrom(ctx) @@ -3115,11 +3308,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryExprComp"): listener.exitUnaryExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryExprComp"): + return visitor.visitUnaryExprComp(self) + else: + return visitor.visitChildren(self) + class CaseExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) + self._exprComponent = None # ExprComponentContext + self.condExpr = list() # of ExprComponentContexts + self.thenExpr = list() # of ExprComponentContexts + self.elseExpr = None # ExprComponentContext self.copyFrom(ctx) def CASE(self): @@ -3154,11 +3357,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCaseExprComp"): listener.exitCaseExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCaseExprComp"): + return visitor.visitCaseExprComp(self) + else: + return visitor.visitChildren(self) + class BooleanExprCompContext(ExprComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ExprComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.op = None # Token self.right = None # ExprComponentContext @@ -3187,6 +3396,12 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBooleanExprComp"): listener.exitBooleanExprComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBooleanExprComp"): + return visitor.visitBooleanExprComp(self) + else: + return visitor.visitChildren(self) + def exprComponent(self, _p: int = 0): _parentctx = self._ctx _parentState = self.state @@ -3197,7 +3412,7 @@ def exprComponent(self, _p: int = 0): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 338 + self.state = 340 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 8, self._ctx) if la_ == 1: @@ -3205,11 +3420,11 @@ def exprComponent(self, _p: int = 0): self._ctx = localctx _prevctx = localctx - self.state = 309 + self.state = 311 self.match(Parser.LPAREN) - self.state = 310 + self.state = 312 self.exprComponent(0) - self.state = 311 + self.state = 313 self.match(Parser.RPAREN) pass @@ -3217,7 +3432,7 @@ def exprComponent(self, _p: int = 0): localctx = Parser.FunctionsExpressionCompContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 313 + self.state = 315 self.functionsComponents() pass @@ -3225,7 +3440,7 @@ def exprComponent(self, _p: int = 0): localctx = Parser.UnaryExprCompContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 314 + self.state = 316 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -3239,7 +3454,7 @@ def exprComponent(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 315 + self.state = 317 localctx.right = self.exprComponent(11) pass @@ -3247,17 +3462,17 @@ def exprComponent(self, _p: int = 0): localctx = Parser.IfExprCompContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 316 + self.state = 318 self.match(Parser.IF) - self.state = 317 + self.state = 319 localctx.conditionalExpr = self.exprComponent(0) - self.state = 318 + self.state = 320 self.match(Parser.THEN) - self.state = 319 + self.state = 321 localctx.thenExpr = self.exprComponent(0) - self.state = 320 + self.state = 322 self.match(Parser.ELSE) - self.state = 321 + self.state = 323 localctx.elseExpr = self.exprComponent(4) pass @@ -3265,37 +3480,39 @@ def exprComponent(self, _p: int = 0): localctx = Parser.CaseExprCompContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 323 + self.state = 325 self.match(Parser.CASE) - self.state = 329 + self.state = 331 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 324 - self.match(Parser.WHEN) - self.state = 325 - self.exprComponent(0) self.state = 326 - self.match(Parser.THEN) + self.match(Parser.WHEN) self.state = 327 - self.exprComponent(0) - self.state = 331 + localctx._exprComponent = self.exprComponent(0) + localctx.condExpr.append(localctx._exprComponent) + self.state = 328 + self.match(Parser.THEN) + self.state = 329 + localctx._exprComponent = self.exprComponent(0) + localctx.thenExpr.append(localctx._exprComponent) + self.state = 333 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la == Parser.WHEN): break - self.state = 333 + self.state = 335 self.match(Parser.ELSE) - self.state = 334 - self.exprComponent(3) + self.state = 336 + localctx.elseExpr = self.exprComponent(3) pass elif la_ == 6: localctx = Parser.ConstantExprCompContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 336 + self.state = 338 self.constant() pass @@ -3303,12 +3520,12 @@ def exprComponent(self, _p: int = 0): localctx = Parser.CompIdContext(self, localctx) self._ctx = localctx _prevctx = localctx - self.state = 337 + self.state = 339 self.componentID() pass self._ctx.stop = self._input.LT(-1) - self.state = 364 + self.state = 366 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input, 11, self._ctx) while _alt != 2 and _alt != ATN.INVALID_ALT_NUMBER: @@ -3316,7 +3533,7 @@ def exprComponent(self, _p: int = 0): if self._parseListeners is not None: self.triggerExitRuleEvent() _prevctx = localctx - self.state = 362 + self.state = 364 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 10, self._ctx) if la_ == 1: @@ -3325,12 +3542,12 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 340 + self.state = 342 if not self.precpred(self._ctx, 10): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 10)") - self.state = 341 + self.state = 343 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.MUL or _la == Parser.DIV): @@ -3338,7 +3555,7 @@ def exprComponent(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 342 + self.state = 344 localctx.right = self.exprComponent(11) pass @@ -3348,12 +3565,12 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 343 + self.state = 345 if not self.precpred(self._ctx, 9): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 9)") - self.state = 344 + self.state = 346 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MINUS or _la == Parser.CONCAT): @@ -3361,7 +3578,7 @@ def exprComponent(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 345 + self.state = 347 localctx.right = self.exprComponent(10) pass @@ -3371,14 +3588,14 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 346 + self.state = 348 if not self.precpred(self._ctx, 8): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 8)") - self.state = 347 + self.state = 349 self.comparisonOperand() - self.state = 348 + self.state = 350 localctx.right = self.exprComponent(9) pass @@ -3388,14 +3605,14 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 350 + self.state = 352 if not self.precpred(self._ctx, 6): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 6)") - self.state = 351 + self.state = 353 localctx.op = self.match(Parser.AND) - self.state = 352 + self.state = 354 localctx.right = self.exprComponent(7) pass @@ -3405,12 +3622,12 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 353 + self.state = 355 if not self.precpred(self._ctx, 5): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 5)") - self.state = 354 + self.state = 356 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.OR or _la == Parser.XOR): @@ -3418,7 +3635,7 @@ def exprComponent(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 355 + self.state = 357 localctx.right = self.exprComponent(6) pass @@ -3428,12 +3645,12 @@ def exprComponent(self, _p: int = 0): ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) - self.state = 356 + self.state = 358 if not self.precpred(self._ctx, 7): from antlr4.error.Errors import FailedPredicateException raise FailedPredicateException(self, "self.precpred(self._ctx, 7)") - self.state = 357 + self.state = 359 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.IN or _la == Parser.NOT_IN): @@ -3441,15 +3658,15 @@ def exprComponent(self, _p: int = 0): else: self._errHandler.reportMatch(self) self.consume() - self.state = 360 + self.state = 362 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.GLPAREN]: - self.state = 358 + self.state = 360 self.lists() pass elif token in [Parser.IDENTIFIER]: - self.state = 359 + self.state = 361 self.valueDomainID() pass else: @@ -3457,7 +3674,7 @@ def exprComponent(self, _p: int = 0): pass - self.state = 366 + self.state = 368 self._errHandler.sync(self) _alt = self._interp.adaptivePredict(self._input, 11, self._ctx) @@ -3470,11 +3687,11 @@ def exprComponent(self, _p: int = 0): return localctx class FunctionsComponentsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_functionsComponents @@ -3484,9 +3701,9 @@ def copyFrom(self, ctx: ParserRuleContext): class NumericFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def numericOperatorsComponent(self): @@ -3500,11 +3717,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitNumericFunctionsComponents"): listener.exitNumericFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitNumericFunctionsComponents"): + return visitor.visitNumericFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class StringFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def stringOperatorsComponent(self): @@ -3518,11 +3741,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitStringFunctionsComponents"): listener.exitStringFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitStringFunctionsComponents"): + return visitor.visitStringFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class ComparisonFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def comparisonOperatorsComponent(self): @@ -3536,11 +3765,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComparisonFunctionsComponents"): listener.exitComparisonFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComparisonFunctionsComponents"): + return visitor.visitComparisonFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class TimeFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def timeOperatorsComponent(self): @@ -3554,11 +3789,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeFunctionsComponents"): listener.exitTimeFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeFunctionsComponents"): + return visitor.visitTimeFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class GenericFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def genericOperatorsComponent(self): @@ -3572,11 +3813,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitGenericFunctionsComponents"): listener.exitGenericFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitGenericFunctionsComponents"): + return visitor.visitGenericFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class AnalyticFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def anFunctionComponent(self): @@ -3590,11 +3837,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAnalyticFunctionsComponents"): listener.exitAnalyticFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAnalyticFunctionsComponents"): + return visitor.visitAnalyticFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class ConditionalFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def conditionalOperatorsComponent(self): @@ -3608,11 +3861,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConditionalFunctionsComponents"): listener.exitConditionalFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConditionalFunctionsComponents"): + return visitor.visitConditionalFunctionsComponents(self) + else: + return visitor.visitChildren(self) + class AggregateFunctionsComponentsContext(FunctionsComponentsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.FunctionsComponentsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def aggrOperators(self): @@ -3626,66 +3885,73 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggregateFunctionsComponents"): listener.exitAggregateFunctionsComponents(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggregateFunctionsComponents"): + return visitor.visitAggregateFunctionsComponents(self) + else: + return visitor.visitChildren(self) + def functionsComponents(self): + localctx = Parser.FunctionsComponentsContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_functionsComponents) try: - self.state = 375 + self.state = 377 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 12, self._ctx) if la_ == 1: localctx = Parser.GenericFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 367 + self.state = 369 self.genericOperatorsComponent() pass elif la_ == 2: localctx = Parser.StringFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 368 + self.state = 370 self.stringOperatorsComponent() pass elif la_ == 3: localctx = Parser.NumericFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 369 + self.state = 371 self.numericOperatorsComponent() pass elif la_ == 4: localctx = Parser.ComparisonFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 370 + self.state = 372 self.comparisonOperatorsComponent() pass elif la_ == 5: localctx = Parser.TimeFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 371 + self.state = 373 self.timeOperatorsComponent() pass elif la_ == 6: localctx = Parser.ConditionalFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 6) - self.state = 372 + self.state = 374 self.conditionalOperatorsComponent() pass elif la_ == 7: localctx = Parser.AggregateFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 7) - self.state = 373 + self.state = 375 self.aggrOperators() pass elif la_ == 8: localctx = Parser.AnalyticFunctionsComponentsContext(self, localctx) self.enterOuterAlt(localctx, 8) - self.state = 374 + self.state = 376 self.anFunctionComponent() pass @@ -3698,11 +3964,11 @@ def functionsComponents(self): return localctx class FunctionsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_functions @@ -3711,10 +3977,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class HierarchyFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def hierarchyOperators(self): @@ -3728,11 +3992,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHierarchyFunctions"): listener.exitHierarchyFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHierarchyFunctions"): + return visitor.visitHierarchyFunctions(self) + else: + return visitor.visitChildren(self) + class StringFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def stringOperators(self): @@ -3746,11 +4014,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitStringFunctions"): listener.exitStringFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitStringFunctions"): + return visitor.visitStringFunctions(self) + else: + return visitor.visitChildren(self) + class ValidationFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def validationOperators(self): @@ -3764,11 +4036,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidationFunctions"): listener.exitValidationFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidationFunctions"): + return visitor.visitValidationFunctions(self) + else: + return visitor.visitChildren(self) + class GenericFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def genericOperators(self): @@ -3782,11 +4058,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitGenericFunctions"): listener.exitGenericFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitGenericFunctions"): + return visitor.visitGenericFunctions(self) + else: + return visitor.visitChildren(self) + class ConditionalFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def conditionalOperators(self): @@ -3800,11 +4080,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConditionalFunctions"): listener.exitConditionalFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConditionalFunctions"): + return visitor.visitConditionalFunctions(self) + else: + return visitor.visitChildren(self) + class AggregateFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def aggrOperatorsGrouping(self): @@ -3818,11 +4102,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggregateFunctions"): listener.exitAggregateFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggregateFunctions"): + return visitor.visitAggregateFunctions(self) + else: + return visitor.visitChildren(self) + class JoinFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def joinOperators(self): @@ -3836,11 +4124,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinFunctions"): listener.exitJoinFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinFunctions"): + return visitor.visitJoinFunctions(self) + else: + return visitor.visitChildren(self) + class ComparisonFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def comparisonOperators(self): @@ -3854,11 +4146,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComparisonFunctions"): listener.exitComparisonFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComparisonFunctions"): + return visitor.visitComparisonFunctions(self) + else: + return visitor.visitChildren(self) + class NumericFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def numericOperators(self): @@ -3872,11 +4168,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitNumericFunctions"): listener.exitNumericFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitNumericFunctions"): + return visitor.visitNumericFunctions(self) + else: + return visitor.visitChildren(self) + class TimeFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def timeOperators(self): @@ -3890,11 +4190,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeFunctions"): listener.exitTimeFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeFunctions"): + return visitor.visitTimeFunctions(self) + else: + return visitor.visitChildren(self) + class SetFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def setOperators(self): @@ -3908,11 +4212,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSetFunctions"): listener.exitSetFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSetFunctions"): + return visitor.visitSetFunctions(self) + else: + return visitor.visitChildren(self) + class AnalyticFunctionsContext(FunctionsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.FunctionsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.FunctionsContext + super().__init__(parser) self.copyFrom(ctx) def anFunction(self): @@ -3926,94 +4234,101 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAnalyticFunctions"): listener.exitAnalyticFunctions(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAnalyticFunctions"): + return visitor.visitAnalyticFunctions(self) + else: + return visitor.visitChildren(self) + def functions(self): + localctx = Parser.FunctionsContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_functions) try: - self.state = 389 + self.state = 391 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 13, self._ctx) if la_ == 1: localctx = Parser.JoinFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 377 + self.state = 379 self.joinOperators() pass elif la_ == 2: localctx = Parser.GenericFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 378 + self.state = 380 self.genericOperators() pass elif la_ == 3: localctx = Parser.StringFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 379 + self.state = 381 self.stringOperators() pass elif la_ == 4: localctx = Parser.NumericFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 380 + self.state = 382 self.numericOperators() pass elif la_ == 5: localctx = Parser.ComparisonFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 381 + self.state = 383 self.comparisonOperators() pass elif la_ == 6: localctx = Parser.TimeFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 6) - self.state = 382 + self.state = 384 self.timeOperators() pass elif la_ == 7: localctx = Parser.SetFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 7) - self.state = 383 + self.state = 385 self.setOperators() pass elif la_ == 8: localctx = Parser.HierarchyFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 8) - self.state = 384 + self.state = 386 self.hierarchyOperators() pass elif la_ == 9: localctx = Parser.ValidationFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 9) - self.state = 385 + self.state = 387 self.validationOperators() pass elif la_ == 10: localctx = Parser.ConditionalFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 10) - self.state = 386 + self.state = 388 self.conditionalOperators() pass elif la_ == 11: localctx = Parser.AggregateFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 11) - self.state = 387 + self.state = 389 self.aggrOperatorsGrouping() pass elif la_ == 12: localctx = Parser.AnalyticFunctionsContext(self, localctx) self.enterOuterAlt(localctx, 12) - self.state = 388 + self.state = 390 self.anFunction() pass @@ -4026,11 +4341,11 @@ def functions(self): return localctx class DatasetClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def renameClause(self): return self.getTypedRuleContext(Parser.RenameClauseContext, 0) @@ -4064,46 +4379,53 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDatasetClause"): listener.exitDatasetClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDatasetClause"): + return visitor.visitDatasetClause(self) + else: + return visitor.visitChildren(self) + def datasetClause(self): + localctx = Parser.DatasetClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_datasetClause) try: - self.state = 398 + self.state = 400 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.RENAME]: self.enterOuterAlt(localctx, 1) - self.state = 391 + self.state = 393 self.renameClause() pass elif token in [Parser.AGGREGATE]: self.enterOuterAlt(localctx, 2) - self.state = 392 + self.state = 394 self.aggrClause() pass elif token in [Parser.FILTER]: self.enterOuterAlt(localctx, 3) - self.state = 393 + self.state = 395 self.filterClause() pass elif token in [Parser.CALC]: self.enterOuterAlt(localctx, 4) - self.state = 394 + self.state = 396 self.calcClause() pass elif token in [Parser.DROP, Parser.KEEP]: self.enterOuterAlt(localctx, 5) - self.state = 395 + self.state = 397 self.keepOrDropClause() pass elif token in [Parser.PIVOT, Parser.UNPIVOT]: self.enterOuterAlt(localctx, 6) - self.state = 396 + self.state = 398 self.pivotOrUnpivotClause() pass elif token in [Parser.SUBSPACE]: self.enterOuterAlt(localctx, 7) - self.state = 397 + self.state = 399 self.subspaceClause() pass else: @@ -4118,11 +4440,11 @@ def datasetClause(self): return localctx class RenameClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def RENAME(self): return self.getToken(Parser.RENAME, 0) @@ -4150,25 +4472,32 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRenameClause"): listener.exitRenameClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRenameClause"): + return visitor.visitRenameClause(self) + else: + return visitor.visitChildren(self) + def renameClause(self): + localctx = Parser.RenameClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_renameClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 400 + self.state = 402 self.match(Parser.RENAME) - self.state = 401 + self.state = 403 self.renameClauseItem() - self.state = 406 + self.state = 408 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 402 + self.state = 404 self.match(Parser.COMMA) - self.state = 403 + self.state = 405 self.renameClauseItem() - self.state = 408 + self.state = 410 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4181,11 +4510,11 @@ def renameClause(self): return localctx class AggrClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def AGGREGATE(self): return self.getToken(Parser.AGGREGATE, 0) @@ -4210,27 +4539,34 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggrClause"): listener.exitAggrClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggrClause"): + return visitor.visitAggrClause(self) + else: + return visitor.visitChildren(self) + def aggrClause(self): + localctx = Parser.AggrClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_aggrClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 409 + self.state = 411 self.match(Parser.AGGREGATE) - self.state = 410 + self.state = 412 self.aggregateClause() - self.state = 415 + self.state = 417 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GROUP: - self.state = 411 - self.groupingClause() self.state = 413 + self.groupingClause() + self.state = 415 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.HAVING: - self.state = 412 + self.state = 414 self.havingClause() except RecognitionException as re: @@ -4242,11 +4578,11 @@ def aggrClause(self): return localctx class FilterClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def FILTER(self): return self.getToken(Parser.FILTER, 0) @@ -4265,14 +4601,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFilterClause"): listener.exitFilterClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFilterClause"): + return visitor.visitFilterClause(self) + else: + return visitor.visitChildren(self) + def filterClause(self): + localctx = Parser.FilterClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_filterClause) try: self.enterOuterAlt(localctx, 1) - self.state = 417 + self.state = 419 self.match(Parser.FILTER) - self.state = 418 + self.state = 420 self.exprComponent(0) except RecognitionException as re: localctx.exception = re @@ -4283,11 +4626,11 @@ def filterClause(self): return localctx class CalcClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def CALC(self): return self.getToken(Parser.CALC, 0) @@ -4315,25 +4658,32 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCalcClause"): listener.exitCalcClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCalcClause"): + return visitor.visitCalcClause(self) + else: + return visitor.visitChildren(self) + def calcClause(self): + localctx = Parser.CalcClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_calcClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 420 + self.state = 422 self.match(Parser.CALC) - self.state = 421 + self.state = 423 self.calcClauseItem() - self.state = 426 + self.state = 428 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 422 + self.state = 424 self.match(Parser.COMMA) - self.state = 423 + self.state = 425 self.calcClauseItem() - self.state = 428 + self.state = 430 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4346,11 +4696,11 @@ def calcClause(self): return localctx class KeepOrDropClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.op = None # Token def componentID(self, i: int = None): @@ -4382,13 +4732,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitKeepOrDropClause"): listener.exitKeepOrDropClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitKeepOrDropClause"): + return visitor.visitKeepOrDropClause(self) + else: + return visitor.visitChildren(self) + def keepOrDropClause(self): + localctx = Parser.KeepOrDropClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_keepOrDropClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 429 + self.state = 431 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.DROP or _la == Parser.KEEP): @@ -4396,17 +4753,17 @@ def keepOrDropClause(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 430 + self.state = 432 self.componentID() - self.state = 435 + self.state = 437 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 431 + self.state = 433 self.match(Parser.COMMA) - self.state = 432 + self.state = 434 self.componentID() - self.state = 437 + self.state = 439 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4419,11 +4776,11 @@ def keepOrDropClause(self): return localctx class PivotOrUnpivotClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.op = None # Token self.id_ = None # ComponentIDContext self.mea = None # ComponentIDContext @@ -4454,13 +4811,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitPivotOrUnpivotClause"): listener.exitPivotOrUnpivotClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitPivotOrUnpivotClause"): + return visitor.visitPivotOrUnpivotClause(self) + else: + return visitor.visitChildren(self) + def pivotOrUnpivotClause(self): + localctx = Parser.PivotOrUnpivotClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_pivotOrUnpivotClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 438 + self.state = 440 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.PIVOT or _la == Parser.UNPIVOT): @@ -4468,11 +4832,11 @@ def pivotOrUnpivotClause(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 439 + self.state = 441 localctx.id_ = self.componentID() - self.state = 440 + self.state = 442 self.match(Parser.COMMA) - self.state = 441 + self.state = 443 localctx.mea = self.componentID() except RecognitionException as re: localctx.exception = re @@ -4483,11 +4847,11 @@ def pivotOrUnpivotClause(self): return localctx class CustomPivotClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.id_ = None # ComponentIDContext self.mea = None # ComponentIDContext @@ -4526,33 +4890,40 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCustomPivotClause"): listener.exitCustomPivotClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCustomPivotClause"): + return visitor.visitCustomPivotClause(self) + else: + return visitor.visitChildren(self) + def customPivotClause(self): + localctx = Parser.CustomPivotClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_customPivotClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 443 + self.state = 445 self.match(Parser.CUSTOMPIVOT) - self.state = 444 + self.state = 446 localctx.id_ = self.componentID() - self.state = 445 + self.state = 447 self.match(Parser.COMMA) - self.state = 446 + self.state = 448 localctx.mea = self.componentID() - self.state = 447 + self.state = 449 self.match(Parser.IN) - self.state = 448 + self.state = 450 self.constant() - self.state = 453 + self.state = 455 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 449 + self.state = 451 self.match(Parser.COMMA) - self.state = 450 + self.state = 452 self.constant() - self.state = 455 + self.state = 457 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4565,11 +4936,11 @@ def customPivotClause(self): return localctx class SubspaceClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def SUBSPACE(self): return self.getToken(Parser.SUBSPACE, 0) @@ -4597,25 +4968,32 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSubspaceClause"): listener.exitSubspaceClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSubspaceClause"): + return visitor.visitSubspaceClause(self) + else: + return visitor.visitChildren(self) + def subspaceClause(self): + localctx = Parser.SubspaceClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 28, self.RULE_subspaceClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 456 + self.state = 458 self.match(Parser.SUBSPACE) - self.state = 457 + self.state = 459 self.subspaceClauseItem() - self.state = 462 + self.state = 464 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 458 + self.state = 460 self.match(Parser.COMMA) - self.state = 459 + self.state = 461 self.subspaceClauseItem() - self.state = 464 + self.state = 466 self._errHandler.sync(self) _la = self._input.LA(1) @@ -4628,11 +5006,11 @@ def subspaceClause(self): return localctx class JoinOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_joinOperators @@ -4642,9 +5020,9 @@ def copyFrom(self, ctx: ParserRuleContext): class JoinExprContext(JoinOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.JoinOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.joinKeyword = None # Token self.copyFrom(ctx) @@ -4683,18 +5061,25 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinExpr"): listener.exitJoinExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinExpr"): + return visitor.visitJoinExpr(self) + else: + return visitor.visitChildren(self) + def joinOperators(self): + localctx = Parser.JoinOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_joinOperators) self._la = 0 # Token type try: - self.state = 477 + self.state = 479 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.INNER_JOIN, Parser.LEFT_JOIN]: localctx = Parser.JoinExprContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 465 + self.state = 467 localctx.joinKeyword = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.INNER_JOIN or _la == Parser.LEFT_JOIN): @@ -4702,19 +5087,19 @@ def joinOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 466 + self.state = 468 self.match(Parser.LPAREN) - self.state = 467 + self.state = 469 self.joinClause() - self.state = 468 + self.state = 470 self.joinBody() - self.state = 469 + self.state = 471 self.match(Parser.RPAREN) pass elif token in [Parser.CROSS_JOIN, Parser.FULL_JOIN]: localctx = Parser.JoinExprContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 471 + self.state = 473 localctx.joinKeyword = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.CROSS_JOIN or _la == Parser.FULL_JOIN): @@ -4722,13 +5107,13 @@ def joinOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 472 + self.state = 474 self.match(Parser.LPAREN) - self.state = 473 + self.state = 475 self.joinClauseWithoutUsing() - self.state = 474 + self.state = 476 self.joinBody() - self.state = 475 + self.state = 477 self.match(Parser.RPAREN) pass else: @@ -4743,11 +5128,11 @@ def joinOperators(self): return localctx class DefOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_defOperators @@ -4756,10 +5141,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DefOperatorContext(DefOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DefOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DefOperatorsContext + super().__init__(parser) self.copyFrom(ctx) def DEFINE(self): @@ -4815,11 +5198,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDefOperator"): listener.exitDefOperator(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDefOperator"): + return visitor.visitDefOperator(self) + else: + return visitor.visitChildren(self) + class DefHierarchicalContext(DefOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DefOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DefOperatorsContext + super().__init__(parser) self.copyFrom(ctx) def DEFINE(self): @@ -4866,11 +5253,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDefHierarchical"): listener.exitDefHierarchical(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDefHierarchical"): + return visitor.visitDefHierarchical(self) + else: + return visitor.visitChildren(self) + class DefDatapointRulesetContext(DefOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DefOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DefOperatorsContext + super().__init__(parser) self.copyFrom(ctx) def DEFINE(self): @@ -4917,120 +5308,127 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDefDatapointRuleset"): listener.exitDefDatapointRuleset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDefDatapointRuleset"): + return visitor.visitDefDatapointRuleset(self) + else: + return visitor.visitChildren(self) + def defOperators(self): + localctx = Parser.DefOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_defOperators) self._la = 0 # Token type try: - self.state = 529 + self.state = 531 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 26, self._ctx) if la_ == 1: localctx = Parser.DefOperatorContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 479 + self.state = 481 self.match(Parser.DEFINE) - self.state = 480 + self.state = 482 self.match(Parser.OPERATOR) - self.state = 481 + self.state = 483 self.operatorID() - self.state = 482 + self.state = 484 self.match(Parser.LPAREN) - self.state = 491 + self.state = 493 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.IDENTIFIER: - self.state = 483 + self.state = 485 self.parameterItem() - self.state = 488 + self.state = 490 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 484 + self.state = 486 self.match(Parser.COMMA) - self.state = 485 + self.state = 487 self.parameterItem() - self.state = 490 + self.state = 492 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 493 + self.state = 495 self.match(Parser.RPAREN) - self.state = 496 + self.state = 498 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.RETURNS: - self.state = 494 + self.state = 496 self.match(Parser.RETURNS) - self.state = 495 + self.state = 497 self.outputParameterType() - self.state = 498 + self.state = 500 self.match(Parser.IS) - self.state = 499 + self.state = 501 self.expr(0) - self.state = 500 + self.state = 502 self.match(Parser.END) - self.state = 501 + self.state = 503 self.match(Parser.OPERATOR) pass elif la_ == 2: localctx = Parser.DefDatapointRulesetContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 503 + self.state = 505 self.match(Parser.DEFINE) - self.state = 504 + self.state = 506 self.match(Parser.DATAPOINT) - self.state = 505 + self.state = 507 self.match(Parser.RULESET) - self.state = 506 + self.state = 508 self.rulesetID() - self.state = 507 + self.state = 509 self.match(Parser.LPAREN) - self.state = 508 + self.state = 510 self.rulesetSignature() - self.state = 509 + self.state = 511 self.match(Parser.RPAREN) - self.state = 510 + self.state = 512 self.match(Parser.IS) - self.state = 511 + self.state = 513 self.ruleClauseDatapoint() - self.state = 512 + self.state = 514 self.match(Parser.END) - self.state = 513 + self.state = 515 self.match(Parser.DATAPOINT) - self.state = 514 + self.state = 516 self.match(Parser.RULESET) pass elif la_ == 3: localctx = Parser.DefHierarchicalContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 516 + self.state = 518 self.match(Parser.DEFINE) - self.state = 517 + self.state = 519 self.match(Parser.HIERARCHICAL) - self.state = 518 + self.state = 520 self.match(Parser.RULESET) - self.state = 519 + self.state = 521 self.rulesetID() - self.state = 520 + self.state = 522 self.match(Parser.LPAREN) - self.state = 521 + self.state = 523 self.hierRuleSignature() - self.state = 522 + self.state = 524 self.match(Parser.RPAREN) - self.state = 523 + self.state = 525 self.match(Parser.IS) - self.state = 524 + self.state = 526 self.ruleClauseHierarchical() - self.state = 525 + self.state = 527 self.match(Parser.END) - self.state = 526 + self.state = 528 self.match(Parser.HIERARCHICAL) - self.state = 527 + self.state = 529 self.match(Parser.RULESET) pass @@ -5043,11 +5441,11 @@ def defOperators(self): return localctx class GenericOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_genericOperators @@ -5057,9 +5455,9 @@ def copyFrom(self, ctx: ParserRuleContext): class EvalAtomContext(GenericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def EVAL(self): @@ -5118,11 +5516,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitEvalAtom"): listener.exitEvalAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitEvalAtom"): + return visitor.visitEvalAtom(self) + else: + return visitor.visitChildren(self) + class CastExprDatasetContext(GenericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def CAST(self): @@ -5160,11 +5564,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCastExprDataset"): listener.exitCastExprDataset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCastExprDataset"): + return visitor.visitCastExprDataset(self) + else: + return visitor.visitChildren(self) + class CallDatasetContext(GenericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def operatorID(self): @@ -5196,22 +5606,29 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCallDataset"): listener.exitCallDataset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCallDataset"): + return visitor.visitCallDataset(self) + else: + return visitor.visitChildren(self) + def genericOperators(self): + localctx = Parser.GenericOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_genericOperators) self._la = 0 # Token type try: - self.state = 588 + self.state = 590 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: localctx = Parser.CallDatasetContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 531 + self.state = 533 self.operatorID() - self.state = 532 + self.state = 534 self.match(Parser.LPAREN) - self.state = 541 + self.state = 543 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -5337,42 +5754,44 @@ def genericOperators(self): != 0 ) ): - self.state = 533 + self.state = 535 self.parameter() - self.state = 538 + self.state = 540 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 534 + self.state = 536 self.match(Parser.COMMA) - self.state = 535 + self.state = 537 self.parameter() - self.state = 540 + self.state = 542 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 543 + self.state = 545 self.match(Parser.RPAREN) pass elif token in [Parser.EVAL]: localctx = Parser.EvalAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 545 + self.state = 547 self.match(Parser.EVAL) - self.state = 546 + self.state = 548 self.match(Parser.LPAREN) - self.state = 547 + self.state = 549 self.routineName() - self.state = 548 + self.state = 550 self.match(Parser.LPAREN) - self.state = 551 + self.state = 553 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 549 + self.state = 551 self.varID() pass elif token in [ + Parser.PLUS, + Parser.MINUS, Parser.NULL_CONSTANT, Parser.CAST, Parser.INTEGER_CONSTANT, @@ -5380,27 +5799,29 @@ def genericOperators(self): Parser.BOOLEAN_CONSTANT, Parser.STRING_CONSTANT, ]: - self.state = 550 + self.state = 552 self.scalarItem() pass elif token in [Parser.RPAREN, Parser.COMMA]: pass else: pass - self.state = 560 + self.state = 562 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 553 + self.state = 555 self.match(Parser.COMMA) - self.state = 556 + self.state = 558 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 554 + self.state = 556 self.varID() pass elif token in [ + Parser.PLUS, + Parser.MINUS, Parser.NULL_CONSTANT, Parser.CAST, Parser.INTEGER_CONSTANT, @@ -5408,51 +5829,51 @@ def genericOperators(self): Parser.BOOLEAN_CONSTANT, Parser.STRING_CONSTANT, ]: - self.state = 555 + self.state = 557 self.scalarItem() pass else: raise NoViableAltException(self) - self.state = 562 + self.state = 564 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 563 + self.state = 565 self.match(Parser.RPAREN) - self.state = 566 + self.state = 568 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LANGUAGE: - self.state = 564 + self.state = 566 self.match(Parser.LANGUAGE) - self.state = 565 + self.state = 567 self.match(Parser.STRING_CONSTANT) - self.state = 570 + self.state = 572 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.RETURNS: - self.state = 568 + self.state = 570 self.match(Parser.RETURNS) - self.state = 569 + self.state = 571 self.evalDatasetType() - self.state = 572 + self.state = 574 self.match(Parser.RPAREN) pass elif token in [Parser.CAST]: localctx = Parser.CastExprDatasetContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 574 + self.state = 576 self.match(Parser.CAST) - self.state = 575 + self.state = 577 self.match(Parser.LPAREN) - self.state = 576 + self.state = 578 self.expr(0) - self.state = 577 + self.state = 579 self.match(Parser.COMMA) - self.state = 580 + self.state = 582 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -5466,26 +5887,26 @@ def genericOperators(self): Parser.DURATION, Parser.SCALAR, ]: - self.state = 578 + self.state = 580 self.basicScalarType() pass elif token in [Parser.IDENTIFIER]: - self.state = 579 + self.state = 581 self.valueDomainName() pass else: raise NoViableAltException(self) - self.state = 584 + self.state = 586 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 582 + self.state = 584 self.match(Parser.COMMA) - self.state = 583 + self.state = 585 self.match(Parser.STRING_CONSTANT) - self.state = 586 + self.state = 588 self.match(Parser.RPAREN) pass else: @@ -5500,11 +5921,11 @@ def genericOperators(self): return localctx class GenericOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_genericOperatorsComponent @@ -5514,9 +5935,9 @@ def copyFrom(self, ctx: ParserRuleContext): class EvalAtomComponentContext(GenericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def EVAL(self): @@ -5575,11 +5996,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitEvalAtomComponent"): listener.exitEvalAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitEvalAtomComponent"): + return visitor.visitEvalAtomComponent(self) + else: + return visitor.visitChildren(self) + class CastExprComponentContext(GenericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def CAST(self): @@ -5617,11 +6044,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCastExprComponent"): listener.exitCastExprComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCastExprComponent"): + return visitor.visitCastExprComponent(self) + else: + return visitor.visitChildren(self) + class CallComponentContext(GenericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GenericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def operatorID(self): @@ -5653,22 +6086,29 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCallComponent"): listener.exitCallComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCallComponent"): + return visitor.visitCallComponent(self) + else: + return visitor.visitChildren(self) + def genericOperatorsComponent(self): + localctx = Parser.GenericOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_genericOperatorsComponent) self._la = 0 # Token type try: - self.state = 647 + self.state = 649 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: localctx = Parser.CallComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 592 self.operatorID() - self.state = 591 + self.state = 593 self.match(Parser.LPAREN) - self.state = 600 + self.state = 602 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -5782,35 +6222,35 @@ def genericOperatorsComponent(self): != 0 ) ): - self.state = 592 + self.state = 594 self.parameterComponent() - self.state = 597 + self.state = 599 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 593 + self.state = 595 self.match(Parser.COMMA) - self.state = 594 + self.state = 596 self.parameterComponent() - self.state = 599 + self.state = 601 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 602 + self.state = 604 self.match(Parser.RPAREN) pass elif token in [Parser.CAST]: localctx = Parser.CastExprComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 604 + self.state = 606 self.match(Parser.CAST) - self.state = 605 + self.state = 607 self.match(Parser.LPAREN) - self.state = 606 + self.state = 608 self.exprComponent(0) - self.state = 607 + self.state = 609 self.match(Parser.COMMA) - self.state = 610 + self.state = 612 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -5824,47 +6264,49 @@ def genericOperatorsComponent(self): Parser.DURATION, Parser.SCALAR, ]: - self.state = 608 + self.state = 610 self.basicScalarType() pass elif token in [Parser.IDENTIFIER]: - self.state = 609 + self.state = 611 self.valueDomainName() pass else: raise NoViableAltException(self) - self.state = 614 + self.state = 616 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 612 + self.state = 614 self.match(Parser.COMMA) - self.state = 613 + self.state = 615 self.match(Parser.STRING_CONSTANT) - self.state = 616 + self.state = 618 self.match(Parser.RPAREN) pass elif token in [Parser.EVAL]: localctx = Parser.EvalAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 618 + self.state = 620 self.match(Parser.EVAL) - self.state = 619 + self.state = 621 self.match(Parser.LPAREN) - self.state = 620 + self.state = 622 self.routineName() - self.state = 621 + self.state = 623 self.match(Parser.LPAREN) - self.state = 624 + self.state = 626 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 622 + self.state = 624 self.componentID() pass elif token in [ + Parser.PLUS, + Parser.MINUS, Parser.NULL_CONSTANT, Parser.CAST, Parser.INTEGER_CONSTANT, @@ -5872,27 +6314,29 @@ def genericOperatorsComponent(self): Parser.BOOLEAN_CONSTANT, Parser.STRING_CONSTANT, ]: - self.state = 623 + self.state = 625 self.scalarItem() pass elif token in [Parser.RPAREN, Parser.COMMA]: pass else: pass - self.state = 633 + self.state = 635 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 626 + self.state = 628 self.match(Parser.COMMA) - self.state = 629 + self.state = 631 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 627 + self.state = 629 self.componentID() pass elif token in [ + Parser.PLUS, + Parser.MINUS, Parser.NULL_CONSTANT, Parser.CAST, Parser.INTEGER_CONSTANT, @@ -5900,37 +6344,37 @@ def genericOperatorsComponent(self): Parser.BOOLEAN_CONSTANT, Parser.STRING_CONSTANT, ]: - self.state = 628 + self.state = 630 self.scalarItem() pass else: raise NoViableAltException(self) - self.state = 635 + self.state = 637 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 636 + self.state = 638 self.match(Parser.RPAREN) - self.state = 639 + self.state = 641 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LANGUAGE: - self.state = 637 + self.state = 639 self.match(Parser.LANGUAGE) - self.state = 638 + self.state = 640 self.match(Parser.STRING_CONSTANT) - self.state = 643 + self.state = 645 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.RETURNS: - self.state = 641 + self.state = 643 self.match(Parser.RETURNS) - self.state = 642 + self.state = 644 self.outputParameterTypeComponent() - self.state = 645 + self.state = 647 self.match(Parser.RPAREN) pass else: @@ -5945,11 +6389,11 @@ def genericOperatorsComponent(self): return localctx class ParameterComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def exprComponent(self): return self.getTypedRuleContext(Parser.ExprComponentContext, 0) @@ -5968,11 +6412,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitParameterComponent"): listener.exitParameterComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitParameterComponent"): + return visitor.visitParameterComponent(self) + else: + return visitor.visitChildren(self) + def parameterComponent(self): + localctx = Parser.ParameterComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 38, self.RULE_parameterComponent) try: - self.state = 651 + self.state = 653 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -6050,12 +6501,12 @@ def parameterComponent(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 649 + self.state = 651 self.exprComponent(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 650 + self.state = 652 self.match(Parser.OPTIONAL) pass else: @@ -6070,11 +6521,11 @@ def parameterComponent(self): return localctx class ParameterContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) @@ -6093,11 +6544,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitParameter"): listener.exitParameter(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitParameter"): + return visitor.visitParameter(self) + else: + return visitor.visitChildren(self) + def parameter(self): + localctx = Parser.ParameterContext(self, self._ctx, self.state) self.enterRule(localctx, 40, self.RULE_parameter) try: - self.state = 655 + self.state = 657 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -6187,12 +6645,12 @@ def parameter(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 653 + self.state = 655 self.expr(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 654 + self.state = 656 self.match(Parser.OPTIONAL) pass else: @@ -6207,11 +6665,11 @@ def parameter(self): return localctx class StringOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_stringOperators @@ -6221,9 +6679,9 @@ def copyFrom(self, ctx: ParserRuleContext): class InstrAtomContext(StringOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.pattern = None # ExprContext self.startParameter = None # OptionalExprContext self.occurrenceParameter = None # OptionalExprContext @@ -6264,11 +6722,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInstrAtom"): listener.exitInstrAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInstrAtom"): + return visitor.visitInstrAtom(self) + else: + return visitor.visitChildren(self) + class UnaryStringFunctionContext(StringOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -6307,11 +6771,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryStringFunction"): listener.exitUnaryStringFunction(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryStringFunction"): + return visitor.visitUnaryStringFunction(self) + else: + return visitor.visitChildren(self) + class SubstrAtomContext(StringOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.startParameter = None # OptionalExprContext self.endParameter = None # OptionalExprContext self.copyFrom(ctx) @@ -6348,11 +6818,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSubstrAtom"): listener.exitSubstrAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSubstrAtom"): + return visitor.visitSubstrAtom(self) + else: + return visitor.visitChildren(self) + class ReplaceAtomContext(StringOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.param = None # ExprContext self.copyFrom(ctx) @@ -6388,12 +6864,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitReplaceAtom"): listener.exitReplaceAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitReplaceAtom"): + return visitor.visitReplaceAtom(self) + else: + return visitor.visitChildren(self) + def stringOperators(self): + localctx = Parser.StringOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_stringOperators) self._la = 0 # Token type try: - self.state = 705 + self.state = 707 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -6406,7 +6889,7 @@ def stringOperators(self): ]: localctx = Parser.UnaryStringFunctionContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 657 + self.state = 659 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -6428,109 +6911,109 @@ def stringOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 658 + self.state = 660 self.match(Parser.LPAREN) - self.state = 659 + self.state = 661 self.expr(0) - self.state = 660 + self.state = 662 self.match(Parser.RPAREN) pass elif token in [Parser.SUBSTR]: localctx = Parser.SubstrAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 662 + self.state = 664 self.match(Parser.SUBSTR) - self.state = 663 + self.state = 665 self.match(Parser.LPAREN) - self.state = 664 + self.state = 666 self.expr(0) - self.state = 675 + self.state = 677 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 50, self._ctx) if la_ == 1: - self.state = 671 + self.state = 673 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 665 + self.state = 667 self.match(Parser.COMMA) - self.state = 666 + self.state = 668 localctx.startParameter = self.optionalExpr() - self.state = 668 + self.state = 670 self.match(Parser.COMMA) - self.state = 669 + self.state = 671 localctx.endParameter = self.optionalExpr() pass elif la_ == 2: - self.state = 673 + self.state = 675 self.match(Parser.COMMA) - self.state = 674 + self.state = 676 localctx.startParameter = self.optionalExpr() pass - self.state = 677 + self.state = 679 self.match(Parser.RPAREN) pass elif token in [Parser.REPLACE]: localctx = Parser.ReplaceAtomContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 679 + self.state = 681 self.match(Parser.REPLACE) - self.state = 680 + self.state = 682 self.match(Parser.LPAREN) - self.state = 681 + self.state = 683 self.expr(0) - self.state = 682 + self.state = 684 self.match(Parser.COMMA) - self.state = 683 + self.state = 685 localctx.param = self.expr(0) - self.state = 686 + self.state = 688 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 684 + self.state = 686 self.match(Parser.COMMA) - self.state = 685 + self.state = 687 self.optionalExpr() - self.state = 688 + self.state = 690 self.match(Parser.RPAREN) pass elif token in [Parser.INSTR]: localctx = Parser.InstrAtomContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 690 + self.state = 692 self.match(Parser.INSTR) - self.state = 691 + self.state = 693 self.match(Parser.LPAREN) - self.state = 692 + self.state = 694 self.expr(0) - self.state = 693 + self.state = 695 self.match(Parser.COMMA) - self.state = 694 + self.state = 696 localctx.pattern = self.expr(0) - self.state = 697 + self.state = 699 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 52, self._ctx) if la_ == 1: - self.state = 695 + self.state = 697 self.match(Parser.COMMA) - self.state = 696 + self.state = 698 localctx.startParameter = self.optionalExpr() - self.state = 701 + self.state = 703 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 699 + self.state = 701 self.match(Parser.COMMA) - self.state = 700 + self.state = 702 localctx.occurrenceParameter = self.optionalExpr() - self.state = 703 + self.state = 705 self.match(Parser.RPAREN) pass else: @@ -6545,11 +7028,11 @@ def stringOperators(self): return localctx class StringOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_stringOperatorsComponent @@ -6559,9 +7042,9 @@ def copyFrom(self, ctx: ParserRuleContext): class ReplaceAtomComponentContext(StringOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.param = None # ExprComponentContext self.copyFrom(ctx) @@ -6597,11 +7080,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitReplaceAtomComponent"): listener.exitReplaceAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitReplaceAtomComponent"): + return visitor.visitReplaceAtomComponent(self) + else: + return visitor.visitChildren(self) + class UnaryStringFunctionComponentContext(StringOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -6640,11 +7129,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryStringFunctionComponent"): listener.exitUnaryStringFunctionComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryStringFunctionComponent"): + return visitor.visitUnaryStringFunctionComponent(self) + else: + return visitor.visitChildren(self) + class SubstrAtomComponentContext(StringOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.startParameter = None # OptionalExprComponentContext self.endParameter = None # OptionalExprComponentContext self.copyFrom(ctx) @@ -6681,11 +7176,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSubstrAtomComponent"): listener.exitSubstrAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSubstrAtomComponent"): + return visitor.visitSubstrAtomComponent(self) + else: + return visitor.visitChildren(self) + class InstrAtomComponentContext(StringOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.StringOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.pattern = None # ExprComponentContext self.startParameter = None # OptionalExprComponentContext self.occurrenceParameter = None # OptionalExprComponentContext @@ -6726,12 +7227,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInstrAtomComponent"): listener.exitInstrAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInstrAtomComponent"): + return visitor.visitInstrAtomComponent(self) + else: + return visitor.visitChildren(self) + def stringOperatorsComponent(self): + localctx = Parser.StringOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 44, self.RULE_stringOperatorsComponent) self._la = 0 # Token type try: - self.state = 755 + self.state = 757 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -6744,7 +7252,7 @@ def stringOperatorsComponent(self): ]: localctx = Parser.UnaryStringFunctionComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 707 + self.state = 709 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -6766,109 +7274,109 @@ def stringOperatorsComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 708 + self.state = 710 self.match(Parser.LPAREN) - self.state = 709 + self.state = 711 self.exprComponent(0) - self.state = 710 + self.state = 712 self.match(Parser.RPAREN) pass elif token in [Parser.SUBSTR]: localctx = Parser.SubstrAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 712 + self.state = 714 self.match(Parser.SUBSTR) - self.state = 713 + self.state = 715 self.match(Parser.LPAREN) - self.state = 714 + self.state = 716 self.exprComponent(0) - self.state = 725 + self.state = 727 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 56, self._ctx) if la_ == 1: - self.state = 721 + self.state = 723 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 715 + self.state = 717 self.match(Parser.COMMA) - self.state = 716 + self.state = 718 localctx.startParameter = self.optionalExprComponent() - self.state = 718 + self.state = 720 self.match(Parser.COMMA) - self.state = 719 + self.state = 721 localctx.endParameter = self.optionalExprComponent() pass elif la_ == 2: - self.state = 723 + self.state = 725 self.match(Parser.COMMA) - self.state = 724 + self.state = 726 localctx.startParameter = self.optionalExprComponent() pass - self.state = 727 + self.state = 729 self.match(Parser.RPAREN) pass elif token in [Parser.REPLACE]: localctx = Parser.ReplaceAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 729 + self.state = 731 self.match(Parser.REPLACE) - self.state = 730 + self.state = 732 self.match(Parser.LPAREN) - self.state = 731 + self.state = 733 self.exprComponent(0) - self.state = 732 + self.state = 734 self.match(Parser.COMMA) - self.state = 733 + self.state = 735 localctx.param = self.exprComponent(0) - self.state = 736 + self.state = 738 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 734 + self.state = 736 self.match(Parser.COMMA) - self.state = 735 + self.state = 737 self.optionalExprComponent() - self.state = 738 + self.state = 740 self.match(Parser.RPAREN) pass elif token in [Parser.INSTR]: localctx = Parser.InstrAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 740 + self.state = 742 self.match(Parser.INSTR) - self.state = 741 + self.state = 743 self.match(Parser.LPAREN) - self.state = 742 + self.state = 744 self.exprComponent(0) - self.state = 743 + self.state = 745 self.match(Parser.COMMA) - self.state = 744 + self.state = 746 localctx.pattern = self.exprComponent(0) - self.state = 747 + self.state = 749 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 58, self._ctx) if la_ == 1: - self.state = 745 + self.state = 747 self.match(Parser.COMMA) - self.state = 746 + self.state = 748 localctx.startParameter = self.optionalExprComponent() - self.state = 751 + self.state = 753 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 749 + self.state = 751 self.match(Parser.COMMA) - self.state = 750 + self.state = 752 localctx.occurrenceParameter = self.optionalExprComponent() - self.state = 753 + self.state = 755 self.match(Parser.RPAREN) pass else: @@ -6883,11 +7391,11 @@ def stringOperatorsComponent(self): return localctx class NumericOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_numericOperators @@ -6897,9 +7405,9 @@ def copyFrom(self, ctx: ParserRuleContext): class UnaryNumericContext(NumericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -6938,11 +7446,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryNumeric"): listener.exitUnaryNumeric(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryNumeric"): + return visitor.visitUnaryNumeric(self) + else: + return visitor.visitChildren(self) + class UnaryWithOptionalNumericContext(NumericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -6975,11 +7489,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryWithOptionalNumeric"): listener.exitUnaryWithOptionalNumeric(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryWithOptionalNumeric"): + return visitor.visitUnaryWithOptionalNumeric(self) + else: + return visitor.visitChildren(self) + class BinaryNumericContext(NumericOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.left = None # ExprContext self.right = None # ExprContext @@ -7020,18 +7540,25 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBinaryNumeric"): listener.exitBinaryNumeric(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBinaryNumeric"): + return visitor.visitBinaryNumeric(self) + else: + return visitor.visitChildren(self) + def numericOperators(self): + localctx = Parser.NumericOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 46, self.RULE_numericOperators) self._la = 0 # Token type try: - self.state = 778 + self.state = 780 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.ABS, Parser.LN, Parser.EXP, Parser.CEIL, Parser.FLOOR, Parser.SQRT]: localctx = Parser.UnaryNumericContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 757 + self.state = 759 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -7053,17 +7580,17 @@ def numericOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 758 + self.state = 760 self.match(Parser.LPAREN) - self.state = 759 + self.state = 761 self.expr(0) - self.state = 760 + self.state = 762 self.match(Parser.RPAREN) pass elif token in [Parser.TRUNC, Parser.ROUND]: localctx = Parser.UnaryWithOptionalNumericContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 762 + self.state = 764 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.TRUNC or _la == Parser.ROUND): @@ -7071,26 +7598,26 @@ def numericOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 763 + self.state = 765 self.match(Parser.LPAREN) - self.state = 764 + self.state = 766 self.expr(0) - self.state = 767 + self.state = 769 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 765 + self.state = 767 self.match(Parser.COMMA) - self.state = 766 + self.state = 768 self.optionalExpr() - self.state = 769 + self.state = 771 self.match(Parser.RPAREN) pass elif token in [Parser.RANDOM, Parser.LOG, Parser.POWER, Parser.MOD]: localctx = Parser.BinaryNumericContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 771 + self.state = 773 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -7110,15 +7637,15 @@ def numericOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 772 + self.state = 774 self.match(Parser.LPAREN) - self.state = 773 + self.state = 775 localctx.left = self.expr(0) - self.state = 774 + self.state = 776 self.match(Parser.COMMA) - self.state = 775 + self.state = 777 localctx.right = self.expr(0) - self.state = 776 + self.state = 778 self.match(Parser.RPAREN) pass else: @@ -7133,11 +7660,11 @@ def numericOperators(self): return localctx class NumericOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_numericOperatorsComponent @@ -7147,9 +7674,9 @@ def copyFrom(self, ctx: ParserRuleContext): class UnaryNumericComponentContext(NumericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -7188,11 +7715,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryNumericComponent"): listener.exitUnaryNumericComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryNumericComponent"): + return visitor.visitUnaryNumericComponent(self) + else: + return visitor.visitChildren(self) + class BinaryNumericComponentContext(NumericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.left = None # ExprComponentContext self.right = None # ExprComponentContext @@ -7233,11 +7766,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBinaryNumericComponent"): listener.exitBinaryNumericComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBinaryNumericComponent"): + return visitor.visitBinaryNumericComponent(self) + else: + return visitor.visitChildren(self) + class UnaryWithOptionalNumericComponentContext(NumericOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.NumericOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -7270,18 +7809,25 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnaryWithOptionalNumericComponent"): listener.exitUnaryWithOptionalNumericComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnaryWithOptionalNumericComponent"): + return visitor.visitUnaryWithOptionalNumericComponent(self) + else: + return visitor.visitChildren(self) + def numericOperatorsComponent(self): + localctx = Parser.NumericOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 48, self.RULE_numericOperatorsComponent) self._la = 0 # Token type try: - self.state = 801 + self.state = 803 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.ABS, Parser.LN, Parser.EXP, Parser.CEIL, Parser.FLOOR, Parser.SQRT]: localctx = Parser.UnaryNumericComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 780 + self.state = 782 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -7303,17 +7849,17 @@ def numericOperatorsComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 781 + self.state = 783 self.match(Parser.LPAREN) - self.state = 782 + self.state = 784 self.exprComponent(0) - self.state = 783 + self.state = 785 self.match(Parser.RPAREN) pass elif token in [Parser.TRUNC, Parser.ROUND]: localctx = Parser.UnaryWithOptionalNumericComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 785 + self.state = 787 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.TRUNC or _la == Parser.ROUND): @@ -7321,26 +7867,26 @@ def numericOperatorsComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 786 + self.state = 788 self.match(Parser.LPAREN) - self.state = 787 + self.state = 789 self.exprComponent(0) - self.state = 790 + self.state = 792 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 788 + self.state = 790 self.match(Parser.COMMA) - self.state = 789 + self.state = 791 self.optionalExprComponent() - self.state = 792 + self.state = 794 self.match(Parser.RPAREN) pass elif token in [Parser.RANDOM, Parser.LOG, Parser.POWER, Parser.MOD]: localctx = Parser.BinaryNumericComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 794 + self.state = 796 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -7360,15 +7906,15 @@ def numericOperatorsComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 795 + self.state = 797 self.match(Parser.LPAREN) - self.state = 796 + self.state = 798 localctx.left = self.exprComponent(0) - self.state = 797 + self.state = 799 self.match(Parser.COMMA) - self.state = 798 + self.state = 800 localctx.right = self.exprComponent(0) - self.state = 799 + self.state = 801 self.match(Parser.RPAREN) pass else: @@ -7383,11 +7929,11 @@ def numericOperatorsComponent(self): return localctx class ComparisonOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_comparisonOperators @@ -7397,9 +7943,9 @@ def copyFrom(self, ctx: ParserRuleContext): class BetweenAtomContext(ComparisonOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.from_ = None # ExprContext self.to_ = None # ExprContext @@ -7434,11 +7980,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBetweenAtom"): listener.exitBetweenAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBetweenAtom"): + return visitor.visitBetweenAtom(self) + else: + return visitor.visitChildren(self) + class CharsetMatchAtomContext(ComparisonOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.pattern = None # ExprContext self.copyFrom(ctx) @@ -7469,11 +8021,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCharsetMatchAtom"): listener.exitCharsetMatchAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCharsetMatchAtom"): + return visitor.visitCharsetMatchAtom(self) + else: + return visitor.visitChildren(self) + class IsNullAtomContext(ComparisonOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def ISNULL(self): @@ -7496,11 +8054,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitIsNullAtom"): listener.exitIsNullAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitIsNullAtom"): + return visitor.visitIsNullAtom(self) + else: + return visitor.visitChildren(self) + class ExistInAtomContext(ComparisonOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprContext self.right = None # ExprContext self.copyFrom(ctx) @@ -7537,85 +8101,92 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitExistInAtom"): listener.exitExistInAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitExistInAtom"): + return visitor.visitExistInAtom(self) + else: + return visitor.visitChildren(self) + def comparisonOperators(self): + localctx = Parser.ComparisonOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 50, self.RULE_comparisonOperators) self._la = 0 # Token type try: - self.state = 835 + self.state = 837 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.BETWEEN]: localctx = Parser.BetweenAtomContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 803 - self.match(Parser.BETWEEN) - self.state = 804 - self.match(Parser.LPAREN) self.state = 805 - localctx.op = self.expr(0) + self.match(Parser.BETWEEN) self.state = 806 - self.match(Parser.COMMA) + self.match(Parser.LPAREN) self.state = 807 - localctx.from_ = self.expr(0) + localctx.op = self.expr(0) self.state = 808 self.match(Parser.COMMA) self.state = 809 - localctx.to_ = self.expr(0) + localctx.from_ = self.expr(0) self.state = 810 + self.match(Parser.COMMA) + self.state = 811 + localctx.to_ = self.expr(0) + self.state = 812 self.match(Parser.RPAREN) pass elif token in [Parser.CHARSET_MATCH]: localctx = Parser.CharsetMatchAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 812 + self.state = 814 self.match(Parser.CHARSET_MATCH) - self.state = 813 + self.state = 815 self.match(Parser.LPAREN) - self.state = 814 + self.state = 816 localctx.op = self.expr(0) - self.state = 815 + self.state = 817 self.match(Parser.COMMA) - self.state = 816 + self.state = 818 localctx.pattern = self.expr(0) - self.state = 817 + self.state = 819 self.match(Parser.RPAREN) pass elif token in [Parser.ISNULL]: localctx = Parser.IsNullAtomContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 819 + self.state = 821 self.match(Parser.ISNULL) - self.state = 820 + self.state = 822 self.match(Parser.LPAREN) - self.state = 821 + self.state = 823 self.expr(0) - self.state = 822 + self.state = 824 self.match(Parser.RPAREN) pass elif token in [Parser.EXISTS_IN]: localctx = Parser.ExistInAtomContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 824 + self.state = 826 self.match(Parser.EXISTS_IN) - self.state = 825 + self.state = 827 self.match(Parser.LPAREN) - self.state = 826 + self.state = 828 localctx.left = self.expr(0) - self.state = 827 + self.state = 829 self.match(Parser.COMMA) - self.state = 828 + self.state = 830 localctx.right = self.expr(0) - self.state = 831 + self.state = 833 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 829 + self.state = 831 self.match(Parser.COMMA) - self.state = 830 + self.state = 832 self.retainType() - self.state = 833 + self.state = 835 self.match(Parser.RPAREN) pass else: @@ -7630,11 +8201,11 @@ def comparisonOperators(self): return localctx class ComparisonOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_comparisonOperatorsComponent @@ -7644,9 +8215,9 @@ def copyFrom(self, ctx: ParserRuleContext): class IsNullAtomComponentContext(ComparisonOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def ISNULL(self): @@ -7669,11 +8240,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitIsNullAtomComponent"): listener.exitIsNullAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitIsNullAtomComponent"): + return visitor.visitIsNullAtomComponent(self) + else: + return visitor.visitChildren(self) + class CharsetMatchAtomComponentContext(ComparisonOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprComponentContext self.pattern = None # ExprComponentContext self.copyFrom(ctx) @@ -7704,11 +8281,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCharsetMatchAtomComponent"): listener.exitCharsetMatchAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCharsetMatchAtomComponent"): + return visitor.visitCharsetMatchAtomComponent(self) + else: + return visitor.visitChildren(self) + class BetweenAtomComponentContext(ComparisonOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ComparisonOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprComponentContext self.from_ = None # ExprComponentContext self.to_ = None # ExprComponentContext @@ -7743,59 +8326,66 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBetweenAtomComponent"): listener.exitBetweenAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBetweenAtomComponent"): + return visitor.visitBetweenAtomComponent(self) + else: + return visitor.visitChildren(self) + def comparisonOperatorsComponent(self): + localctx = Parser.ComparisonOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 52, self.RULE_comparisonOperatorsComponent) try: - self.state = 858 + self.state = 860 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.BETWEEN]: localctx = Parser.BetweenAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 837 - self.match(Parser.BETWEEN) - self.state = 838 - self.match(Parser.LPAREN) self.state = 839 - localctx.op = self.exprComponent(0) + self.match(Parser.BETWEEN) self.state = 840 - self.match(Parser.COMMA) + self.match(Parser.LPAREN) self.state = 841 - localctx.from_ = self.exprComponent(0) + localctx.op = self.exprComponent(0) self.state = 842 self.match(Parser.COMMA) self.state = 843 - localctx.to_ = self.exprComponent(0) + localctx.from_ = self.exprComponent(0) self.state = 844 + self.match(Parser.COMMA) + self.state = 845 + localctx.to_ = self.exprComponent(0) + self.state = 846 self.match(Parser.RPAREN) pass elif token in [Parser.CHARSET_MATCH]: localctx = Parser.CharsetMatchAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 846 + self.state = 848 self.match(Parser.CHARSET_MATCH) - self.state = 847 + self.state = 849 self.match(Parser.LPAREN) - self.state = 848 + self.state = 850 localctx.op = self.exprComponent(0) - self.state = 849 + self.state = 851 self.match(Parser.COMMA) - self.state = 850 + self.state = 852 localctx.pattern = self.exprComponent(0) - self.state = 851 + self.state = 853 self.match(Parser.RPAREN) pass elif token in [Parser.ISNULL]: localctx = Parser.IsNullAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 853 + self.state = 855 self.match(Parser.ISNULL) - self.state = 854 + self.state = 856 self.match(Parser.LPAREN) - self.state = 855 + self.state = 857 self.exprComponent(0) - self.state = 856 + self.state = 858 self.match(Parser.RPAREN) pass else: @@ -7810,11 +8400,11 @@ def comparisonOperatorsComponent(self): return localctx class TimeOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_timeOperators @@ -7824,9 +8414,9 @@ def copyFrom(self, ctx: ParserRuleContext): class DayToYearAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYTOYEAR(self): @@ -7849,11 +8439,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayToYearAtom"): listener.exitDayToYearAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayToYearAtom"): + return visitor.visitDayToYearAtom(self) + else: + return visitor.visitChildren(self) + class YearAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def YEAR_OP(self): @@ -7876,15 +8472,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitYearAtom"): listener.exitYearAtom(self) - class MonthToDayAtomContext(TimeOperatorsContext): + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitYearAtom"): + return visitor.visitYearAtom(self) + else: + return visitor.visitChildren(self) + + class YearTodayAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) - def MONTHTODAY(self): - return self.getToken(Parser.MONTHTODAY, 0) + def YEARTODAY(self): + return self.getToken(Parser.YEARTODAY, 0) def LPAREN(self): return self.getToken(Parser.LPAREN, 0) @@ -7896,18 +8498,24 @@ def RPAREN(self): return self.getToken(Parser.RPAREN, 0) def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthToDayAtom"): - listener.enterMonthToDayAtom(self) + if hasattr(listener, "enterYearTodayAtom"): + listener.enterYearTodayAtom(self) def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthToDayAtom"): - listener.exitMonthToDayAtom(self) + if hasattr(listener, "exitYearTodayAtom"): + listener.exitYearTodayAtom(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitYearTodayAtom"): + return visitor.visitYearTodayAtom(self) + else: + return visitor.visitChildren(self) class DayToMonthAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYTOMONTH(self): @@ -7930,11 +8538,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayToMonthAtom"): listener.exitDayToMonthAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayToMonthAtom"): + return visitor.visitDayToMonthAtom(self) + else: + return visitor.visitChildren(self) + class PeriodAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def PERIOD_INDICATOR(self): @@ -7957,11 +8571,51 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitPeriodAtom"): listener.exitPeriodAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitPeriodAtom"): + return visitor.visitPeriodAtom(self) + else: + return visitor.visitChildren(self) + + class MonthTodayAtomContext(TimeOperatorsContext): + def __init__( + self, parser, ctx: ParserRuleContext + ): # actually a Parser.TimeOperatorsContext + super().__init__(parser) + self.copyFrom(ctx) + + def MONTHTODAY(self): + return self.getToken(Parser.MONTHTODAY, 0) + + def LPAREN(self): + return self.getToken(Parser.LPAREN, 0) + + def expr(self): + return self.getTypedRuleContext(Parser.ExprContext, 0) + + def RPAREN(self): + return self.getToken(Parser.RPAREN, 0) + + def enterRule(self, listener: ParseTreeListener): + if hasattr(listener, "enterMonthTodayAtom"): + listener.enterMonthTodayAtom(self) + + def exitRule(self, listener: ParseTreeListener): + if hasattr(listener, "exitMonthTodayAtom"): + listener.exitMonthTodayAtom(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMonthTodayAtom"): + return visitor.visitMonthTodayAtom(self) + else: + return visitor.visitChildren(self) + class FillTimeAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) + self.op = None # Token self.copyFrom(ctx) def FILL_TIME_SERIES(self): @@ -7993,11 +8647,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFillTimeAtom"): listener.exitFillTimeAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFillTimeAtom"): + return visitor.visitFillTimeAtom(self) + else: + return visitor.visitChildren(self) + class MonthAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def MONTH_OP(self): @@ -8020,11 +8680,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitMonthAtom"): listener.exitMonthAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMonthAtom"): + return visitor.visitMonthAtom(self) + else: + return visitor.visitChildren(self) + class DayOfYearAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYOFYEAR(self): @@ -8047,11 +8713,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayOfYearAtom"): listener.exitDayOfYearAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayOfYearAtom"): + return visitor.visitDayOfYearAtom(self) + else: + return visitor.visitChildren(self) + class FlowAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -8078,11 +8750,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFlowAtom"): listener.exitFlowAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFlowAtom"): + return visitor.visitFlowAtom(self) + else: + return visitor.visitChildren(self) + class TimeShiftAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def TIMESHIFT(self): @@ -8111,14 +8789,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeShiftAtom"): listener.exitTimeShiftAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeShiftAtom"): + return visitor.visitTimeShiftAtom(self) + else: + return visitor.visitChildren(self) + class TimeAggAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.periodIndTo = None # Token self.periodIndFrom = None # Token self.op = None # OptionalExprContext + self.delim = None # Token self.copyFrom(ctx) def TIME_AGG(self): @@ -8145,15 +8830,15 @@ def COMMA(self, i: int = None): def optionalExpr(self): return self.getTypedRuleContext(Parser.OptionalExprContext, 0) + def OPTIONAL(self): + return self.getToken(Parser.OPTIONAL, 0) + def FIRST(self): return self.getToken(Parser.FIRST, 0) def LAST(self): return self.getToken(Parser.LAST, 0) - def OPTIONAL(self): - return self.getToken(Parser.OPTIONAL, 0) - def enterRule(self, listener: ParseTreeListener): if hasattr(listener, "enterTimeAggAtom"): listener.enterTimeAggAtom(self) @@ -8162,11 +8847,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeAggAtom"): listener.exitTimeAggAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeAggAtom"): + return visitor.visitTimeAggAtom(self) + else: + return visitor.visitChildren(self) + class DateDiffAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.dateFrom = None # ExprContext self.dateTo = None # ExprContext self.copyFrom(ctx) @@ -8197,11 +8888,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDateDiffAtom"): listener.exitDateDiffAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDateDiffAtom"): + return visitor.visitDateDiffAtom(self) + else: + return visitor.visitChildren(self) + class DateAddAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.shiftNumber = None # ExprContext self.periodInd = None # ExprContext @@ -8236,38 +8933,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDateAddAtom"): listener.exitDateAddAtom(self) - class YearToDayAtomContext(TimeOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) - self.copyFrom(ctx) - - def YEARTODAY(self): - return self.getToken(Parser.YEARTODAY, 0) - - def LPAREN(self): - return self.getToken(Parser.LPAREN, 0) - - def expr(self): - return self.getTypedRuleContext(Parser.ExprContext, 0) - - def RPAREN(self): - return self.getToken(Parser.RPAREN, 0) - - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearToDayAtom"): - listener.enterYearToDayAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearToDayAtom"): - listener.exitYearToDayAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDateAddAtom"): + return visitor.visitDateAddAtom(self) + else: + return visitor.visitChildren(self) class DayOfMonthAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYOFMONTH(self): @@ -8290,11 +8966,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayOfMonthAtom"): listener.exitDayOfMonthAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayOfMonthAtom"): + return visitor.visitDayOfMonthAtom(self) + else: + return visitor.visitChildren(self) + class CurrentDateAtomContext(TimeOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def CURRENT_DATE(self): @@ -8314,22 +8996,29 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCurrentDateAtom"): listener.exitCurrentDateAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCurrentDateAtom"): + return visitor.visitCurrentDateAtom(self) + else: + return visitor.visitChildren(self) + def timeOperators(self): + localctx = Parser.TimeOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 54, self.RULE_timeOperators) self._la = 0 # Token type try: - self.state = 962 + self.state = 964 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.PERIOD_INDICATOR]: localctx = Parser.PeriodAtomContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 860 + self.state = 862 self.match(Parser.PERIOD_INDICATOR) - self.state = 861 - self.match(Parser.LPAREN) self.state = 863 + self.match(Parser.LPAREN) + self.state = 865 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -8454,42 +9143,43 @@ def timeOperators(self): != 0 ) ): - self.state = 862 + self.state = 864 self.expr(0) - self.state = 865 + self.state = 867 self.match(Parser.RPAREN) pass elif token in [Parser.FILL_TIME_SERIES]: localctx = Parser.FillTimeAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 866 + self.state = 868 self.match(Parser.FILL_TIME_SERIES) - self.state = 867 + self.state = 869 self.match(Parser.LPAREN) - self.state = 868 + self.state = 870 self.expr(0) - self.state = 871 + self.state = 873 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 869 + self.state = 871 self.match(Parser.COMMA) - self.state = 870 + self.state = 872 + localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.SINGLE): - self._errHandler.recoverInline(self) + localctx.op = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 873 + self.state = 875 self.match(Parser.RPAREN) pass elif token in [Parser.FLOW_TO_STOCK, Parser.STOCK_TO_FLOW]: localctx = Parser.FlowAtomContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 875 + self.state = 877 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.FLOW_TO_STOCK or _la == Parser.STOCK_TO_FLOW): @@ -8497,45 +9187,45 @@ def timeOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 876 + self.state = 878 self.match(Parser.LPAREN) - self.state = 877 + self.state = 879 self.expr(0) - self.state = 878 + self.state = 880 self.match(Parser.RPAREN) pass elif token in [Parser.TIMESHIFT]: localctx = Parser.TimeShiftAtomContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 880 + self.state = 882 self.match(Parser.TIMESHIFT) - self.state = 881 + self.state = 883 self.match(Parser.LPAREN) - self.state = 882 + self.state = 884 self.expr(0) - self.state = 883 + self.state = 885 self.match(Parser.COMMA) - self.state = 884 + self.state = 886 self.signedInteger() - self.state = 885 + self.state = 887 self.match(Parser.RPAREN) pass elif token in [Parser.TIME_AGG]: localctx = Parser.TimeAggAtomContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 887 + self.state = 889 self.match(Parser.TIME_AGG) - self.state = 888 + self.state = 890 self.match(Parser.LPAREN) - self.state = 889 + self.state = 891 localctx.periodIndTo = self.match(Parser.STRING_CONSTANT) - self.state = 892 + self.state = 894 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 70, self._ctx) if la_ == 1: - self.state = 890 + self.state = 892 self.match(Parser.COMMA) - self.state = 891 + self.state = 893 localctx.periodIndFrom = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.OPTIONAL or _la == Parser.STRING_CONSTANT): @@ -8544,172 +9234,173 @@ def timeOperators(self): self._errHandler.reportMatch(self) self.consume() - self.state = 896 + self.state = 898 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 71, self._ctx) if la_ == 1: - self.state = 894 + self.state = 896 self.match(Parser.COMMA) - self.state = 895 + self.state = 897 localctx.op = self.optionalExpr() - self.state = 900 + self.state = 902 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 898 + self.state = 900 self.match(Parser.COMMA) - self.state = 899 + self.state = 901 + localctx.delim = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.FIRST or _la == Parser.LAST): - self._errHandler.recoverInline(self) + localctx.delim = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 902 + self.state = 904 self.match(Parser.RPAREN) pass elif token in [Parser.CURRENT_DATE]: localctx = Parser.CurrentDateAtomContext(self, localctx) self.enterOuterAlt(localctx, 6) - self.state = 903 + self.state = 905 self.match(Parser.CURRENT_DATE) - self.state = 904 + self.state = 906 self.match(Parser.LPAREN) - self.state = 905 + self.state = 907 self.match(Parser.RPAREN) pass elif token in [Parser.DATEDIFF]: localctx = Parser.DateDiffAtomContext(self, localctx) self.enterOuterAlt(localctx, 7) - self.state = 906 + self.state = 908 self.match(Parser.DATEDIFF) - self.state = 907 + self.state = 909 self.match(Parser.LPAREN) - self.state = 908 + self.state = 910 localctx.dateFrom = self.expr(0) - self.state = 909 + self.state = 911 self.match(Parser.COMMA) - self.state = 910 + self.state = 912 localctx.dateTo = self.expr(0) - self.state = 911 + self.state = 913 self.match(Parser.RPAREN) pass elif token in [Parser.DATEADD]: localctx = Parser.DateAddAtomContext(self, localctx) self.enterOuterAlt(localctx, 8) - self.state = 913 - self.match(Parser.DATEADD) - self.state = 914 - self.match(Parser.LPAREN) self.state = 915 - localctx.op = self.expr(0) + self.match(Parser.DATEADD) self.state = 916 - self.match(Parser.COMMA) + self.match(Parser.LPAREN) self.state = 917 - localctx.shiftNumber = self.expr(0) + localctx.op = self.expr(0) self.state = 918 self.match(Parser.COMMA) self.state = 919 - localctx.periodInd = self.expr(0) + localctx.shiftNumber = self.expr(0) self.state = 920 + self.match(Parser.COMMA) + self.state = 921 + localctx.periodInd = self.expr(0) + self.state = 922 self.match(Parser.RPAREN) pass elif token in [Parser.YEAR_OP]: localctx = Parser.YearAtomContext(self, localctx) self.enterOuterAlt(localctx, 9) - self.state = 922 + self.state = 924 self.match(Parser.YEAR_OP) - self.state = 923 + self.state = 925 self.match(Parser.LPAREN) - self.state = 924 + self.state = 926 self.expr(0) - self.state = 925 + self.state = 927 self.match(Parser.RPAREN) pass elif token in [Parser.MONTH_OP]: localctx = Parser.MonthAtomContext(self, localctx) self.enterOuterAlt(localctx, 10) - self.state = 927 + self.state = 929 self.match(Parser.MONTH_OP) - self.state = 928 + self.state = 930 self.match(Parser.LPAREN) - self.state = 929 + self.state = 931 self.expr(0) - self.state = 930 + self.state = 932 self.match(Parser.RPAREN) pass elif token in [Parser.DAYOFMONTH]: localctx = Parser.DayOfMonthAtomContext(self, localctx) self.enterOuterAlt(localctx, 11) - self.state = 932 + self.state = 934 self.match(Parser.DAYOFMONTH) - self.state = 933 + self.state = 935 self.match(Parser.LPAREN) - self.state = 934 + self.state = 936 self.expr(0) - self.state = 935 + self.state = 937 self.match(Parser.RPAREN) pass elif token in [Parser.DAYOFYEAR]: localctx = Parser.DayOfYearAtomContext(self, localctx) self.enterOuterAlt(localctx, 12) - self.state = 937 + self.state = 939 self.match(Parser.DAYOFYEAR) - self.state = 938 + self.state = 940 self.match(Parser.LPAREN) - self.state = 939 + self.state = 941 self.expr(0) - self.state = 940 + self.state = 942 self.match(Parser.RPAREN) pass elif token in [Parser.DAYTOYEAR]: localctx = Parser.DayToYearAtomContext(self, localctx) self.enterOuterAlt(localctx, 13) - self.state = 942 + self.state = 944 self.match(Parser.DAYTOYEAR) - self.state = 943 + self.state = 945 self.match(Parser.LPAREN) - self.state = 944 + self.state = 946 self.expr(0) - self.state = 945 + self.state = 947 self.match(Parser.RPAREN) pass elif token in [Parser.DAYTOMONTH]: localctx = Parser.DayToMonthAtomContext(self, localctx) self.enterOuterAlt(localctx, 14) - self.state = 947 + self.state = 949 self.match(Parser.DAYTOMONTH) - self.state = 948 + self.state = 950 self.match(Parser.LPAREN) - self.state = 949 + self.state = 951 self.expr(0) - self.state = 950 + self.state = 952 self.match(Parser.RPAREN) pass elif token in [Parser.YEARTODAY]: - localctx = Parser.YearToDayAtomContext(self, localctx) + localctx = Parser.YearTodayAtomContext(self, localctx) self.enterOuterAlt(localctx, 15) - self.state = 952 + self.state = 954 self.match(Parser.YEARTODAY) - self.state = 953 + self.state = 955 self.match(Parser.LPAREN) - self.state = 954 + self.state = 956 self.expr(0) - self.state = 955 + self.state = 957 self.match(Parser.RPAREN) pass elif token in [Parser.MONTHTODAY]: - localctx = Parser.MonthToDayAtomContext(self, localctx) + localctx = Parser.MonthTodayAtomContext(self, localctx) self.enterOuterAlt(localctx, 16) - self.state = 957 + self.state = 959 self.match(Parser.MONTHTODAY) - self.state = 958 + self.state = 960 self.match(Parser.LPAREN) - self.state = 959 + self.state = 961 self.expr(0) - self.state = 960 + self.state = 962 self.match(Parser.RPAREN) pass else: @@ -8724,11 +9415,11 @@ def timeOperators(self): return localctx class TimeOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_timeOperatorsComponent @@ -8738,9 +9429,9 @@ def copyFrom(self, ctx: ParserRuleContext): class PeriodAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def PERIOD_INDICATOR(self): @@ -8763,11 +9454,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitPeriodAtomComponent"): listener.exitPeriodAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitPeriodAtomComponent"): + return visitor.visitPeriodAtomComponent(self) + else: + return visitor.visitChildren(self) + class TimeShiftAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def TIMESHIFT(self): @@ -8796,14 +9493,54 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeShiftAtomComponent"): listener.exitTimeShiftAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeShiftAtomComponent"): + return visitor.visitTimeShiftAtomComponent(self) + else: + return visitor.visitChildren(self) + + class MonthTodayAtomComponentContext(TimeOperatorsComponentContext): + def __init__( + self, parser, ctx: ParserRuleContext + ): # actually a Parser.TimeOperatorsComponentContext + super().__init__(parser) + self.copyFrom(ctx) + + def MONTHTODAY(self): + return self.getToken(Parser.MONTHTODAY, 0) + + def LPAREN(self): + return self.getToken(Parser.LPAREN, 0) + + def exprComponent(self): + return self.getTypedRuleContext(Parser.ExprComponentContext, 0) + + def RPAREN(self): + return self.getToken(Parser.RPAREN, 0) + + def enterRule(self, listener: ParseTreeListener): + if hasattr(listener, "enterMonthTodayAtomComponent"): + listener.enterMonthTodayAtomComponent(self) + + def exitRule(self, listener: ParseTreeListener): + if hasattr(listener, "exitMonthTodayAtomComponent"): + listener.exitMonthTodayAtomComponent(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMonthTodayAtomComponent"): + return visitor.visitMonthTodayAtomComponent(self) + else: + return visitor.visitChildren(self) + class TimeAggAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.periodIndTo = None # Token self.periodIndFrom = None # Token self.op = None # OptionalExprComponentContext + self.delim = None # Token self.copyFrom(ctx) def TIME_AGG(self): @@ -8830,15 +9567,15 @@ def COMMA(self, i: int = None): def optionalExprComponent(self): return self.getTypedRuleContext(Parser.OptionalExprComponentContext, 0) + def OPTIONAL(self): + return self.getToken(Parser.OPTIONAL, 0) + def FIRST(self): return self.getToken(Parser.FIRST, 0) def LAST(self): return self.getToken(Parser.LAST, 0) - def OPTIONAL(self): - return self.getToken(Parser.OPTIONAL, 0) - def enterRule(self, listener: ParseTreeListener): if hasattr(listener, "enterTimeAggAtomComponent"): listener.enterTimeAggAtomComponent(self) @@ -8847,11 +9584,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitTimeAggAtomComponent"): listener.exitTimeAggAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitTimeAggAtomComponent"): + return visitor.visitTimeAggAtomComponent(self) + else: + return visitor.visitChildren(self) + class DayToMonthAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYTOMONTH(self): @@ -8874,11 +9617,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayToMonthAtomComponent"): listener.exitDayToMonthAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayToMonthAtomComponent"): + return visitor.visitDayToMonthAtomComponent(self) + else: + return visitor.visitChildren(self) + class DateAddAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprComponentContext self.shiftNumber = None # ExprComponentContext self.periodInd = None # ExprComponentContext @@ -8913,15 +9662,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDateAddAtomComponent"): listener.exitDateAddAtomComponent(self) - class MonthToDayAtomComponentContext(TimeOperatorsComponentContext): + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDateAddAtomComponent"): + return visitor.visitDateAddAtomComponent(self) + else: + return visitor.visitChildren(self) + + class YearTodayAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) - def MONTHTODAY(self): - return self.getToken(Parser.MONTHTODAY, 0) + def YEARTODAY(self): + return self.getToken(Parser.YEARTODAY, 0) def LPAREN(self): return self.getToken(Parser.LPAREN, 0) @@ -8933,18 +9688,24 @@ def RPAREN(self): return self.getToken(Parser.RPAREN, 0) def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthToDayAtomComponent"): - listener.enterMonthToDayAtomComponent(self) + if hasattr(listener, "enterYearTodayAtomComponent"): + listener.enterYearTodayAtomComponent(self) def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthToDayAtomComponent"): - listener.exitMonthToDayAtomComponent(self) + if hasattr(listener, "exitYearTodayAtomComponent"): + listener.exitYearTodayAtomComponent(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitYearTodayAtomComponent"): + return visitor.visitYearTodayAtomComponent(self) + else: + return visitor.visitChildren(self) class DayOfMonthAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYOFMONTH(self): @@ -8967,38 +9728,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayOfMonthAtomComponent"): listener.exitDayOfMonthAtomComponent(self) - class DayOfYearAtomComponentContext(TimeOperatorsComponentContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) - self.copyFrom(ctx) - - def DAYOFYEAR(self): - return self.getToken(Parser.DAYOFYEAR, 0) - - def LPAREN(self): - return self.getToken(Parser.LPAREN, 0) - - def exprComponent(self): - return self.getTypedRuleContext(Parser.ExprComponentContext, 0) - - def RPAREN(self): - return self.getToken(Parser.RPAREN, 0) - - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayOfYearAtomComponent"): - listener.enterDayOfYearAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayOfYearAtomComponent"): - listener.exitDayOfYearAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayOfMonthAtomComponent"): + return visitor.visitDayOfMonthAtomComponent(self) + else: + return visitor.visitChildren(self) class MonthAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def MONTH_OP(self): @@ -9021,15 +9761,22 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitMonthAtomComponent"): listener.exitMonthAtomComponent(self) - class YearToDayAtomComponentContext(TimeOperatorsComponentContext): + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMonthAtomComponent"): + return visitor.visitMonthAtomComponent(self) + else: + return visitor.visitChildren(self) + + class FillTimeAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) + self.op = None # Token self.copyFrom(ctx) - def YEARTODAY(self): - return self.getToken(Parser.YEARTODAY, 0) + def FILL_TIME_SERIES(self): + return self.getToken(Parser.FILL_TIME_SERIES, 0) def LPAREN(self): return self.getToken(Parser.LPAREN, 0) @@ -9040,23 +9787,38 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) + def COMMA(self): + return self.getToken(Parser.COMMA, 0) + + def SINGLE(self): + return self.getToken(Parser.SINGLE, 0) + + def ALL(self): + return self.getToken(Parser.ALL, 0) + def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearToDayAtomComponent"): - listener.enterYearToDayAtomComponent(self) + if hasattr(listener, "enterFillTimeAtomComponent"): + listener.enterFillTimeAtomComponent(self) def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearToDayAtomComponent"): - listener.exitYearToDayAtomComponent(self) + if hasattr(listener, "exitFillTimeAtomComponent"): + listener.exitFillTimeAtomComponent(self) - class FillTimeAtomComponentContext(TimeOperatorsComponentContext): + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFillTimeAtomComponent"): + return visitor.visitFillTimeAtomComponent(self) + else: + return visitor.visitChildren(self) + + class DatOfYearAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) - def FILL_TIME_SERIES(self): - return self.getToken(Parser.FILL_TIME_SERIES, 0) + def DAYOFYEAR(self): + return self.getToken(Parser.DAYOFYEAR, 0) def LPAREN(self): return self.getToken(Parser.LPAREN, 0) @@ -9067,28 +9829,25 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def COMMA(self): - return self.getToken(Parser.COMMA, 0) - - def SINGLE(self): - return self.getToken(Parser.SINGLE, 0) - - def ALL(self): - return self.getToken(Parser.ALL, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFillTimeAtomComponent"): - listener.enterFillTimeAtomComponent(self) + if hasattr(listener, "enterDatOfYearAtomComponent"): + listener.enterDatOfYearAtomComponent(self) def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFillTimeAtomComponent"): - listener.exitFillTimeAtomComponent(self) + if hasattr(listener, "exitDatOfYearAtomComponent"): + listener.exitDatOfYearAtomComponent(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDatOfYearAtomComponent"): + return visitor.visitDatOfYearAtomComponent(self) + else: + return visitor.visitChildren(self) class DayToYearAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def DAYTOYEAR(self): @@ -9111,11 +9870,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDayToYearAtomComponent"): listener.exitDayToYearAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDayToYearAtomComponent"): + return visitor.visitDayToYearAtomComponent(self) + else: + return visitor.visitChildren(self) + class CurrentDateAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def CURRENT_DATE(self): @@ -9135,11 +9900,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCurrentDateAtomComponent"): listener.exitCurrentDateAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCurrentDateAtomComponent"): + return visitor.visitCurrentDateAtomComponent(self) + else: + return visitor.visitChildren(self) + class FlowAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -9166,13 +9937,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitFlowAtomComponent"): listener.exitFlowAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitFlowAtomComponent"): + return visitor.visitFlowAtomComponent(self) + else: + return visitor.visitChildren(self) + class DateDiffAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.dateFrom = None # ExprComponentContext - self.dateTo = None # ExprComponentContext + self.dateTo = None # ExprContext self.copyFrom(ctx) def DATEDIFF(self): @@ -9187,11 +9964,11 @@ def COMMA(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def exprComponent(self, i: int = None): - if i is None: - return self.getTypedRuleContexts(Parser.ExprComponentContext) - else: - return self.getTypedRuleContext(Parser.ExprComponentContext, i) + def exprComponent(self): + return self.getTypedRuleContext(Parser.ExprComponentContext, 0) + + def expr(self): + return self.getTypedRuleContext(Parser.ExprContext, 0) def enterRule(self, listener: ParseTreeListener): if hasattr(listener, "enterDateDiffAtomComponent"): @@ -9201,11 +9978,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDateDiffAtomComponent"): listener.exitDateDiffAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDateDiffAtomComponent"): + return visitor.visitDateDiffAtomComponent(self) + else: + return visitor.visitChildren(self) + class YearAtomComponentContext(TimeOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.TimeOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def YEAR_OP(self): @@ -9228,22 +10011,29 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitYearAtomComponent"): listener.exitYearAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitYearAtomComponent"): + return visitor.visitYearAtomComponent(self) + else: + return visitor.visitChildren(self) + def timeOperatorsComponent(self): + localctx = Parser.TimeOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 56, self.RULE_timeOperatorsComponent) self._la = 0 # Token type try: - self.state = 1066 + self.state = 1068 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.PERIOD_INDICATOR]: localctx = Parser.PeriodAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 964 + self.state = 966 self.match(Parser.PERIOD_INDICATOR) - self.state = 965 - self.match(Parser.LPAREN) self.state = 967 + self.match(Parser.LPAREN) + self.state = 969 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -9356,42 +10146,43 @@ def timeOperatorsComponent(self): != 0 ) ): - self.state = 966 + self.state = 968 self.exprComponent(0) - self.state = 969 + self.state = 971 self.match(Parser.RPAREN) pass elif token in [Parser.FILL_TIME_SERIES]: localctx = Parser.FillTimeAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 970 + self.state = 972 self.match(Parser.FILL_TIME_SERIES) - self.state = 971 + self.state = 973 self.match(Parser.LPAREN) - self.state = 972 + self.state = 974 self.exprComponent(0) - self.state = 975 + self.state = 977 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 973 + self.state = 975 self.match(Parser.COMMA) - self.state = 974 + self.state = 976 + localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.SINGLE): - self._errHandler.recoverInline(self) + localctx.op = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 977 + self.state = 979 self.match(Parser.RPAREN) pass elif token in [Parser.FLOW_TO_STOCK, Parser.STOCK_TO_FLOW]: localctx = Parser.FlowAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 979 + self.state = 981 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.FLOW_TO_STOCK or _la == Parser.STOCK_TO_FLOW): @@ -9399,45 +10190,45 @@ def timeOperatorsComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 980 + self.state = 982 self.match(Parser.LPAREN) - self.state = 981 + self.state = 983 self.exprComponent(0) - self.state = 982 + self.state = 984 self.match(Parser.RPAREN) pass elif token in [Parser.TIMESHIFT]: localctx = Parser.TimeShiftAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 984 + self.state = 986 self.match(Parser.TIMESHIFT) - self.state = 985 + self.state = 987 self.match(Parser.LPAREN) - self.state = 986 + self.state = 988 self.exprComponent(0) - self.state = 987 + self.state = 989 self.match(Parser.COMMA) - self.state = 988 + self.state = 990 self.signedInteger() - self.state = 989 + self.state = 991 self.match(Parser.RPAREN) pass elif token in [Parser.TIME_AGG]: localctx = Parser.TimeAggAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 5) - self.state = 991 + self.state = 993 self.match(Parser.TIME_AGG) - self.state = 992 + self.state = 994 self.match(Parser.LPAREN) - self.state = 993 + self.state = 995 localctx.periodIndTo = self.match(Parser.STRING_CONSTANT) - self.state = 996 + self.state = 998 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 76, self._ctx) if la_ == 1: - self.state = 994 + self.state = 996 self.match(Parser.COMMA) - self.state = 995 + self.state = 997 localctx.periodIndFrom = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.OPTIONAL or _la == Parser.STRING_CONSTANT): @@ -9446,172 +10237,173 @@ def timeOperatorsComponent(self): self._errHandler.reportMatch(self) self.consume() - self.state = 1000 + self.state = 1002 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 77, self._ctx) if la_ == 1: - self.state = 998 + self.state = 1000 self.match(Parser.COMMA) - self.state = 999 + self.state = 1001 localctx.op = self.optionalExprComponent() - self.state = 1004 + self.state = 1006 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 1002 + self.state = 1004 self.match(Parser.COMMA) - self.state = 1003 + self.state = 1005 + localctx.delim = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.FIRST or _la == Parser.LAST): - self._errHandler.recoverInline(self) + localctx.delim = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 1006 + self.state = 1008 self.match(Parser.RPAREN) pass elif token in [Parser.CURRENT_DATE]: localctx = Parser.CurrentDateAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 6) - self.state = 1007 + self.state = 1009 self.match(Parser.CURRENT_DATE) - self.state = 1008 + self.state = 1010 self.match(Parser.LPAREN) - self.state = 1009 + self.state = 1011 self.match(Parser.RPAREN) pass elif token in [Parser.DATEDIFF]: localctx = Parser.DateDiffAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 7) - self.state = 1010 - self.match(Parser.DATEDIFF) - self.state = 1011 - self.match(Parser.LPAREN) self.state = 1012 - localctx.dateFrom = self.exprComponent(0) + self.match(Parser.DATEDIFF) self.state = 1013 - self.match(Parser.COMMA) + self.match(Parser.LPAREN) self.state = 1014 - localctx.dateTo = self.exprComponent(0) + localctx.dateFrom = self.exprComponent(0) self.state = 1015 + self.match(Parser.COMMA) + self.state = 1016 + localctx.dateTo = self.expr(0) + self.state = 1017 self.match(Parser.RPAREN) pass elif token in [Parser.DATEADD]: localctx = Parser.DateAddAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 8) - self.state = 1017 - self.match(Parser.DATEADD) - self.state = 1018 - self.match(Parser.LPAREN) self.state = 1019 - localctx.op = self.exprComponent(0) + self.match(Parser.DATEADD) self.state = 1020 - self.match(Parser.COMMA) + self.match(Parser.LPAREN) self.state = 1021 - localctx.shiftNumber = self.exprComponent(0) + localctx.op = self.exprComponent(0) self.state = 1022 self.match(Parser.COMMA) self.state = 1023 - localctx.periodInd = self.exprComponent(0) + localctx.shiftNumber = self.exprComponent(0) self.state = 1024 + self.match(Parser.COMMA) + self.state = 1025 + localctx.periodInd = self.exprComponent(0) + self.state = 1026 self.match(Parser.RPAREN) pass elif token in [Parser.YEAR_OP]: localctx = Parser.YearAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 9) - self.state = 1026 + self.state = 1028 self.match(Parser.YEAR_OP) - self.state = 1027 + self.state = 1029 self.match(Parser.LPAREN) - self.state = 1028 + self.state = 1030 self.exprComponent(0) - self.state = 1029 + self.state = 1031 self.match(Parser.RPAREN) pass elif token in [Parser.MONTH_OP]: localctx = Parser.MonthAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 10) - self.state = 1031 + self.state = 1033 self.match(Parser.MONTH_OP) - self.state = 1032 + self.state = 1034 self.match(Parser.LPAREN) - self.state = 1033 + self.state = 1035 self.exprComponent(0) - self.state = 1034 + self.state = 1036 self.match(Parser.RPAREN) pass elif token in [Parser.DAYOFMONTH]: localctx = Parser.DayOfMonthAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 11) - self.state = 1036 + self.state = 1038 self.match(Parser.DAYOFMONTH) - self.state = 1037 + self.state = 1039 self.match(Parser.LPAREN) - self.state = 1038 + self.state = 1040 self.exprComponent(0) - self.state = 1039 + self.state = 1041 self.match(Parser.RPAREN) pass elif token in [Parser.DAYOFYEAR]: - localctx = Parser.DayOfYearAtomComponentContext(self, localctx) + localctx = Parser.DatOfYearAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 12) - self.state = 1041 + self.state = 1043 self.match(Parser.DAYOFYEAR) - self.state = 1042 + self.state = 1044 self.match(Parser.LPAREN) - self.state = 1043 + self.state = 1045 self.exprComponent(0) - self.state = 1044 + self.state = 1046 self.match(Parser.RPAREN) pass elif token in [Parser.DAYTOYEAR]: localctx = Parser.DayToYearAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 13) - self.state = 1046 + self.state = 1048 self.match(Parser.DAYTOYEAR) - self.state = 1047 + self.state = 1049 self.match(Parser.LPAREN) - self.state = 1048 + self.state = 1050 self.exprComponent(0) - self.state = 1049 + self.state = 1051 self.match(Parser.RPAREN) pass elif token in [Parser.DAYTOMONTH]: localctx = Parser.DayToMonthAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 14) - self.state = 1051 + self.state = 1053 self.match(Parser.DAYTOMONTH) - self.state = 1052 + self.state = 1054 self.match(Parser.LPAREN) - self.state = 1053 + self.state = 1055 self.exprComponent(0) - self.state = 1054 + self.state = 1056 self.match(Parser.RPAREN) pass elif token in [Parser.YEARTODAY]: - localctx = Parser.YearToDayAtomComponentContext(self, localctx) + localctx = Parser.YearTodayAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 15) - self.state = 1056 + self.state = 1058 self.match(Parser.YEARTODAY) - self.state = 1057 + self.state = 1059 self.match(Parser.LPAREN) - self.state = 1058 + self.state = 1060 self.exprComponent(0) - self.state = 1059 + self.state = 1061 self.match(Parser.RPAREN) pass elif token in [Parser.MONTHTODAY]: - localctx = Parser.MonthToDayAtomComponentContext(self, localctx) + localctx = Parser.MonthTodayAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 16) - self.state = 1061 + self.state = 1063 self.match(Parser.MONTHTODAY) - self.state = 1062 + self.state = 1064 self.match(Parser.LPAREN) - self.state = 1063 + self.state = 1065 self.exprComponent(0) - self.state = 1064 + self.state = 1066 self.match(Parser.RPAREN) pass else: @@ -9626,11 +10418,11 @@ def timeOperatorsComponent(self): return localctx class SetOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_setOperators @@ -9639,10 +10431,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class SetOrSYmDiffAtomContext(SetOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.SetOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.SetOperatorsContext + super().__init__(parser) self.op = None # Token self.left = None # ExprContext self.right = None # ExprContext @@ -9677,11 +10467,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSetOrSYmDiffAtom"): listener.exitSetOrSYmDiffAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSetOrSYmDiffAtom"): + return visitor.visitSetOrSYmDiffAtom(self) + else: + return visitor.visitChildren(self) + class IntersectAtomContext(SetOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.SetOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.SetOperatorsContext + super().__init__(parser) self.left = None # ExprContext self.copyFrom(ctx) @@ -9714,11 +10508,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitIntersectAtom"): listener.exitIntersectAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitIntersectAtom"): + return visitor.visitIntersectAtom(self) + else: + return visitor.visitChildren(self) + class UnionAtomContext(SetOperatorsContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.SetOperatorsContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.SetOperatorsContext + super().__init__(parser) self.left = None # ExprContext self.copyFrom(ctx) @@ -9751,70 +10549,77 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitUnionAtom"): listener.exitUnionAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitUnionAtom"): + return visitor.visitUnionAtom(self) + else: + return visitor.visitChildren(self) + def setOperators(self): + localctx = Parser.SetOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 58, self.RULE_setOperators) self._la = 0 # Token type try: - self.state = 1097 + self.state = 1099 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.UNION]: localctx = Parser.UnionAtomContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1068 + self.state = 1070 self.match(Parser.UNION) - self.state = 1069 + self.state = 1071 self.match(Parser.LPAREN) - self.state = 1070 + self.state = 1072 localctx.left = self.expr(0) - self.state = 1073 + self.state = 1075 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 1071 + self.state = 1073 self.match(Parser.COMMA) - self.state = 1072 + self.state = 1074 self.expr(0) - self.state = 1075 + self.state = 1077 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la == Parser.COMMA): break - self.state = 1077 + self.state = 1079 self.match(Parser.RPAREN) pass elif token in [Parser.INTERSECT]: localctx = Parser.IntersectAtomContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1079 + self.state = 1081 self.match(Parser.INTERSECT) - self.state = 1080 + self.state = 1082 self.match(Parser.LPAREN) - self.state = 1081 + self.state = 1083 localctx.left = self.expr(0) - self.state = 1084 + self.state = 1086 self._errHandler.sync(self) _la = self._input.LA(1) while True: - self.state = 1082 + self.state = 1084 self.match(Parser.COMMA) - self.state = 1083 + self.state = 1085 self.expr(0) - self.state = 1086 + self.state = 1088 self._errHandler.sync(self) _la = self._input.LA(1) if not (_la == Parser.COMMA): break - self.state = 1088 + self.state = 1090 self.match(Parser.RPAREN) pass elif token in [Parser.SYMDIFF, Parser.SETDIFF]: localctx = Parser.SetOrSYmDiffAtomContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1090 + self.state = 1092 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.SYMDIFF or _la == Parser.SETDIFF): @@ -9822,15 +10627,15 @@ def setOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1091 + self.state = 1093 self.match(Parser.LPAREN) - self.state = 1092 + self.state = 1094 localctx.left = self.expr(0) - self.state = 1093 + self.state = 1095 self.match(Parser.COMMA) - self.state = 1094 + self.state = 1096 localctx.right = self.expr(0) - self.state = 1095 + self.state = 1097 self.match(Parser.RPAREN) pass else: @@ -9845,11 +10650,11 @@ def setOperators(self): return localctx class HierarchyOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.op = None # ExprContext self.hrName = None # Token self.ruleComponent = None # ComponentIDContext @@ -9901,39 +10706,46 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHierarchyOperators"): listener.exitHierarchyOperators(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHierarchyOperators"): + return visitor.visitHierarchyOperators(self) + else: + return visitor.visitChildren(self) + def hierarchyOperators(self): + localctx = Parser.HierarchyOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 60, self.RULE_hierarchyOperators) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1099 + self.state = 1101 self.match(Parser.HIERARCHY) - self.state = 1100 + self.state = 1102 self.match(Parser.LPAREN) - self.state = 1101 + self.state = 1103 localctx.op = self.expr(0) - self.state = 1102 + self.state = 1104 self.match(Parser.COMMA) - self.state = 1103 - localctx.hrName = self.match(Parser.IDENTIFIER) self.state = 1105 + localctx.hrName = self.match(Parser.IDENTIFIER) + self.state = 1107 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.CONDITION: - self.state = 1104 + self.state = 1106 self.conditionClause() - self.state = 1109 + self.state = 1111 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 84, self._ctx) if la_ == 1: - self.state = 1107 + self.state = 1109 self.match(Parser.RULE) - self.state = 1108 + self.state = 1110 localctx.ruleComponent = self.componentID() - self.state = 1112 + self.state = 1114 self._errHandler.sync(self) _la = self._input.LA(1) if ((_la - 225) & ~0x3F) == 0 and ( @@ -9947,24 +10759,24 @@ def hierarchyOperators(self): | (1 << (Parser.ALWAYS_ZERO - 225)) ) ) != 0: - self.state = 1111 + self.state = 1113 self.validationMode() - self.state = 1115 + self.state = 1117 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DATASET or _la == Parser.RULE or _la == Parser.RULE_PRIORITY: - self.state = 1114 + self.state = 1116 self.inputModeHierarchy() - self.state = 1118 + self.state = 1120 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ALL or _la == Parser.COMPUTED: - self.state = 1117 + self.state = 1119 self.outputModeHierarchy() - self.state = 1120 + self.state = 1122 self.match(Parser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -9975,11 +10787,11 @@ def hierarchyOperators(self): return localctx class ValidationOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_validationOperators @@ -9989,9 +10801,9 @@ def copyFrom(self, ctx: ParserRuleContext): class ValidateHRrulesetContext(ValidationOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ValidationOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.hrName = None # Token self.copyFrom(ctx) @@ -10040,11 +10852,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidateHRruleset"): listener.exitValidateHRruleset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidateHRruleset"): + return visitor.visitValidateHRruleset(self) + else: + return visitor.visitChildren(self) + class ValidateDPrulesetContext(ValidationOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ValidationOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.dpName = None # Token self.copyFrom(ctx) @@ -10090,11 +10908,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidateDPruleset"): listener.exitValidateDPruleset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidateDPruleset"): + return visitor.visitValidateDPruleset(self) + else: + return visitor.visitChildren(self) + class ValidationSimpleContext(ValidationOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ValidationOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # ExprContext self.codeErr = None # ErCodeContext self.levelCode = None # ErLevelContext @@ -10136,87 +10960,94 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidationSimple"): listener.exitValidationSimple(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidationSimple"): + return visitor.visitValidationSimple(self) + else: + return visitor.visitChildren(self) + def validationOperators(self): + localctx = Parser.ValidationOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 62, self.RULE_validationOperators) self._la = 0 # Token type try: - self.state = 1183 + self.state = 1185 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.CHECK_DATAPOINT]: localctx = Parser.ValidateDPrulesetContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1122 + self.state = 1124 self.match(Parser.CHECK_DATAPOINT) - self.state = 1123 + self.state = 1125 self.match(Parser.LPAREN) - self.state = 1124 + self.state = 1126 localctx.op = self.expr(0) - self.state = 1125 + self.state = 1127 self.match(Parser.COMMA) - self.state = 1126 + self.state = 1128 localctx.dpName = self.match(Parser.IDENTIFIER) - self.state = 1136 + self.state = 1138 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMPONENTS: - self.state = 1127 + self.state = 1129 self.match(Parser.COMPONENTS) - self.state = 1128 + self.state = 1130 self.componentID() - self.state = 1133 + self.state = 1135 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1129 + self.state = 1131 self.match(Parser.COMMA) - self.state = 1130 + self.state = 1132 self.componentID() - self.state = 1135 + self.state = 1137 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1139 + self.state = 1141 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ALL or _la == Parser.INVALID or _la == Parser.ALL_MEASURES: - self.state = 1138 + self.state = 1140 self.validationOutput() - self.state = 1141 + self.state = 1143 self.match(Parser.RPAREN) pass elif token in [Parser.CHECK_HIERARCHY]: localctx = Parser.ValidateHRrulesetContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1143 + self.state = 1145 self.match(Parser.CHECK_HIERARCHY) - self.state = 1144 + self.state = 1146 self.match(Parser.LPAREN) - self.state = 1145 + self.state = 1147 localctx.op = self.expr(0) - self.state = 1146 + self.state = 1148 self.match(Parser.COMMA) - self.state = 1147 - localctx.hrName = self.match(Parser.IDENTIFIER) self.state = 1149 + localctx.hrName = self.match(Parser.IDENTIFIER) + self.state = 1151 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.CONDITION: - self.state = 1148 + self.state = 1150 self.conditionClause() - self.state = 1153 + self.state = 1155 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.RULE: - self.state = 1151 + self.state = 1153 self.match(Parser.RULE) - self.state = 1152 + self.state = 1154 self.componentID() - self.state = 1156 + self.state = 1158 self._errHandler.sync(self) _la = self._input.LA(1) if ((_la - 225) & ~0x3F) == 0 and ( @@ -10230,61 +11061,61 @@ def validationOperators(self): | (1 << (Parser.ALWAYS_ZERO - 225)) ) ) != 0: - self.state = 1155 + self.state = 1157 self.validationMode() - self.state = 1159 + self.state = 1161 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DATASET or _la == Parser.DATASET_PRIORITY: - self.state = 1158 + self.state = 1160 self.inputMode() - self.state = 1162 + self.state = 1164 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ALL or _la == Parser.INVALID or _la == Parser.ALL_MEASURES: - self.state = 1161 + self.state = 1163 self.validationOutput() - self.state = 1164 + self.state = 1166 self.match(Parser.RPAREN) pass elif token in [Parser.CHECK]: localctx = Parser.ValidationSimpleContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1166 + self.state = 1168 self.match(Parser.CHECK) - self.state = 1167 + self.state = 1169 self.match(Parser.LPAREN) - self.state = 1168 - localctx.op = self.expr(0) self.state = 1170 + localctx.op = self.expr(0) + self.state = 1172 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORCODE: - self.state = 1169 + self.state = 1171 localctx.codeErr = self.erCode() - self.state = 1173 + self.state = 1175 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORLEVEL: - self.state = 1172 + self.state = 1174 localctx.levelCode = self.erLevel() - self.state = 1176 + self.state = 1178 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.IMBALANCE: - self.state = 1175 + self.state = 1177 self.imbalanceExpr() - self.state = 1179 + self.state = 1181 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ALL or _la == Parser.INVALID: - self.state = 1178 + self.state = 1180 localctx.output = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.INVALID): @@ -10293,7 +11124,7 @@ def validationOperators(self): self._errHandler.reportMatch(self) self.consume() - self.state = 1181 + self.state = 1183 self.match(Parser.RPAREN) pass else: @@ -10308,11 +11139,11 @@ def validationOperators(self): return localctx class ConditionalOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_conditionalOperators @@ -10322,9 +11153,9 @@ def copyFrom(self, ctx: ParserRuleContext): class NvlAtomContext(ConditionalOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ConditionalOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprContext self.right = None # ExprContext self.copyFrom(ctx) @@ -10355,23 +11186,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitNvlAtom"): listener.exitNvlAtom(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitNvlAtom"): + return visitor.visitNvlAtom(self) + else: + return visitor.visitChildren(self) + def conditionalOperators(self): + localctx = Parser.ConditionalOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 64, self.RULE_conditionalOperators) try: localctx = Parser.NvlAtomContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1185 + self.state = 1187 self.match(Parser.NVL) - self.state = 1186 + self.state = 1188 self.match(Parser.LPAREN) - self.state = 1187 + self.state = 1189 localctx.left = self.expr(0) - self.state = 1188 + self.state = 1190 self.match(Parser.COMMA) - self.state = 1189 + self.state = 1191 localctx.right = self.expr(0) - self.state = 1190 + self.state = 1192 self.match(Parser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -10382,11 +11220,11 @@ def conditionalOperators(self): return localctx class ConditionalOperatorsComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_conditionalOperatorsComponent @@ -10396,9 +11234,9 @@ def copyFrom(self, ctx: ParserRuleContext): class NvlAtomComponentContext(ConditionalOperatorsComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ConditionalOperatorsComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.left = None # ExprComponentContext self.right = None # ExprComponentContext self.copyFrom(ctx) @@ -10429,23 +11267,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitNvlAtomComponent"): listener.exitNvlAtomComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitNvlAtomComponent"): + return visitor.visitNvlAtomComponent(self) + else: + return visitor.visitChildren(self) + def conditionalOperatorsComponent(self): + localctx = Parser.ConditionalOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 66, self.RULE_conditionalOperatorsComponent) try: localctx = Parser.NvlAtomComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1192 + self.state = 1194 self.match(Parser.NVL) - self.state = 1193 + self.state = 1195 self.match(Parser.LPAREN) - self.state = 1194 + self.state = 1196 localctx.left = self.exprComponent(0) - self.state = 1195 + self.state = 1197 self.match(Parser.COMMA) - self.state = 1196 + self.state = 1198 localctx.right = self.exprComponent(0) - self.state = 1197 + self.state = 1199 self.match(Parser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -10456,11 +11301,11 @@ def conditionalOperatorsComponent(self): return localctx class AggrOperatorsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_aggrOperators @@ -10470,9 +11315,9 @@ def copyFrom(self, ctx: ParserRuleContext): class AggrCompContext(AggrOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AggrOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -10523,11 +11368,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggrComp"): listener.exitAggrComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggrComp"): + return visitor.visitAggrComp(self) + else: + return visitor.visitChildren(self) + class CountAggrCompContext(AggrOperatorsContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AggrOperatorsContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def COUNT(self): @@ -10547,18 +11398,25 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCountAggrComp"): listener.exitCountAggrComp(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCountAggrComp"): + return visitor.visitCountAggrComp(self) + else: + return visitor.visitChildren(self) + def aggrOperators(self): + localctx = Parser.AggrOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 68, self.RULE_aggrOperators) self._la = 0 # Token type try: - self.state = 1207 + self.state = 1209 self._errHandler.sync(self) la_ = self._interp.adaptivePredict(self._input, 101, self._ctx) if la_ == 1: localctx = Parser.AggrCompContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1199 + self.state = 1201 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -10584,22 +11442,22 @@ def aggrOperators(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1200 + self.state = 1202 self.match(Parser.LPAREN) - self.state = 1201 + self.state = 1203 self.exprComponent(0) - self.state = 1202 + self.state = 1204 self.match(Parser.RPAREN) pass elif la_ == 2: localctx = Parser.CountAggrCompContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1204 + self.state = 1206 self.match(Parser.COUNT) - self.state = 1205 + self.state = 1207 self.match(Parser.LPAREN) - self.state = 1206 + self.state = 1208 self.match(Parser.RPAREN) pass @@ -10612,11 +11470,11 @@ def aggrOperators(self): return localctx class AggrOperatorsGroupingContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_aggrOperatorsGrouping @@ -10626,9 +11484,9 @@ def copyFrom(self, ctx: ParserRuleContext): class AggrDatasetContext(AggrOperatorsGroupingContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AggrOperatorsGroupingContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.copyFrom(ctx) @@ -10685,14 +11543,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggrDataset"): listener.exitAggrDataset(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggrDataset"): + return visitor.visitAggrDataset(self) + else: + return visitor.visitChildren(self) + def aggrOperatorsGrouping(self): + localctx = Parser.AggrOperatorsGroupingContext(self, self._ctx, self.state) self.enterRule(localctx, 70, self.RULE_aggrOperatorsGrouping) self._la = 0 # Token type try: localctx = Parser.AggrDatasetContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1209 + self.state = 1211 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -10718,24 +11583,24 @@ def aggrOperatorsGrouping(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1210 + self.state = 1212 self.match(Parser.LPAREN) - self.state = 1211 + self.state = 1213 self.expr(0) - self.state = 1216 + self.state = 1218 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GROUP: - self.state = 1212 - self.groupingClause() self.state = 1214 + self.groupingClause() + self.state = 1216 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.HAVING: - self.state = 1213 + self.state = 1215 self.havingClause() - self.state = 1218 + self.state = 1220 self.match(Parser.RPAREN) except RecognitionException as re: localctx.exception = re @@ -10746,11 +11611,11 @@ def aggrOperatorsGrouping(self): return localctx class AnFunctionContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_anFunction @@ -10759,10 +11624,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class LagOrLeadAnContext(AnFunctionContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.AnFunctionContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.AnFunctionContext + super().__init__(parser) self.op = None # Token self.offset = None # SignedIntegerContext self.defaultValue = None # ScalarItemContext @@ -10820,11 +11683,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitLagOrLeadAn"): listener.exitLagOrLeadAn(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitLagOrLeadAn"): + return visitor.visitLagOrLeadAn(self) + else: + return visitor.visitChildren(self) + class RatioToReportAnContext(AnFunctionContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.AnFunctionContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.AnFunctionContext + super().__init__(parser) self.op = None # Token self.partition = None # PartitionByClauseContext self.copyFrom(ctx) @@ -10861,11 +11728,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRatioToReportAn"): listener.exitRatioToReportAn(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRatioToReportAn"): + return visitor.visitRatioToReportAn(self) + else: + return visitor.visitChildren(self) + class AnSimpleFunctionContext(AnFunctionContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.AnFunctionContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.AnFunctionContext + super().__init__(parser) self.op = None # Token self.partition = None # PartitionByClauseContext self.orderBy = None # OrderByClauseContext @@ -10943,12 +11814,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAnSimpleFunction"): listener.exitAnSimpleFunction(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAnSimpleFunction"): + return visitor.visitAnSimpleFunction(self) + else: + return visitor.visitChildren(self) + def anFunction(self): + localctx = Parser.AnFunctionContext(self, self._ctx, self.state) self.enterRule(localctx, 72, self.RULE_anFunction) self._la = 0 # Token type try: - self.state = 1267 + self.state = 1269 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -10967,7 +11845,7 @@ def anFunction(self): ]: localctx = Parser.AnSimpleFunctionContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1220 + self.state = 1222 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -10997,45 +11875,45 @@ def anFunction(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1221 + self.state = 1223 self.match(Parser.LPAREN) - self.state = 1222 + self.state = 1224 self.expr(0) - self.state = 1223 + self.state = 1225 self.match(Parser.OVER) - self.state = 1224 + self.state = 1226 self.match(Parser.LPAREN) - self.state = 1226 + self.state = 1228 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PARTITION: - self.state = 1225 + self.state = 1227 localctx.partition = self.partitionByClause() - self.state = 1229 + self.state = 1231 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ORDER: - self.state = 1228 + self.state = 1230 localctx.orderBy = self.orderByClause() - self.state = 1232 + self.state = 1234 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DATA or _la == Parser.RANGE: - self.state = 1231 + self.state = 1233 localctx.windowing = self.windowingClause() - self.state = 1234 + self.state = 1236 self.match(Parser.RPAREN) - self.state = 1235 + self.state = 1237 self.match(Parser.RPAREN) pass elif token in [Parser.LAG, Parser.LEAD]: localctx = Parser.LagOrLeadAnContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1237 + self.state = 1239 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.LAG or _la == Parser.LEAD): @@ -11043,65 +11921,65 @@ def anFunction(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1238 + self.state = 1240 self.match(Parser.LPAREN) - self.state = 1239 + self.state = 1241 self.expr(0) - self.state = 1246 + self.state = 1248 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 1240 + self.state = 1242 self.match(Parser.COMMA) - self.state = 1241 + self.state = 1243 localctx.offset = self.signedInteger() - self.state = 1244 + self.state = 1246 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 1242 + self.state = 1244 self.match(Parser.COMMA) - self.state = 1243 + self.state = 1245 localctx.defaultValue = self.scalarItem() - self.state = 1248 + self.state = 1250 self.match(Parser.OVER) - self.state = 1249 + self.state = 1251 self.match(Parser.LPAREN) - self.state = 1251 + self.state = 1253 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PARTITION: - self.state = 1250 + self.state = 1252 localctx.partition = self.partitionByClause() - self.state = 1253 - localctx.orderBy = self.orderByClause() self.state = 1255 + localctx.orderBy = self.orderByClause() + self.state = 1257 self.match(Parser.RPAREN) - self.state = 1256 + self.state = 1258 self.match(Parser.RPAREN) pass elif token in [Parser.RATIO_TO_REPORT]: localctx = Parser.RatioToReportAnContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1258 + self.state = 1260 localctx.op = self.match(Parser.RATIO_TO_REPORT) - self.state = 1259 + self.state = 1261 self.match(Parser.LPAREN) - self.state = 1260 + self.state = 1262 self.expr(0) - self.state = 1261 + self.state = 1263 self.match(Parser.OVER) - self.state = 1262 + self.state = 1264 self.match(Parser.LPAREN) - self.state = 1263 + self.state = 1265 localctx.partition = self.partitionByClause() - self.state = 1264 + self.state = 1266 self.match(Parser.RPAREN) - self.state = 1265 + self.state = 1267 self.match(Parser.RPAREN) pass else: @@ -11116,11 +11994,11 @@ def anFunction(self): return localctx class AnFunctionComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_anFunctionComponent @@ -11130,9 +12008,9 @@ def copyFrom(self, ctx: ParserRuleContext): class AnSimpleFunctionComponentContext(AnFunctionComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AnFunctionComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.partition = None # PartitionByClauseContext self.orderBy = None # OrderByClauseContext @@ -11210,13 +12088,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAnSimpleFunctionComponent"): listener.exitAnSimpleFunctionComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAnSimpleFunctionComponent"): + return visitor.visitAnSimpleFunctionComponent(self) + else: + return visitor.visitChildren(self) + class LagOrLeadAnComponentContext(AnFunctionComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AnFunctionComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token - self.offet = None # SignedIntegerContext + self.offset = None # SignedIntegerContext self.defaultValue = None # ScalarItemContext self.partition = None # PartitionByClauseContext self.orderBy = None # OrderByClauseContext @@ -11269,11 +12153,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitLagOrLeadAnComponent"): listener.exitLagOrLeadAnComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitLagOrLeadAnComponent"): + return visitor.visitLagOrLeadAnComponent(self) + else: + return visitor.visitChildren(self) + class RankAnComponentContext(AnFunctionComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AnFunctionComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.partition = None # PartitionByClauseContext self.orderBy = None # OrderByClauseContext @@ -11311,11 +12201,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRankAnComponent"): listener.exitRankAnComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRankAnComponent"): + return visitor.visitRankAnComponent(self) + else: + return visitor.visitChildren(self) + class RatioToReportAnComponentContext(AnFunctionComponentContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.AnFunctionComponentContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token self.partition = None # PartitionByClauseContext self.copyFrom(ctx) @@ -11352,12 +12248,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRatioToReportAnComponent"): listener.exitRatioToReportAnComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRatioToReportAnComponent"): + return visitor.visitRatioToReportAnComponent(self) + else: + return visitor.visitChildren(self) + def anFunctionComponent(self): + localctx = Parser.AnFunctionComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 74, self.RULE_anFunctionComponent) self._la = 0 # Token type try: - self.state = 1327 + self.state = 1329 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -11376,7 +12279,7 @@ def anFunctionComponent(self): ]: localctx = Parser.AnSimpleFunctionComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1269 + self.state = 1271 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( @@ -11406,45 +12309,45 @@ def anFunctionComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1270 + self.state = 1272 self.match(Parser.LPAREN) - self.state = 1271 + self.state = 1273 self.exprComponent(0) - self.state = 1272 + self.state = 1274 self.match(Parser.OVER) - self.state = 1273 + self.state = 1275 self.match(Parser.LPAREN) - self.state = 1275 + self.state = 1277 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PARTITION: - self.state = 1274 + self.state = 1276 localctx.partition = self.partitionByClause() - self.state = 1278 + self.state = 1280 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ORDER: - self.state = 1277 + self.state = 1279 localctx.orderBy = self.orderByClause() - self.state = 1281 + self.state = 1283 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DATA or _la == Parser.RANGE: - self.state = 1280 + self.state = 1282 localctx.windowing = self.windowingClause() - self.state = 1283 + self.state = 1285 self.match(Parser.RPAREN) - self.state = 1284 + self.state = 1286 self.match(Parser.RPAREN) pass elif token in [Parser.LAG, Parser.LEAD]: localctx = Parser.LagOrLeadAnComponentContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1286 + self.state = 1288 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.LAG or _la == Parser.LEAD): @@ -11452,22 +12355,33 @@ def anFunctionComponent(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1287 + self.state = 1289 self.match(Parser.LPAREN) - self.state = 1288 + self.state = 1290 self.exprComponent(0) - self.state = 1294 + self.state = 1296 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 1289 + self.state = 1291 self.match(Parser.COMMA) - self.state = 1290 - localctx.offet = self.signedInteger() self.state = 1292 + localctx.offset = self.signedInteger() + self.state = 1294 self._errHandler.sync(self) _la = self._input.LA(1) - if _la == Parser.NULL_CONSTANT or ( + if ( + ((_la) & ~0x3F) == 0 + and ( + (1 << _la) + & ( + (1 << Parser.PLUS) + | (1 << Parser.MINUS) + | (1 << Parser.NULL_CONSTANT) + ) + ) + != 0 + ) or ( ((_la - 218) & ~0x3F) == 0 and ( (1 << (_la - 218)) @@ -11481,73 +12395,73 @@ def anFunctionComponent(self): ) != 0 ): - self.state = 1291 + self.state = 1293 localctx.defaultValue = self.scalarItem() - self.state = 1296 + self.state = 1298 self.match(Parser.OVER) - self.state = 1297 + self.state = 1299 self.match(Parser.LPAREN) - self.state = 1299 + self.state = 1301 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PARTITION: - self.state = 1298 + self.state = 1300 localctx.partition = self.partitionByClause() - self.state = 1301 - localctx.orderBy = self.orderByClause() self.state = 1303 + localctx.orderBy = self.orderByClause() + self.state = 1305 self.match(Parser.RPAREN) - self.state = 1304 + self.state = 1306 self.match(Parser.RPAREN) pass elif token in [Parser.RANK]: localctx = Parser.RankAnComponentContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1306 + self.state = 1308 localctx.op = self.match(Parser.RANK) - self.state = 1307 + self.state = 1309 self.match(Parser.LPAREN) - self.state = 1308 + self.state = 1310 self.match(Parser.OVER) - self.state = 1309 + self.state = 1311 self.match(Parser.LPAREN) - self.state = 1311 + self.state = 1313 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PARTITION: - self.state = 1310 + self.state = 1312 localctx.partition = self.partitionByClause() - self.state = 1313 - localctx.orderBy = self.orderByClause() self.state = 1315 + localctx.orderBy = self.orderByClause() + self.state = 1317 self.match(Parser.RPAREN) - self.state = 1316 + self.state = 1318 self.match(Parser.RPAREN) pass elif token in [Parser.RATIO_TO_REPORT]: localctx = Parser.RatioToReportAnComponentContext(self, localctx) self.enterOuterAlt(localctx, 4) - self.state = 1318 + self.state = 1320 localctx.op = self.match(Parser.RATIO_TO_REPORT) - self.state = 1319 + self.state = 1321 self.match(Parser.LPAREN) - self.state = 1320 + self.state = 1322 self.exprComponent(0) - self.state = 1321 + self.state = 1323 self.match(Parser.OVER) - self.state = 1322 + self.state = 1324 self.match(Parser.LPAREN) - self.state = 1323 + self.state = 1325 localctx.partition = self.partitionByClause() - self.state = 1324 + self.state = 1326 self.match(Parser.RPAREN) - self.state = 1325 + self.state = 1327 self.match(Parser.RPAREN) pass else: @@ -11562,11 +12476,11 @@ def anFunctionComponent(self): return localctx class RenameClauseItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.fromName = None # ComponentIDContext self.toName = None # ComponentIDContext @@ -11590,16 +12504,23 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRenameClauseItem"): listener.exitRenameClauseItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRenameClauseItem"): + return visitor.visitRenameClauseItem(self) + else: + return visitor.visitChildren(self) + def renameClauseItem(self): + localctx = Parser.RenameClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 76, self.RULE_renameClauseItem) try: self.enterOuterAlt(localctx, 1) - self.state = 1329 + self.state = 1331 localctx.fromName = self.componentID() - self.state = 1330 + self.state = 1332 self.match(Parser.TO) - self.state = 1331 + self.state = 1333 localctx.toName = self.componentID() except RecognitionException as re: localctx.exception = re @@ -11610,11 +12531,11 @@ def renameClauseItem(self): return localctx class AggregateClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def aggrFunctionClause(self, i: int = None): if i is None: @@ -11639,23 +12560,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggregateClause"): listener.exitAggregateClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggregateClause"): + return visitor.visitAggregateClause(self) + else: + return visitor.visitChildren(self) + def aggregateClause(self): + localctx = Parser.AggregateClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 78, self.RULE_aggregateClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1333 + self.state = 1335 self.aggrFunctionClause() - self.state = 1338 + self.state = 1340 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1334 + self.state = 1336 self.match(Parser.COMMA) - self.state = 1335 + self.state = 1337 self.aggrFunctionClause() - self.state = 1340 + self.state = 1342 self._errHandler.sync(self) _la = self._input.LA(1) @@ -11668,11 +12596,11 @@ def aggregateClause(self): return localctx class AggrFunctionClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentID(self): return self.getTypedRuleContext(Parser.ComponentIDContext, 0) @@ -11697,13 +12625,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAggrFunctionClause"): listener.exitAggrFunctionClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAggrFunctionClause"): + return visitor.visitAggrFunctionClause(self) + else: + return visitor.visitChildren(self) + def aggrFunctionClause(self): + localctx = Parser.AggrFunctionClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 80, self.RULE_aggrFunctionClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1342 + self.state = 1344 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -11719,14 +12654,14 @@ def aggrFunctionClause(self): ) != 0 ) or _la == Parser.COMPONENT: - self.state = 1341 + self.state = 1343 self.componentRole() - self.state = 1344 + self.state = 1346 self.componentID() - self.state = 1345 + self.state = 1347 self.match(Parser.ASSIGN) - self.state = 1346 + self.state = 1348 self.aggrOperators() except RecognitionException as re: localctx.exception = re @@ -11737,11 +12672,11 @@ def aggrFunctionClause(self): return localctx class CalcClauseItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentID(self): return self.getTypedRuleContext(Parser.ComponentIDContext, 0) @@ -11766,13 +12701,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCalcClauseItem"): listener.exitCalcClauseItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCalcClauseItem"): + return visitor.visitCalcClauseItem(self) + else: + return visitor.visitChildren(self) + def calcClauseItem(self): + localctx = Parser.CalcClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 82, self.RULE_calcClauseItem) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1349 + self.state = 1351 self._errHandler.sync(self) _la = self._input.LA(1) if ( @@ -11788,14 +12730,14 @@ def calcClauseItem(self): ) != 0 ) or _la == Parser.COMPONENT: - self.state = 1348 + self.state = 1350 self.componentRole() - self.state = 1351 + self.state = 1353 self.componentID() - self.state = 1352 + self.state = 1354 self.match(Parser.ASSIGN) - self.state = 1353 + self.state = 1355 self.exprComponent(0) except RecognitionException as re: localctx.exception = re @@ -11806,11 +12748,11 @@ def calcClauseItem(self): return localctx class SubspaceClauseItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentID(self): return self.getTypedRuleContext(Parser.ComponentIDContext, 0) @@ -11821,9 +12763,6 @@ def EQ(self): def scalarItem(self): return self.getTypedRuleContext(Parser.ScalarItemContext, 0) - def varID(self): - return self.getTypedRuleContext(Parser.VarIDContext, 0) - def getRuleIndex(self): return Parser.RULE_subspaceClauseItem @@ -11835,36 +12774,24 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSubspaceClauseItem"): listener.exitSubspaceClauseItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSubspaceClauseItem"): + return visitor.visitSubspaceClauseItem(self) + else: + return visitor.visitChildren(self) + def subspaceClauseItem(self): + localctx = Parser.SubspaceClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 84, self.RULE_subspaceClauseItem) try: self.enterOuterAlt(localctx, 1) - self.state = 1355 + self.state = 1357 self.componentID() - self.state = 1356 + self.state = 1358 self.match(Parser.EQ) self.state = 1359 - self._errHandler.sync(self) - token = self._input.LA(1) - if token in [ - Parser.NULL_CONSTANT, - Parser.CAST, - Parser.INTEGER_CONSTANT, - Parser.NUMBER_CONSTANT, - Parser.BOOLEAN_CONSTANT, - Parser.STRING_CONSTANT, - ]: - self.state = 1357 - self.scalarItem() - pass - elif token in [Parser.IDENTIFIER]: - self.state = 1358 - self.varID() - pass - else: - raise NoViableAltException(self) - + self.scalarItem() except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -11874,11 +12801,11 @@ def subspaceClauseItem(self): return localctx class ScalarItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_scalarItem @@ -11887,10 +12814,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ScalarWithCastContext(ScalarItemContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.ScalarItemContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ScalarItemContext + super().__init__(parser) self.copyFrom(ctx) def CAST(self): @@ -11925,11 +12850,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitScalarWithCast"): listener.exitScalarWithCast(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitScalarWithCast"): + return visitor.visitScalarWithCast(self) + else: + return visitor.visitChildren(self) + class SimpleScalarContext(ScalarItemContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.ScalarItemContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ScalarItemContext + super().__init__(parser) self.copyFrom(ctx) def constant(self): @@ -11943,7 +12872,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSimpleScalar"): listener.exitSimpleScalar(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSimpleScalar"): + return visitor.visitSimpleScalar(self) + else: + return visitor.visitChildren(self) + def scalarItem(self): + localctx = Parser.ScalarItemContext(self, self._ctx, self.state) self.enterRule(localctx, 86, self.RULE_scalarItem) self._la = 0 # Token type @@ -11952,6 +12888,8 @@ def scalarItem(self): self._errHandler.sync(self) token = self._input.LA(1) if token in [ + Parser.PLUS, + Parser.MINUS, Parser.NULL_CONSTANT, Parser.INTEGER_CONSTANT, Parser.NUMBER_CONSTANT, @@ -12001,11 +12939,11 @@ def scalarItem(self): return localctx class JoinClauseWithoutUsingContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def joinClauseItem(self, i: int = None): if i is None: @@ -12030,7 +12968,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinClauseWithoutUsing"): listener.exitJoinClauseWithoutUsing(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinClauseWithoutUsing"): + return visitor.visitJoinClauseWithoutUsing(self) + else: + return visitor.visitChildren(self) + def joinClauseWithoutUsing(self): + localctx = Parser.JoinClauseWithoutUsingContext(self, self._ctx, self.state) self.enterRule(localctx, 88, self.RULE_joinClauseWithoutUsing) self._la = 0 # Token type @@ -12059,11 +13004,11 @@ def joinClauseWithoutUsing(self): return localctx class JoinClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def joinClauseItem(self, i: int = None): if i is None: @@ -12097,7 +13042,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinClause"): listener.exitJoinClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinClause"): + return visitor.visitJoinClause(self) + else: + return visitor.visitChildren(self) + def joinClause(self): + localctx = Parser.JoinClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 90, self.RULE_joinClause) self._la = 0 # Token type @@ -12146,11 +13098,11 @@ def joinClause(self): return localctx class JoinClauseItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) @@ -12172,7 +13124,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinClauseItem"): listener.exitJoinClauseItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinClauseItem"): + return visitor.visitJoinClauseItem(self) + else: + return visitor.visitChildren(self) + def joinClauseItem(self): + localctx = Parser.JoinClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 92, self.RULE_joinClauseItem) self._la = 0 # Token type @@ -12198,11 +13157,11 @@ def joinClauseItem(self): return localctx class JoinBodyContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def filterClause(self): return self.getTypedRuleContext(Parser.FilterClauseContext, 0) @@ -12233,7 +13192,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinBody"): listener.exitJoinBody(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinBody"): + return visitor.visitJoinBody(self) + else: + return visitor.visitChildren(self) + def joinBody(self): + localctx = Parser.JoinBodyContext(self, self._ctx, self.state) self.enterRule(localctx, 94, self.RULE_joinBody) self._la = 0 # Token type @@ -12288,11 +13254,11 @@ def joinBody(self): return localctx class JoinApplyClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def APPLY(self): return self.getToken(Parser.APPLY, 0) @@ -12311,7 +13277,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitJoinApplyClause"): listener.exitJoinApplyClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitJoinApplyClause"): + return visitor.visitJoinApplyClause(self) + else: + return visitor.visitChildren(self) + def joinApplyClause(self): + localctx = Parser.JoinApplyClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 96, self.RULE_joinApplyClause) try: @@ -12329,11 +13302,11 @@ def joinApplyClause(self): return localctx class PartitionByClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def PARTITION(self): return self.getToken(Parser.PARTITION, 0) @@ -12364,7 +13337,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitPartitionByClause"): listener.exitPartitionByClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitPartitionByClause"): + return visitor.visitPartitionByClause(self) + else: + return visitor.visitChildren(self) + def partitionByClause(self): + localctx = Parser.PartitionByClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 98, self.RULE_partitionByClause) self._la = 0 # Token type @@ -12397,11 +13377,11 @@ def partitionByClause(self): return localctx class OrderByClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def ORDER(self): return self.getToken(Parser.ORDER, 0) @@ -12432,7 +13412,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOrderByClause"): listener.exitOrderByClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOrderByClause"): + return visitor.visitOrderByClause(self) + else: + return visitor.visitChildren(self) + def orderByClause(self): + localctx = Parser.OrderByClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 100, self.RULE_orderByClause) self._la = 0 # Token type @@ -12465,11 +13452,11 @@ def orderByClause(self): return localctx class OrderByItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentID(self): return self.getTypedRuleContext(Parser.ComponentIDContext, 0) @@ -12491,7 +13478,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOrderByItem"): listener.exitOrderByItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOrderByItem"): + return visitor.visitOrderByItem(self) + else: + return visitor.visitChildren(self) + def orderByItem(self): + localctx = Parser.OrderByItemContext(self, self._ctx, self.state) self.enterRule(localctx, 102, self.RULE_orderByItem) self._la = 0 # Token type @@ -12520,11 +13514,11 @@ def orderByItem(self): return localctx class WindowingClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.from_ = None # LimitClauseItemContext self.to_ = None # LimitClauseItemContext @@ -12560,7 +13554,14 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitWindowingClause"): listener.exitWindowingClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitWindowingClause"): + return visitor.visitWindowingClause(self) + else: + return visitor.visitChildren(self) + def windowingClause(self): + localctx = Parser.WindowingClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 104, self.RULE_windowingClause) try: @@ -12598,15 +13599,21 @@ def windowingClause(self): return localctx class SignedIntegerContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def INTEGER_CONSTANT(self): return self.getToken(Parser.INTEGER_CONSTANT, 0) + def MINUS(self): + return self.getToken(Parser.MINUS, 0) + + def PLUS(self): + return self.getToken(Parser.PLUS, 0) + def getRuleIndex(self): return Parser.RULE_signedInteger @@ -12618,13 +13625,95 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSignedInteger"): listener.exitSignedInteger(self) - def signedInteger(self): - localctx = Parser.SignedIntegerContext(self, self._ctx, self.state) - self.enterRule(localctx, 106, self.RULE_signedInteger) - try: - self.enterOuterAlt(localctx, 1) - self.state = 1458 - self.match(Parser.INTEGER_CONSTANT) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSignedInteger"): + return visitor.visitSignedInteger(self) + else: + return visitor.visitChildren(self) + + def signedInteger(self): + + localctx = Parser.SignedIntegerContext(self, self._ctx, self.state) + self.enterRule(localctx, 106, self.RULE_signedInteger) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 1459 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la == Parser.PLUS or _la == Parser.MINUS: + self.state = 1458 + _la = self._input.LA(1) + if not (_la == Parser.PLUS or _la == Parser.MINUS): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + self.state = 1461 + self.match(Parser.INTEGER_CONSTANT) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + class SignedNumberContext(ParserRuleContext): + __slots__ = "parser" + + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): + super().__init__(parent, invokingState) + self.parser = parser + + def NUMBER_CONSTANT(self): + return self.getToken(Parser.NUMBER_CONSTANT, 0) + + def MINUS(self): + return self.getToken(Parser.MINUS, 0) + + def PLUS(self): + return self.getToken(Parser.PLUS, 0) + + def getRuleIndex(self): + return Parser.RULE_signedNumber + + def enterRule(self, listener: ParseTreeListener): + if hasattr(listener, "enterSignedNumber"): + listener.enterSignedNumber(self) + + def exitRule(self, listener: ParseTreeListener): + if hasattr(listener, "exitSignedNumber"): + listener.exitSignedNumber(self) + + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSignedNumber"): + return visitor.visitSignedNumber(self) + else: + return visitor.visitChildren(self) + + def signedNumber(self): + + localctx = Parser.SignedNumberContext(self, self._ctx, self.state) + self.enterRule(localctx, 108, self.RULE_signedNumber) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 1464 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la == Parser.PLUS or _la == Parser.MINUS: + self.state = 1463 + _la = self._input.LA(1) + if not (_la == Parser.PLUS or _la == Parser.MINUS): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + self.state = 1466 + self.match(Parser.NUMBER_CONSTANT) except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -12634,14 +13723,15 @@ def signedInteger(self): return localctx class LimitClauseItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser + self.limitDir = None # Token - def INTEGER_CONSTANT(self): - return self.getToken(Parser.INTEGER_CONSTANT, 0) + def signedInteger(self): + return self.getTypedRuleContext(Parser.SignedIntegerContext, 0) def PRECEDING(self): return self.getToken(Parser.PRECEDING, 0) @@ -12672,53 +13762,60 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitLimitClauseItem"): listener.exitLimitClauseItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitLimitClauseItem"): + return visitor.visitLimitClauseItem(self) + else: + return visitor.visitChildren(self) + def limitClauseItem(self): + localctx = Parser.LimitClauseItemContext(self, self._ctx, self.state) - self.enterRule(localctx, 108, self.RULE_limitClauseItem) + self.enterRule(localctx, 110, self.RULE_limitClauseItem) try: - self.state = 1471 + self.state = 1481 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 138, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 139, self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 1460 - self.match(Parser.INTEGER_CONSTANT) - self.state = 1461 - self.match(Parser.PRECEDING) + self.state = 1468 + self.signedInteger() + self.state = 1469 + localctx.limitDir = self.match(Parser.PRECEDING) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 1462 - self.match(Parser.INTEGER_CONSTANT) - self.state = 1463 - self.match(Parser.FOLLOWING) + self.state = 1471 + self.signedInteger() + self.state = 1472 + localctx.limitDir = self.match(Parser.FOLLOWING) pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 1464 + self.state = 1474 self.match(Parser.CURRENT) - self.state = 1465 + self.state = 1475 self.match(Parser.DATA) - self.state = 1466 + self.state = 1476 self.match(Parser.POINT) pass elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 1467 + self.state = 1477 self.match(Parser.UNBOUNDED) - self.state = 1468 - self.match(Parser.PRECEDING) + self.state = 1478 + localctx.limitDir = self.match(Parser.PRECEDING) pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 1469 + self.state = 1479 self.match(Parser.UNBOUNDED) - self.state = 1470 - self.match(Parser.FOLLOWING) + self.state = 1480 + localctx.limitDir = self.match(Parser.FOLLOWING) pass except RecognitionException as re: @@ -12730,11 +13827,11 @@ def limitClauseItem(self): return localctx class GroupingClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_groupingClause @@ -12744,9 +13841,10 @@ def copyFrom(self, ctx: ParserRuleContext): class GroupAllContext(GroupingClauseContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GroupingClauseContext - super().__init__(ANTLRParser) + super().__init__(parser) + self.delim = None # Token self.copyFrom(ctx) def GROUP(self): @@ -12755,8 +13853,38 @@ def GROUP(self): def ALL(self): return self.getToken(Parser.ALL, 0) - def exprComponent(self): - return self.getTypedRuleContext(Parser.ExprComponentContext, 0) + def TIME_AGG(self): + return self.getToken(Parser.TIME_AGG, 0) + + def LPAREN(self): + return self.getToken(Parser.LPAREN, 0) + + def STRING_CONSTANT(self, i: int = None): + if i is None: + return self.getTokens(Parser.STRING_CONSTANT) + else: + return self.getToken(Parser.STRING_CONSTANT, i) + + def RPAREN(self): + return self.getToken(Parser.RPAREN, 0) + + def COMMA(self, i: int = None): + if i is None: + return self.getTokens(Parser.COMMA) + else: + return self.getToken(Parser.COMMA, i) + + def optionalExpr(self): + return self.getTypedRuleContext(Parser.OptionalExprContext, 0) + + def OPTIONAL(self): + return self.getToken(Parser.OPTIONAL, 0) + + def FIRST(self): + return self.getToken(Parser.FIRST, 0) + + def LAST(self): + return self.getToken(Parser.LAST, 0) def enterRule(self, listener: ParseTreeListener): if hasattr(listener, "enterGroupAll"): @@ -12766,12 +13894,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitGroupAll"): listener.exitGroupAll(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitGroupAll"): + return visitor.visitGroupAll(self) + else: + return visitor.visitChildren(self) + class GroupByOrExceptContext(GroupingClauseContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.GroupingClauseContext - super().__init__(ANTLRParser) + super().__init__(parser) self.op = None # Token + self.delim = None # Token self.copyFrom(ctx) def GROUP(self): @@ -12795,6 +13930,24 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) + def TIME_AGG(self): + return self.getToken(Parser.TIME_AGG, 0) + + def LPAREN(self): + return self.getToken(Parser.LPAREN, 0) + + def STRING_CONSTANT(self): + return self.getToken(Parser.STRING_CONSTANT, 0) + + def RPAREN(self): + return self.getToken(Parser.RPAREN, 0) + + def FIRST(self): + return self.getToken(Parser.FIRST, 0) + + def LAST(self): + return self.getToken(Parser.LAST, 0) + def enterRule(self, listener: ParseTreeListener): if hasattr(listener, "enterGroupByOrExcept"): listener.enterGroupByOrExcept(self) @@ -12803,20 +13956,27 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitGroupByOrExcept"): listener.exitGroupByOrExcept(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitGroupByOrExcept"): + return visitor.visitGroupByOrExcept(self) + else: + return visitor.visitChildren(self) + def groupingClause(self): + localctx = Parser.GroupingClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 110, self.RULE_groupingClause) + self.enterRule(localctx, 112, self.RULE_groupingClause) self._la = 0 # Token type try: - self.state = 1486 + self.state = 1523 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 140, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 147, self._ctx) if la_ == 1: localctx = Parser.GroupByOrExceptContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1473 + self.state = 1483 self.match(Parser.GROUP) - self.state = 1474 + self.state = 1484 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.BY or _la == Parser.EXCEPT): @@ -12824,31 +13984,108 @@ def groupingClause(self): else: self._errHandler.reportMatch(self) self.consume() - self.state = 1475 + self.state = 1485 self.componentID() - self.state = 1480 + self.state = 1490 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1476 + self.state = 1486 self.match(Parser.COMMA) - self.state = 1477 + self.state = 1487 self.componentID() - self.state = 1482 + self.state = 1492 + self._errHandler.sync(self) + _la = self._input.LA(1) + + self.state = 1501 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la == Parser.TIME_AGG: + self.state = 1493 + self.match(Parser.TIME_AGG) + self.state = 1494 + self.match(Parser.LPAREN) + self.state = 1495 + self.match(Parser.STRING_CONSTANT) + self.state = 1498 self._errHandler.sync(self) _la = self._input.LA(1) + if _la == Parser.COMMA: + self.state = 1496 + self.match(Parser.COMMA) + self.state = 1497 + localctx.delim = self._input.LT(1) + _la = self._input.LA(1) + if not (_la == Parser.FIRST or _la == Parser.LAST): + localctx.delim = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + self.state = 1500 + self.match(Parser.RPAREN) pass elif la_ == 2: localctx = Parser.GroupAllContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1483 + self.state = 1503 self.match(Parser.GROUP) - self.state = 1484 + self.state = 1504 self.match(Parser.ALL) - self.state = 1485 - self.exprComponent(0) + self.state = 1521 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la == Parser.TIME_AGG: + self.state = 1505 + self.match(Parser.TIME_AGG) + self.state = 1506 + self.match(Parser.LPAREN) + self.state = 1507 + self.match(Parser.STRING_CONSTANT) + self.state = 1510 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input, 143, self._ctx) + if la_ == 1: + self.state = 1508 + self.match(Parser.COMMA) + self.state = 1509 + _la = self._input.LA(1) + if not (_la == Parser.OPTIONAL or _la == Parser.STRING_CONSTANT): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + self.state = 1514 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input, 144, self._ctx) + if la_ == 1: + self.state = 1512 + self.match(Parser.COMMA) + self.state = 1513 + self.optionalExpr() + + self.state = 1518 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la == Parser.COMMA: + self.state = 1516 + self.match(Parser.COMMA) + self.state = 1517 + localctx.delim = self._input.LT(1) + _la = self._input.LA(1) + if not (_la == Parser.FIRST or _la == Parser.LAST): + localctx.delim = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + self.state = 1520 + self.match(Parser.RPAREN) + pass except RecognitionException as re: @@ -12860,11 +14097,11 @@ def groupingClause(self): return localctx class HavingClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def HAVING(self): return self.getToken(Parser.HAVING, 0) @@ -12883,14 +14120,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHavingClause"): listener.exitHavingClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHavingClause"): + return visitor.visitHavingClause(self) + else: + return visitor.visitChildren(self) + def havingClause(self): + localctx = Parser.HavingClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 112, self.RULE_havingClause) + self.enterRule(localctx, 114, self.RULE_havingClause) try: self.enterOuterAlt(localctx, 1) - self.state = 1488 + self.state = 1525 self.match(Parser.HAVING) - self.state = 1489 + self.state = 1526 self.exprComponent(0) except RecognitionException as re: localctx.exception = re @@ -12901,11 +14145,11 @@ def havingClause(self): return localctx class ParameterItemContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def varID(self): return self.getTypedRuleContext(Parser.VarIDContext, 0) @@ -12930,23 +14174,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitParameterItem"): listener.exitParameterItem(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitParameterItem"): + return visitor.visitParameterItem(self) + else: + return visitor.visitChildren(self) + def parameterItem(self): + localctx = Parser.ParameterItemContext(self, self._ctx, self.state) - self.enterRule(localctx, 114, self.RULE_parameterItem) + self.enterRule(localctx, 116, self.RULE_parameterItem) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1491 + self.state = 1528 self.varID() - self.state = 1492 + self.state = 1529 self.inputParameterType() - self.state = 1495 + self.state = 1532 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DEFAULT: - self.state = 1493 + self.state = 1530 self.match(Parser.DEFAULT) - self.state = 1494 + self.state = 1531 self.scalarItem() except RecognitionException as re: @@ -12958,11 +14209,11 @@ def parameterItem(self): return localctx class OutputParameterTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def scalarType(self): return self.getTypedRuleContext(Parser.ScalarTypeContext, 0) @@ -12984,11 +14235,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOutputParameterType"): listener.exitOutputParameterType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOutputParameterType"): + return visitor.visitOutputParameterType(self) + else: + return visitor.visitChildren(self) + def outputParameterType(self): + localctx = Parser.OutputParameterTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 116, self.RULE_outputParameterType) + self.enterRule(localctx, 118, self.RULE_outputParameterType) try: - self.state = 1500 + self.state = 1537 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -13004,12 +14262,12 @@ def outputParameterType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1497 + self.state = 1534 self.scalarType() pass elif token in [Parser.DATASET]: self.enterOuterAlt(localctx, 2) - self.state = 1498 + self.state = 1535 self.datasetType() pass elif token in [ @@ -13020,7 +14278,7 @@ def outputParameterType(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 3) - self.state = 1499 + self.state = 1536 self.componentType() pass else: @@ -13035,11 +14293,11 @@ def outputParameterType(self): return localctx class OutputParameterTypeComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentType(self): return self.getTypedRuleContext(Parser.ComponentTypeContext, 0) @@ -13058,11 +14316,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOutputParameterTypeComponent"): listener.exitOutputParameterTypeComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOutputParameterTypeComponent"): + return visitor.visitOutputParameterTypeComponent(self) + else: + return visitor.visitChildren(self) + def outputParameterTypeComponent(self): + localctx = Parser.OutputParameterTypeComponentContext(self, self._ctx, self.state) - self.enterRule(localctx, 118, self.RULE_outputParameterTypeComponent) + self.enterRule(localctx, 120, self.RULE_outputParameterTypeComponent) try: - self.state = 1504 + self.state = 1541 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -13073,7 +14338,7 @@ def outputParameterTypeComponent(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 1) - self.state = 1502 + self.state = 1539 self.componentType() pass elif token in [ @@ -13089,7 +14354,7 @@ def outputParameterTypeComponent(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 2) - self.state = 1503 + self.state = 1540 self.scalarType() pass else: @@ -13104,11 +14369,11 @@ def outputParameterTypeComponent(self): return localctx class InputParameterTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def scalarType(self): return self.getTypedRuleContext(Parser.ScalarTypeContext, 0) @@ -13136,11 +14401,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInputParameterType"): listener.exitInputParameterType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInputParameterType"): + return visitor.visitInputParameterType(self) + else: + return visitor.visitChildren(self) + def inputParameterType(self): + localctx = Parser.InputParameterTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 120, self.RULE_inputParameterType) + self.enterRule(localctx, 122, self.RULE_inputParameterType) try: - self.state = 1511 + self.state = 1548 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -13156,17 +14428,17 @@ def inputParameterType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1506 + self.state = 1543 self.scalarType() pass elif token in [Parser.DATASET]: self.enterOuterAlt(localctx, 2) - self.state = 1507 + self.state = 1544 self.datasetType() pass elif token in [Parser.SET]: self.enterOuterAlt(localctx, 3) - self.state = 1508 + self.state = 1545 self.scalarSetType() pass elif token in [ @@ -13179,7 +14451,7 @@ def inputParameterType(self): Parser.HIERARCHICAL_ON_VAR, ]: self.enterOuterAlt(localctx, 4) - self.state = 1509 + self.state = 1546 self.rulesetType() pass elif token in [ @@ -13190,7 +14462,7 @@ def inputParameterType(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 5) - self.state = 1510 + self.state = 1547 self.componentType() pass else: @@ -13205,11 +14477,11 @@ def inputParameterType(self): return localctx class RulesetTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def RULESET(self): return self.getToken(Parser.RULESET, 0) @@ -13231,21 +14503,28 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRulesetType"): listener.exitRulesetType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRulesetType"): + return visitor.visitRulesetType(self) + else: + return visitor.visitChildren(self) + def rulesetType(self): + localctx = Parser.RulesetTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 122, self.RULE_rulesetType) + self.enterRule(localctx, 124, self.RULE_rulesetType) try: - self.state = 1516 + self.state = 1553 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.RULESET]: self.enterOuterAlt(localctx, 1) - self.state = 1513 + self.state = 1550 self.match(Parser.RULESET) pass elif token in [Parser.DATAPOINT, Parser.DATAPOINT_ON_VD, Parser.DATAPOINT_ON_VAR]: self.enterOuterAlt(localctx, 2) - self.state = 1514 + self.state = 1551 self.dpRuleset() pass elif token in [ @@ -13254,7 +14533,7 @@ def rulesetType(self): Parser.HIERARCHICAL_ON_VAR, ]: self.enterOuterAlt(localctx, 3) - self.state = 1515 + self.state = 1552 self.hrRuleset() pass else: @@ -13269,11 +14548,11 @@ def rulesetType(self): return localctx class ScalarTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def basicScalarType(self): return self.getTypedRuleContext(Parser.BasicScalarTypeContext, 0) @@ -13301,13 +14580,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitScalarType"): listener.exitScalarType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitScalarType"): + return visitor.visitScalarType(self) + else: + return visitor.visitChildren(self) + def scalarType(self): + localctx = Parser.ScalarTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 124, self.RULE_scalarType) + self.enterRule(localctx, 126, self.RULE_scalarType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1520 + self.state = 1557 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -13321,35 +14607,35 @@ def scalarType(self): Parser.DURATION, Parser.SCALAR, ]: - self.state = 1518 + self.state = 1555 self.basicScalarType() pass elif token in [Parser.IDENTIFIER]: - self.state = 1519 + self.state = 1556 self.valueDomainName() pass else: raise NoViableAltException(self) - self.state = 1523 + self.state = 1560 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.QLPAREN or _la == Parser.GLPAREN: - self.state = 1522 + self.state = 1559 self.scalarTypeConstraint() - self.state = 1529 + self.state = 1566 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.NOT or _la == Parser.NULL_CONSTANT: - self.state = 1526 + self.state = 1563 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.NOT: - self.state = 1525 + self.state = 1562 self.match(Parser.NOT) - self.state = 1528 + self.state = 1565 self.match(Parser.NULL_CONSTANT) except RecognitionException as re: @@ -13361,11 +14647,11 @@ def scalarType(self): return localctx class ComponentTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentRole(self): return self.getTypedRuleContext(Parser.ComponentRoleContext, 0) @@ -13390,23 +14676,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComponentType"): listener.exitComponentType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComponentType"): + return visitor.visitComponentType(self) + else: + return visitor.visitChildren(self) + def componentType(self): + localctx = Parser.ComponentTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 126, self.RULE_componentType) + self.enterRule(localctx, 128, self.RULE_componentType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1531 + self.state = 1568 self.componentRole() - self.state = 1536 + self.state = 1573 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LT: - self.state = 1532 + self.state = 1569 self.match(Parser.LT) - self.state = 1533 + self.state = 1570 self.scalarType() - self.state = 1534 + self.state = 1571 self.match(Parser.MT) except RecognitionException as re: @@ -13418,11 +14711,11 @@ def componentType(self): return localctx class DatasetTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def DATASET(self): return self.getToken(Parser.DATASET, 0) @@ -13456,35 +14749,42 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDatasetType"): listener.exitDatasetType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDatasetType"): + return visitor.visitDatasetType(self) + else: + return visitor.visitChildren(self) + def datasetType(self): + localctx = Parser.DatasetTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 128, self.RULE_datasetType) + self.enterRule(localctx, 130, self.RULE_datasetType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1538 + self.state = 1575 self.match(Parser.DATASET) - self.state = 1550 + self.state = 1587 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1539 + self.state = 1576 self.match(Parser.GLPAREN) - self.state = 1540 + self.state = 1577 self.compConstraint() - self.state = 1545 + self.state = 1582 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1541 + self.state = 1578 self.match(Parser.COMMA) - self.state = 1542 + self.state = 1579 self.compConstraint() - self.state = 1547 + self.state = 1584 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1548 + self.state = 1585 self.match(Parser.GRPAREN) except RecognitionException as re: @@ -13496,11 +14796,11 @@ def datasetType(self): return localctx class EvalDatasetTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def datasetType(self): return self.getTypedRuleContext(Parser.DatasetTypeContext, 0) @@ -13519,16 +14819,23 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitEvalDatasetType"): listener.exitEvalDatasetType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitEvalDatasetType"): + return visitor.visitEvalDatasetType(self) + else: + return visitor.visitChildren(self) + def evalDatasetType(self): + localctx = Parser.EvalDatasetTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 130, self.RULE_evalDatasetType) + self.enterRule(localctx, 132, self.RULE_evalDatasetType) try: - self.state = 1554 + self.state = 1591 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.DATASET]: self.enterOuterAlt(localctx, 1) - self.state = 1552 + self.state = 1589 self.datasetType() pass elif token in [ @@ -13544,7 +14851,7 @@ def evalDatasetType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 2) - self.state = 1553 + self.state = 1590 self.scalarType() pass else: @@ -13559,11 +14866,11 @@ def evalDatasetType(self): return localctx class ScalarSetTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def SET(self): return self.getToken(Parser.SET, 0) @@ -13588,23 +14895,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitScalarSetType"): listener.exitScalarSetType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitScalarSetType"): + return visitor.visitScalarSetType(self) + else: + return visitor.visitChildren(self) + def scalarSetType(self): + localctx = Parser.ScalarSetTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 132, self.RULE_scalarSetType) + self.enterRule(localctx, 134, self.RULE_scalarSetType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1556 + self.state = 1593 self.match(Parser.SET) - self.state = 1561 + self.state = 1598 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LT: - self.state = 1557 + self.state = 1594 self.match(Parser.LT) - self.state = 1558 + self.state = 1595 self.scalarType() - self.state = 1559 + self.state = 1596 self.match(Parser.MT) except RecognitionException as re: @@ -13616,11 +14930,11 @@ def scalarSetType(self): return localctx class DpRulesetContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_dpRuleset @@ -13629,10 +14943,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DataPointVdContext(DpRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DpRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DpRulesetContext + super().__init__(parser) self.copyFrom(ctx) def DATAPOINT_ON_VD(self): @@ -13664,11 +14976,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDataPointVd"): listener.exitDataPointVd(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDataPointVd"): + return visitor.visitDataPointVd(self) + else: + return visitor.visitChildren(self) + class DataPointVarContext(DpRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DpRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DpRulesetContext + super().__init__(parser) self.copyFrom(ctx) def DATAPOINT_ON_VAR(self): @@ -13700,11 +15016,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDataPointVar"): listener.exitDataPointVar(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDataPointVar"): + return visitor.visitDataPointVar(self) + else: + return visitor.visitChildren(self) + class DataPointContext(DpRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.DpRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DpRulesetContext + super().__init__(parser) self.copyFrom(ctx) def DATAPOINT(self): @@ -13718,75 +15038,82 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitDataPoint"): listener.exitDataPoint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitDataPoint"): + return visitor.visitDataPoint(self) + else: + return visitor.visitChildren(self) + def dpRuleset(self): + localctx = Parser.DpRulesetContext(self, self._ctx, self.state) - self.enterRule(localctx, 134, self.RULE_dpRuleset) + self.enterRule(localctx, 136, self.RULE_dpRuleset) self._la = 0 # Token type try: - self.state = 1592 + self.state = 1629 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.DATAPOINT]: localctx = Parser.DataPointContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1563 + self.state = 1600 self.match(Parser.DATAPOINT) pass elif token in [Parser.DATAPOINT_ON_VD]: localctx = Parser.DataPointVdContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1564 + self.state = 1601 self.match(Parser.DATAPOINT_ON_VD) - self.state = 1576 + self.state = 1613 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1565 + self.state = 1602 self.match(Parser.GLPAREN) - self.state = 1566 + self.state = 1603 self.valueDomainName() - self.state = 1571 + self.state = 1608 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1567 + self.state = 1604 self.match(Parser.MUL) - self.state = 1568 + self.state = 1605 self.valueDomainName() - self.state = 1573 + self.state = 1610 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1574 + self.state = 1611 self.match(Parser.GRPAREN) pass elif token in [Parser.DATAPOINT_ON_VAR]: localctx = Parser.DataPointVarContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1578 + self.state = 1615 self.match(Parser.DATAPOINT_ON_VAR) - self.state = 1590 + self.state = 1627 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1579 + self.state = 1616 self.match(Parser.GLPAREN) - self.state = 1580 + self.state = 1617 self.varID() - self.state = 1585 + self.state = 1622 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1581 + self.state = 1618 self.match(Parser.MUL) - self.state = 1582 + self.state = 1619 self.varID() - self.state = 1587 + self.state = 1624 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1588 + self.state = 1625 self.match(Parser.GRPAREN) pass @@ -13802,11 +15129,11 @@ def dpRuleset(self): return localctx class HrRulesetContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_hrRuleset @@ -13815,10 +15142,8 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class HrRulesetVdTypeContext(HrRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.HrRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.HrRulesetContext + super().__init__(parser) self.vdName = None # Token self.copyFrom(ctx) @@ -13860,11 +15185,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHrRulesetVdType"): listener.exitHrRulesetVdType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHrRulesetVdType"): + return visitor.visitHrRulesetVdType(self) + else: + return visitor.visitChildren(self) + class HrRulesetVarTypeContext(HrRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.HrRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.HrRulesetContext + super().__init__(parser) self.varName = None # VarIDContext self.copyFrom(ctx) @@ -13903,11 +15232,15 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHrRulesetVarType"): listener.exitHrRulesetVarType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHrRulesetVarType"): + return visitor.visitHrRulesetVarType(self) + else: + return visitor.visitChildren(self) + class HrRulesetTypeContext(HrRulesetContext): - def __init__( - self, ANTLRParser, ctx: ParserRuleContext - ): # actually a Parser.HrRulesetContext - super().__init__(ANTLRParser) + def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.HrRulesetContext + super().__init__(parser) self.copyFrom(ctx) def HIERARCHICAL(self): @@ -13921,97 +15254,104 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHrRulesetType"): listener.exitHrRulesetType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHrRulesetType"): + return visitor.visitHrRulesetType(self) + else: + return visitor.visitChildren(self) + def hrRuleset(self): + localctx = Parser.HrRulesetContext(self, self._ctx, self.state) - self.enterRule(localctx, 136, self.RULE_hrRuleset) + self.enterRule(localctx, 138, self.RULE_hrRuleset) self._la = 0 # Token type try: - self.state = 1634 + self.state = 1671 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.HIERARCHICAL]: localctx = Parser.HrRulesetTypeContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1594 + self.state = 1631 self.match(Parser.HIERARCHICAL) pass elif token in [Parser.HIERARCHICAL_ON_VD]: localctx = Parser.HrRulesetVdTypeContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1595 + self.state = 1632 self.match(Parser.HIERARCHICAL_ON_VD) - self.state = 1612 + self.state = 1649 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1596 + self.state = 1633 self.match(Parser.GLPAREN) - self.state = 1597 + self.state = 1634 localctx.vdName = self.match(Parser.IDENTIFIER) - self.state = 1609 + self.state = 1646 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LPAREN: - self.state = 1598 + self.state = 1635 self.match(Parser.LPAREN) - self.state = 1599 + self.state = 1636 self.valueDomainName() - self.state = 1604 + self.state = 1641 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1600 + self.state = 1637 self.match(Parser.MUL) - self.state = 1601 + self.state = 1638 self.valueDomainName() - self.state = 1606 + self.state = 1643 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1607 + self.state = 1644 self.match(Parser.RPAREN) - self.state = 1611 + self.state = 1648 self.match(Parser.GRPAREN) pass elif token in [Parser.HIERARCHICAL_ON_VAR]: localctx = Parser.HrRulesetVarTypeContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1614 + self.state = 1651 self.match(Parser.HIERARCHICAL_ON_VAR) - self.state = 1632 + self.state = 1669 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1615 + self.state = 1652 self.match(Parser.GLPAREN) - self.state = 1616 + self.state = 1653 localctx.varName = self.varID() - self.state = 1628 + self.state = 1665 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LPAREN: - self.state = 1617 + self.state = 1654 self.match(Parser.LPAREN) - self.state = 1618 + self.state = 1655 self.varID() - self.state = 1623 + self.state = 1660 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1619 + self.state = 1656 self.match(Parser.MUL) - self.state = 1620 + self.state = 1657 self.varID() - self.state = 1625 + self.state = 1662 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1626 + self.state = 1663 self.match(Parser.RPAREN) - self.state = 1630 + self.state = 1667 self.match(Parser.GRPAREN) pass @@ -14027,11 +15367,11 @@ def hrRuleset(self): return localctx class ValueDomainNameContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -14047,12 +15387,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValueDomainName"): listener.exitValueDomainName(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValueDomainName"): + return visitor.visitValueDomainName(self) + else: + return visitor.visitChildren(self) + def valueDomainName(self): + localctx = Parser.ValueDomainNameContext(self, self._ctx, self.state) - self.enterRule(localctx, 138, self.RULE_valueDomainName) + self.enterRule(localctx, 140, self.RULE_valueDomainName) try: self.enterOuterAlt(localctx, 1) - self.state = 1636 + self.state = 1673 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -14063,11 +15410,11 @@ def valueDomainName(self): return localctx class RulesetIDContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -14083,12 +15430,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRulesetID"): listener.exitRulesetID(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRulesetID"): + return visitor.visitRulesetID(self) + else: + return visitor.visitChildren(self) + def rulesetID(self): + localctx = Parser.RulesetIDContext(self, self._ctx, self.state) - self.enterRule(localctx, 140, self.RULE_rulesetID) + self.enterRule(localctx, 142, self.RULE_rulesetID) try: self.enterOuterAlt(localctx, 1) - self.state = 1638 + self.state = 1675 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -14099,11 +15453,11 @@ def rulesetID(self): return localctx class RulesetSignatureContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def signature(self, i: int = None): if i is None: @@ -14134,30 +15488,37 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRulesetSignature"): listener.exitRulesetSignature(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRulesetSignature"): + return visitor.visitRulesetSignature(self) + else: + return visitor.visitChildren(self) + def rulesetSignature(self): + localctx = Parser.RulesetSignatureContext(self, self._ctx, self.state) - self.enterRule(localctx, 142, self.RULE_rulesetSignature) + self.enterRule(localctx, 144, self.RULE_rulesetSignature) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1640 + self.state = 1677 _la = self._input.LA(1) if not (_la == Parser.VALUE_DOMAIN or _la == Parser.VARIABLE): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 1641 + self.state = 1678 self.signature() - self.state = 1646 + self.state = 1683 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1642 + self.state = 1679 self.match(Parser.COMMA) - self.state = 1643 + self.state = 1680 self.signature() - self.state = 1648 + self.state = 1685 self._errHandler.sync(self) _la = self._input.LA(1) @@ -14170,11 +15531,11 @@ def rulesetSignature(self): return localctx class SignatureContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def varID(self): return self.getTypedRuleContext(Parser.VarIDContext, 0) @@ -14196,21 +15557,28 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSignature"): listener.exitSignature(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSignature"): + return visitor.visitSignature(self) + else: + return visitor.visitChildren(self) + def signature(self): + localctx = Parser.SignatureContext(self, self._ctx, self.state) - self.enterRule(localctx, 144, self.RULE_signature) + self.enterRule(localctx, 146, self.RULE_signature) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1649 + self.state = 1686 self.varID() - self.state = 1652 + self.state = 1689 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.AS: - self.state = 1650 + self.state = 1687 self.match(Parser.AS) - self.state = 1651 + self.state = 1688 self.alias() except RecognitionException as re: @@ -14222,11 +15590,11 @@ def signature(self): return localctx class RuleClauseDatapointContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def ruleItemDatapoint(self, i: int = None): if i is None: @@ -14251,23 +15619,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRuleClauseDatapoint"): listener.exitRuleClauseDatapoint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRuleClauseDatapoint"): + return visitor.visitRuleClauseDatapoint(self) + else: + return visitor.visitChildren(self) + def ruleClauseDatapoint(self): + localctx = Parser.RuleClauseDatapointContext(self, self._ctx, self.state) - self.enterRule(localctx, 146, self.RULE_ruleClauseDatapoint) + self.enterRule(localctx, 148, self.RULE_ruleClauseDatapoint) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1654 + self.state = 1691 self.ruleItemDatapoint() - self.state = 1659 + self.state = 1696 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.EOL: - self.state = 1655 + self.state = 1692 self.match(Parser.EOL) - self.state = 1656 + self.state = 1693 self.ruleItemDatapoint() - self.state = 1661 + self.state = 1698 self._errHandler.sync(self) _la = self._input.LA(1) @@ -14280,11 +15655,11 @@ def ruleClauseDatapoint(self): return localctx class RuleItemDatapointContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.ruleName = None # Token self.antecedentContiditon = None # ExprComponentContext self.consequentCondition = None # ExprComponentContext @@ -14324,46 +15699,53 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRuleItemDatapoint"): listener.exitRuleItemDatapoint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRuleItemDatapoint"): + return visitor.visitRuleItemDatapoint(self) + else: + return visitor.visitChildren(self) + def ruleItemDatapoint(self): + localctx = Parser.RuleItemDatapointContext(self, self._ctx, self.state) - self.enterRule(localctx, 148, self.RULE_ruleItemDatapoint) + self.enterRule(localctx, 150, self.RULE_ruleItemDatapoint) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1664 + self.state = 1701 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 170, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 177, self._ctx) if la_ == 1: - self.state = 1662 + self.state = 1699 localctx.ruleName = self.match(Parser.IDENTIFIER) - self.state = 1663 + self.state = 1700 self.match(Parser.COLON) - self.state = 1670 + self.state = 1707 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.WHEN: - self.state = 1666 + self.state = 1703 self.match(Parser.WHEN) - self.state = 1667 + self.state = 1704 localctx.antecedentContiditon = self.exprComponent(0) - self.state = 1668 + self.state = 1705 self.match(Parser.THEN) - self.state = 1672 + self.state = 1709 localctx.consequentCondition = self.exprComponent(0) - self.state = 1674 + self.state = 1711 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORCODE: - self.state = 1673 + self.state = 1710 self.erCode() - self.state = 1677 + self.state = 1714 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORLEVEL: - self.state = 1676 + self.state = 1713 self.erLevel() except RecognitionException as re: @@ -14375,11 +15757,11 @@ def ruleItemDatapoint(self): return localctx class RuleClauseHierarchicalContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def ruleItemHierarchical(self, i: int = None): if i is None: @@ -14404,23 +15786,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRuleClauseHierarchical"): listener.exitRuleClauseHierarchical(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRuleClauseHierarchical"): + return visitor.visitRuleClauseHierarchical(self) + else: + return visitor.visitChildren(self) + def ruleClauseHierarchical(self): + localctx = Parser.RuleClauseHierarchicalContext(self, self._ctx, self.state) - self.enterRule(localctx, 150, self.RULE_ruleClauseHierarchical) + self.enterRule(localctx, 152, self.RULE_ruleClauseHierarchical) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1679 + self.state = 1716 self.ruleItemHierarchical() - self.state = 1684 + self.state = 1721 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.EOL: - self.state = 1680 + self.state = 1717 self.match(Parser.EOL) - self.state = 1681 + self.state = 1718 self.ruleItemHierarchical() - self.state = 1686 + self.state = 1723 self._errHandler.sync(self) _la = self._input.LA(1) @@ -14433,11 +15822,11 @@ def ruleClauseHierarchical(self): return localctx class RuleItemHierarchicalContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.ruleName = None # Token def codeItemRelation(self): @@ -14466,35 +15855,42 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRuleItemHierarchical"): listener.exitRuleItemHierarchical(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRuleItemHierarchical"): + return visitor.visitRuleItemHierarchical(self) + else: + return visitor.visitChildren(self) + def ruleItemHierarchical(self): + localctx = Parser.RuleItemHierarchicalContext(self, self._ctx, self.state) - self.enterRule(localctx, 152, self.RULE_ruleItemHierarchical) + self.enterRule(localctx, 154, self.RULE_ruleItemHierarchical) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1689 + self.state = 1726 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 175, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 182, self._ctx) if la_ == 1: - self.state = 1687 + self.state = 1724 localctx.ruleName = self.match(Parser.IDENTIFIER) - self.state = 1688 + self.state = 1725 self.match(Parser.COLON) - self.state = 1691 + self.state = 1728 self.codeItemRelation() - self.state = 1693 + self.state = 1730 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORCODE: - self.state = 1692 + self.state = 1729 self.erCode() - self.state = 1696 + self.state = 1733 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORLEVEL: - self.state = 1695 + self.state = 1732 self.erLevel() except RecognitionException as re: @@ -14506,11 +15902,11 @@ def ruleItemHierarchical(self): return localctx class HierRuleSignatureContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def RULE(self): return self.getToken(Parser.RULE, 0) @@ -14541,31 +15937,38 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitHierRuleSignature"): listener.exitHierRuleSignature(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitHierRuleSignature"): + return visitor.visitHierRuleSignature(self) + else: + return visitor.visitChildren(self) + def hierRuleSignature(self): + localctx = Parser.HierRuleSignatureContext(self, self._ctx, self.state) - self.enterRule(localctx, 154, self.RULE_hierRuleSignature) + self.enterRule(localctx, 156, self.RULE_hierRuleSignature) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1698 + self.state = 1735 _la = self._input.LA(1) if not (_la == Parser.VALUE_DOMAIN or _la == Parser.VARIABLE): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 1701 + self.state = 1738 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.CONDITION: - self.state = 1699 + self.state = 1736 self.match(Parser.CONDITION) - self.state = 1700 + self.state = 1737 self.valueDomainSignature() - self.state = 1703 + self.state = 1740 self.match(Parser.RULE) - self.state = 1704 + self.state = 1741 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -14576,11 +15979,11 @@ def hierRuleSignature(self): return localctx class ValueDomainSignatureContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def signature(self, i: int = None): if i is None: @@ -14605,23 +16008,30 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValueDomainSignature"): listener.exitValueDomainSignature(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValueDomainSignature"): + return visitor.visitValueDomainSignature(self) + else: + return visitor.visitChildren(self) + def valueDomainSignature(self): + localctx = Parser.ValueDomainSignatureContext(self, self._ctx, self.state) - self.enterRule(localctx, 156, self.RULE_valueDomainSignature) + self.enterRule(localctx, 158, self.RULE_valueDomainSignature) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1706 + self.state = 1743 self.signature() - self.state = 1711 + self.state = 1748 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1707 + self.state = 1744 self.match(Parser.COMMA) - self.state = 1708 + self.state = 1745 self.signature() - self.state = 1713 + self.state = 1750 self._errHandler.sync(self) _la = self._input.LA(1) @@ -14634,11 +16044,11 @@ def valueDomainSignature(self): return localctx class CodeItemRelationContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.codetemRef = None # ValueDomainValueContext def codeItemRelationClause(self, i: int = None): @@ -14673,26 +16083,33 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCodeItemRelation"): listener.exitCodeItemRelation(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCodeItemRelation"): + return visitor.visitCodeItemRelation(self) + else: + return visitor.visitChildren(self) + def codeItemRelation(self): + localctx = Parser.CodeItemRelationContext(self, self._ctx, self.state) - self.enterRule(localctx, 158, self.RULE_codeItemRelation) + self.enterRule(localctx, 160, self.RULE_codeItemRelation) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1718 + self.state = 1755 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.WHEN: - self.state = 1714 + self.state = 1751 self.match(Parser.WHEN) - self.state = 1715 + self.state = 1752 self.exprComponent(0) - self.state = 1716 + self.state = 1753 self.match(Parser.THEN) - self.state = 1720 + self.state = 1757 localctx.codetemRef = self.valueDomainValue() - self.state = 1722 + self.state = 1759 self._errHandler.sync(self) _la = self._input.LA(1) if ((_la) & ~0x3F) == 0 and ( @@ -14706,12 +16123,12 @@ def codeItemRelation(self): | (1 << Parser.LE) ) ) != 0: - self.state = 1721 + self.state = 1758 self.comparisonOperand() - self.state = 1724 + self.state = 1761 self.codeItemRelationClause() - self.state = 1728 + self.state = 1765 self._errHandler.sync(self) _la = self._input.LA(1) while ( @@ -14730,9 +16147,9 @@ def codeItemRelation(self): != 0 ) ): - self.state = 1725 + self.state = 1762 self.codeItemRelationClause() - self.state = 1730 + self.state = 1767 self._errHandler.sync(self) _la = self._input.LA(1) @@ -14745,11 +16162,11 @@ def codeItemRelation(self): return localctx class CodeItemRelationClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser self.opAdd = None # Token self.rightCodeItem = None # ValueDomainValueContext self.rightCondition = None # ExprComponentContext @@ -14783,17 +16200,24 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCodeItemRelationClause"): listener.exitCodeItemRelationClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCodeItemRelationClause"): + return visitor.visitCodeItemRelationClause(self) + else: + return visitor.visitChildren(self) + def codeItemRelationClause(self): + localctx = Parser.CodeItemRelationClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 160, self.RULE_codeItemRelationClause) + self.enterRule(localctx, 162, self.RULE_codeItemRelationClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1732 + self.state = 1769 self._errHandler.sync(self) - _la = self._input.LA(1) - if _la == Parser.PLUS or _la == Parser.MINUS: - self.state = 1731 + la_ = self._interp.adaptivePredict(self._input, 190, self._ctx) + if la_ == 1: + self.state = 1768 localctx.opAdd = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MINUS): @@ -14802,17 +16226,17 @@ def codeItemRelationClause(self): self._errHandler.reportMatch(self) self.consume() - self.state = 1734 + self.state = 1771 localctx.rightCodeItem = self.valueDomainValue() - self.state = 1739 + self.state = 1776 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.QLPAREN: - self.state = 1735 + self.state = 1772 self.match(Parser.QLPAREN) - self.state = 1736 + self.state = 1773 localctx.rightCondition = self.exprComponent(0) - self.state = 1737 + self.state = 1774 self.match(Parser.QRPAREN) except RecognitionException as re: @@ -14824,20 +16248,20 @@ def codeItemRelationClause(self): return localctx class ValueDomainValueContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) - def INTEGER_CONSTANT(self): - return self.getToken(Parser.INTEGER_CONSTANT, 0) + def signedInteger(self): + return self.getTypedRuleContext(Parser.SignedIntegerContext, 0) - def NUMBER_CONSTANT(self): - return self.getToken(Parser.NUMBER_CONSTANT, 0) + def signedNumber(self): + return self.getTypedRuleContext(Parser.SignedNumberContext, 0) def getRuleIndex(self): return Parser.RULE_valueDomainValue @@ -14850,30 +16274,38 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValueDomainValue"): listener.exitValueDomainValue(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValueDomainValue"): + return visitor.visitValueDomainValue(self) + else: + return visitor.visitChildren(self) + def valueDomainValue(self): + localctx = Parser.ValueDomainValueContext(self, self._ctx, self.state) - self.enterRule(localctx, 162, self.RULE_valueDomainValue) - self._la = 0 # Token type + self.enterRule(localctx, 164, self.RULE_valueDomainValue) try: - self.enterOuterAlt(localctx, 1) - self.state = 1741 - _la = self._input.LA(1) - if not ( - ((_la - 241) & ~0x3F) == 0 - and ( - (1 << (_la - 241)) - & ( - (1 << (Parser.INTEGER_CONSTANT - 241)) - | (1 << (Parser.NUMBER_CONSTANT - 241)) - | (1 << (Parser.IDENTIFIER - 241)) - ) - ) - != 0 - ): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() + self.state = 1781 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input, 192, self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 1778 + self.match(Parser.IDENTIFIER) + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 1779 + self.signedInteger() + pass + + elif la_ == 3: + self.enterOuterAlt(localctx, 3) + self.state = 1780 + self.signedNumber() + pass + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -14883,11 +16315,11 @@ def valueDomainValue(self): return localctx class ScalarTypeConstraintContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def getRuleIndex(self): return Parser.RULE_scalarTypeConstraint @@ -14897,9 +16329,9 @@ def copyFrom(self, ctx: ParserRuleContext): class RangeConstraintContext(ScalarTypeConstraintContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ScalarTypeConstraintContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def GLPAREN(self): @@ -14928,11 +16360,17 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRangeConstraint"): listener.exitRangeConstraint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRangeConstraint"): + return visitor.visitRangeConstraint(self) + else: + return visitor.visitChildren(self) + class ConditionConstraintContext(ScalarTypeConstraintContext): def __init__( - self, ANTLRParser, ctx: ParserRuleContext + self, parser, ctx: ParserRuleContext ): # actually a Parser.ScalarTypeConstraintContext - super().__init__(ANTLRParser) + super().__init__(parser) self.copyFrom(ctx) def QLPAREN(self): @@ -14952,44 +16390,51 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConditionConstraint"): listener.exitConditionConstraint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConditionConstraint"): + return visitor.visitConditionConstraint(self) + else: + return visitor.visitChildren(self) + def scalarTypeConstraint(self): + localctx = Parser.ScalarTypeConstraintContext(self, self._ctx, self.state) - self.enterRule(localctx, 164, self.RULE_scalarTypeConstraint) + self.enterRule(localctx, 166, self.RULE_scalarTypeConstraint) self._la = 0 # Token type try: - self.state = 1758 + self.state = 1798 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.QLPAREN]: localctx = Parser.ConditionConstraintContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1743 + self.state = 1783 self.match(Parser.QLPAREN) - self.state = 1744 + self.state = 1784 self.exprComponent(0) - self.state = 1745 + self.state = 1785 self.match(Parser.QRPAREN) pass elif token in [Parser.GLPAREN]: localctx = Parser.RangeConstraintContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1747 + self.state = 1787 self.match(Parser.GLPAREN) - self.state = 1748 + self.state = 1788 self.scalarItem() - self.state = 1753 + self.state = 1793 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1749 + self.state = 1789 self.match(Parser.COMMA) - self.state = 1750 + self.state = 1790 self.scalarItem() - self.state = 1755 + self.state = 1795 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1756 + self.state = 1796 self.match(Parser.GRPAREN) pass else: @@ -15004,11 +16449,11 @@ def scalarTypeConstraint(self): return localctx class CompConstraintContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def componentType(self): return self.getTypedRuleContext(Parser.ComponentTypeContext, 0) @@ -15030,22 +16475,29 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitCompConstraint"): listener.exitCompConstraint(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitCompConstraint"): + return visitor.visitCompConstraint(self) + else: + return visitor.visitChildren(self) + def compConstraint(self): + localctx = Parser.CompConstraintContext(self, self._ctx, self.state) - self.enterRule(localctx, 166, self.RULE_compConstraint) + self.enterRule(localctx, 168, self.RULE_compConstraint) try: self.enterOuterAlt(localctx, 1) - self.state = 1760 + self.state = 1800 self.componentType() - self.state = 1763 + self.state = 1803 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 1761 + self.state = 1801 self.componentID() pass elif token in [Parser.OPTIONAL]: - self.state = 1762 + self.state = 1802 self.multModifier() pass else: @@ -15060,11 +16512,11 @@ def compConstraint(self): return localctx class MultModifierContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def OPTIONAL(self): return self.getToken(Parser.OPTIONAL, 0) @@ -15086,19 +16538,26 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitMultModifier"): listener.exitMultModifier(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitMultModifier"): + return visitor.visitMultModifier(self) + else: + return visitor.visitChildren(self) + def multModifier(self): + localctx = Parser.MultModifierContext(self, self._ctx, self.state) - self.enterRule(localctx, 168, self.RULE_multModifier) + self.enterRule(localctx, 170, self.RULE_multModifier) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1765 + self.state = 1805 self.match(Parser.OPTIONAL) - self.state = 1767 + self.state = 1807 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PLUS or _la == Parser.MUL: - self.state = 1766 + self.state = 1806 _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MUL): self._errHandler.recoverInline(self) @@ -15115,11 +16574,11 @@ def multModifier(self): return localctx class ValidationOutputContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def INVALID(self): return self.getToken(Parser.INVALID, 0) @@ -15141,13 +16600,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidationOutput"): listener.exitValidationOutput(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidationOutput"): + return visitor.visitValidationOutput(self) + else: + return visitor.visitChildren(self) + def validationOutput(self): + localctx = Parser.ValidationOutputContext(self, self._ctx, self.state) - self.enterRule(localctx, 170, self.RULE_validationOutput) + self.enterRule(localctx, 172, self.RULE_validationOutput) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1769 + self.state = 1809 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.INVALID or _la == Parser.ALL_MEASURES): self._errHandler.recoverInline(self) @@ -15163,11 +16629,11 @@ def validationOutput(self): return localctx class ValidationModeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def NON_NULL(self): return self.getToken(Parser.NON_NULL, 0) @@ -15198,13 +16664,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValidationMode"): listener.exitValidationMode(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValidationMode"): + return visitor.visitValidationMode(self) + else: + return visitor.visitChildren(self) + def validationMode(self): + localctx = Parser.ValidationModeContext(self, self._ctx, self.state) - self.enterRule(localctx, 172, self.RULE_validationMode) + self.enterRule(localctx, 174, self.RULE_validationMode) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1771 + self.state = 1811 _la = self._input.LA(1) if not ( ((_la - 225) & ~0x3F) == 0 @@ -15234,11 +16707,11 @@ def validationMode(self): return localctx class ConditionClauseContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def CONDITION(self): return self.getToken(Parser.CONDITION, 0) @@ -15266,25 +16739,32 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConditionClause"): listener.exitConditionClause(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConditionClause"): + return visitor.visitConditionClause(self) + else: + return visitor.visitChildren(self) + def conditionClause(self): + localctx = Parser.ConditionClauseContext(self, self._ctx, self.state) - self.enterRule(localctx, 174, self.RULE_conditionClause) + self.enterRule(localctx, 176, self.RULE_conditionClause) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1773 + self.state = 1813 self.match(Parser.CONDITION) - self.state = 1774 + self.state = 1814 self.componentID() - self.state = 1779 + self.state = 1819 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1775 + self.state = 1815 self.match(Parser.COMMA) - self.state = 1776 + self.state = 1816 self.componentID() - self.state = 1781 + self.state = 1821 self._errHandler.sync(self) _la = self._input.LA(1) @@ -15297,11 +16777,11 @@ def conditionClause(self): return localctx class InputModeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def DATASET(self): return self.getToken(Parser.DATASET, 0) @@ -15320,13 +16800,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInputMode"): listener.exitInputMode(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInputMode"): + return visitor.visitInputMode(self) + else: + return visitor.visitChildren(self) + def inputMode(self): + localctx = Parser.InputModeContext(self, self._ctx, self.state) - self.enterRule(localctx, 176, self.RULE_inputMode) + self.enterRule(localctx, 178, self.RULE_inputMode) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1782 + self.state = 1822 _la = self._input.LA(1) if not (_la == Parser.DATASET or _la == Parser.DATASET_PRIORITY): self._errHandler.recoverInline(self) @@ -15342,11 +16829,11 @@ def inputMode(self): return localctx class ImbalanceExprContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IMBALANCE(self): return self.getToken(Parser.IMBALANCE, 0) @@ -15365,14 +16852,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitImbalanceExpr"): listener.exitImbalanceExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitImbalanceExpr"): + return visitor.visitImbalanceExpr(self) + else: + return visitor.visitChildren(self) + def imbalanceExpr(self): + localctx = Parser.ImbalanceExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 178, self.RULE_imbalanceExpr) + self.enterRule(localctx, 180, self.RULE_imbalanceExpr) try: self.enterOuterAlt(localctx, 1) - self.state = 1784 + self.state = 1824 self.match(Parser.IMBALANCE) - self.state = 1785 + self.state = 1825 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -15383,11 +16877,11 @@ def imbalanceExpr(self): return localctx class InputModeHierarchyContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def RULE(self): return self.getToken(Parser.RULE, 0) @@ -15409,13 +16903,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitInputModeHierarchy"): listener.exitInputModeHierarchy(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitInputModeHierarchy"): + return visitor.visitInputModeHierarchy(self) + else: + return visitor.visitChildren(self) + def inputModeHierarchy(self): + localctx = Parser.InputModeHierarchyContext(self, self._ctx, self.state) - self.enterRule(localctx, 180, self.RULE_inputModeHierarchy) + self.enterRule(localctx, 182, self.RULE_inputModeHierarchy) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1787 + self.state = 1827 _la = self._input.LA(1) if not (_la == Parser.DATASET or _la == Parser.RULE or _la == Parser.RULE_PRIORITY): self._errHandler.recoverInline(self) @@ -15431,11 +16932,11 @@ def inputModeHierarchy(self): return localctx class OutputModeHierarchyContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def COMPUTED(self): return self.getToken(Parser.COMPUTED, 0) @@ -15454,13 +16955,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOutputModeHierarchy"): listener.exitOutputModeHierarchy(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOutputModeHierarchy"): + return visitor.visitOutputModeHierarchy(self) + else: + return visitor.visitChildren(self) + def outputModeHierarchy(self): + localctx = Parser.OutputModeHierarchyContext(self, self._ctx, self.state) - self.enterRule(localctx, 182, self.RULE_outputModeHierarchy) + self.enterRule(localctx, 184, self.RULE_outputModeHierarchy) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1789 + self.state = 1829 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.COMPUTED): self._errHandler.recoverInline(self) @@ -15476,11 +16984,11 @@ def outputModeHierarchy(self): return localctx class AliasContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -15496,12 +17004,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitAlias"): listener.exitAlias(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitAlias"): + return visitor.visitAlias(self) + else: + return visitor.visitChildren(self) + def alias(self): + localctx = Parser.AliasContext(self, self._ctx, self.state) - self.enterRule(localctx, 184, self.RULE_alias) + self.enterRule(localctx, 186, self.RULE_alias) try: self.enterOuterAlt(localctx, 1) - self.state = 1791 + self.state = 1831 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -15512,11 +17027,11 @@ def alias(self): return localctx class VarIDContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -15532,12 +17047,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitVarID"): listener.exitVarID(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitVarID"): + return visitor.visitVarID(self) + else: + return visitor.visitChildren(self) + def varID(self): + localctx = Parser.VarIDContext(self, self._ctx, self.state) - self.enterRule(localctx, 186, self.RULE_varID) + self.enterRule(localctx, 188, self.RULE_varID) try: self.enterOuterAlt(localctx, 1) - self.state = 1793 + self.state = 1833 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -15548,11 +17070,11 @@ def varID(self): return localctx class SimpleComponentIdContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -15568,12 +17090,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitSimpleComponentId"): listener.exitSimpleComponentId(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitSimpleComponentId"): + return visitor.visitSimpleComponentId(self) + else: + return visitor.visitChildren(self) + def simpleComponentId(self): + localctx = Parser.SimpleComponentIdContext(self, self._ctx, self.state) - self.enterRule(localctx, 188, self.RULE_simpleComponentId) + self.enterRule(localctx, 190, self.RULE_simpleComponentId) try: self.enterOuterAlt(localctx, 1) - self.state = 1795 + self.state = 1835 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -15584,11 +17113,11 @@ def simpleComponentId(self): return localctx class ComponentIDContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self, i: int = None): if i is None: @@ -15610,20 +17139,27 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComponentID"): listener.exitComponentID(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComponentID"): + return visitor.visitComponentID(self) + else: + return visitor.visitChildren(self) + def componentID(self): + localctx = Parser.ComponentIDContext(self, self._ctx, self.state) - self.enterRule(localctx, 190, self.RULE_componentID) + self.enterRule(localctx, 192, self.RULE_componentID) try: self.enterOuterAlt(localctx, 1) - self.state = 1797 + self.state = 1837 self.match(Parser.IDENTIFIER) - self.state = 1800 + self.state = 1840 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 190, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 198, self._ctx) if la_ == 1: - self.state = 1798 + self.state = 1838 self.match(Parser.MEMBERSHIP) - self.state = 1799 + self.state = 1839 self.match(Parser.IDENTIFIER) except RecognitionException as re: @@ -15635,11 +17171,11 @@ def componentID(self): return localctx class ListsContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def GLPAREN(self): return self.getToken(Parser.GLPAREN, 0) @@ -15670,29 +17206,36 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitLists"): listener.exitLists(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitLists"): + return visitor.visitLists(self) + else: + return visitor.visitChildren(self) + def lists(self): + localctx = Parser.ListsContext(self, self._ctx, self.state) - self.enterRule(localctx, 192, self.RULE_lists) + self.enterRule(localctx, 194, self.RULE_lists) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1802 + self.state = 1842 self.match(Parser.GLPAREN) - self.state = 1803 + self.state = 1843 self.scalarItem() - self.state = 1808 + self.state = 1848 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1804 + self.state = 1844 self.match(Parser.COMMA) - self.state = 1805 + self.state = 1845 self.scalarItem() - self.state = 1810 + self.state = 1850 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1811 + self.state = 1851 self.match(Parser.GRPAREN) except RecognitionException as re: localctx.exception = re @@ -15703,11 +17246,11 @@ def lists(self): return localctx class ErCodeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def ERRORCODE(self): return self.getToken(Parser.ERRORCODE, 0) @@ -15726,14 +17269,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitErCode"): listener.exitErCode(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitErCode"): + return visitor.visitErCode(self) + else: + return visitor.visitChildren(self) + def erCode(self): + localctx = Parser.ErCodeContext(self, self._ctx, self.state) - self.enterRule(localctx, 194, self.RULE_erCode) + self.enterRule(localctx, 196, self.RULE_erCode) try: self.enterOuterAlt(localctx, 1) - self.state = 1813 + self.state = 1853 self.match(Parser.ERRORCODE) - self.state = 1814 + self.state = 1854 self.constant() except RecognitionException as re: localctx.exception = re @@ -15744,11 +17294,11 @@ def erCode(self): return localctx class ErLevelContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def ERRORLEVEL(self): return self.getToken(Parser.ERRORLEVEL, 0) @@ -15767,14 +17317,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitErLevel"): listener.exitErLevel(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitErLevel"): + return visitor.visitErLevel(self) + else: + return visitor.visitChildren(self) + def erLevel(self): + localctx = Parser.ErLevelContext(self, self._ctx, self.state) - self.enterRule(localctx, 196, self.RULE_erLevel) + self.enterRule(localctx, 198, self.RULE_erLevel) try: self.enterOuterAlt(localctx, 1) - self.state = 1816 + self.state = 1856 self.match(Parser.ERRORLEVEL) - self.state = 1817 + self.state = 1857 self.constant() except RecognitionException as re: localctx.exception = re @@ -15785,11 +17342,11 @@ def erLevel(self): return localctx class ComparisonOperandContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def MT(self): return self.getToken(Parser.MT, 0) @@ -15820,13 +17377,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComparisonOperand"): listener.exitComparisonOperand(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComparisonOperand"): + return visitor.visitComparisonOperand(self) + else: + return visitor.visitChildren(self) + def comparisonOperand(self): + localctx = Parser.ComparisonOperandContext(self, self._ctx, self.state) - self.enterRule(localctx, 198, self.RULE_comparisonOperand) + self.enterRule(localctx, 200, self.RULE_comparisonOperand) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1819 + self.state = 1859 _la = self._input.LA(1) if not ( ((_la) & ~0x3F) == 0 @@ -15856,11 +17420,11 @@ def comparisonOperand(self): return localctx class OptionalExprContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) @@ -15879,11 +17443,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOptionalExpr"): listener.exitOptionalExpr(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOptionalExpr"): + return visitor.visitOptionalExpr(self) + else: + return visitor.visitChildren(self) + def optionalExpr(self): + localctx = Parser.OptionalExprContext(self, self._ctx, self.state) - self.enterRule(localctx, 200, self.RULE_optionalExpr) + self.enterRule(localctx, 202, self.RULE_optionalExpr) try: - self.state = 1823 + self.state = 1863 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -15973,12 +17544,12 @@ def optionalExpr(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1821 + self.state = 1861 self.expr(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 1822 + self.state = 1862 self.match(Parser.OPTIONAL) pass else: @@ -15993,11 +17564,11 @@ def optionalExpr(self): return localctx class OptionalExprComponentContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def exprComponent(self): return self.getTypedRuleContext(Parser.ExprComponentContext, 0) @@ -16016,11 +17587,18 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOptionalExprComponent"): listener.exitOptionalExprComponent(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOptionalExprComponent"): + return visitor.visitOptionalExprComponent(self) + else: + return visitor.visitChildren(self) + def optionalExprComponent(self): + localctx = Parser.OptionalExprComponentContext(self, self._ctx, self.state) - self.enterRule(localctx, 202, self.RULE_optionalExprComponent) + self.enterRule(localctx, 204, self.RULE_optionalExprComponent) try: - self.state = 1827 + self.state = 1867 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -16098,12 +17676,12 @@ def optionalExprComponent(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1825 + self.state = 1865 self.exprComponent(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 1826 + self.state = 1866 self.match(Parser.OPTIONAL) pass else: @@ -16118,11 +17696,11 @@ def optionalExprComponent(self): return localctx class ComponentRoleContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def MEASURE(self): return self.getToken(Parser.MEASURE, 0) @@ -16150,36 +17728,43 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitComponentRole"): listener.exitComponentRole(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitComponentRole"): + return visitor.visitComponentRole(self) + else: + return visitor.visitChildren(self) + def componentRole(self): + localctx = Parser.ComponentRoleContext(self, self._ctx, self.state) - self.enterRule(localctx, 204, self.RULE_componentRole) + self.enterRule(localctx, 206, self.RULE_componentRole) try: - self.state = 1834 + self.state = 1874 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.MEASURE]: self.enterOuterAlt(localctx, 1) - self.state = 1829 + self.state = 1869 self.match(Parser.MEASURE) pass elif token in [Parser.COMPONENT]: self.enterOuterAlt(localctx, 2) - self.state = 1830 + self.state = 1870 self.match(Parser.COMPONENT) pass elif token in [Parser.DIMENSION]: self.enterOuterAlt(localctx, 3) - self.state = 1831 + self.state = 1871 self.match(Parser.DIMENSION) pass elif token in [Parser.ATTRIBUTE]: self.enterOuterAlt(localctx, 4) - self.state = 1832 + self.state = 1872 self.match(Parser.ATTRIBUTE) pass elif token in [Parser.VIRAL]: self.enterOuterAlt(localctx, 5) - self.state = 1833 + self.state = 1873 self.viralAttribute() pass else: @@ -16194,11 +17779,11 @@ def componentRole(self): return localctx class ViralAttributeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def VIRAL(self): return self.getToken(Parser.VIRAL, 0) @@ -16217,14 +17802,21 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitViralAttribute"): listener.exitViralAttribute(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitViralAttribute"): + return visitor.visitViralAttribute(self) + else: + return visitor.visitChildren(self) + def viralAttribute(self): + localctx = Parser.ViralAttributeContext(self, self._ctx, self.state) - self.enterRule(localctx, 206, self.RULE_viralAttribute) + self.enterRule(localctx, 208, self.RULE_viralAttribute) try: self.enterOuterAlt(localctx, 1) - self.state = 1836 + self.state = 1876 self.match(Parser.VIRAL) - self.state = 1837 + self.state = 1877 self.match(Parser.ATTRIBUTE) except RecognitionException as re: localctx.exception = re @@ -16235,11 +17827,11 @@ def viralAttribute(self): return localctx class ValueDomainIDContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -16255,12 +17847,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitValueDomainID"): listener.exitValueDomainID(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitValueDomainID"): + return visitor.visitValueDomainID(self) + else: + return visitor.visitChildren(self) + def valueDomainID(self): + localctx = Parser.ValueDomainIDContext(self, self._ctx, self.state) - self.enterRule(localctx, 208, self.RULE_valueDomainID) + self.enterRule(localctx, 210, self.RULE_valueDomainID) try: self.enterOuterAlt(localctx, 1) - self.state = 1839 + self.state = 1879 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -16271,11 +17870,11 @@ def valueDomainID(self): return localctx class OperatorIDContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -16291,12 +17890,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitOperatorID"): listener.exitOperatorID(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitOperatorID"): + return visitor.visitOperatorID(self) + else: + return visitor.visitChildren(self) + def operatorID(self): + localctx = Parser.OperatorIDContext(self, self._ctx, self.state) - self.enterRule(localctx, 210, self.RULE_operatorID) + self.enterRule(localctx, 212, self.RULE_operatorID) try: self.enterOuterAlt(localctx, 1) - self.state = 1841 + self.state = 1881 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -16307,11 +17913,11 @@ def operatorID(self): return localctx class RoutineNameContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def IDENTIFIER(self): return self.getToken(Parser.IDENTIFIER, 0) @@ -16327,12 +17933,19 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRoutineName"): listener.exitRoutineName(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRoutineName"): + return visitor.visitRoutineName(self) + else: + return visitor.visitChildren(self) + def routineName(self): + localctx = Parser.RoutineNameContext(self, self._ctx, self.state) - self.enterRule(localctx, 212, self.RULE_routineName) + self.enterRule(localctx, 214, self.RULE_routineName) try: self.enterOuterAlt(localctx, 1) - self.state = 1843 + self.state = 1883 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -16343,17 +17956,17 @@ def routineName(self): return localctx class ConstantContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser - def INTEGER_CONSTANT(self): - return self.getToken(Parser.INTEGER_CONSTANT, 0) + def signedInteger(self): + return self.getTypedRuleContext(Parser.SignedIntegerContext, 0) - def NUMBER_CONSTANT(self): - return self.getToken(Parser.NUMBER_CONSTANT, 0) + def signedNumber(self): + return self.getTypedRuleContext(Parser.SignedNumberContext, 0) def BOOLEAN_CONSTANT(self): return self.getToken(Parser.BOOLEAN_CONSTANT, 0) @@ -16375,34 +17988,50 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitConstant"): listener.exitConstant(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitConstant"): + return visitor.visitConstant(self) + else: + return visitor.visitChildren(self) + def constant(self): + localctx = Parser.ConstantContext(self, self._ctx, self.state) - self.enterRule(localctx, 214, self.RULE_constant) - self._la = 0 # Token type + self.enterRule(localctx, 216, self.RULE_constant) try: - self.enterOuterAlt(localctx, 1) - self.state = 1845 - _la = self._input.LA(1) - if not ( - _la == Parser.NULL_CONSTANT - or ( - ((_la - 241) & ~0x3F) == 0 - and ( - (1 << (_la - 241)) - & ( - (1 << (Parser.INTEGER_CONSTANT - 241)) - | (1 << (Parser.NUMBER_CONSTANT - 241)) - | (1 << (Parser.BOOLEAN_CONSTANT - 241)) - | (1 << (Parser.STRING_CONSTANT - 241)) - ) - ) - != 0 - ) - ): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() + self.state = 1890 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input, 203, self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 1885 + self.signedInteger() + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 1886 + self.signedNumber() + pass + + elif la_ == 3: + self.enterOuterAlt(localctx, 3) + self.state = 1887 + self.match(Parser.BOOLEAN_CONSTANT) + pass + + elif la_ == 4: + self.enterOuterAlt(localctx, 4) + self.state = 1888 + self.match(Parser.STRING_CONSTANT) + pass + + elif la_ == 5: + self.enterOuterAlt(localctx, 5) + self.state = 1889 + self.match(Parser.NULL_CONSTANT) + pass + except RecognitionException as re: localctx.exception = re self._errHandler.reportError(self, re) @@ -16412,11 +18041,11 @@ def constant(self): return localctx class BasicScalarTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def STRING(self): return self.getToken(Parser.STRING, 0) @@ -16456,13 +18085,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitBasicScalarType"): listener.exitBasicScalarType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitBasicScalarType"): + return visitor.visitBasicScalarType(self) + else: + return visitor.visitChildren(self) + def basicScalarType(self): + localctx = Parser.BasicScalarTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 216, self.RULE_basicScalarType) + self.enterRule(localctx, 218, self.RULE_basicScalarType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1847 + self.state = 1892 _la = self._input.LA(1) if not ( ( @@ -16497,11 +18133,11 @@ def basicScalarType(self): return localctx class RetainTypeContext(ParserRuleContext): - __slots__ = "ANTLRParser" + __slots__ = "parser" - def __init__(self, ANTLRParser, parent: ParserRuleContext = None, invokingState: int = -1): + def __init__(self, parser, parent: ParserRuleContext = None, invokingState: int = -1): super().__init__(parent, invokingState) - self.ANTLRParser = ANTLRParser + self.parser = parser def BOOLEAN_CONSTANT(self): return self.getToken(Parser.BOOLEAN_CONSTANT, 0) @@ -16520,13 +18156,20 @@ def exitRule(self, listener: ParseTreeListener): if hasattr(listener, "exitRetainType"): listener.exitRetainType(self) + def accept(self, visitor: ParseTreeVisitor): + if hasattr(visitor, "visitRetainType"): + return visitor.visitRetainType(self) + else: + return visitor.visitChildren(self) + def retainType(self): + localctx = Parser.RetainTypeContext(self, self._ctx, self.state) - self.enterRule(localctx, 218, self.RULE_retainType) + self.enterRule(localctx, 220, self.RULE_retainType) self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1849 + self.state = 1894 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.BOOLEAN_CONSTANT): self._errHandler.recoverInline(self) diff --git a/src/vtlengine/AST/Grammar/tokens.py b/src/vtlengine/AST/Grammar/tokens.py index ec44db9c8..94a7979ae 100644 --- a/src/vtlengine/AST/Grammar/tokens.py +++ b/src/vtlengine/AST/Grammar/tokens.py @@ -93,8 +93,8 @@ CURRENT_DATE = "current_date" DATEDIFF = "datediff" DATE_ADD = "dateadd" -YEAR = "year" -MONTH = "month" +YEAR = "getyear" +MONTH = "getmonth" DAYOFMONTH = "dayofmonth" DAYOFYEAR = "dayofyear" DAYTOYEAR = "daytoyear" diff --git a/src/vtlengine/Operators/Numeric.py b/src/vtlengine/Operators/Numeric.py index a8c1f6847..e748ef969 100644 --- a/src/vtlengine/Operators/Numeric.py +++ b/src/vtlengine/Operators/Numeric.py @@ -47,7 +47,7 @@ def apply_operation_component(cls, series: Any) -> Any: if cls.pc_func is not None: arr = series.values._pa_array return pd.Series( - pd.arrays.ArrowExtensionArray(cls.pc_func(arr)), # type: ignore[attr-defined] + pd.arrays.ArrowExtensionArray(cls.pc_func(arr)), # type: ignore[attr-defined,unused-ignore] index=series.index, ) return super().apply_operation_component(series) diff --git a/tests/API/test_api.py b/tests/API/test_api.py index 98857b9ca..d27632614 100644 --- a/tests/API/test_api.py +++ b/tests/API/test_api.py @@ -1569,7 +1569,7 @@ def test_check_script_with_transformation_scheme(transformation_scheme, result_s def test_run_with_scalars(data_structures, datapoints, tmp_path): script = """ DS_r <- DS_3[filter Me_1 = sc_1]; - DS_r2 <- DS_3[sub Id_1 = sc_1]; + DS_r2 <- DS_3[sub Id_1 = 20]; Sc_r <- sc_1 + sc_2 + 3 + sc_3; Sc_r2 <- sc_1 - sc_2; Sc_r3 <- null; @@ -1643,7 +1643,7 @@ def test_run_with_scalars(data_structures, datapoints, tmp_path): def test_run_with_scalar_being_none(data_structures, datapoints, tmp_path): script = """ DS_r <- DS_3[filter Me_1 = sc_1]; - DS_r2 <- DS_3[sub Id_1 = sc_1]; + DS_r2 <- DS_3[sub Id_1 = 20]; Sc_r <- sc_1 + sc_2 + 3 + sc_3; """ scalars = {"sc_1": 20, "sc_2": 5, "sc_3": None} diff --git a/tests/AST/data/prettier/complete_grammar.vtl b/tests/AST/data/prettier/complete_grammar.vtl index 5d553ace1..c28cedcf4 100644 --- a/tests/AST/data/prettier/complete_grammar.vtl +++ b/tests/AST/data/prettier/complete_grammar.vtl @@ -274,8 +274,8 @@ rtrim_ds := rtrim(DS_1#Me_str); TIME *************************************************/ current_date_sc := current_date(); -year_sc := year(cast("2000-01-01", date)); -month_sc := month(cast("2012-12-25", date)); +year_sc := getyear(cast("2000-01-01", date)); +month_sc := getmonth(cast("2012-12-25", date)); dayofyear_sc := dayofyear(cast("2020-12-31", date)); dayofmonth_sc := dayofmonth(cast("2025-03-07", date)); daytoyear_sc := daytoyear(730); diff --git a/tests/AST/data/prettier/reference_complete_grammar.vtl b/tests/AST/data/prettier/reference_complete_grammar.vtl index 540658af1..3e368218e 100644 --- a/tests/AST/data/prettier/reference_complete_grammar.vtl +++ b/tests/AST/data/prettier/reference_complete_grammar.vtl @@ -594,9 +594,9 @@ TIME current_date_sc := current_date(); year_sc := - year(cast("2000-01-01", date)); + getyear(cast("2000-01-01", date)); month_sc := - month(cast("2012-12-25", date)); + getmonth(cast("2012-12-25", date)); dayofyear_sc := dayofyear(cast("2020-12-31", date)); dayofmonth_sc := diff --git a/tests/AST/data/vtl/complete_grammar.vtl b/tests/AST/data/vtl/complete_grammar.vtl index 5d553ace1..c28cedcf4 100644 --- a/tests/AST/data/vtl/complete_grammar.vtl +++ b/tests/AST/data/vtl/complete_grammar.vtl @@ -274,8 +274,8 @@ rtrim_ds := rtrim(DS_1#Me_str); TIME *************************************************/ current_date_sc := current_date(); -year_sc := year(cast("2000-01-01", date)); -month_sc := month(cast("2012-12-25", date)); +year_sc := getyear(cast("2000-01-01", date)); +month_sc := getmonth(cast("2012-12-25", date)); dayofyear_sc := dayofyear(cast("2020-12-31", date)); dayofmonth_sc := dayofmonth(cast("2025-03-07", date)); daytoyear_sc := daytoyear(730); diff --git a/tests/AST/data/vtl/time.vtl b/tests/AST/data/vtl/time.vtl index f326d54ba..92f033f4a 100644 --- a/tests/AST/data/vtl/time.vtl +++ b/tests/AST/data/vtl/time.vtl @@ -1,6 +1,6 @@ current_date_sc := current_date(); -year_sc := year(cast("2000-01-01", date)); -month_sc := month(cast("2012-12-25", date)); +year_sc := getyear(cast("2000-01-01", date)); +month_sc := getmonth(cast("2012-12-25", date)); dayofyear_sc := dayofyear(cast("2020-12-31", date)); dayofmonth_sc := dayofmonth(cast("2025-03-07", date)); daytoyear_sc := daytoyear(730); diff --git a/tests/Aggregate/data/DataSet/output/GH_164_2-1.csv b/tests/Aggregate/data/DataSet/output/GH_164_2-1.csv index 56a6051ca..b39a24832 100644 --- a/tests/Aggregate/data/DataSet/output/GH_164_2-1.csv +++ b/tests/Aggregate/data/DataSet/output/GH_164_2-1.csv @@ -1 +1,2 @@ -1 \ No newline at end of file +Id_2,var1,var2 +A,100,200 \ No newline at end of file diff --git a/tests/Aggregate/data/DataSet/output/GH_164_2-2.csv b/tests/Aggregate/data/DataSet/output/GH_164_2-2.csv deleted file mode 100644 index b39a24832..000000000 --- a/tests/Aggregate/data/DataSet/output/GH_164_2-2.csv +++ /dev/null @@ -1,2 +0,0 @@ -Id_2,var1,var2 -A,100,200 \ No newline at end of file diff --git a/tests/Aggregate/data/DataStructure/output/GH_164_2-1.json b/tests/Aggregate/data/DataStructure/output/GH_164_2-1.json index bf4292731..b3f3a74a0 100644 --- a/tests/Aggregate/data/DataStructure/output/GH_164_2-1.json +++ b/tests/Aggregate/data/DataStructure/output/GH_164_2-1.json @@ -1,8 +1,27 @@ { - "scalars": [ + "datasets": [ { - "name": "sc_sub", - "type": "Integer" + "name": "DS_r_sub", + "DataStructure": [ + { + "name": "Id_2", + "type": "String", + "nullable": false, + "role": "Identifier" + }, + { + "name": "var1", + "type": "Integer", + "nullable": true, + "role": "Measure" + }, + { + "name": "var2", + "type": "Integer", + "nullable": true, + "role": "Measure" + } + ] } ] } \ No newline at end of file diff --git a/tests/Aggregate/data/DataStructure/output/GH_164_2-2.json b/tests/Aggregate/data/DataStructure/output/GH_164_2-2.json deleted file mode 100644 index b3f3a74a0..000000000 --- a/tests/Aggregate/data/DataStructure/output/GH_164_2-2.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "datasets": [ - { - "name": "DS_r_sub", - "DataStructure": [ - { - "name": "Id_2", - "type": "String", - "nullable": false, - "role": "Identifier" - }, - { - "name": "var1", - "type": "Integer", - "nullable": true, - "role": "Measure" - }, - { - "name": "var2", - "type": "Integer", - "nullable": true, - "role": "Measure" - } - ] - } - ] -} \ No newline at end of file diff --git a/tests/Aggregate/data/vtl/GH_164_2.vtl b/tests/Aggregate/data/vtl/GH_164_2.vtl index 2f405bd69..da8424311 100644 --- a/tests/Aggregate/data/vtl/GH_164_2.vtl +++ b/tests/Aggregate/data/vtl/GH_164_2.vtl @@ -1,2 +1 @@ -sc_sub := 1; -DS_r_sub <- DS_1[sub Id_1 = sc_sub]; \ No newline at end of file +DS_r_sub <- DS_1[sub Id_1 = 1]; \ No newline at end of file diff --git a/tests/Aggregate/test_aggregate.py b/tests/Aggregate/test_aggregate.py index b38aa4e07..f7a51cfa6 100644 --- a/tests/Aggregate/test_aggregate.py +++ b/tests/Aggregate/test_aggregate.py @@ -1300,7 +1300,7 @@ def test_GH_164_2(self): """ code = "GH_164_2" number_inputs = 1 - references_names = ["1", "2"] + references_names = ["1"] self.BaseTest( code=code, diff --git a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl index 83c19a048..ce1f9488d 100644 --- a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl +++ b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl @@ -274,8 +274,8 @@ rtrim_ds := rtrim(DS_1#Me_str); TIME *************************************************/ current_date_sc := current_date(); -year_sc := year(cast("2000-01-01", date)); -month_sc := month(cast("2012-12-25", date)); +year_sc := getyear(cast("2000-01-01", date)); +month_sc := getmonth(cast("2012-12-25", date)); dayofyear_sc := dayofyear(cast("2020-12-31", date)); dayofmonth_sc := dayofmonth(cast("2025-03-07", date)); daytoyear_sc := daytoyear(730); diff --git a/tests/DAG/data/vtl/9.vtl b/tests/DAG/data/vtl/9.vtl index a0649b5ee..ec4e92c94 100644 --- a/tests/DAG/data/vtl/9.vtl +++ b/tests/DAG/data/vtl/9.vtl @@ -620,7 +620,7 @@ SUPDEMM.housing_dataset_final := add_general_features(SUPDEMM.housing_dataset_04); OUTPUT.housing_output_generic_01 := SUPDEMM.housing_dataset_final - [sub county = sc_selected_county] + [sub county = "selected_county"] [calc identifier did := 1]; OUTPUT.housing_output_generic_02 := add_special_features(OUTPUT.housing_output_generic_01); diff --git a/tests/DateTime/test_datetime.py b/tests/DateTime/test_datetime.py index 335f6429a..24bb3c722 100644 --- a/tests/DateTime/test_datetime.py +++ b/tests/DateTime/test_datetime.py @@ -68,10 +68,10 @@ def _to_pylist(series: pd.Series) -> List[Any]: # type: ignore[type-arg] ] scalar_time_params = [ - ('year(cast("2023-01-12T10:30:00", date))', 2023), - ('year(cast("2023-01-12 10:30:00", date))', 2023), - ('month(cast("2023-06-15T08:00:00", date))', 6), - ('month(cast("2023-06-15 08:00:00", date))', 6), + ('getyear(cast("2023-01-12T10:30:00", date))', 2023), + ('getyear(cast("2023-01-12 10:30:00", date))', 2023), + ('getmonth(cast("2023-06-15T08:00:00", date))', 6), + ('getmonth(cast("2023-06-15 08:00:00", date))', 6), ('dayofmonth(cast("2023-01-12T15:45:00", date))', 12), ('dayofmonth(cast("2023-01-12 15:45:00", date))', 12), ('dayofyear(cast("2023-02-01T23:59:59", date))', 32), @@ -169,13 +169,13 @@ def _to_pylist(series: pd.Series) -> List[Any]: # type: ignore[type-arg] dataset_extraction_params = [ pytest.param( - "year", + "getyear", ["2023-01-12 10:30:00", "2024-06-15 08:00:00"], [2023, 2024], id="year_from_datetime", ), pytest.param( - "month", + "getmonth", ["2023-06-15 08:00:00", "2023-12-01 10:00:00"], [6, 12], id="month_from_datetime", diff --git a/tests/NewOperators/UnaryTime/test_time_operators.py b/tests/NewOperators/UnaryTime/test_time_operators.py index 499eb03e2..094b03199 100644 --- a/tests/NewOperators/UnaryTime/test_time_operators.py +++ b/tests/NewOperators/UnaryTime/test_time_operators.py @@ -12,8 +12,8 @@ pytestmark = mark.input_path(Path(__file__).parent / "data") ds_param = [ - ("1", "DS_r := DS_1 [calc Me_2 := year(Me_1)];"), - ("2", "DS_r := DS_1[calc Me_2 := month(Me_1)];"), + ("1", "DS_r := DS_1 [calc Me_2 := getyear(Me_1)];"), + ("2", "DS_r := DS_1[calc Me_2 := getmonth(Me_1)];"), ("3", "DS_r := DS_1[calc Me_2 := dayofmonth(Me_1)];"), ("4", "DS_r := DS_1[calc Me_2 := dayofyear(Me_1)];"), ("5", "DS_r := DS_1[calc Me_2 := daytomonth(Me_1)];"), @@ -25,17 +25,17 @@ error_param = [ ("9", "DS_r := DS_1[calc Me_2 := daytomonth(Me_1)];", RunTimeError, "2-1-19-16"), ("10", "DS_r := DS_1[calc Me_2 := daytoyear(Me_1)];", RunTimeError, "2-1-19-16"), - ("13", "DS_r := DS_1 [calc Me_2 := year(Me_1)];", SemanticError, "1-1-19-10"), - ("14", "DS_r := DS_1 [calc Me_2 := month(Me_1)];", SemanticError, "1-1-19-10"), + ("13", "DS_r := DS_1 [calc Me_2 := getyear(Me_1)];", SemanticError, "1-1-19-10"), + ("14", "DS_r := DS_1 [calc Me_2 := getmonth(Me_1)];", SemanticError, "1-1-19-10"), ("15", "DS_r := DS_1 [calc Me_2 := dayofmonth(Me_1)];", SemanticError, "1-1-19-10"), ("16", "DS_r := DS_1 [calc Me_2 := dayofyear(Me_1)];", SemanticError, "1-1-19-10"), ] scalar_time_params = [ - ('year(cast("2023-01-12", date))', 2023), - ('year(cast("2022Q1", time_period))', 2022), - ('month(cast("2023-01-12", date))', 1), - ('month(cast("2022Q1", time_period))', 1), + ('getyear(cast("2023-01-12", date))', 2023), + ('getyear(cast("2022Q1", time_period))', 2022), + ('getmonth(cast("2023-01-12", date))', 1), + ('getmonth(cast("2022Q1", time_period))', 1), ('dayofmonth(cast("2023-01-12", date))', 12), ('dayofmonth(cast("2022Q1", time_period))', 31), ('dayofyear(cast("2023-01-12", date))', 12), @@ -43,12 +43,12 @@ ] scalar_time_error_params = [ - ('year(cast("2023-01-12/2024-01-03", date))', RunTimeError, "2-1-19-8"), - ('month(cast("2023-01-12/2024-02-15", date))', RunTimeError, "2-1-19-8"), + ('getyear(cast("2023-01-12/2024-01-03", date))', RunTimeError, "2-1-19-8"), + ('getmonth(cast("2023-01-12/2024-02-15", date))', RunTimeError, "2-1-19-8"), ('dayofmonth(cast("2023-01-12/2024-02-02", date))', RunTimeError, "2-1-19-8"), ('dayofyear(cast("2023-01-12/2024-03-06", date))', RunTimeError, "2-1-19-8"), - ('year(cast("2023-01-12/2024-01-31", time))', SemanticError, "1-1-19-10"), - ('month(cast("2023-01-12/2024-03-25", time))', SemanticError, "1-1-19-10"), + ('getyear(cast("2023-01-12/2024-01-31", time))', SemanticError, "1-1-19-10"), + ('getmonth(cast("2023-01-12/2024-03-25", time))', SemanticError, "1-1-19-10"), ('dayofmonth(cast("2023-01-12/2024-05-29", time))', SemanticError, "1-1-19-10"), ('dayofyear(cast("2023-01-12/2024-06-08", time))', SemanticError, "1-1-19-10"), ] diff --git a/tests/ReferenceManual/data/vtl/RM179.vtl b/tests/ReferenceManual/data/vtl/RM179.vtl index 709a03f7d..3ca0a045f 100644 --- a/tests/ReferenceManual/data/vtl/RM179.vtl +++ b/tests/ReferenceManual/data/vtl/RM179.vtl @@ -1 +1 @@ -DS_r := DS_1[ calc Me_2 := month (Me_1) ]; \ No newline at end of file +DS_r := DS_1[ calc Me_2 := getmonth (Me_1) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl index 7b2e4218a..9192334be 100644 --- a/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl @@ -1,5 +1,5 @@ define operator Test179 (y component) returns component is - month(y) + getmonth(y) end operator; DS_r := DS_1[calc Me_2 := Test179(Me_1)]; \ No newline at end of file From 32b17668c7b646c92b1b784f3d2cc6cabf64d4dc Mon Sep 17 00:00:00 2001 From: Javier Hernandez Date: Wed, 4 Mar 2026 16:54:09 +0100 Subject: [PATCH 16/38] Trigger publish and docs workflows via repository_dispatch --- .github/workflows/create-release.yml | 9 +++++++++ .github/workflows/docs.yml | 3 +++ .github/workflows/release.yml | 2 ++ 3 files changed, 14 insertions(+) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index dce90263e..d49428011 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -306,3 +306,12 @@ jobs: } core.info(`Deleted ${preReleases.length} pre-release(s)`); } + + // 13. Trigger downstream workflows via repository_dispatch + await github.rest.repos.createDispatchEvent({ + owner: context.repo.owner, + repo: context.repo.repo, + event_type: 'release-published', + client_payload: { tag_name: tagName }, + }); + core.info('Dispatched release-published event'); diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ff8f20863..a9c96cca3 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -3,6 +3,8 @@ name: Publish documentation on: release: types: [ published ] + repository_dispatch: + types: [ release-published ] workflow_dispatch: @@ -86,6 +88,7 @@ jobs: always() && !cancelled() && ( github.event_name == 'release' || + github.event_name == 'repository_dispatch' || github.event_name == 'workflow_dispatch' || needs.check-docs-label.outputs.should_build == 'true' ) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6135f9760..3c9037499 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,6 +3,8 @@ name: Publish package on: release: types: [ published ] + repository_dispatch: + types: [ release-published ] permissions: contents: read From 6b128dec4bbf11232b5f9feffc087774f4b98135 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 9 Mar 2026 10:14:27 +0100 Subject: [PATCH 17/38] Fix #575: Allow swap renames in rename clause (#576) The rename validation now excludes components being renamed away when checking for name conflicts, and builds result components atomically instead of sequentially to handle swaps correctly. --- src/vtlengine/Operators/Clause.py | 25 ++++++++++------- tests/Bugs/data/DataSet/input/GH_575-1.csv | 4 +++ tests/Bugs/data/DataSet/output/GH_575-1.csv | 4 +++ .../data/DataStructure/input/GH_575-1.json | 27 +++++++++++++++++++ .../data/DataStructure/output/GH_575-1.json | 27 +++++++++++++++++++ tests/Bugs/data/vtl/GH_575.vtl | 1 + tests/Bugs/test_bugs.py | 12 +++++++++ 7 files changed, 90 insertions(+), 10 deletions(-) create mode 100644 tests/Bugs/data/DataSet/input/GH_575-1.csv create mode 100644 tests/Bugs/data/DataSet/output/GH_575-1.csv create mode 100644 tests/Bugs/data/DataStructure/input/GH_575-1.json create mode 100644 tests/Bugs/data/DataStructure/output/GH_575-1.json create mode 100644 tests/Bugs/data/vtl/GH_575.vtl diff --git a/src/vtlengine/Operators/Clause.py b/src/vtlengine/Operators/Clause.py index 6e56bf0cb..8ed794a6e 100644 --- a/src/vtlengine/Operators/Clause.py +++ b/src/vtlengine/Operators/Clause.py @@ -212,6 +212,7 @@ def validate(cls, operands: List[RenameNode], dataset: Dataset) -> Dataset: duplicates = set([name for name in to_names if to_names.count(name) > 1]) raise SemanticError("1-2-1", alias=duplicates) + from_names_set = set(from_names) for operand in operands: if operand.old_name not in dataset.components: raise SemanticError( @@ -220,7 +221,7 @@ def validate(cls, operands: List[RenameNode], dataset: Dataset) -> Dataset: comp_name=operand.old_name, dataset_name=dataset_name, ) - if operand.new_name in dataset.components: + if operand.new_name in dataset.components and operand.new_name not in from_names_set: raise SemanticError( "1-1-6-8", op=cls.op, @@ -228,15 +229,19 @@ def validate(cls, operands: List[RenameNode], dataset: Dataset) -> Dataset: dataset_name=dataset_name, ) - result_components = {comp.name: comp for comp in dataset.components.values()} - for operand in operands: - result_components[operand.new_name] = Component( - name=operand.new_name, - data_type=result_components[operand.old_name].data_type, - role=result_components[operand.old_name].role, - nullable=result_components[operand.old_name].nullable, - ) - del result_components[operand.old_name] + rename_map = {op.old_name: op.new_name for op in operands} + result_components = {} + for comp in dataset.components.values(): + if comp.name in rename_map: + new_name = rename_map[comp.name] + result_components[new_name] = Component( + name=new_name, + data_type=comp.data_type, + role=comp.role, + nullable=comp.nullable, + ) + else: + result_components[comp.name] = comp return Dataset(name=dataset_name, components=result_components, data=None) @classmethod diff --git a/tests/Bugs/data/DataSet/input/GH_575-1.csv b/tests/Bugs/data/DataSet/input/GH_575-1.csv new file mode 100644 index 000000000..c24d2a191 --- /dev/null +++ b/tests/Bugs/data/DataSet/input/GH_575-1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +1,10.0,100.0 +2,20.0,200.0 +3,30.0,300.0 diff --git a/tests/Bugs/data/DataSet/output/GH_575-1.csv b/tests/Bugs/data/DataSet/output/GH_575-1.csv new file mode 100644 index 000000000..7e3daa520 --- /dev/null +++ b/tests/Bugs/data/DataSet/output/GH_575-1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +1,100.0,10.0 +2,200.0,20.0 +3,300.0,30.0 diff --git a/tests/Bugs/data/DataStructure/input/GH_575-1.json b/tests/Bugs/data/DataStructure/input/GH_575-1.json new file mode 100644 index 000000000..de86c1623 --- /dev/null +++ b/tests/Bugs/data/DataStructure/input/GH_575-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Bugs/data/DataStructure/output/GH_575-1.json b/tests/Bugs/data/DataStructure/output/GH_575-1.json new file mode 100644 index 000000000..aff3fd171 --- /dev/null +++ b/tests/Bugs/data/DataStructure/output/GH_575-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_A", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Bugs/data/vtl/GH_575.vtl b/tests/Bugs/data/vtl/GH_575.vtl new file mode 100644 index 000000000..baffd85cf --- /dev/null +++ b/tests/Bugs/data/vtl/GH_575.vtl @@ -0,0 +1 @@ +DS_A <- DS_1[rename Me_1 to Me_2, Me_2 to Me_1]; diff --git a/tests/Bugs/test_bugs.py b/tests/Bugs/test_bugs.py index 2b5c27d57..59f055647 100644 --- a/tests/Bugs/test_bugs.py +++ b/tests/Bugs/test_bugs.py @@ -2386,6 +2386,18 @@ def test_GL_444_2(self): code=code, number_inputs=number_inputs, exception_code=error_code ) + def test_GH_575(self): + """ + Description: Rename clause should allow swapping component names. + Git Branch: cr-575. + Goal: Check Result. + """ + code = "GH_575" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + class DefinedBugs(BugHelper): """ """ From 901a67dcbc1f5e9732aaf50ba105bd9cec731de5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 9 Mar 2026 10:28:12 +0100 Subject: [PATCH 18/38] Validate that data_structures does not contain extra datasets not referenced by the script (#569) (#570) --- src/vtlengine/API/__init__.py | 16 +++++++++++ src/vtlengine/AST/DAG/__init__.py | 1 + src/vtlengine/AST/DAG/_models.py | 1 + src/vtlengine/Exceptions/messages.py | 6 +++++ tests/API/test_api.py | 30 +++++++++++++++++++++ tests/Additional/test_additional_scalars.py | 20 +------------- 6 files changed, 55 insertions(+), 19 deletions(-) diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index 33dce44f4..fc83a0604 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -25,6 +25,7 @@ from vtlengine.AST.ASTConstructor import ASTVisitor from vtlengine.AST.ASTString import ASTString from vtlengine.AST.DAG import DAGAnalyzer +from vtlengine.AST.DAG._models import DatasetSchedule from vtlengine.AST.Grammar.lexer import Lexer from vtlengine.AST.Grammar.parser import Parser from vtlengine.Exceptions import InputValidationException @@ -93,6 +94,14 @@ def _extract_input_datasets(script: Union[str, TransformationScheme, Path]) -> L return dag_inputs +def _validate_extra_datasets(datasets: Dict[str, Any], ds_analysis: DatasetSchedule) -> None: + """Raise if data_structures contains datasets not referenced by the script.""" + script_datasets = set(ds_analysis.global_inputs) | set(ds_analysis.all_outputs) + extra_datasets = set(datasets.keys()) - script_datasets + if extra_datasets: + raise InputValidationException(code="0-1-3-9", datasets=sorted(extra_datasets)) + + def prettify(script: Union[str, TransformationScheme, Path]) -> str: """ Function that prettifies the VTL script given. @@ -261,6 +270,10 @@ def semantic_analysis( # Loading datasets from file/dict/pysdmx objects/URLs datasets, scalars = load_datasets(data_structures, sdmx_mappings=mapping_dict) + # Validate that all provided datasets are required by the script + ds_analysis = DAGAnalyzer.ds_structure(ast) + _validate_extra_datasets(datasets, ds_analysis) + # Handling of library items vd = None if value_domains is not None: @@ -434,6 +447,9 @@ def run( # VTL Efficient analysis ds_analysis = DAGAnalyzer.ds_structure(ast) + # Validate that all provided datasets are required by the script + _validate_extra_datasets(datasets, ds_analysis) + # Checking the output path to be a Path object to a directory if output_folder is not None: _check_output_folder(output_folder) diff --git a/src/vtlengine/AST/DAG/__init__.py b/src/vtlengine/AST/DAG/__init__.py index aed9d5145..17deee8a3 100644 --- a/src/vtlengine/AST/DAG/__init__.py +++ b/src/vtlengine/AST/DAG/__init__.py @@ -110,6 +110,7 @@ def _ds_usage_analysis(self) -> DatasetSchedule: deletion=dict(deletion), global_inputs=global_inputs, persistent=persistent_datasets, + all_outputs=sorted(all_outputs), ) @classmethod diff --git a/src/vtlengine/AST/DAG/_models.py b/src/vtlengine/AST/DAG/_models.py index 359edc49e..200836d78 100644 --- a/src/vtlengine/AST/DAG/_models.py +++ b/src/vtlengine/AST/DAG/_models.py @@ -23,3 +23,4 @@ class DatasetSchedule: deletion: Dict[int, List[str]] = field(default_factory=dict) global_inputs: List[str] = field(default_factory=list) persistent: List[str] = field(default_factory=list) + all_outputs: List[str] = field(default_factory=list) diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index 7a27891ba..f2cc4a784 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -224,6 +224,12 @@ "description": "Raised when URL datapoints are provided but data_structures is not a " "file path or URL for fetching the SDMX structure definition.", }, + "0-1-3-9": { + "message": "Dataset(s) {datasets} defined in data structures " + "but not required by the script.", + "description": "Raised when the provided data structures contain datasets " + "that are not used as inputs in the VTL script.", + }, # ------------Operators------------- # General Semantic errors "1-1-1-1": { diff --git a/tests/API/test_api.py b/tests/API/test_api.py index d27632614..1d462018a 100644 --- a/tests/API/test_api.py +++ b/tests/API/test_api.py @@ -2084,3 +2084,33 @@ def test_validate_dataset(ds_input, dp_input, is_valid, message): else: with pytest.raises(Exception, match=message): validate_dataset(ds_data, dp_input) + + +def test_extra_dataset_in_data_structures(): + """run() and semantic_analysis() should fail when data_structures has unused datasets.""" + script = "DS_A <- DS_1 * 10;" + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, + ], + }, + { + "name": "DS_2", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, + ], + }, + ] + } + datapoints = {"DS_1": pd.DataFrame({"Id_1": [1], "Me_1": [10]})} + + with pytest.raises(InputValidationException, match="0-1-3-9"): + semantic_analysis(script=script, data_structures=data_structures) + + with pytest.raises(InputValidationException, match="0-1-3-9"): + run(script=script, data_structures=data_structures, datapoints=datapoints) diff --git a/tests/Additional/test_additional_scalars.py b/tests/Additional/test_additional_scalars.py index 216d6ca3a..f33c4f500 100644 --- a/tests/Additional/test_additional_scalars.py +++ b/tests/Additional/test_additional_scalars.py @@ -397,16 +397,6 @@ def test_run_scalars_operations(script, reference, tmp_path): } data_structures = { - "datasets": [ - { - "name": "DS_3", - "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, - {"name": "Me_2", "type": "Duration", "role": "Measure", "nullable": True}, - ], - } - ], "scalars": [ {"name": "sc_1", "type": "Integer"}, {"name": "sc_2", "type": "Integer"}, @@ -420,15 +410,7 @@ def test_run_scalars_operations(script, reference, tmp_path): ], } - datapoints = { - "DS_3": pd.DataFrame( - { - "Id_1": [1, 2, 3], - "Me_1": [10.0, 20.5, 30.1], - "Me_2": ["A", "M", None], - } - ) - } + datapoints: dict = {} run_result = run( script=script, From 67321cdfa6113d956c6021cf989f09f8e36b7852 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Mon, 9 Mar 2026 14:22:17 +0100 Subject: [PATCH 19/38] Fix #574: Accept "" values as null on non String input cols and auto-detect other separators usage on input CSVs (#577) * Updated parser logic * Added related tests * Simplified delimiter detection logic * Fixed ruff errors * Fixed mypy errora * Fixed linting errors * Minor fix * Test commit sign * Remove commit sign --- src/vtlengine/files/parser/__init__.py | 55 +++++++++++++++++-- tests/API/test_S3.py | 9 ++- .../data/DataSet/input/DataLoad-14-1.csv | 3 + .../data/DataSet/input/DataLoad-15-1.csv | 3 + .../data/DataSet/input/DataLoad-16-1.csv | 3 + .../data/DataSet/output/DataLoad-14-DS_r.csv | 3 + .../data/DataSet/output/DataLoad-15-DS_r.csv | 3 + .../data/DataSet/output/DataLoad-16-DS_r.csv | 3 + .../DataStructure/input/DataLoad-14-1.json | 39 +++++++++++++ .../DataStructure/input/DataLoad-15-1.json | 27 +++++++++ .../DataStructure/input/DataLoad-16-1.json | 27 +++++++++ .../output/DataLoad-14-DS_r.json | 39 +++++++++++++ .../output/DataLoad-15-DS_r.json | 27 +++++++++ .../output/DataLoad-16-DS_r.json | 27 +++++++++ tests/DataLoad/data/vtl/DataLoad-14.vtl | 1 + tests/DataLoad/data/vtl/DataLoad-15.vtl | 1 + tests/DataLoad/data/vtl/DataLoad-16.vtl | 1 + tests/DataLoad/test_dataload.py | 39 +++++++++++++ 18 files changed, 301 insertions(+), 9 deletions(-) create mode 100644 tests/DataLoad/data/DataSet/input/DataLoad-14-1.csv create mode 100644 tests/DataLoad/data/DataSet/input/DataLoad-15-1.csv create mode 100644 tests/DataLoad/data/DataSet/input/DataLoad-16-1.csv create mode 100644 tests/DataLoad/data/DataSet/output/DataLoad-14-DS_r.csv create mode 100644 tests/DataLoad/data/DataSet/output/DataLoad-15-DS_r.csv create mode 100644 tests/DataLoad/data/DataSet/output/DataLoad-16-DS_r.csv create mode 100644 tests/DataLoad/data/DataStructure/input/DataLoad-14-1.json create mode 100644 tests/DataLoad/data/DataStructure/input/DataLoad-15-1.json create mode 100644 tests/DataLoad/data/DataStructure/input/DataLoad-16-1.json create mode 100644 tests/DataLoad/data/DataStructure/output/DataLoad-14-DS_r.json create mode 100644 tests/DataLoad/data/DataStructure/output/DataLoad-15-DS_r.json create mode 100644 tests/DataLoad/data/DataStructure/output/DataLoad-16-DS_r.json create mode 100644 tests/DataLoad/data/vtl/DataLoad-14.vtl create mode 100644 tests/DataLoad/data/vtl/DataLoad-15.vtl create mode 100644 tests/DataLoad/data/vtl/DataLoad-16.vtl diff --git a/src/vtlengine/files/parser/__init__.py b/src/vtlengine/files/parser/__init__.py index 9eab9a139..aa3e45a54 100644 --- a/src/vtlengine/files/parser/__init__.py +++ b/src/vtlengine/files/parser/__init__.py @@ -1,3 +1,4 @@ +import csv import warnings from csv import DictReader from pathlib import Path @@ -13,6 +14,7 @@ Integer, Number, ScalarType, + String, TimeInterval, TimePeriod, ) @@ -33,6 +35,26 @@ TimeInterval: check_time, } +SEPARATORS = "".join([",", ";", ":", "|", "\t"]) + + +def _detect_delimiter(file_path: Union[str, Path], num_bytes: int = 4096) -> str: + try: + if _is_remote_path(file_path): + import fsspec # type: ignore[import-untyped] + + reader = fsspec.open + else: + reader = open + + with reader(file_path, "r", encoding="utf-8", errors="replace") as f: + sample = f.read(num_bytes) + if sample: + return csv.Sniffer().sniff(sample, delimiters=SEPARATORS).delimiter + except Exception: + return "," + return "," + def _validate_csv_path(components: Dict[str, Component], csv_path: Path) -> None: # GE1 check if the file is empty @@ -42,8 +64,9 @@ def _validate_csv_path(components: Dict[str, Component], csv_path: Path) -> None raise DataLoadError(code="0-3-1-1", file=csv_path) register_rfc() try: + delimiter = _detect_delimiter(csv_path) with open(csv_path, "r", errors="replace", encoding="utf-8") as f: - reader = DictReader(f, dialect="rfc") + reader = DictReader(f, delimiter=delimiter) csv_columns = reader.fieldnames except InputValidationException as ie: raise InputValidationException("{}".format(str(ie))) from None @@ -106,15 +129,31 @@ def _sanitize_pandas_columns( return data +def _is_remote_path(csv_path: Union[str, Path]) -> bool: + return isinstance(csv_path, str) and "://" in csv_path + + def _pandas_load_csv(components: Dict[str, Component], csv_path: Union[str, Path]) -> pd.DataFrame: obj_dtypes = dict.fromkeys(components, "string[pyarrow]") - data = pd.read_csv( - csv_path, # type: ignore[call-overload, unused-ignore] + na_values: Union[Dict[str, List[str]], List[str]] + if components: + na_values = { + comp_name: ["", '""'] if comp.data_type != String else [""] + for comp_name, comp in components.items() + } + else: + na_values = {} + + sep = _detect_delimiter(csv_path) + + data = pd.read_csv( # type: ignore[call-overload, unused-ignore] + csv_path, dtype=obj_dtypes, engine="c", + sep=sep, keep_default_na=False, - na_values=[""], + na_values=na_values, encoding_errors="replace", ) @@ -150,10 +189,14 @@ def _validate_pandas( if len(id_names) == 0 and len(data) > 1: raise DataLoadError("0-3-1-4", name=dataset_name) + # Treat empty strings as null for non-String columns + for comp_name, comp in components.items(): + if comp.data_type != String and comp_name in data.columns: + data[comp_name] = data[comp_name].replace("", pd.NA) + data = data.fillna(value=pd.NA) # Checking data types on all data types comp_name = "" - comp = None try: for comp_name, comp in components.items(): if comp.data_type in (Date, TimePeriod, TimeInterval): @@ -202,7 +245,7 @@ def _validate_pandas( data[comp_name] = data[comp_name].astype(comp.data_type.dtype()) # type: ignore[call-overload] except (ValueError, InputValidationException) as e: - str_comp = SCALAR_TYPES_CLASS_REVERSE[comp.data_type] if comp else "Null" + str_comp = SCALAR_TYPES_CLASS_REVERSE.get(comp.data_type, "Null") error = e.args[0] if isinstance(e, InputValidationException) else str(e) raise DataLoadError( "0-3-1-6", name=dataset_name, column=comp_name, type=str_comp, error=error diff --git a/tests/API/test_S3.py b/tests/API/test_S3.py index b52077007..32246d5c0 100644 --- a/tests/API/test_S3.py +++ b/tests/API/test_S3.py @@ -144,8 +144,9 @@ def test_load_datapoints_s3(mock_read_csv): input_path, dtype={}, engine="c", + sep=",", keep_default_na=False, - na_values=[""], + na_values={}, encoding_errors="replace", ) @@ -166,8 +167,9 @@ def test_run_s3(mock_read_csv): input_path, dtype=dtypes, engine="c", + sep=",", keep_default_na=False, - na_values=[""], + na_values={"Id_1": ["", '""'], "Id_2": [""], "Me_1": ["", '""']}, encoding_errors="replace", ) @@ -188,7 +190,8 @@ def test_validate_dataset_s3(mock_read_csv): input_path, dtype=dtypes, engine="c", + sep=",", keep_default_na=False, - na_values=[""], + na_values={"Id_1": ["", '""'], "Id_2": [""], "Me_1": ["", '""']}, encoding_errors="replace", ) diff --git a/tests/DataLoad/data/DataSet/input/DataLoad-14-1.csv b/tests/DataLoad/data/DataSet/input/DataLoad-14-1.csv new file mode 100644 index 000000000..62b002988 --- /dev/null +++ b/tests/DataLoad/data/DataSet/input/DataLoad-14-1.csv @@ -0,0 +1,3 @@ +Id_1,Me_1,Me_2,Me_3,Me_4 +1,10,1.5,TRUE,hello +2,"","","","" diff --git a/tests/DataLoad/data/DataSet/input/DataLoad-15-1.csv b/tests/DataLoad/data/DataSet/input/DataLoad-15-1.csv new file mode 100644 index 000000000..5ff70911b --- /dev/null +++ b/tests/DataLoad/data/DataSet/input/DataLoad-15-1.csv @@ -0,0 +1,3 @@ +Id_1;Me_1;Me_2 +1;10.5;hello +2;20.3;world diff --git a/tests/DataLoad/data/DataSet/input/DataLoad-16-1.csv b/tests/DataLoad/data/DataSet/input/DataLoad-16-1.csv new file mode 100644 index 000000000..51c13633a --- /dev/null +++ b/tests/DataLoad/data/DataSet/input/DataLoad-16-1.csv @@ -0,0 +1,3 @@ +Id_1 Me_1 Me_2 +1 10.5 hello +2 20.3 world diff --git a/tests/DataLoad/data/DataSet/output/DataLoad-14-DS_r.csv b/tests/DataLoad/data/DataSet/output/DataLoad-14-DS_r.csv new file mode 100644 index 000000000..2d5902b2b --- /dev/null +++ b/tests/DataLoad/data/DataSet/output/DataLoad-14-DS_r.csv @@ -0,0 +1,3 @@ +Id_1,Me_1,Me_2,Me_3,Me_4 +1,10,1.5,TRUE,hello +2,,,, diff --git a/tests/DataLoad/data/DataSet/output/DataLoad-15-DS_r.csv b/tests/DataLoad/data/DataSet/output/DataLoad-15-DS_r.csv new file mode 100644 index 000000000..ad649998a --- /dev/null +++ b/tests/DataLoad/data/DataSet/output/DataLoad-15-DS_r.csv @@ -0,0 +1,3 @@ +Id_1,Me_1,Me_2 +1,10.5,hello +2,20.3,world diff --git a/tests/DataLoad/data/DataSet/output/DataLoad-16-DS_r.csv b/tests/DataLoad/data/DataSet/output/DataLoad-16-DS_r.csv new file mode 100644 index 000000000..ad649998a --- /dev/null +++ b/tests/DataLoad/data/DataSet/output/DataLoad-16-DS_r.csv @@ -0,0 +1,3 @@ +Id_1,Me_1,Me_2 +1,10.5,hello +2,20.3,world diff --git a/tests/DataLoad/data/DataStructure/input/DataLoad-14-1.json b/tests/DataLoad/data/DataStructure/input/DataLoad-14-1.json new file mode 100644 index 000000000..c3a0decba --- /dev/null +++ b/tests/DataLoad/data/DataStructure/input/DataLoad-14-1.json @@ -0,0 +1,39 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_3", + "role": "Measure", + "type": "Boolean", + "nullable": true + }, + { + "name": "Me_4", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/DataStructure/input/DataLoad-15-1.json b/tests/DataLoad/data/DataStructure/input/DataLoad-15-1.json new file mode 100644 index 000000000..4a81cca42 --- /dev/null +++ b/tests/DataLoad/data/DataStructure/input/DataLoad-15-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/DataStructure/input/DataLoad-16-1.json b/tests/DataLoad/data/DataStructure/input/DataLoad-16-1.json new file mode 100644 index 000000000..4a81cca42 --- /dev/null +++ b/tests/DataLoad/data/DataStructure/input/DataLoad-16-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/DataStructure/output/DataLoad-14-DS_r.json b/tests/DataLoad/data/DataStructure/output/DataLoad-14-DS_r.json new file mode 100644 index 000000000..dc1f80007 --- /dev/null +++ b/tests/DataLoad/data/DataStructure/output/DataLoad-14-DS_r.json @@ -0,0 +1,39 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_3", + "role": "Measure", + "type": "Boolean", + "nullable": true + }, + { + "name": "Me_4", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/DataStructure/output/DataLoad-15-DS_r.json b/tests/DataLoad/data/DataStructure/output/DataLoad-15-DS_r.json new file mode 100644 index 000000000..ab773e0c0 --- /dev/null +++ b/tests/DataLoad/data/DataStructure/output/DataLoad-15-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/DataStructure/output/DataLoad-16-DS_r.json b/tests/DataLoad/data/DataStructure/output/DataLoad-16-DS_r.json new file mode 100644 index 000000000..ab773e0c0 --- /dev/null +++ b/tests/DataLoad/data/DataStructure/output/DataLoad-16-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "String", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DataLoad/data/vtl/DataLoad-14.vtl b/tests/DataLoad/data/vtl/DataLoad-14.vtl new file mode 100644 index 000000000..6724578ed --- /dev/null +++ b/tests/DataLoad/data/vtl/DataLoad-14.vtl @@ -0,0 +1 @@ +DS_r := DS_1; \ No newline at end of file diff --git a/tests/DataLoad/data/vtl/DataLoad-15.vtl b/tests/DataLoad/data/vtl/DataLoad-15.vtl new file mode 100644 index 000000000..6724578ed --- /dev/null +++ b/tests/DataLoad/data/vtl/DataLoad-15.vtl @@ -0,0 +1 @@ +DS_r := DS_1; \ No newline at end of file diff --git a/tests/DataLoad/data/vtl/DataLoad-16.vtl b/tests/DataLoad/data/vtl/DataLoad-16.vtl new file mode 100644 index 000000000..6724578ed --- /dev/null +++ b/tests/DataLoad/data/vtl/DataLoad-16.vtl @@ -0,0 +1 @@ +DS_r := DS_1; \ No newline at end of file diff --git a/tests/DataLoad/test_dataload.py b/tests/DataLoad/test_dataload.py index 547e31e61..26c4e7161 100644 --- a/tests/DataLoad/test_dataload.py +++ b/tests/DataLoad/test_dataload.py @@ -820,6 +820,45 @@ def test_GL_492_2(self): self.DataLoadTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_51(self): + """ + Status: OK + Expression: DS_r := DS_1; + Description: Data Load with "" (quoted empty string) as null for non-String columns. + Goal: Check Result. + """ + code = "DataLoad-14" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_52(self): + """ + Status: OK + Expression: DS_r := DS_1; + Description: Data Load with semicolon separator auto-detection. + Goal: Check Result. + """ + code = "DataLoad-15" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_53(self): + """ + Status: OK + Expression: DS_r := DS_1; + Description: Data Load with tab separator auto-detection. + Goal: Check Result. + """ + code = "DataLoad-16" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_infer_keys_1(self): """ """ code = "IK-1" From 3a945d2c42ec445b29b4e5680014dded65586337 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 9 Mar 2026 14:48:45 +0100 Subject: [PATCH 20/38] Bump version to 1.6.0rc5 (#580) --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a75fc10d7..8a04a6cc3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc4" +version = "1.6.0rc5" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index 57abc8915..ff8f11656 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc4" +__version__ = "1.6.0rc5" From 5cafa575fcfcecbe75f317ef7a3d45abce7ea7cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Mon, 9 Mar 2026 16:00:06 +0100 Subject: [PATCH 21/38] Fix #578: Duration scalar-scalar comparison uses magnitude order (#579) * Fix #578: Duration scalar-scalar comparison uses magnitude order instead of alphabetical Apply PERIOD_IND_MAPPING conversion in scalar_evaluation before comparing Duration values, consistent with all other evaluation paths. Also replace raw Exception with .get() returning None for invalid durations. * Add duration scalar comparison tests in additional scalars Cover all six comparison operators (=, <>, <, >, <=, >=) with Duration cast values to verify magnitude-based ordering. * Add dataset, component-scalar, and component-component duration comparison tests Cover all Duration comparison evaluation paths: scalar-scalar, dataset-dataset, dataset-scalar, component-scalar, and component-component. * Add TimePeriod comparison tests across all evaluation paths Cover scalar-scalar, dataset-dataset, dataset-scalar, component-scalar, and component-component comparisons for TimePeriod type. --- src/vtlengine/Operators/__init__.py | 8 +- tests/Additional/test_additional_scalars.py | 13 + tests/TypeChecking/test_time_type_checking.py | 346 ++++++++++++++++++ 3 files changed, 363 insertions(+), 4 deletions(-) diff --git a/src/vtlengine/Operators/__init__.py b/src/vtlengine/Operators/__init__.py index 45063384e..5d35bf2d8 100644 --- a/src/vtlengine/Operators/__init__.py +++ b/src/vtlengine/Operators/__init__.py @@ -86,9 +86,7 @@ def cast_time_types_scalar(cls, data_type: Any, value: str) -> Any: elif data_type.__name__ == "TimePeriod": return TimePeriodHandler(value) elif data_type.__name__ == "Duration": - if value not in PERIOD_IND_MAPPING: - raise Exception(f"Duration {value} is not valid") - return PERIOD_IND_MAPPING[value] + return PERIOD_IND_MAPPING.get(value) return value @classmethod @@ -632,7 +630,9 @@ def _cast_y(v: Any, ft: Any = y_type) -> Any: @classmethod def scalar_evaluation(cls, left_operand: Scalar, right_operand: Scalar) -> Scalar: result_scalar = cls.scalar_validation(left_operand, right_operand) - result_scalar.value = cls.op_func(left_operand.value, right_operand.value) + left_value = cls.cast_time_types_scalar(left_operand.data_type, left_operand.value) + right_value = cls.cast_time_types_scalar(right_operand.data_type, right_operand.value) + result_scalar.value = cls.op_func(left_value, right_value) return result_scalar @classmethod diff --git a/tests/Additional/test_additional_scalars.py b/tests/Additional/test_additional_scalars.py index f33c4f500..cbab8d6ab 100644 --- a/tests/Additional/test_additional_scalars.py +++ b/tests/Additional/test_additional_scalars.py @@ -190,6 +190,19 @@ class AdditionalScalarsTests(TestHelper): ('between("z", "a", "c")', False), ("between(6, 1, 9)", True), ("between(12, 1, 9)", False), + # Duration scalar comparisons (magnitude order: A>S>Q>M>W>D) + ('cast("A", duration) > cast("M", duration)', True), + ('cast("A", duration) > cast("D", duration)', True), + ('cast("D", duration) < cast("A", duration)', True), + ('cast("S", duration) >= cast("Q", duration)', True), + ('cast("W", duration) < cast("M", duration)', True), + ('cast("A", duration) = cast("A", duration)', True), + ('cast("D", duration) > cast("W", duration)', False), + ('cast("M", duration) > cast("A", duration)', False), + ('cast("A", duration) <> cast("M", duration)', True), + ('cast("A", duration) <> cast("A", duration)', False), + ('cast("D", duration) <= cast("D", duration)', True), + ('cast("Q", duration) >= cast("S", duration)', False), ] string_exception_param = [ diff --git a/tests/TypeChecking/test_time_type_checking.py b/tests/TypeChecking/test_time_type_checking.py index a566de491..25270aa1a 100644 --- a/tests/TypeChecking/test_time_type_checking.py +++ b/tests/TypeChecking/test_time_type_checking.py @@ -20,6 +20,7 @@ binary_implicit_promotion, check_binary_implicit_promotion, ) +from vtlengine.Model import Dataset class TestDateTimePeriodImplicitPromotion: @@ -120,3 +121,348 @@ def test_comparison(self, script, date_vals, period_vals, expected): result = run(script=script, data_structures=DATA_STRUCTURES, datapoints=datapoints) assert "DS_r" in result assert list(result["DS_r"].data["bool_var"]) == expected + + +DURATION_DS = { + "name": "DS_1", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Duration", "role": "Measure", "nullable": True}, + ], +} + +DURATION_SINGLE_DS = {"datasets": [DURATION_DS]} + +DURATION_TWO_DS = { + "datasets": [ + DURATION_DS, + { + "name": "DS_2", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Duration", "role": "Measure", "nullable": True}, + ], + }, + ] +} + + +class TestDurationComparison: + """Duration comparisons must use magnitude order (A>S>Q>M>W>D), not alphabetical.""" + + @pytest.mark.parametrize( + "script, expected", + [ + ('DS_r <- cast("A", duration) > cast("M", duration);', True), + ('DS_r <- cast("A", duration) > cast("D", duration);', True), + ('DS_r <- cast("D", duration) < cast("A", duration);', True), + ('DS_r <- cast("S", duration) >= cast("Q", duration);', True), + ('DS_r <- cast("W", duration) < cast("M", duration);', True), + ('DS_r <- cast("A", duration) = cast("A", duration);', True), + ('DS_r <- cast("D", duration) > cast("W", duration);', False), + ('DS_r <- cast("M", duration) > cast("A", duration);', False), + ], + ids=[ + "annual_gt_month", + "annual_gt_day", + "day_lt_annual", + "semester_gte_quarter", + "week_lt_month", + "annual_eq_annual", + "day_not_gt_week", + "month_not_gt_annual", + ], + ) + def test_scalar_comparison(self, script: str, expected: bool) -> None: + result = run(script=script, data_structures={"datasets": []}, datapoints={}) + scalar = result["DS_r"] + assert not isinstance(scalar, Dataset) + assert scalar.value == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ("DS_r <- DS_1 > DS_2;", [True, False, False]), + ("DS_r <- DS_1 < DS_2;", [False, True, True]), + ("DS_r <- DS_1 >= DS_2;", [True, False, False]), + ("DS_r <- DS_1 <= DS_2;", [False, True, True]), + ("DS_r <- DS_1 = DS_2;", [False, False, False]), + ("DS_r <- DS_1 <> DS_2;", [True, True, True]), + ], + ids=["ds_gt", "ds_lt", "ds_gte", "ds_lte", "ds_eq", "ds_neq"], + ) + def test_dataset_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["A", "M", "D"]}), + "DS_2": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["M", "A", "W"]}), + } + result = run(script=script, data_structures=DURATION_TWO_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["bool_var"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ('DS_r <- DS_1 > cast("M", duration);', [True, True, False]), + ('DS_r <- DS_1 < cast("M", duration);', [False, False, True]), + ('DS_r <- DS_1 = cast("Q", duration);', [False, True, False]), + ], + ids=["ds_scalar_gt", "ds_scalar_lt", "ds_scalar_eq"], + ) + def test_dataset_scalar_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["A", "Q", "D"]}), + } + result = run(script=script, data_structures=DURATION_SINGLE_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["bool_var"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ( + 'DS_r <- DS_1[calc Me_2 := Me_1 > cast("M", duration)];', + [True, False, False], + ), + ( + 'DS_r <- DS_1[calc Me_2 := Me_1 < cast("Q", duration)];', + [False, True, True], + ), + ], + ids=["comp_scalar_gt", "comp_scalar_lt"], + ) + def test_component_scalar_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["A", "M", "D"]}), + } + result = run(script=script, data_structures=DURATION_SINGLE_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["Me_2"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ( + "DS_r <- DS_1[calc Me_3 := Me_1 > Me_2];", + [True, False, False], + ), + ( + "DS_r <- DS_1[calc Me_3 := Me_1 < Me_2];", + [False, True, True], + ), + ( + "DS_r <- DS_1[calc Me_3 := Me_1 = Me_2];", + [False, False, False], + ), + ], + ids=["comp_comp_gt", "comp_comp_lt", "comp_comp_eq"], + ) + def test_component_component_comparison(self, script: str, expected: list[bool]) -> None: + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "Duration", + "role": "Measure", + "nullable": True, + }, + { + "name": "Me_2", + "type": "Duration", + "role": "Measure", + "nullable": True, + }, + ], + } + ] + } + datapoints = { + "DS_1": pd.DataFrame( + { + "Id_1": [1, 2, 3], + "Me_1": ["A", "M", "D"], + "Me_2": ["M", "A", "W"], + } + ), + } + result = run(script=script, data_structures=data_structures, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["Me_3"]) == expected + + +TIME_PERIOD_DS = { + "name": "DS_1", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Time_Period", "role": "Measure", "nullable": True}, + ], +} + +TIME_PERIOD_SINGLE_DS = {"datasets": [TIME_PERIOD_DS]} + +TIME_PERIOD_TWO_DS = { + "datasets": [ + TIME_PERIOD_DS, + { + "name": "DS_2", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Time_Period", "role": "Measure", "nullable": True}, + ], + }, + ] +} + + +class TestTimePeriodComparison: + """TimePeriod comparisons across all evaluation paths.""" + + @pytest.mark.parametrize( + "script, expected", + [ + ('DS_r <- cast("2020Q3", time_period) > cast("2020Q1", time_period);', True), + ('DS_r <- cast("2020Q1", time_period) > cast("2020Q3", time_period);', False), + ('DS_r <- cast("2021M01", time_period) > cast("2020M12", time_period);', True), + ('DS_r <- cast("2020Q1", time_period) = cast("2020Q1", time_period);', True), + ('DS_r <- cast("2020Q1", time_period) <> cast("2020Q3", time_period);', True), + ('DS_r <- cast("2020Q1", time_period) < cast("2021Q1", time_period);', True), + ], + ids=[ + "q3_gt_q1", + "q1_not_gt_q3", + "2021m01_gt_2020m12", + "q1_eq_q1", + "q1_neq_q3", + "2020q1_lt_2021q1", + ], + ) + def test_scalar_comparison(self, script: str, expected: bool) -> None: + result = run(script=script, data_structures={"datasets": []}, datapoints={}) + scalar = result["DS_r"] + assert not isinstance(scalar, Dataset) + assert scalar.value == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ("DS_r <- DS_1 > DS_2;", [False, True, False]), + ("DS_r <- DS_1 < DS_2;", [True, False, True]), + ("DS_r <- DS_1 = DS_2;", [False, False, False]), + ("DS_r <- DS_1 <> DS_2;", [True, True, True]), + ], + ids=["ds_gt", "ds_lt", "ds_eq", "ds_neq"], + ) + def test_dataset_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["2020Q1", "2021M06", "2020-A1"]}), + "DS_2": pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": ["2020Q3", "2020M12", "2021-A1"]}), + } + result = run(script=script, data_structures=TIME_PERIOD_TWO_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["bool_var"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ('DS_r <- DS_1 > cast("2020Q2", time_period);', [False, True]), + ('DS_r <- DS_1 < cast("2020Q2", time_period);', [True, False]), + ('DS_r <- DS_1 = cast("2020Q1", time_period);', [True, False]), + ], + ids=["ds_scalar_gt", "ds_scalar_lt", "ds_scalar_eq"], + ) + def test_dataset_scalar_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2], "Me_1": ["2020Q1", "2020Q3"]}), + } + result = run(script=script, data_structures=TIME_PERIOD_SINGLE_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["bool_var"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ( + 'DS_r <- DS_1[calc Me_2 := Me_1 > cast("2020Q2", time_period)];', + [False, True], + ), + ( + 'DS_r <- DS_1[calc Me_2 := Me_1 < cast("2020Q2", time_period)];', + [True, False], + ), + ], + ids=["comp_scalar_gt", "comp_scalar_lt"], + ) + def test_component_scalar_comparison(self, script: str, expected: list[bool]) -> None: + datapoints = { + "DS_1": pd.DataFrame({"Id_1": [1, 2], "Me_1": ["2020Q1", "2020Q3"]}), + } + result = run(script=script, data_structures=TIME_PERIOD_SINGLE_DS, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["Me_2"]) == expected + + @pytest.mark.parametrize( + "script, expected", + [ + ("DS_r <- DS_1[calc Me_3 := Me_1 > Me_2];", [False, True, False]), + ("DS_r <- DS_1[calc Me_3 := Me_1 < Me_2];", [True, False, True]), + ("DS_r <- DS_1[calc Me_3 := Me_1 = Me_2];", [False, False, False]), + ], + ids=["comp_comp_gt", "comp_comp_lt", "comp_comp_eq"], + ) + def test_component_component_comparison(self, script: str, expected: list[bool]) -> None: + data_structures = { + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "Time_Period", + "role": "Measure", + "nullable": True, + }, + { + "name": "Me_2", + "type": "Time_Period", + "role": "Measure", + "nullable": True, + }, + ], + } + ] + } + datapoints = { + "DS_1": pd.DataFrame( + { + "Id_1": [1, 2, 3], + "Me_1": ["2020Q1", "2021M06", "2020-A1"], + "Me_2": ["2020Q3", "2020M12", "2021-A1"], + } + ), + } + result = run(script=script, data_structures=data_structures, datapoints=datapoints) + ds = result["DS_r"] + assert isinstance(ds, Dataset) + assert list(ds.data["Me_3"]) == expected From 2381cadd1cf6081973dc8bd1e9f2c3016629cbbd Mon Sep 17 00:00:00 2001 From: Javier Hernandez Date: Mon, 9 Mar 2026 16:08:07 +0100 Subject: [PATCH 22/38] Handle non-PR numbers in create release workflow GraphQL query Commit messages may reference issue numbers (e.g. (#569)) which cause the pullRequest GraphQL query to fail with NOT_FOUND. Catch partial errors and use the valid data instead of failing the entire workflow. --- .github/workflows/create-release.yml | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index d49428011..744ac755e 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -131,9 +131,22 @@ jobs: } `; - const graphqlResult = await github.graphql(query, { - headers: { 'GraphQL-Features': 'issue_types' }, - }); + // Some numbers extracted from commit messages may be issues, not PRs. + // The GraphQL query returns partial data with errors for non-PR numbers, + // so we catch the error and use the partial data. + let graphqlResult; + try { + graphqlResult = await github.graphql(query, { + headers: { 'GraphQL-Features': 'issue_types' }, + }); + } catch (e) { + if (e.data?.repository) { + graphqlResult = { repository: e.data.repository }; + core.info(`GraphQL partial errors (non-PR numbers skipped): ${e.errors?.map(err => err.message).join(', ')}`); + } else { + throw e; + } + } const prs = Object.values(graphqlResult.repository).filter(Boolean); From 5b6cd4f92cafb1317ddc6475ddd74d9c1248f150 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Mar 2026 10:26:01 +0100 Subject: [PATCH 23/38] Bump ruff from 0.15.4 to 0.15.5 (#583) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.15.4 to 0.15.5. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.15.4...0.15.5) --- updated-dependencies: - dependency-name: ruff dependency-version: 0.15.5 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index f695794a1..229240c95 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2912,30 +2912,30 @@ files = [ [[package]] name = "ruff" -version = "0.15.4" +version = "0.15.5" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0"}, - {file = "ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992"}, - {file = "ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec"}, - {file = "ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f"}, - {file = "ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338"}, - {file = "ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc"}, - {file = "ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68"}, - {file = "ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3"}, - {file = "ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22"}, - {file = "ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f"}, - {file = "ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453"}, - {file = "ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1"}, + {file = "ruff-0.15.5-py3-none-linux_armv6l.whl", hash = "sha256:4ae44c42281f42e3b06b988e442d344a5b9b72450ff3c892e30d11b29a96a57c"}, + {file = "ruff-0.15.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6edd3792d408ebcf61adabc01822da687579a1a023f297618ac27a5b51ef0080"}, + {file = "ruff-0.15.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:89f463f7c8205a9f8dea9d658d59eff49db05f88f89cc3047fb1a02d9f344010"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba786a8295c6574c1116704cf0b9e6563de3432ac888d8f83685654fe528fd65"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd4b801e57955fe9f02b31d20375ab3a5c4415f2e5105b79fb94cf2642c91440"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391f7c73388f3d8c11b794dbbc2959a5b5afe66642c142a6effa90b45f6f5204"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc18f30302e379fe1e998548b0f5e9f4dff907f52f73ad6da419ea9c19d66c8"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc6e7f90087e2d27f98dc34ed1b3ab7c8f0d273cc5431415454e22c0bd2a681"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cb7169f53c1ddb06e71a9aebd7e98fc0fea936b39afb36d8e86d36ecc2636a"}, + {file = "ruff-0.15.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9b037924500a31ee17389b5c8c4d88874cc6ea8e42f12e9c61a3d754ff72f1ca"}, + {file = "ruff-0.15.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65bb414e5b4eadd95a8c1e4804f6772bbe8995889f203a01f77ddf2d790929dd"}, + {file = "ruff-0.15.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d20aa469ae3b57033519c559e9bc9cd9e782842e39be05b50e852c7c981fa01d"}, + {file = "ruff-0.15.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:15388dd28c9161cdb8eda68993533acc870aa4e646a0a277aa166de9ad5a8752"}, + {file = "ruff-0.15.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b30da330cbd03bed0c21420b6b953158f60c74c54c5f4c1dabbdf3a57bf355d2"}, + {file = "ruff-0.15.5-py3-none-win32.whl", hash = "sha256:732e5ee1f98ba5b3679029989a06ca39a950cced52143a0ea82a2102cb592b74"}, + {file = "ruff-0.15.5-py3-none-win_amd64.whl", hash = "sha256:821d41c5fa9e19117616c35eaa3f4b75046ec76c65e7ae20a333e9a8696bc7fe"}, + {file = "ruff-0.15.5-py3-none-win_arm64.whl", hash = "sha256:b498d1c60d2fe5c10c45ec3f698901065772730b411f164ae270bb6bfcc4740b"}, + {file = "ruff-0.15.5.tar.gz", hash = "sha256:7c3601d3b6d76dce18c5c824fc8d06f4eef33d6df0c21ec7799510cde0f159a2"}, ] [[package]] From 736d730025ee5fdb15f55750bd8e5049bf167783 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Tue, 10 Mar 2026 11:14:39 +0100 Subject: [PATCH 24/38] Add run-name to publish workflows to show release version (#581) --- .github/workflows/docs.yml | 2 ++ .github/workflows/release.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index a9c96cca3..d1cc42fba 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,5 +1,7 @@ name: Publish documentation +run-name: "Publish documentation ${{ github.event.client_payload.tag_name || github.event.release.tag_name || '' }}" + on: release: types: [ published ] diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3c9037499..10613383e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,5 +1,7 @@ name: Publish package +run-name: "Publish package ${{ github.event.client_payload.tag_name || github.event.release.tag_name || '' }}" + on: release: types: [ published ] From 81b856c189734e45223b7c69e9f39fbeea897cb2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 10 Mar 2026 13:42:30 +0100 Subject: [PATCH 25/38] Fix 567: Update DAG Analysis sorting on Hierarchical Rulesets (#572) * Removed Hierarchy AST rules validation and sorting from interpreter * Updated DAG to validate and sort Hierarchical roll-up rules * Added related tests * Updated related test * Minor fix * Fixed mypy errors * Removed outdated pysapark code * Added HRuleset rule sorting statement into DAGAnalyzer * Fixed related assertion tests * Updated cyclic graph detection * Fixed related tests * Added duplicated HR EQ rules error * Updated related tests * Fixed linting errors * Fixed related tests --- src/vtlengine/AST/DAG/__init__.py | 42 +- src/vtlengine/Exceptions/messages.py | 6 + src/vtlengine/Interpreter/__init__.py | 11 +- .../data/DataSet/output/11-25-DS_r.csv | 12 +- .../data/DataSet/output/11-26-DS_r.csv | 28 +- .../data/DataSet/output/11-27-DS_r.csv | 28 +- .../data/DataSet/output/11-28-DS_r.csv | 28 +- .../data/DataSet/output/11-29-DS_r.csv | 30 +- .../data/DataSet/output/11-30-DS_r.csv | 30 +- tests/Additional/test_additional.py | 9 +- .../data/DataSet/output/VALIDATIONS-1.csv | 2982 ++++++++--------- .../data/DataSet/output/VALIDATIONS-12.csv | 2 +- .../data/DataSet/output/VALIDATIONS-13.csv | 22 +- .../data/DataSet/output/VALIDATIONS-14.csv | 10 +- .../data/DataSet/output/VALIDATIONS-15.csv | 16 +- .../data/DataSet/output/VALIDATIONS-16.csv | 188 +- .../data/DataSet/output/VALIDATIONS-17.csv | 10 +- .../data/DataSet/output/VALIDATIONS-18.csv | 8 +- .../data/DataSet/output/VALIDATIONS-19.csv | 26 +- .../data/DataSet/output/VALIDATIONS-20.csv | 2 +- .../data/DataSet/output/VALIDATIONS-21.csv | 243 +- .../data/DataSet/output/VALIDATIONS-3.csv | 2 +- .../data/DataSet/input/GH_567_1-1.csv | 6 + .../data/DataSet/output/GH_567_1-1.csv | 4 + .../data/DataStructure/input/GH_567_1-1.json | 27 + .../data/DataStructure/input/GH_567_2-1.json | 27 + .../data/DataStructure/input/GH_567_3-1.json | 27 + .../data/DataStructure/input/GH_567_4-1.json | 27 + .../data/DataStructure/output/GH_567_1-1.json | 27 + tests/Hierarchical/data/vtl/GH_567_1.vtl | 8 + tests/Hierarchical/data/vtl/GH_567_2.vtl | 5 + tests/Hierarchical/data/vtl/GH_567_3.vtl | 8 + tests/Hierarchical/data/vtl/GH_567_4.vtl | 8 + tests/Hierarchical/test_hierarchical.py | 114 +- .../ReferenceManual/test_reference_manual.py | 33 +- 35 files changed, 2173 insertions(+), 1883 deletions(-) create mode 100644 tests/Hierarchical/data/DataSet/input/GH_567_1-1.csv create mode 100644 tests/Hierarchical/data/DataSet/output/GH_567_1-1.csv create mode 100644 tests/Hierarchical/data/DataStructure/input/GH_567_1-1.json create mode 100644 tests/Hierarchical/data/DataStructure/input/GH_567_2-1.json create mode 100644 tests/Hierarchical/data/DataStructure/input/GH_567_3-1.json create mode 100644 tests/Hierarchical/data/DataStructure/input/GH_567_4-1.json create mode 100644 tests/Hierarchical/data/DataStructure/output/GH_567_1-1.json create mode 100644 tests/Hierarchical/data/vtl/GH_567_1.vtl create mode 100644 tests/Hierarchical/data/vtl/GH_567_2.vtl create mode 100644 tests/Hierarchical/data/vtl/GH_567_3.vtl create mode 100644 tests/Hierarchical/data/vtl/GH_567_4.vtl diff --git a/src/vtlengine/AST/DAG/__init__.py b/src/vtlengine/AST/DAG/__init__.py index 17deee8a3..29d7cb819 100644 --- a/src/vtlengine/AST/DAG/__init__.py +++ b/src/vtlengine/AST/DAG/__init__.py @@ -38,7 +38,14 @@ ) from vtlengine.AST.ASTTemplate import ASTTemplate from vtlengine.AST.DAG._models import DatasetSchedule, StatementDeps -from vtlengine.AST.Grammar.tokens import AS, DROP, KEEP, MEMBERSHIP, RENAME, TO +from vtlengine.AST.Grammar.tokens import ( + AS, + DROP, + KEEP, + MEMBERSHIP, + RENAME, + TO, +) from vtlengine.Exceptions import SemanticError from vtlengine.Model import Component @@ -149,13 +156,15 @@ def _build_and_sort_graph(self, error_op: str) -> None: self.sorting = result except nx.NetworkXUnfeasible: error_keys: Dict[int, Any] = {} - for v in self.edges.values(): - aux_v0, aux_v1 = v[1], v[0] - for iv in self.edges.values(): - if aux_v0 == iv[0] and aux_v1 == iv[1]: - error_keys[aux_v0] = self.dependencies[aux_v0] - error_keys[aux_v1] = self.dependencies[aux_v1] - break + try: + cycle = nx.find_cycle(graph) + for u, v in cycle: + if u in self.dependencies: + error_keys[u] = self.dependencies[u] + if v in self.dependencies: + error_keys[v] = self.dependencies[v] + except nx.NetworkXNoCycle: + pass raise SemanticError("1-3-2-3", op=error_op, nodes=error_keys) from None def load_vertex(self) -> None: @@ -237,6 +246,8 @@ def visit_Start(self, node: Start) -> None: for ast_element in node.children: if isinstance(ast_element, Operator): udos[ast_element.op] = ast_element + elif isinstance(ast_element, HRuleset): + HRDAGAnalyzer.sort_hr_rules(ast_element) self.udos = udos for child in node.children: if isinstance(child, (Assignment, PersistentAssignment)): @@ -359,6 +370,21 @@ def visit_DPValidation(self, node: DPValidation) -> None: class HRDAGAnalyzer(DAGAnalyzer): + @classmethod + def sort_hr_rules(cls, node: HRuleset) -> None: + """Filter valid hierarchy rules (EQ comparison) and sort by dependency order. + + Modifies node.rules in place: removes rules whose comparison operator is not '=' + and re-sorts the remaining rules based on the dependency DAG. + """ + dag = cls() + dag.visit(node) + dag.load_vertex() + dag.load_edges() + if len(dag.edges) != 0: + dag._build_and_sort_graph("hierarchy") + node.rules = dag.sort_elements(node.rules) + def visit_HRuleset(self, node: HRuleset) -> None: """ HRuleset: (name, element, rules) diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index f2cc4a784..b147096c4 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -554,6 +554,12 @@ "description": "Raised when condComp and ruleComp in a ruleset signature do not " "match as required.", }, + "1-1-10-10": { + "message": "Duplicated EQ rules on the Hierarchy Roll-up {ruleset}. " + "Check the EQ rules: {rule}.", + "description": "Raised when there are no applicable rules in a Hierarchy Roll-up " + "due to missing '=' operators.", + }, # General Operators "2-1-12-1": { "message": "At op {op}: Create a null Measure without a Scalar type is not allowed. " diff --git a/src/vtlengine/Interpreter/__init__.py b/src/vtlengine/Interpreter/__init__.py index eda8789bc..656d22c46 100644 --- a/src/vtlengine/Interpreter/__init__.py +++ b/src/vtlengine/Interpreter/__init__.py @@ -1296,7 +1296,7 @@ def _get_hr_mode_values(self, node: AST.HROperation) -> Tuple[str, str, str]: output = node.output.value if node.output else "invalid" return mode, input_, output - def visit_HROperation(self, node: AST.HROperation) -> None: + def visit_HROperation(self, node: AST.HROperation) -> None: # noqa: C901 """Handle hierarchy and check_hierarchy operators.""" # Visit dataset and get component if present # Deep copy the dataset when there are conditions to avoid modifying the original @@ -1356,8 +1356,17 @@ def visit_HROperation(self, node: AST.HROperation) -> None: for rule in hr_info["rules"]: if rule.rule.op == EQ or rule.rule.op == WHEN and rule.rule.right.op == EQ: aux.append(rule) + if len(aux) == 0: raise SemanticError("1-1-10-5") + + left_parts = [] + for rule in aux: + left_part = rule.rule.left if rule.rule.op == EQ else rule.rule.right.left + if left_part in left_parts: + raise SemanticError("1-1-10-10", ruleset=hr_name, rule=left_part) + left_parts.append(left_part) + hr_info["rules"] = aux hierarchy_ast = AST.HRuleset( diff --git a/tests/Additional/data/DataSet/output/11-25-DS_r.csv b/tests/Additional/data/DataSet/output/11-25-DS_r.csv index d15ff873e..2c6553e9a 100644 --- a/tests/Additional/data/DataSet/output/11-25-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-25-DS_r.csv @@ -1,6 +1,6 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,A,2,False,error,5.0,-115.0 +1,A,3,False,error3,5.0,-5.0 +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 diff --git a/tests/Additional/data/DataSet/output/11-26-DS_r.csv b/tests/Additional/data/DataSet/output/11-26-DS_r.csv index ad1a08474..3e1ff38ee 100644 --- a/tests/Additional/data/DataSet/output/11-26-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-26-DS_r.csv @@ -1,14 +1,14 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -2,A,True,0.0,1,, -3,A,,,1,, -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 -2,A,True,200.0,3,, -3,A,,,3,, -2,A,False,200.0,4,error4,5.0 -3,A,False,300.0,4,error4,5.0 -1,C,False,5.0,6,error6,5.0 -3,C,,,6,, +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,C,1,False,error6,5.0,5.0 +3,C,1,,,, +1,A,2,False,error,5.0,-115.0 +2,A,2,True,,,0.0 +3,A,2,,,, +1,A,3,False,error3,5.0,-5.0 +2,A,3,True,,,200.0 +3,A,3,,,, +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 +2,A,5,False,error4,5.0,200.0 +3,A,5,False,error4,5.0,300.0 diff --git a/tests/Additional/data/DataSet/output/11-27-DS_r.csv b/tests/Additional/data/DataSet/output/11-27-DS_r.csv index 9d040882b..1b0126efc 100644 --- a/tests/Additional/data/DataSet/output/11-27-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-27-DS_r.csv @@ -1,14 +1,14 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -2,A,,,1,, -3,A,,,1,, -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 -2,A,,,3,, -3,A,,,3,, -1,A,,,4,, -2,A,,,4,, -3,A,,,4,, -1,C,,,6,, +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,C,1,,,, +1,A,2,False,error,5.0,-115.0 +2,A,2,,,, +3,A,2,,,, +1,A,3,False,error3,5.0,-5.0 +2,A,3,,,, +3,A,3,,,, +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 +1,A,5,,,, +2,A,5,,,, +3,A,5,,,, diff --git a/tests/Additional/data/DataSet/output/11-28-DS_r.csv b/tests/Additional/data/DataSet/output/11-28-DS_r.csv index db22e962d..27e11130b 100644 --- a/tests/Additional/data/DataSet/output/11-28-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-28-DS_r.csv @@ -1,14 +1,14 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -2,A,True,0.0,1,, -3,A,,,1,, -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 -2,A,True,200.0,3,, -3,A,,,3,, -1,A,True,0.0,4,, -2,A,False,200.0,4,error4,5.0 -3,A,False,300.0,4,error4,5.0 -1,C,False,5.0,6,error6,5.0 +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,C,1,False,error6,5.0,5.0 +1,A,2,False,error,5.0,-115.0 +2,A,2,True,,,0.0 +3,A,2,,,, +1,A,3,False,error3,5.0,-5.0 +2,A,3,True,,,200.0 +3,A,3,,,, +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 +1,A,5,True,,,0.0 +2,A,5,False,error4,5.0,200.0 +3,A,5,False,error4,5.0,300.0 diff --git a/tests/Additional/data/DataSet/output/11-29-DS_r.csv b/tests/Additional/data/DataSet/output/11-29-DS_r.csv index 2b080dca2..7e4179986 100644 --- a/tests/Additional/data/DataSet/output/11-29-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-29-DS_r.csv @@ -1,15 +1,15 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -2,A,,,1,, -3,A,,,1,, -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 -2,A,,,3,, -3,A,,,3,, -1,A,,,4,, -2,A,,,4,, -3,A,,,4,, -1,C,,,6,, -3,C,,,6,, +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,C,1,,,, +3,C,1,,,, +1,A,2,False,error,5.0,-115.0 +2,A,2,,,, +3,A,2,,,, +1,A,3,False,error3,5.0,-5.0 +2,A,3,,,, +3,A,3,,,, +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 +1,A,5,,,, +2,A,5,,,, +3,A,5,,,, diff --git a/tests/Additional/data/DataSet/output/11-30-DS_r.csv b/tests/Additional/data/DataSet/output/11-30-DS_r.csv index dba31de81..4fb50de66 100644 --- a/tests/Additional/data/DataSet/output/11-30-DS_r.csv +++ b/tests/Additional/data/DataSet/output/11-30-DS_r.csv @@ -1,15 +1,15 @@ -Id1,Id2,bool_var,imbalance,ruleid,errorcode,errorlevel -1,A,False,-115.0,1,error,5.0 -2,A,True,0.0,1,, -3,A,,,1,, -1,A,False,-110.0,2,error2,5.0 -2,A,True,0.0,2,, -3,A,True,0.0,2,, -1,A,False,-5.0,3,error3,5.0 -2,A,True,200.0,3,, -3,A,,,3,, -1,A,True,0.0,4,, -2,A,False,200.0,4,error4,5.0 -3,A,False,300.0,4,error4,5.0 -1,C,False,5.0,6,error6,5.0 -3,C,,,6,, +Id1,Id2,ruleid,bool_var,errorcode,errorlevel,imbalance +1,C,1,False,error6,5.0,5.0 +3,C,1,,,, +1,A,2,False,error,5.0,-115.0 +2,A,2,True,,,0.0 +3,A,2,,,, +1,A,3,False,error3,5.0,-5.0 +2,A,3,True,,,200.0 +3,A,3,,,, +1,A,4,False,error2,5.0,-110.0 +2,A,4,True,,,0.0 +3,A,4,True,,,0.0 +1,A,5,True,,,0.0 +2,A,5,False,error4,5.0,200.0 +3,A,5,False,error4,5.0,300.0 diff --git a/tests/Additional/test_additional.py b/tests/Additional/test_additional.py index 826c4fdb6..73d5c73a6 100644 --- a/tests/Additional/test_additional.py +++ b/tests/Additional/test_additional.py @@ -2850,13 +2850,10 @@ def test_4(self): code = "11-4" number_inputs = 1 - references_names = ["DS_r"] + exception_code = "1-3-2-3" - self.BaseTest( - text=text, - code=code, - number_inputs=number_inputs, - references_names=references_names, + self.NewSemanticExceptionTest( + text=text, code=code, number_inputs=number_inputs, exception_code=exception_code ) def test_5(self): diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-1.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-1.csv index 630e18187..340f79dbb 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-1.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-1.csv @@ -1,1492 +1,1492 @@ ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE -A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,765 -C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,503 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44 -C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,146 -C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,944 -C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1267 -C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,690 -C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,0 -C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,16148 -C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,803 -C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,83 -C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,6233 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,227 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186 -D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,126 -D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,325 -D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2722 -D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,20263 -D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,15838 -D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1237 -D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,29 -D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,6732 -C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,2 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,763 -C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,473 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44 -C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,147 -C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,489 -C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1235 -C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,680 -C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,20947 -C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14699 -C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,684 -C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,80 -C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5414 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,223 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186 -D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,129 -D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,170 -D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2590 -D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,18814 -D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14933 -D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1033 -D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,29 -D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5921 -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,71 -C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,67 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,3 -C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,122 -C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,290 -C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,104 -C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8610 -C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,6701 -C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,164 -C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,1 -C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2365 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,13 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,75 -D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,403 -D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,235 -D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,11204 -D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,6532 -D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,378 -D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,31 -D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2137 -C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,24 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,73 -C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,97 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,2 -C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,577 -C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,323 -C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,114 -C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,10998 -C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8150 -C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,283 -C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,3 -C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3184 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,17 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,71 -D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,558 -D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,366 -D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,12653 -D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,7437 -D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,582 -D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,32 -D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2948 -C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,26 -A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,12694 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,429 -C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,62 -C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,434 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,150 -C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,479 -C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1093 -C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,678 -C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,18611 -C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13021 -C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,539 -C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,31 -C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,4663 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,219 -D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,103 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,119 -D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,166 -D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2400 -D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,16927 -D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13444 -D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,552 -D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,28 -D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5286 -L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,24606 -N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,191 -A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-453 -A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-138 -A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,-10 -A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,277 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-453 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-138 -A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,267 -A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-289 -A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-261 -A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,713 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,397 -A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1178 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-86 -A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-06-30,EUR,N,-176 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,-486 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,25 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,37554 -L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-06-30,EUR,N,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-374 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,150 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,11907 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-729 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,-95 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,-28 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-529 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4432 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4432 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,64 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-14796 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,-2 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-53 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-374 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,150 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,11907 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-731 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,-148 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-28 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-529 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4432 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4432 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,64 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-14796 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,365 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,-12 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-8 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,-36 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-1 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-1044 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12294 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-7702 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-7846 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,1199 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,129 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,1509 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-12355 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-8539 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-9689 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-1936 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-11256 -A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,32430 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,407 -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,-7 -C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,136 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44 -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,-1 -C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,587 -C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,464 -C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,116 -C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13335 -C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,9829 -C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,428 -C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,53 -C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3935 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,21 -D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,69 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186 -D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,82 -D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,561 -D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,556 -D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14539 -D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8926 -D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1064 -D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,33 -D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3583 -L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,-4542 -N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,492 -A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-130 -A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-199 -A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6474 -A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,5 -A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-199 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6474 -A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,5 -A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,795 -A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-8 -A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,172 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,376 -A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,551 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,158 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-291 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-561 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-20 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-1146 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,340 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,6735 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-515 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,7 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,7 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-125 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4070 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,1 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-42 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-561 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-20 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-1146 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,341 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,6693 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-515 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,7 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,7 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-125 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4070 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,41 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,1 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-1 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-4 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,109 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,884 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,4712 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,426 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,19 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,555 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-518 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-625 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-662 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,1615 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,4331 -A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1678 -A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-19808 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-06-30,EUR,_X,-13121 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-66870 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,8105 -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,15937 -A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-06-30,EUR,M,9448 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,738 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6710 -A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-06-30,EUR,M,7448 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2000 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,-501 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-06-30,EUR,_X,-478 -B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479 -B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479 -B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-06-30,EUR,_X,208 -B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,23 -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37457 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,836 -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,54 -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,570 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-06-30,EUR,_X,-13919 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44 -C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,4163 -C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,2623 -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,149 -C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,1364 -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,1066 -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1557 -C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,794 -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946 -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22849 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-06-30,EUR,_X,5042 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,967 -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,84 -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,8598 -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37959 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,240 -D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,172 -D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,597 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-06-30,EUR,_X,-13588 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186 -D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,4678 -D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,3684 -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200 -D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,793 -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,728 -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2956 -D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,196 -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466 -D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22370 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-06-30,EUR,_X,5104 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1615 -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60 -D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,8869 -L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,199 -L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-15157 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-06-30,EUR,_X,-14775 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-382 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,10424 -L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,20064 -L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-06-30,EUR,M,4933 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,5848 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-3414 -L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-06-30,EUR,M,2434 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2499 -N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-06-30,EUR,_X,-1400 -N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1878 -N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,683 -A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-130 -A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-652 -A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6336 -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,-5 -A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,278 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-652 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6336 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,273 -A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,506 -A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-269 -A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,884 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,773 -A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1728 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,72 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,-486 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,25 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-106 -A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,-3 -A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-06-30,EUR,M,-134 -A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-06-30,EUR,M,41 -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-06-30,EUR,M,-93 -A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-06-30,EUR,M,-93 -A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-06-30,EUR,_X,-106 -A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-06-30,EUR,_X,-4 -C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-06-30,EUR,_X,26 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0 -C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,26 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,37263 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,5 -N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-06-30,EUR,T,-6 -A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,-3 -A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,0 -A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-06-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-935 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,130 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,10761 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-389 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,6640 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,-28 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-1044 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4424 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4424 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-61 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-18866 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-116 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,-1159 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,-1 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-95 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,-125 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-935 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,130 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,10761 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-390 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,6545 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-28 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-1044 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4424 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4424 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-61 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-18866 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-116 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,-1159 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-125 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,405 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,-11 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-8 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,-36 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-2 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-1048 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,5730 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-1188 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12186 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-6818 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-3134 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,1625 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,148 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,2064 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-12873 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-9164 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-10351 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-321 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-6925 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,234 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,-1066 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,2619 -A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-06-30,EUR,M,0 -A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,887 -C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,519 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88 -C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,118 -C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,535 -C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1267 -C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1287 -C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,24091 -C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,17780 -C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,844 -C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,205 -C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5927 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,388 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159 -D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,128 -D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,368 -D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2722 -D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,22860 -D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,18387 -D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1181 -D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,19 -D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,6724 -C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,2 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,886 -C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,488 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88 -C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,119 -C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,245 -C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1235 -C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1262 -C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,21929 -C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,16419 -C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,712 -C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,205 -C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5255 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,384 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159 -D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,132 -D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,211 -D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2590 -D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,21018 -D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,17208 -D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,972 -D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,18 -D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5982 -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,78 -C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,68 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,3 -C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,100 -C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,290 -C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,67 -C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,9101 -C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,6939 -C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,177 -C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,1 -C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2197 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,13 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,76 -D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,402 -D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,235 -D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,10746 -D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,6746 -D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,444 -D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,31 -D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,1794 -C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,26 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,79 -C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,99 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2 -C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,390 -C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,323 -C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,92 -C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,11263 -C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,8300 -C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,310 -C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,2 -C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2869 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,17 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,73 -D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,559 -D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,366 -D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,12588 -D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,7925 -D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,653 -D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,32 -D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2536 -C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,27 -A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-3674 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,553 -C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,33 -C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,452 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,123 -C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,240 -C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1093 -C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1250 -C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,19436 -C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,14535 -C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,567 -C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,155 -C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,4533 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,380 -D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,105 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121 -D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,209 -D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2400 -D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,18939 -D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,15541 -D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,607 -D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,18 -D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5370 -L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,3598 -N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,152 -A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,336 -A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-152 -A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-125 -A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-342 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,336 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-152 -A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-467 -A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1150 -A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,917 -A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,248 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,472 -A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-1900 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-450 -A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-05-31,EUR,N,-449 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-11 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,8005 -L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-05-31,EUR,N,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,-13 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,529 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-1158 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,1058 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,563 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,10 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,19 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,7 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-3542 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,55 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,644 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-13 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,529 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-1158 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1113 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1206 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,10 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,19 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,7 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-3542 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,-1 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,-50 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-14 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,-3 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,1447 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1475 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-11049 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-5347 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,645 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,31 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-2373 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-281 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5624 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-10209 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-4139 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1028 -A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-1016 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,413 -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,-7 -C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,135 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88 -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,-1 -C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,396 -C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,464 -C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,104 -C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,13756 -C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,10184 -C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,455 -C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,52 -C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,3591 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,21 -D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,71 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159 -D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,84 -D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,562 -D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,556 -D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,14667 -D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,9592 -D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1018 -D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,33 -D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,3148 -L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-12219 -N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,-139 -A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-1 -A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1 -A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,244 -A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1060 -A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-72 -A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,244 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1060 -A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-72 -A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-265 -A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,28 -A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,8 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-28 -A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-1572 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,19 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-74 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-183 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,83 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,28 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-2360 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,5 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-798 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,-17 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,79 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,194 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-12515 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,-237 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,83 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,28 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-2360 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,4 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,-1036 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,-17 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,79 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,194 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-12515 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,401 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,8 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-1 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,-861 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-49 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,6561 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-1308 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,47 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,-25 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-1626 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-126 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,384 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,347 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,5950 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,632 -A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-14845 -A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-2434 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-05-31,EUR,_X,1498 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-05-31,EUR,_X,-3932 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-10253 -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-4689 -A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-05-31,EUR,M,-2112 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1720 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,177 -A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-05-31,EUR,M,1897 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-4009 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,-3507 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-05-31,EUR,_X,-3351 -B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,-414 -B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,-414 -B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-05-31,EUR,_X,-808 -B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,156 -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,39202 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966 -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,26 -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,587 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-05-31,EUR,_X,-310 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88 -C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,4283 -C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2193 -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121 -C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,1941 -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636 -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1557 -C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1354 -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,33192 -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,24719 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-05-31,EUR,_X,5337 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1022 -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,207 -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,8124 -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,42709 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,401 -D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,176 -D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,608 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-05-31,EUR,_X,-5107 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159 -D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,6873 -D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,3727 -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,205 -D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2942 -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,770 -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2956 -D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2334 -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,33606 -D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,25133 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-05-31,EUR,_X,7434 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1625 -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51 -D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,8518 -L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-12464 -L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-7717 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-05-31,EUR,_X,-9729 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-05-31,EUR,_X,2012 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-5989 -L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-8621 -L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-05-31,EUR,M,1243 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,2981 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1206 -L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-05-31,EUR,M,1775 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-533 -N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-05-31,EUR,_X,970 -N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-2381 -N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,13 -A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-1 -A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1 -A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,580 -A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211 -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-196 -A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-342 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,580 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-539 -A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,885 -A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,945 -A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,255 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,444 -A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-3472 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-430 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-85 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-59 -A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-123 -A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-05-31,EUR,M,-265 -A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-05-31,EUR,M,339 -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-05-31,EUR,M,73 -A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-05-31,EUR,M,73 -A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-05-31,EUR,_X,-60 -A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-05-31,EUR,_X,0 -C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-05-31,EUR,_X,28 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0 -C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,28 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,7822 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-05-31,EUR,T,-11 -A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-60 -A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-62 -A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-05-31,EUR,M,1 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,71 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,557 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-3518 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,1063 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-236 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,-7 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,99 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,201 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-16057 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,68 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,96 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,55 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,406 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,227 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,71 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,557 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-3518 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1118 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,-7 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,99 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,201 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-16057 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,68 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,96 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,227 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,1 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,-1 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,351 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-5 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,-3 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-2 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,586 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,3693 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,-1620 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1426 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-4488 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-6655 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,692 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,6 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-3999 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-407 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5241 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-9863 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1811 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1660 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,-780 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,318 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-760 -A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-05-31,EUR,M,0 -A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-04-30,EUR,M,148 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,749 -C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,510 -C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45 -C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,117 -C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,457 -C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1267 -C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,630 -C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,23223 -C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,16798 -C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,786 -C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,88 -C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,6107 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,212 -D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174 -D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,127 -D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,244 -D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2722 -D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,21892 -D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,17316 -D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,1091 -D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,32 -D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,6744 -C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,1 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,743 -C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,479 -C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45 -C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119 -C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,174 -C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1235 -C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,624 -C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,20945 -C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,15425 -C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,663 -C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,88 -C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5472 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,210 -D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174 -D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,130 -D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,150 -D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2590 -D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,20312 -D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,16321 -D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,892 -D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,31 -D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5846 -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,69 -C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,65 -C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,2 -C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,87 -C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,290 -C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,92 -C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,9510 -C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,7363 -C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,145 -C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,1 -C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2285 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,11 -D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,77 -D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,341 -D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,235 -D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,10852 -D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,6856 -D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,289 -D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,31 -D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,1750 -C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,25 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,75 -C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,95 -C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,1 -C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,371 -C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,323 -C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,98 -C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,11788 -C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,8735 -C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,267 -C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,2 -C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2920 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,14 -D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,74 -D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,435 -D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,366 -D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,12432 -D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,7851 -D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,488 -D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,32 -D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2648 -C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,26 -A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-10224 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,410 -C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,33 -C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,438 -C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,122 -C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,142 -C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1093 -C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,612 -C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,18493 -C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,13607 -C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,517 -C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,38 -C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,4670 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,206 -D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,103 -D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119 -D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,146 -D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2400 -D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,18280 -D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14734 -D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,502 -D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,30 -D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5231 -L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-12781 -N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,1272 -A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,4 -A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-7 -A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-15 -A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-303 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,4 -A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-7 -A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-318 -A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,12 -A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-763 -A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-171 -A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-163 -A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1006 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-104 -A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-04-30,EUR,N,-60 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-71 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,-5 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-29833 -L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-04-30,EUR,N,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,53 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,255 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,-8188 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,230 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-345 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-168 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,19 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-41 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,14974 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-182 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-633 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,53 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,255 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-8188 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,48 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-978 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-168 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,19 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,124 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-41 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,14974 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,51 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,162 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,6 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,-6 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,-9214 -A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,537 -A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-4516 -A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3576 -A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-138 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,51 -A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,863 -C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,817 -D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1563 -L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-1605 -L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-260 -L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,26 -A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,5092 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,408 -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,-8 -C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,137 -C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45 -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,-3 -C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,402 -C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,464 -C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,110 -C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14240 -C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,10554 -C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,413 -C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,51 -C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,3721 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,18 -D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,72 -D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174 -D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,85 -D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,439 -D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,556 -D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14464 -D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,9438 -D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,878 -D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,33 -D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,3263 -L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,11006 -N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,-459 -A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,41 -A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5730 -A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-56 -A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,41 -A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5730 -A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-56 -A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-455 -A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-1042 -A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-99 -A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-336 -A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1151 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-178 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-54 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,831 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,88 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-102 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,5889 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-419 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-5879 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,11 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,79 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-37 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,10936 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-4 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-154 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,88 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-102 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,5889 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-423 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-6033 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,11 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,79 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,9 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-37 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,10936 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,1 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-7 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,0 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-1 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,782 -A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-988 -A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,980 -A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,593 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-29 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,-1075 -A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,1084 -C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1053 -D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,104 -L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,134 -L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-709 -L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2085 -A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-14882 -A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-3641 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-04-30,EUR,_X,-310 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-04-30,EUR,_X,-3331 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-5401 -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-5133 -A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-04-30,EUR,M,-6924 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-668 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-8041 -A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,-8708 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1784 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,-1218 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-04-30,EUR,_X,-1192 -B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,-11 -B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,-11 -B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-04-30,EUR,_X,-114 -B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,26 -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,37889 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,818 -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,24 -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-04-30,EUR,_X,-139 -C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45 -C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,3544 -C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,2101 -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119 -C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,1297 -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,544 -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1557 -C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,722 -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,32733 -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24161 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-04-30,EUR,_X,4406 -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,930 -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,89 -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391 -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,39107 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223 -D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,175 -D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,592 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-04-30,EUR,_X,-1325 -D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174 -D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,4663 -D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,3541 -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,204 -D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,918 -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,585 -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2956 -D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,326 -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,32744 -D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24172 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-04-30,EUR,_X,5061 -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,1380 -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,63 -D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8494 -L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-12656 -L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-2384 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-04-30,EUR,_X,-1338 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-04-30,EUR,_X,-1047 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-13582 -L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-1774 -L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-04-30,EUR,M,3310 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,1265 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,2263 -L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,3528 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-218 -N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-04-30,EUR,_X,-1034 -N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-2226 -N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,814 -A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44 -A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5737 -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-70 -A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-303 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44 -A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5737 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-374 -A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-443 -A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-1805 -A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-270 -A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-499 -A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,2157 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-281 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-124 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,-5 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,271 -A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-446 -A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-04-30,EUR,M,346 -A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-04-30,EUR,M,244 -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-04-30,EUR,M,591 -A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-04-30,EUR,M,591 -A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-04-30,EUR,_X,123 -A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-04-30,EUR,_X,-1 -C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-04-30,EUR,_X,26 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0 -C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,26 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-29002 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-04-30,EUR,T,-20 -A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-46 -A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-401 -A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-04-30,EUR,M,0 -A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-04-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,140 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,153 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,-2299 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-189 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-6224 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-158 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,99 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-78 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,25910 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-188 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-1048 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-186 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-787 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,0 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,-486 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,140 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,153 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-2299 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-375 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-7011 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-158 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,99 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,133 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-78 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,25910 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-188 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-1048 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-486 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,1 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,51 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,163 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-2 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,-6 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-2 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,-8432 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,1323 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,1145 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,0 -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-451 -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3535 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2983 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-166 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,-1024 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,1947 -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-236 -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1459 -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-1471 -L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-969 -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2058 -L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,130 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,2166 -L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,268 -A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-04-30,EUR,M,0 +A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,765.0 +C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,503.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44.0 +C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,146.0 +C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,944.0 +C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1267.0 +C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,690.0 +C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,16148.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,803.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,83.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,6233.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,227.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186.0 +D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,126.0 +D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,325.0 +D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2722.0 +D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,20263.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,15838.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1237.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,29.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,6732.0 +C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,2.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,763.0 +C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,473.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44.0 +C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,147.0 +C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,489.0 +C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1235.0 +C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,680.0 +C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,20947.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14699.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,684.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,80.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5414.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,223.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186.0 +D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,129.0 +D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,170.0 +D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2590.0 +D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,18814.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14933.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1033.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,29.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5921.0 +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,71.0 +C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,67.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,3.0 +C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,122.0 +C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,290.0 +C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,104.0 +C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8610.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,6701.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,164.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,1.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2365.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,13.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,75.0 +D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,403.0 +D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,235.0 +D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,11204.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,6532.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,378.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,31.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2137.0 +C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,24.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,73.0 +C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,97.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,2.0 +C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,577.0 +C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,323.0 +C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,114.0 +C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,10998.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8150.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,283.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,3.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3184.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,17.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,71.0 +D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,558.0 +D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,366.0 +D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,12653.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,7437.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,582.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,32.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,2948.0 +C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,26.0 +A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,12694.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,429.0 +C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,62.0 +C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,434.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,150.0 +C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,479.0 +C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1093.0 +C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,678.0 +C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,18611.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13021.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,539.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,31.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,4663.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,219.0 +D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,103.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,119.0 +D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,166.0 +D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2400.0 +D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,16927.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13444.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,552.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,28.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,5286.0 +L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,24606.0 +N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,191.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-453.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-138.0 +A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,-10.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,277.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-453.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-138.0 +A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,267.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-289.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-261.0 +A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,713.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,397.0 +A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1178.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-86.0 +A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-06-30,EUR,N,-176.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,-486.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,25.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,37554.0 +L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-06-30,EUR,N,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-374.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,150.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,11907.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-729.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,-95.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,-28.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-529.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4432.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4432.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,64.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-14796.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,-2.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-53.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-374.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,150.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,11907.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-731.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,-148.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-28.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-529.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4432.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4432.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,64.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-14796.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,365.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,-12.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-8.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,-36.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-1.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-1044.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12294.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-7702.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-7846.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,1199.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,129.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,1509.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-12355.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-8539.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-9689.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-1936.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-11256.0 +A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,32430.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,407.0 +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,-7.0 +C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,136.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44.0 +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,-1.0 +C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,587.0 +C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,464.0 +C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,116.0 +C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,13335.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,9829.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,428.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,53.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3935.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,21.0 +D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,69.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186.0 +D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,82.0 +D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,561.0 +D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,556.0 +D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,14539.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,8926.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1064.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,33.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,3583.0 +L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,-4542.0 +N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,492.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-130.0 +A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-199.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6474.0 +A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,5.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-199.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6474.0 +A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,5.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,795.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-8.0 +A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,172.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,376.0 +A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,551.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,158.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-291.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-561.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-20.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-1146.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,340.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,6735.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-515.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,7.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,7.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-125.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4070.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,1.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-42.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-561.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-20.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-1146.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,341.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,6693.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-515.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,7.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,7.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-125.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4070.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,41.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,1.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-1.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-4.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,109.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,884.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,4712.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,426.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,19.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,555.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-518.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-625.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-662.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,1615.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,4331.0 +A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1678.0 +A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-19808.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-06-30,EUR,_X,-13121.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-66870.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,8105.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,15937.0 +A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-06-30,EUR,M,9448.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,738.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6710.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-06-30,EUR,M,7448.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2000.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,-501.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-06-30,EUR,_X,-478.0 +B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-06-30,EUR,_X,208.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,23.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37457.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,836.0 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,54.0 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,570.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-06-30,EUR,_X,-13919.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,44.0 +C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,4163.0 +C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,2623.0 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,149.0 +C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,1364.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,1066.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1557.0 +C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,794.0 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22849.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-06-30,EUR,_X,5042.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,967.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,84.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,8598.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37959.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-06-30,EUR,_X,240.0 +D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,172.0 +D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-06-30,EUR,_X,597.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-06-30,EUR,_X,-13588.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-06-30,EUR,_X,186.0 +D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,4678.0 +D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,3684.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200.0 +D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,793.0 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,728.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2956.0 +D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,196.0 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22370.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-06-30,EUR,_X,5104.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1615.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-06-30,EUR,_X,8869.0 +L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,199.0 +L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-15157.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-06-30,EUR,_X,-14775.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-382.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,10424.0 +L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,20064.0 +L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-06-30,EUR,M,4933.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,5848.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-3414.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-06-30,EUR,M,2434.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2499.0 +N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-06-30,EUR,_X,-1400.0 +N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1878.0 +N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-06-30,EUR,T,683.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-130.0 +A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-652.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6336.0 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,-5.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,278.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,-652.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,6336.0 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,273.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,506.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-269.0 +A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,884.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,773.0 +A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1728.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,72.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,-486.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,25.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,-106.0 +A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,-3.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-06-30,EUR,M,-134.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-06-30,EUR,M,41.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-06-30,EUR,M,-93.0 +A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-06-30,EUR,M,-93.0 +A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-06-30,EUR,_X,-106.0 +A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-06-30,EUR,_X,-4.0 +C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-06-30,EUR,_X,26.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-06-30,EUR,_X,0.0 +C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,26.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-06-30,EUR,_X,37263.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-06-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-06-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-06-30,EUR,M,5.0 +N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-06-30,EUR,T,-6.0 +A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,-3.0 +A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-06-30,EUR,N,0.0 +A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-935.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,130.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,10761.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-389.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,6640.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,-28.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-1044.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-06-30,EUR,_X,-61.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-06-30,EUR,_X,-18866.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-06-30,EUR,M,-116.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-06-30,EUR,M,-1159.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,-95.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,0.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-06-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-06-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-06-30,EUR,M,-125.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-935.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,130.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,10761.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-390.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,6545.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-28.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-1044.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-61.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-18866.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-06-30,EUR,M,-116.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-06-30,EUR,M,-1159.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-06-30,EUR,M,-125.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,405.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,-11.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-8.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,-36.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,-2.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-06-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,-1048.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-06-30,EUR,M,5730.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-06-30,EUR,M,-1188.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-06-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12186.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-6818.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-3134.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,1625.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,148.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,2064.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-12873.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-06-30,EUR,_X,-9164.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-10351.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-321.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-6925.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-06-30,EUR,M,234.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-06-30,EUR,M,-1066.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-06-30,EUR,M,2619.0 +A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-06-30,EUR,M,0.0 +A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,887.0 +C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,519.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88.0 +C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,118.0 +C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,535.0 +C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1267.0 +C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1287.0 +C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,24091.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,17780.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,844.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,205.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5927.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,388.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159.0 +D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,128.0 +D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,368.0 +D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2722.0 +D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,22860.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,18387.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1181.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,19.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,6724.0 +C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,2.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,886.0 +C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,488.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88.0 +C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,119.0 +C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,245.0 +C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1235.0 +C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1262.0 +C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,21929.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,16419.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,712.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,205.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5255.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,384.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159.0 +D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,132.0 +D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,211.0 +D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2590.0 +D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,21018.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,17208.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,972.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,18.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5982.0 +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,78.0 +C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,68.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,3.0 +C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,100.0 +C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,290.0 +C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,67.0 +C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,9101.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,6939.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,177.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,1.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2197.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,13.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,76.0 +D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,402.0 +D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,235.0 +D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,10746.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,6746.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,444.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,31.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,1794.0 +C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,26.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,79.0 +C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,99.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2.0 +C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,390.0 +C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,323.0 +C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,92.0 +C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,11263.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,8300.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,310.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,2.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2869.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,17.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,73.0 +D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,559.0 +D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,366.0 +D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,12588.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,7925.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,653.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,32.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,2536.0 +C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,27.0 +A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-3674.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,553.0 +C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,33.0 +C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,452.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,123.0 +C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,240.0 +C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1093.0 +C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1250.0 +C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,19436.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,14535.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,567.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,155.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,4533.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,380.0 +D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,105.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121.0 +D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,209.0 +D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2400.0 +D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,18939.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,15541.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,607.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,18.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,5370.0 +L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,3598.0 +N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,152.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,336.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-152.0 +A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-125.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-342.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,336.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-152.0 +A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-467.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1150.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,917.0 +A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,248.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,472.0 +A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-1900.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-450.0 +A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-05-31,EUR,N,-449.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-11.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,8005.0 +L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-05-31,EUR,N,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,-13.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,529.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-1158.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,1058.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,563.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,10.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,19.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,7.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-3542.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,55.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,644.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-13.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,529.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-1158.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1113.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1206.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,10.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,19.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,7.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-3542.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,-1.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,-50.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-14.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,-3.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,1447.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1475.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-11049.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-5347.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,645.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,31.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-2373.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-281.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5624.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-10209.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-4139.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1028.0 +A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-1016.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,413.0 +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,-7.0 +C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,135.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88.0 +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,-1.0 +C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,396.0 +C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,464.0 +C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,104.0 +C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,13756.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,10184.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,455.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,52.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,3591.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,21.0 +D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,71.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159.0 +D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,84.0 +D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,562.0 +D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,556.0 +D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,14667.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,9592.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1018.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,33.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,3148.0 +L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-12219.0 +N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,-139.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-1.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1.0 +A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,244.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1060.0 +A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-72.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,244.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1060.0 +A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-72.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-265.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,28.0 +A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,8.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-28.0 +A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-1572.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,19.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-74.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-183.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,83.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,28.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-2360.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,5.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-798.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,-17.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,79.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,194.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-12515.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,-237.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,83.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,28.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-2360.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,4.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,-1036.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,-17.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,79.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,194.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-12515.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,401.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,8.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-1.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,-861.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-49.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,6561.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-1308.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,47.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,-25.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-1626.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-126.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,384.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,347.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,5950.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,632.0 +A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-14845.0 +A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-2434.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-05-31,EUR,_X,1498.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-05-31,EUR,_X,-3932.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-10253.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-4689.0 +A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-05-31,EUR,M,-2112.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1720.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,177.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-05-31,EUR,M,1897.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-4009.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,-3507.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-05-31,EUR,_X,-3351.0 +B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,-414.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,-414.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-05-31,EUR,_X,-808.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,156.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,39202.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966.0 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,26.0 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,587.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-05-31,EUR,_X,-310.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,88.0 +C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,4283.0 +C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2193.0 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121.0 +C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,1941.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1557.0 +C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1354.0 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,33192.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,24719.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-05-31,EUR,_X,5337.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1022.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,207.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,8124.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,42709.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,401.0 +D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,176.0 +D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-05-31,EUR,_X,608.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-05-31,EUR,_X,-5107.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-05-31,EUR,_X,159.0 +D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,6873.0 +D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,3727.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,205.0 +D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2942.0 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,770.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2956.0 +D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2334.0 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,33606.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-05-31,EUR,_X,25133.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-05-31,EUR,_X,7434.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1625.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-05-31,EUR,_X,8518.0 +L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-12464.0 +L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-7717.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-05-31,EUR,_X,-9729.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-05-31,EUR,_X,2012.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-5989.0 +L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-8621.0 +L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-05-31,EUR,M,1243.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,2981.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1206.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-05-31,EUR,M,1775.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-533.0 +N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-05-31,EUR,_X,970.0 +N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-2381.0 +N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-05-31,EUR,T,13.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,-1.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1.0 +A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,580.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-196.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-342.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,580.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-539.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,885.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,945.0 +A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,255.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,444.0 +A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-3472.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-430.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,-85.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-59.0 +A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-123.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-05-31,EUR,M,-265.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-05-31,EUR,M,339.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-05-31,EUR,M,73.0 +A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-05-31,EUR,M,73.0 +A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-05-31,EUR,_X,-60.0 +A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-05-31,EUR,_X,28.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-05-31,EUR,_X,0.0 +C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,28.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,7822.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-05-31,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-05-31,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-05-31,EUR,T,-11.0 +A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-60.0 +A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-05-31,EUR,N,-62.0 +A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-05-31,EUR,M,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,71.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,557.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-3518.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,1063.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-236.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,-7.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,99.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-05-31,EUR,_X,201.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-05-31,EUR,_X,-16057.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-05-31,EUR,M,68.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,96.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,55.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,406.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,0.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-05-31,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-05-31,EUR,M,227.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,71.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,557.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-3518.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1118.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,-7.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,99.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-05-31,EUR,_X,201.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-05-31,EUR,_X,-16057.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,68.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,96.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-05-31,EUR,M,227.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,1.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,-1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,351.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-5.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,-3.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-2.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-05-31,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-05-31,EUR,_X,586.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,3693.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-05-31,EUR,M,-1620.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-05-31,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1426.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-4488.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-6655.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,692.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,6.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-3999.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-407.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5241.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-9863.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1811.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1660.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-05-31,EUR,M,-780.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-05-31,EUR,M,318.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-05-31,EUR,M,-760.0 +A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-05-31,EUR,M,0.0 +A,N,N,1C,S121,XDR,T,M,R,FK,FA,_Z,BE,S121,2019-04-30,EUR,M,148.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,749.0 +C,N,N,B5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,510.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45.0 +C,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,117.0 +C,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,457.0 +C,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1267.0 +C,N,N,B5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,630.0 +C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,23223.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,16798.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,786.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,88.0 +C,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,6107.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,212.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174.0 +D,N,N,B5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,127.0 +D,N,N,B5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,244.0 +D,N,N,B5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2722.0 +D,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,21892.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,17316.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,1091.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,32.0 +D,N,N,B5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,6744.0 +C,N,N,B5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,1.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,743.0 +C,N,N,B6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,479.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45.0 +C,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119.0 +C,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,174.0 +C,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1235.0 +C,N,N,B6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,624.0 +C,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,20945.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,15425.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,663.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,88.0 +C,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5472.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,210.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174.0 +D,N,N,B6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,130.0 +D,N,N,B6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,150.0 +D,N,N,B6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2590.0 +D,N,C,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,20312.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,16321.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,892.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,31.0 +D,N,N,B6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5846.0 +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,69.0 +C,N,N,D5,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,65.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +C,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,2.0 +C,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,87.0 +C,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,290.0 +C,N,N,D5,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,92.0 +C,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,9510.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,7363.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,145.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,1.0 +C,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2285.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,11.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +D,N,N,D5,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,77.0 +D,N,N,D5,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,341.0 +D,N,N,D5,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,235.0 +D,N,C,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,10852.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,6856.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,289.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,31.0 +D,N,N,D5,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,1750.0 +C,N,N,D5,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,25.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,75.0 +C,N,N,D6,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,95.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +C,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,1.0 +C,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,371.0 +C,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,323.0 +C,N,N,D6,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,98.0 +C,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,11788.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,8735.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,267.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,2.0 +C,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2920.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,14.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +D,N,N,D6,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,74.0 +D,N,N,D6,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,435.0 +D,N,N,D6,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,366.0 +D,N,C,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,12432.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,7851.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,488.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,32.0 +D,N,N,D6,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2648.0 +C,N,N,D6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,26.0 +A,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-10224.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,410.0 +C,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,33.0 +C,N,N,I8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,438.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +C,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,122.0 +C,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,142.0 +C,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1093.0 +C,N,N,I8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,612.0 +C,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,18493.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,13607.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,517.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,38.0 +C,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,4670.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,206.0 +D,N,N,I8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,103.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119.0 +D,N,N,I8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,146.0 +D,N,N,I8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2400.0 +D,N,C,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,18280.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14734.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,502.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,30.0 +D,N,N,I8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,5231.0 +L,N,N,I8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-12781.0 +N,N,N,I8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,1272.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,4.0 +A,N,N,I8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-7.0 +A,N,N,I8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-15.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-303.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,4.0 +A,N,N,I8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-7.0 +A,N,N,I8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-318.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,12.0 +A,N,N,I8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-763.0 +A,N,N,I8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-171.0 +A,N,N,I8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-163.0 +A,N,N,I8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1006.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-104.0 +A,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-04-30,EUR,N,-60.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-71.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,-5.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-29833.0 +L,N,N,I8,S1,EUR,T,M,O,F221C,FA,S,BE,S121,2019-04-30,EUR,N,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,53.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,255.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,-8188.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,230.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-345.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-168.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,19.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-41.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,14974.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-182.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-633.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,53.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,255.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-8188.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,48.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-978.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-168.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,19.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,124.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-41.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,14974.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,51.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,162.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,6.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,-6.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,-9214.0 +A,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,537.0 +A,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-4516.0 +A,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3576.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-138.0 +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,51.0 +A,N,N,I8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,863.0 +C,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,817.0 +D,N,N,I8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1563.0 +L,N,N,I8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-1605.0 +L,N,N,I8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-260.0 +L,N,N,I8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,26.0 +A,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,5092.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,408.0 +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,-8.0 +C,N,N,J8,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,137.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45.0 +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,-3.0 +C,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,402.0 +C,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,464.0 +C,N,N,J8,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,110.0 +C,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14240.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,10554.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,413.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,51.0 +C,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,3721.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,18.0 +D,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,72.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174.0 +D,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,85.0 +D,N,N,J8,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,439.0 +D,N,N,J8,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,556.0 +D,N,C,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,14464.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,9438.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,878.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,33.0 +D,N,N,J8,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,3263.0 +L,N,N,J8,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,11006.0 +N,N,N,J8,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,-459.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,41.0 +A,N,N,J8,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5730.0 +A,N,N,J8,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-56.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,41.0 +A,N,N,J8,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5730.0 +A,N,N,J8,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-56.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-455.0 +A,N,N,J8,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-1042.0 +A,N,N,J8,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-99.0 +A,N,N,J8,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-336.0 +A,N,N,J8,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1151.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-178.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-54.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,831.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,88.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-102.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,5889.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-419.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-5879.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,11.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,79.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-37.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,10936.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-4.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-154.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,88.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-102.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,5889.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-423.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-6033.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,11.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,79.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,9.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-37.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,10936.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,1.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-7.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,0.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-1.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,782.0 +A,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-988.0 +A,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,980.0 +A,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,593.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-29.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,-1075.0 +A,N,N,J8,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,1084.0 +C,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1053.0 +D,N,N,J8,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,104.0 +L,N,N,J8,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,134.0 +L,N,N,J8,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-709.0 +L,N,N,J8,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2085.0 +A,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-14882.0 +A,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-3641.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-04-30,EUR,_X,-310.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-04-30,EUR,_X,-3331.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-5401.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-5133.0 +A,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-04-30,EUR,M,-6924.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-668.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-8041.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,-8708.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1784.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,-1218.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CKA,_Z,BE,S1,2019-04-30,EUR,_X,-1192.0 +B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,-11.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,-11.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,GS,_Z,BE,S1,2019-04-30,EUR,_X,-114.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,26.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,37889.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,818.0 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,24.0 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-04-30,EUR,_X,-139.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,45.0 +C,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,3544.0 +C,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,2101.0 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119.0 +C,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,1297.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,544.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1557.0 +C,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,722.0 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,32733.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24161.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-04-30,EUR,_X,4406.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,930.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,89.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,39107.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223.0 +D,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,175.0 +D,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,592.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1,2019-04-30,EUR,_X,-1325.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D4O,_Z,BE,S1,2019-04-30,EUR,_X,174.0 +D,N,N,W1,S1,_T,T,M,_T,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,4663.0 +D,N,N,W1,S1,_T,T,M,D,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,3541.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,204.0 +D,N,N,W1,S1,_T,T,M,P,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,918.0 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,585.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2956.0 +D,N,N,W1,S1,_T,T,M,P,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,326.0 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,32744.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24172.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN1,_Z,BE,S1,2019-04-30,EUR,_X,5061.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,1380.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,63.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8494.0 +L,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-12656.0 +L,N,N,W1,S1,_T,T,M,D,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-2384.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1,2019-04-30,EUR,_X,-1338.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-04-30,EUR,_X,-1047.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-13582.0 +L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-1774.0 +L,N,N,W1,S1,_T,T,M,P,F,FA,_Z,BE,S1,2019-04-30,EUR,M,3310.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,1265.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,2263.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,3528.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-218.0 +N,N,N,W1,S1,_T,T,M,_Z,_Z,EO,_Z,BE,S1,2019-04-30,EUR,_X,-1034.0 +N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-2226.0 +N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,814.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S121,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S121,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5737.0 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-70.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-303.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-5737.0 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-374.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-443.0 +A,N,N,W1,S13,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-1805.0 +A,N,N,W1,S13,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-270.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-499.0 +A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,2157.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-281.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-124.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,-5.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,271.0 +A,N,N,W1,S1,XAU,T,M,R,F11,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-446.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,L,BE,S121,2019-04-30,EUR,M,346.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,S,BE,S121,2019-04-30,EUR,M,244.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-04-30,EUR,M,591.0 +A,N,N,W1,S1,X1,T,M,R,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,X1,T,M,R,FR1,FA,_Z,BE,S121,2019-04-30,EUR,M,591.0 +A,N,N,W1,S1,X1,T,M,R,FR2,FA,_Z,BE,S121,2019-04-30,EUR,_X,123.0 +A,N,N,W1,S1,X1,T,M,R,FR41,FA,_Z,BE,S121,2019-04-30,EUR,_X,-1.0 +C,N,N,W1,S1,X1,T,M,R,FLA,D41,T,BE,S121,2019-04-30,EUR,_X,26.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S121,2019-04-30,EUR,_X,0.0 +C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,26.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-29002.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S121,2019-04-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +N,N,N,W1,S1,X1,T,M,R,F71,FA,T,BE,S121,2019-04-30,EUR,T,-20.0 +A,N,N,W1,S121,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-46.0 +A,N,N,W1,S122,X1,T,M,R,F2,FA,T,BE,S121,2019-04-30,EUR,N,-401.0 +A,N,N,W1,S12K,XAU,T,M,R,F11B,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1N,XDR,T,M,R,F12,FA,T,BE,S121,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,140.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,153.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,-2299.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-189.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-6224.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-158.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,99.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S122,2019-04-30,EUR,_X,-78.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S122,2019-04-30,EUR,_X,25910.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S122,2019-04-30,EUR,M,-188.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-04-30,EUR,M,-1048.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,-186.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,-787.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,0.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S123,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S123,2019-04-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-04-30,EUR,M,0.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S123,2019-04-30,EUR,M,-486.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,140.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,153.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-2299.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-375.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-7011.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-158.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,99.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,133.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S12T,2019-04-30,EUR,_X,-78.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S12T,2019-04-30,EUR,_X,25910.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-04-30,EUR,M,-188.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-04-30,EUR,M,-1048.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-486.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,1.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,51.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,163.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-2.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,-6.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-2.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S13,2019-04-30,EUR,_X,0.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-04-30,EUR,_X,-8432.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,1323.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S13,2019-04-30,EUR,M,1145.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S13,2019-04-30,EUR,M,0.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-451.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3535.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2983.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-166.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,-1024.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,1947.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-236.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-04-30,EUR,_X,-1459.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-1471.0 +L,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-969.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2058.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,130.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1P,2019-04-30,EUR,M,2166.0 +L,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1P,2019-04-30,EUR,M,268.0 +A,N,N,W19,S1N,XAU,T,M,R,F11A,FA,_Z,BE,S121,2019-04-30,EUR,M,0.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-12.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-12.csv index 4b0e79dca..fcf301b98 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-12.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-12.csv @@ -1 +1 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,ruleid,errorcode,errorlevel +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-13.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-13.csv index 0cdce6f13..2b149299c 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-13.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-13.csv @@ -1,11 +1,11 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,ruleid,errorcode,errorlevel -C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,0,sign1c,sign1c, -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,0,sign8c,sign8c, -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,-1,sign8c,sign8c, -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,0,sign8c,sign8c, -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,-1,sign8c,sign8c, -C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,0,sign8c,sign8c, -C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,-3,sign8c,sign8c, -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,-7,sign12,sign12, -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,-7,sign12,sign12, -C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,-8,sign12,sign12, +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel +C,N,C,B5,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,sign1c,0.0,sign1c, +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-06-30,EUR,_X,sign8c,0.0,sign8c, +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,sign8c,-1.0,sign8c, +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,sign8c,0.0,sign8c, +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,sign8c,-1.0,sign8c, +C,N,N,B6,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-04-30,EUR,_X,sign8c,0.0,sign8c, +C,N,N,J8,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,sign8c,-3.0,sign8c, +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,sign12,-7.0,sign12, +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-05-31,EUR,_X,sign12,-7.0,sign12, +C,N,N,J8,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,sign12,-8.0,sign12, diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-14.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-14.csv index 5537dba15..ea7029fce 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-14.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-14.csv @@ -1,5 +1,5 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,-501.0,1.0,1,Balance (credit-debet),4 -B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479.0,-1.0,1,Balance (credit-debet),4 -B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,23.0,-1.0,1,Balance (credit-debet),4 -N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1878.0,-1.0,2,Net (assets-liabilities),4 +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,1,-501.0,Balance (credit-debet),4.0,1.0 +B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,1,479.0,Balance (credit-debet),4.0,-1.0 +B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,1,23.0,Balance (credit-debet),4.0,-1.0 +N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,2,-1878.0,Net (assets-liabilities),4.0,-1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-15.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-15.csv index 5d6ae8d20..020e3523a 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-15.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-15.csv @@ -1,8 +1,8 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2000.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1720.0,1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-4009.0,2.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-668.0,1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1784.0,1.0,1,total economy,4.0 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-539.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-374.0,-1.0,2,Monetary financial institutions other than central bank,4.0 +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,1,-539.0,Monetary financial institutions other than central bank,4.0,-1.0 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1,-374.0,Monetary financial institutions other than central bank,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,3,2000.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,3,1720.0,total economy,4.0,1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,3,-4009.0,total economy,4.0,2.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,3,-668.0,total economy,4.0,1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,3,1784.0,total economy,4.0,1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-16.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-16.csv index 7075ae845..107ca5222 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-16.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-16.csv @@ -1,94 +1,94 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,15937.0,-29187.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,54.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22849.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,728.0,1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1615.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60.0,-1.0,1,Rest of the world, -A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,278.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,273.0,1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,884.0,-1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1728.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,405.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12186.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-4689.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,770.0,-1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0,1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-196.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0,1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,255.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-430.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,71.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-236.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,406.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,71.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1118.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,1.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-5.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-2.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5241.0,-1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-9863.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-5133.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,24.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223.0,-1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-1774.0,1.0,1,Rest of the world, -N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,814.0,1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0,-1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-70.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-281.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-124.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,140.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-158.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,140.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-158.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,1.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-2.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-2.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3535.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-166.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2058.0,1.0,1,Rest of the world, -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946.0,23336.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2956.0,-1.0,2,Rest of the world, -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1022.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,207.0,1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,205.0,1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2956.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,930.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2956.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1557.0,-1.0,3,Rest of the world, -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,84.0,1.0,3,Rest of the world, -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1557.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51.0,1.0,3,Rest of the world, -C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,28.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,544.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1557.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24161.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,89.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223.0,-1.0,3,Rest of the world, +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,1,22849.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,1,200.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,1,728.0,Rest of the world,,1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1,1615.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,1,60.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,1,15937.0,Rest of the world,,-29187.0 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,1,54.0,Rest of the world,,-1.0 +A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1,278.0,Rest of the world,,1.0 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1,273.0,Rest of the world,,1.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,1,884.0,Rest of the world,,-1.0 +A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1,1728.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,1,-4424.0,Rest of the world,,1.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,1,-4424.0,Rest of the world,,1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,1,-4424.0,Rest of the world,,1.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,1,-4424.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,1,405.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,1,-12186.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,1,121.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,1,770.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,1,-4689.0,Rest of the world,,1.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,1,-1211.0,Rest of the world,,1.0 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,1,-196.0,Rest of the world,,1.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,1,-1211.0,Rest of the world,,1.0 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1,255.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,1,-430.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,1,71.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,1,-236.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,1,99.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,1,406.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,1,71.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1,1118.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1,171.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,1,99.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,1,1.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,1,-5.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,1,-2.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,1,-5241.0,Rest of the world,,-1.0 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,1,-9863.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,1,574.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,1,223.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,1,-5133.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,1,24.0,Rest of the world,,-1.0 +L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,1,-1774.0,Rest of the world,,1.0 +N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,1,814.0,Rest of the world,,1.0 +A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,1,44.0,Rest of the world,,-1.0 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1,-70.0,Rest of the world,,1.0 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,1,44.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,1,-281.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,1,-124.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,1,140.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,1,-158.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,1,99.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,1,140.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,1,-158.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,1,99.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,1,1.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,1,-2.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,1,-2.0,Rest of the world,,-1.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,1,-3535.0,Rest of the world,,1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,1,-166.0,Rest of the world,,1.0 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,1,-2058.0,Rest of the world,,1.0 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,2,31946.0,Rest of the world,,23336.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,2,200.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2,2956.0,Rest of the world,,-1.0 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,2,31466.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,2,966.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,2,636.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,2,1022.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,2,207.0,Rest of the world,,1.0 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,2,205.0,Rest of the world,,1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2,2956.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,2,51.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,2,574.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,2,930.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,2,8391.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2,2956.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,3,1557.0,Rest of the world,,-1.0 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,3,31946.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,3,84.0,Rest of the world,,1.0 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,3,31466.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,3,60.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,3,966.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,3,636.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,3,1557.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,3,51.0,Rest of the world,,1.0 +C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,3,28.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,3,119.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,3,544.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,3,1557.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,3,24161.0,Rest of the world,,1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,3,89.0,Rest of the world,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,3,8391.0,Rest of the world,,-1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,3,223.0,Rest of the world,,-1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-17.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-17.csv index 40d018622..3bdb734a1 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-17.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-17.csv @@ -1,5 +1,5 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37457.0,1.0,1,Current account, -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37959.0,1.0,1,Current account, -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,42709.0,-1.0,1,Current account, -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,37889.0,1.0,1,Current account, +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,3,37457.0,Current account,,1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,3,37959.0,Current account,,1.0 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,3,42709.0,Current account,,-1.0 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,3,37889.0,Current account,,1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-18.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-18.csv index f15b96968..a7aca0711 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-18.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-18.csv @@ -1,4 +1,4 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-05-31,EUR,M,73.0,-1.0,1,All original maturities, -A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,-8708.0,1.0,1,All original maturities, -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-04-30,EUR,M,591.0,1.0,1,All original maturities, +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-05-31,EUR,M,1,73.0,All original maturities,,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,1,-8708.0,All original maturities,,1.0 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-04-30,EUR,M,1,591.0,All original maturities,,1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-19.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-19.csv index 9b1668b5b..b3180913b 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-19.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-19.csv @@ -1,13 +1,13 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-66870.0,-60182.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,8105.0,1.0,1,total economy,4.0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-3414.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-10253.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-5401.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-668.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-8041.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1784.0,1.0,1,total economy,4.0 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1206.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,4.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,-1036.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171.0,1.0,2,Monetary financial institutions other than central bank,4.0 +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1,1206.0,Monetary financial institutions other than central bank,4.0,-1.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1,4.0,Monetary financial institutions other than central bank,4.0,-1.0 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1,-1036.0,Monetary financial institutions other than central bank,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1,171.0,Monetary financial institutions other than central bank,4.0,1.0 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,3,-66870.0,total economy,4.0,-60182.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,3,8105.0,total economy,4.0,1.0 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,3,-3414.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,3,-10253.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,3,-5401.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,3,-668.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,3,-8041.0,total economy,4.0,-1.0 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,3,1784.0,total economy,4.0,1.0 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-20.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-20.csv index 4198ba488..59931e98e 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-20.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-20.csv @@ -1 +1 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,ruleid,OBS_VALUE,errorcode,errorlevel,imbalance diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-21.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-21.csv index c76d25163..030f2d690 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-21.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-21.csv @@ -1,122 +1,121 @@ -ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,OBS_VALUE,imbalance,ruleid,errorcode,errorlevel -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1,2019-06-30,EUR,_X,-66870.0,-60182.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,8105.0,1.0,1,total economy,4.0 -L,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-06-30,EUR,M,-3414.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-05-31,EUR,_X,-10253.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1,2019-04-30,EUR,_X,-5401.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,-668.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S1,2019-04-30,EUR,M,-8041.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,1784.0,1.0,1,total economy,4.0 -A,N,N,I8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,1206.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,4.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,J8,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,-1036.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171.0,1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-05-31,EUR,M,73.0,-1.0,1,All original maturities, -A,N,N,W1,S1,_T,T,M,P,F3,FA,T,BE,S1,2019-04-30,EUR,M,-8708.0,1.0,1,All original maturities, -A,N,N,W1,S1,X1,T,M,R,F3,FA,T,BE,S121,2019-04-30,EUR,M,591.0,1.0,1,All original maturities, -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37457.0,1.0,1,Current account, -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,37959.0,1.0,1,Current account, -D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-05-31,EUR,_X,42709.0,-1.0,1,Current account, -C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-04-30,EUR,_X,37889.0,1.0,1,Current account, -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-06-30,EUR,N,15937.0,-29187.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-06-30,EUR,_X,54.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,22849.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-06-30,EUR,_X,728.0,1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-06-30,EUR,_X,1615.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60.0,-1.0,1,Rest of the world, -A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,278.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,273.0,1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-06-30,EUR,M,884.0,-1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,1728.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-06-30,EUR,_X,-4424.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S13,2019-06-30,EUR,_X,405.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-06-30,EUR,_X,-12186.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-05-31,EUR,N,-4689.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,121.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,770.0,-1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0,1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-196.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,BE,S1,2019-05-31,EUR,M,-1211.0,1.0,1,Rest of the world, -A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,255.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-05-31,EUR,_X,-430.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-05-31,EUR,_X,71.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S122,2019-05-31,EUR,M,-236.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-05-31,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S123,2019-05-31,EUR,M,406.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-05-31,EUR,_X,71.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S12T,2019-05-31,EUR,M,1118.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,S,BE,S12T,2019-05-31,EUR,M,171.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-05-31,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-05-31,EUR,_X,1.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-05-31,EUR,M,-5.0,1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-05-31,EUR,_X,-2.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S1P,2019-05-31,EUR,_X,-5241.0,-1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S1P,2019-05-31,EUR,_X,-9863.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-5133.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,BE,S1,2019-04-30,EUR,_X,24.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574.0,-1.0,1,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223.0,-1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,O,F2,FA,T,BE,S1,2019-04-30,EUR,N,-1774.0,1.0,1,Rest of the world, -N,N,N,W1,S1,_T,T,M,F,F7,FA,T,BE,S1,2019-04-30,EUR,T,814.0,1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0,-1.0,1,Rest of the world, -A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-70.0,1.0,1,Rest of the world, -A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,BE,S1,2019-04-30,EUR,M,44.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S121,2019-04-30,EUR,_X,-281.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S121,2019-04-30,EUR,M,-124.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S122,2019-04-30,EUR,_X,140.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S122,2019-04-30,EUR,M,-158.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S122,2019-04-30,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S12T,2019-04-30,EUR,_X,140.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S12T,2019-04-30,EUR,M,-158.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S12T,2019-04-30,EUR,_X,99.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,BE,S13,2019-04-30,EUR,_X,1.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S13,2019-04-30,EUR,M,-2.0,-1.0,1,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,BE,S13,2019-04-30,EUR,_X,-2.0,-1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-3535.0,1.0,1,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1P,2019-04-30,EUR,M,-166.0,1.0,1,Rest of the world, -L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,BE,S1P,2019-04-30,EUR,_X,-2058.0,1.0,1,Rest of the world, -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946.0,23336.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-06-30,EUR,_X,200.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,2956.0,-1.0,2,Rest of the world, -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-05-31,EUR,_X,1022.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,207.0,1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-05-31,EUR,_X,205.0,1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,2956.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51.0,1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,P,F3,D41,T,BE,S1,2019-04-30,EUR,_X,574.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,BE,S1,2019-04-30,EUR,_X,930.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391.0,-1.0,2,Rest of the world, -D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,2956.0,-1.0,2,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-06-30,EUR,_X,1557.0,-1.0,3,Rest of the world, -C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31946.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,84.0,1.0,3,Rest of the world, -D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,31466.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,60.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-05-31,EUR,_X,966.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-05-31,EUR,_X,636.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-05-31,EUR,_X,1557.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-05-31,EUR,_X,51.0,1.0,3,Rest of the world, -C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,BE,S121,2019-05-31,EUR,_X,28.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,BE,S1,2019-04-30,EUR,_X,119.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,BE,S1,2019-04-30,EUR,_X,544.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,BE,S1,2019-04-30,EUR,_X,1557.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-04-30,EUR,_X,24161.0,1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-04-30,EUR,_X,89.0,-1.0,3,Rest of the world, -C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,BE,S1,2019-04-30,EUR,_X,8391.0,-1.0,3,Rest of the world, -D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,BE,S1,2019-04-30,EUR,_X,223.0,-1.0,3,Rest of the world, -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-06-30,EUR,M,2000.0,-1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F3,FA,L,BE,S1,2019-05-31,EUR,M,1720.0,1.0,1,total economy,4.0 -A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-4009.0,2.0,1,total economy,4.0 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-05-31,EUR,M,-539.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,BE,S1,2019-04-30,EUR,M,-374.0,-1.0,2,Monetary financial institutions other than central bank,4.0 -B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,BE,S1,2019-06-30,EUR,_X,-501.0,1.0,1,Balance (credit-debet),4.0 -B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,BE,S1,2019-06-30,EUR,_X,479.0,-1.0,1,Balance (credit-debet),4.0 -B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,BE,S1,2019-06-30,EUR,_X,23.0,-1.0,1,Balance (credit-debet),4.0 -N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,BE,S1,2019-06-30,EUR,_X,-1878.0,-1.0,2,Net (assets-liabilities),4.0 +ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,FUNCTIONAL_CAT,INSTR_ASSET,INT_ACC_ITEM,MATURITY,OBS_VALUE,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,errorcode,errorlevel,imbalance,ruleid +A,N,N,I8,S1,_T,T,M,P,F3,FA,S,1206.0,BE,S12T,2019-05-31,EUR,M,Monetary financial institutions other than central bank,4.0,-1.0,1 +A,N,N,J8,S1,_T,T,M,P,F3,FA,L,4.0,BE,S12T,2019-05-31,EUR,M,Monetary financial institutions other than central bank,4.0,-1.0,1 +A,N,N,J8,S1,_T,T,M,P,F3,FA,S,-1036.0,BE,S12T,2019-05-31,EUR,M,Monetary financial institutions other than central bank,4.0,-1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,171.0,BE,S12T,2019-05-31,EUR,M,Monetary financial institutions other than central bank,4.0,1.0,1 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,-66870.0,BE,S1,2019-06-30,EUR,_X,total economy,4.0,-60182.0,3 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,8105.0,BE,S1,2019-06-30,EUR,_X,total economy,4.0,1.0,3 +L,N,N,W1,S1,_T,T,M,P,F3,FA,S,-3414.0,BE,S1,2019-06-30,EUR,M,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,-10253.0,BE,S1,2019-05-31,EUR,_X,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,-5401.0,BE,S1,2019-04-30,EUR,_X,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,-668.0,BE,S1,2019-04-30,EUR,M,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,-8041.0,BE,S1,2019-04-30,EUR,M,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,1784.0,BE,S1,2019-04-30,EUR,M,total economy,4.0,1.0,3 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,73.0,BE,S121,2019-05-31,EUR,M,All original maturities,,-1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,T,-8708.0,BE,S1,2019-04-30,EUR,M,All original maturities,,1.0,1 +A,N,N,W1,S1,X1,T,M,R,F3,FA,T,591.0,BE,S121,2019-04-30,EUR,M,All original maturities,,1.0,1 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,37457.0,BE,S1,2019-06-30,EUR,_X,Current account,,1.0,3 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,37959.0,BE,S1,2019-06-30,EUR,_X,Current account,,1.0,3 +D,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,42709.0,BE,S1,2019-05-31,EUR,_X,Current account,,-1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,37889.0,BE,S1,2019-04-30,EUR,_X,Current account,,1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,22849.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,200.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,728.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,1.0,1 +D,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,1615.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,60.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,15937.0,BE,S1,2019-06-30,EUR,N,Rest of the world,,-29187.0,1 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,54.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S123,_T,T,M,P,F5,FA,_Z,278.0,BE,S1,2019-06-30,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,273.0,BE,S1,2019-06-30,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,884.0,BE,S1,2019-06-30,EUR,M,Rest of the world,,-1.0,1 +A,N,N,W1,S1P,_T,T,M,P,F5,FA,_Z,1728.0,BE,S1,2019-06-30,EUR,M,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,-4424.0,BE,S122,2019-06-30,EUR,_X,Rest of the world,,1.0,1 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,-4424.0,BE,S122,2019-06-30,EUR,_X,Rest of the world,,1.0,1 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,-4424.0,BE,S12T,2019-06-30,EUR,_X,Rest of the world,,1.0,1 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,-4424.0,BE,S12T,2019-06-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,405.0,BE,S13,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,-12186.0,BE,S1P,2019-06-30,EUR,_X,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,121.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,770.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,-4689.0,BE,S1,2019-05-31,EUR,N,Rest of the world,,1.0,1 +A,N,N,W1,S122,_T,T,M,P,F3,FA,S,-1211.0,BE,S1,2019-05-31,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,-196.0,BE,S1,2019-05-31,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,S,-1211.0,BE,S1,2019-05-31,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S1P,_T,T,M,P,F3,FA,L,255.0,BE,S1,2019-05-31,EUR,M,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,-430.0,BE,S121,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,71.0,BE,S122,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,-236.0,BE,S122,2019-05-31,EUR,M,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,99.0,BE,S122,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,S,406.0,BE,S123,2019-05-31,EUR,M,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,71.0,BE,S12T,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,1118.0,BE,S12T,2019-05-31,EUR,M,Rest of the world,,1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,99.0,BE,S12T,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,1.0,BE,S13,2019-05-31,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,-5.0,BE,S13,2019-05-31,EUR,M,Rest of the world,,1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,-2.0,BE,S13,2019-05-31,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,-5241.0,BE,S1P,2019-05-31,EUR,_X,Rest of the world,,-1.0,1 +L,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,-9863.0,BE,S1P,2019-05-31,EUR,_X,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,574.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,223.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,O,F2,FA,T,-5133.0,BE,S1,2019-04-30,EUR,N,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,O,FLA,D41,T,24.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +L,N,N,W1,S1,_T,T,M,O,F2,FA,T,-1774.0,BE,S1,2019-04-30,EUR,N,Rest of the world,,1.0,1 +N,N,N,W1,S1,_T,T,M,F,F7,FA,T,814.0,BE,S1,2019-04-30,EUR,T,Rest of the world,,1.0,1 +A,N,N,W1,S122,_T,T,M,P,F3,FA,L,44.0,BE,S1,2019-04-30,EUR,M,Rest of the world,,-1.0,1 +A,N,N,W1,S122,_T,T,M,P,F5,FA,_Z,-70.0,BE,S1,2019-04-30,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S12T,_T,T,M,P,F3,FA,L,44.0,BE,S1,2019-04-30,EUR,M,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,O,F,FA,_Z,-281.0,BE,S121,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,-124.0,BE,S121,2019-04-30,EUR,M,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,140.0,BE,S122,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,-158.0,BE,S122,2019-04-30,EUR,M,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,99.0,BE,S122,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,140.0,BE,S12T,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,-158.0,BE,S12T,2019-04-30,EUR,M,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,99.0,BE,S12T,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,D,F5,FA,_Z,1.0,BE,S13,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,-2.0,BE,S13,2019-04-30,EUR,M,Rest of the world,,-1.0,1 +C,N,N,W1,S1,_T,T,M,D,F5,D43S,_Z,-2.0,BE,S13,2019-04-30,EUR,_X,Rest of the world,,-1.0,1 +A,N,N,W1,S1,_T,T,M,D,FL,FA,_Z,-3535.0,BE,S1P,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,-166.0,BE,S1P,2019-04-30,EUR,M,Rest of the world,,1.0,1 +L,N,N,W1,S1,_T,T,M,O,F,FA,_Z,-2058.0,BE,S1P,2019-04-30,EUR,_X,Rest of the world,,1.0,1 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,31946.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,23336.0,2 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,200.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,2 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,2956.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,2 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,31466.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,2 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,966.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,636.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,1022.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,207.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +D,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,205.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,2956.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,-1.0,2 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,51.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,2 +C,N,N,W1,S1,_T,T,M,P,F3,D41,T,574.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,2 +C,N,N,W1,S1,_T,T,M,_Z,_Z,IN2,_Z,930.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,2 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,8391.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,2 +D,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,2956.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,2 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,1557.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,31946.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,84.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,1.0,3 +D,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,31466.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,3 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,60.0,BE,S1,2019-06-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,966.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,636.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,1557.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,-1.0,3 +D,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,51.0,BE,S1,2019-05-31,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,X1,T,M,R,F,D4P,_Z,28.0,BE,S121,2019-05-31,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,_T,T,M,O,F,D4P,_Z,119.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,N,W1,S1,_T,T,M,D,FL,D4Q,_Z,544.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,N,W1,S1,_T,T,M,D,F5,D4S,_Z,1557.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,G,_Z,24161.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,89.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +C,N,N,W1,S1,_T,T,M,_Z,_Z,S,_Z,8391.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +D,N,N,W1,S1,_T,T,M,_Z,_Z,D1,_Z,223.0,BE,S1,2019-04-30,EUR,_X,Rest of the world,,-1.0,3 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,-539.0,BE,S1,2019-05-31,EUR,M,Monetary financial institutions other than central bank,4.0,-1.0,1 +A,N,N,W1,S12T,_T,T,M,P,F5,FA,_Z,-374.0,BE,S1,2019-04-30,EUR,M,Monetary financial institutions other than central bank,4.0,-1.0,1 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,2000.0,BE,S1,2019-06-30,EUR,M,total economy,4.0,-1.0,3 +A,N,N,W1,S1,_T,T,M,P,F3,FA,L,1720.0,BE,S1,2019-05-31,EUR,M,total economy,4.0,1.0,3 +A,N,N,W1,S1,_T,T,M,P,F5,FA,_Z,-4009.0,BE,S1,2019-05-31,EUR,M,total economy,4.0,2.0,3 +B,N,N,W1,S1,_T,T,M,_Z,_Z,CA,_Z,-501.0,BE,S1,2019-06-30,EUR,_X,Balance (credit-debet),4.0,1.0,1 +B,N,C,W1,S1,_T,T,M,_Z,_Z,G,_Z,479.0,BE,S1,2019-06-30,EUR,_X,Balance (credit-debet),4.0,-1.0,1 +B,N,N,W1,S1,_T,T,M,_Z,_Z,KA,_Z,23.0,BE,S1,2019-06-30,EUR,_X,Balance (credit-debet),4.0,-1.0,1 +N,N,N,W1,S1,_T,T,M,_T,F,FA,_Z,-1878.0,BE,S1,2019-06-30,EUR,_X,Net (assets-liabilities),4.0,-1.0,2 diff --git a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-3.csv b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-3.csv index 0e70bed97..0be073885 100644 --- a/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-3.csv +++ b/tests/BigProjects/NBB_bop/data/DataSet/output/VALIDATIONS-3.csv @@ -1,3 +1,3 @@ ACCOUNTING_ENTRY,ADJUSTMENT,COMP_METHOD,COUNTERPART_AREA,COUNTERPART_SECTOR,CURRENCY_DENOM,FLOW_STOCK_ENTRY,FREQ,MATURITY,REF_AREA,REF_SECTOR,TIME_PERIOD,UNIT_MEASURE,VALUATION,bool_var,imbalance,errorcode,errorlevel -A,N,N,W1,S1,_T,T,M,_Z,BE,S1,2019-06-30,EUR,_X,False,,, L,N,N,W1,S1,_T,T,M,_Z,BE,S1,2019-04-30,EUR,_X,False,,, +A,N,N,W1,S1,_T,T,M,_Z,BE,S1,2019-06-30,EUR,_X,False,,, diff --git a/tests/Hierarchical/data/DataSet/input/GH_567_1-1.csv b/tests/Hierarchical/data/DataSet/input/GH_567_1-1.csv new file mode 100644 index 000000000..fc00eabfe --- /dev/null +++ b/tests/Hierarchical/data/DataSet/input/GH_567_1-1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Me_1 +1,A,10.0 +1,B,3.0 +1,C,7.0 +1,F,5.0 +1,X,99.0 diff --git a/tests/Hierarchical/data/DataSet/output/GH_567_1-1.csv b/tests/Hierarchical/data/DataSet/output/GH_567_1-1.csv new file mode 100644 index 000000000..3835b7ec0 --- /dev/null +++ b/tests/Hierarchical/data/DataSet/output/GH_567_1-1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +1,A,10.0 +1,E,15.0 +1,D,25.0 diff --git a/tests/Hierarchical/data/DataStructure/input/GH_567_1-1.json b/tests/Hierarchical/data/DataStructure/input/GH_567_1-1.json new file mode 100644 index 000000000..77c914e3a --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/input/GH_567_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/DataStructure/input/GH_567_2-1.json b/tests/Hierarchical/data/DataStructure/input/GH_567_2-1.json new file mode 100644 index 000000000..77c914e3a --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/input/GH_567_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/DataStructure/input/GH_567_3-1.json b/tests/Hierarchical/data/DataStructure/input/GH_567_3-1.json new file mode 100644 index 000000000..77c914e3a --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/input/GH_567_3-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/DataStructure/input/GH_567_4-1.json b/tests/Hierarchical/data/DataStructure/input/GH_567_4-1.json new file mode 100644 index 000000000..77c914e3a --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/input/GH_567_4-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/DataStructure/output/GH_567_1-1.json b/tests/Hierarchical/data/DataStructure/output/GH_567_1-1.json new file mode 100644 index 000000000..a6a1cce99 --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/output/GH_567_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/vtl/GH_567_1.vtl b/tests/Hierarchical/data/vtl/GH_567_1.vtl new file mode 100644 index 000000000..ddae5c41d --- /dev/null +++ b/tests/Hierarchical/data/vtl/GH_567_1.vtl @@ -0,0 +1,8 @@ +define hierarchical ruleset hie1 (variable rule Id_2) is + E = A + F errorcode "error" errorlevel 5; + A = B + C errorcode "error2" errorlevel 5; + D = E + A errorcode "error3" errorlevel 5; + A >= B errorcode "error4" errorlevel 5 +end hierarchical ruleset; + +DS_r := hierarchy(DS_1, hie1 rule Id_2 computed); diff --git a/tests/Hierarchical/data/vtl/GH_567_2.vtl b/tests/Hierarchical/data/vtl/GH_567_2.vtl new file mode 100644 index 000000000..b77440cce --- /dev/null +++ b/tests/Hierarchical/data/vtl/GH_567_2.vtl @@ -0,0 +1,5 @@ +define hierarchical ruleset hie1 (variable rule Id_2) is + A >= B errorcode "error4" errorlevel 5 +end hierarchical ruleset; + +DS_r := hierarchy(DS_1, hie1 rule Id_2 all); diff --git a/tests/Hierarchical/data/vtl/GH_567_3.vtl b/tests/Hierarchical/data/vtl/GH_567_3.vtl new file mode 100644 index 000000000..29ea0e5fe --- /dev/null +++ b/tests/Hierarchical/data/vtl/GH_567_3.vtl @@ -0,0 +1,8 @@ +define hierarchical ruleset hie1 (variable rule Id_2) is + E = A + F errorcode "error" errorlevel 5; + A = B + C errorcode "error2" errorlevel 5; + D = E + A errorcode "error3" errorlevel 5; + A = B errorcode "error4" errorlevel 5 +end hierarchical ruleset; + +DS_r := hierarchy(DS_1, hie1 rule Id_2 computed); \ No newline at end of file diff --git a/tests/Hierarchical/data/vtl/GH_567_4.vtl b/tests/Hierarchical/data/vtl/GH_567_4.vtl new file mode 100644 index 000000000..06882fd93 --- /dev/null +++ b/tests/Hierarchical/data/vtl/GH_567_4.vtl @@ -0,0 +1,8 @@ +define hierarchical ruleset hie1 (variable rule Id_2) is + E = A + F errorcode "error" errorlevel 5; + A = B + C errorcode "error2" errorlevel 5; + D = E errorcode "error3" errorlevel 5; + A = D errorcode "error4" errorlevel 5 +end hierarchical ruleset; + +DS_r := hierarchy(DS_1, hie1 rule Id_2 computed); \ No newline at end of file diff --git a/tests/Hierarchical/test_hierarchical.py b/tests/Hierarchical/test_hierarchical.py index 00c54fbb5..727d4071a 100644 --- a/tests/Hierarchical/test_hierarchical.py +++ b/tests/Hierarchical/test_hierarchical.py @@ -2647,9 +2647,11 @@ def test_GL_397_31(self): code = "GL_397_31" number_inputs = 1 - references_names = ["1"] + exception_code = "1-1-10-10" - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) def test_GL_397_33(self): """ @@ -2667,10 +2669,12 @@ def test_GL_397_33(self): code = "GL_397_33" number_inputs = 1 - references_names = ["1"] + exception_code = "1-1-10-10" # with pytest.raises(Exception, match="cast .+? without providing a mask"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=exception_code + ) def test_GL_397_35(self): """ @@ -2754,3 +2758,105 @@ def test_GL_494_5(self): references_names = ["1"] self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_567_1(self): + """ + HIERARCHICAL ROLL-UP: hierarchy + Dataset --> Dataset + Status: OK + Expression: define hierarchical ruleset hie1 (variable rule Id_2) is + E = A + F errorcode "error" errorlevel 5; + A = B + C errorcode "error2" errorlevel 5; + D = E + A errorcode "error3" errorlevel 5; + A >= B errorcode "error4" errorlevel 5 + end hierarchical ruleset; + + DS_r := hierarchy(DS_1, hie1 rule Id_2 all); + + Description: Hierarchy with mixed comparison operators (= and >=). + Non-EQ rules are filtered out by the DAG during rule analysis. + + Git Branch: #567. + Goal: Verify that hierarchy filters non-EQ rules and computes correctly. + """ + + code = "GH_567_1" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_567_2(self): + """ + HIERARCHICAL ROLL-UP: hierarchy + Dataset --> Dataset + Status: OK + Expression: define hierarchical ruleset hie1 (variable rule Id_2) is + A >= B errorcode "error4" errorlevel 5 + end hierarchical ruleset; + + DS_r := hierarchy(DS_1, hie1 rule Id_2 all); + + Description: Hierarchy with no EQ rules should raise SemanticError. + + Git Branch: #567. + Goal: Verify that hierarchy raises error 1-1-10-5 when no rules have = operator. + """ + + code = "GH_567_2" + number_inputs = 1 + error_code = "1-1-10-5" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=error_code + ) + + def test_GH_567_3(self): + """ + HIERARCHICAL ROLL-UP: hierarchy + Dataset --> Dataset + Status: OK + Expression: define hierarchical ruleset hie1 (variable rule Id_2) is + A >= B errorcode "error4" errorlevel 5 + end hierarchical ruleset; + + DS_r := hierarchy(DS_1, hie1 rule Id_2 all); + + Description: Hierarchy with no EQ rules should raise SemanticError. + + Git Branch: #567. + Goal: Verify that hierarchy raises error 1-1-10-5 when no rules have = operator. + """ + + code = "GH_567_3" + number_inputs = 1 + error_code = "1-1-10-10" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=error_code + ) + + def test_GH_567_4(self): + """ + HIERARCHICAL ROLL-UP: hierarchy + Dataset --> Dataset + Status: OK + Expression: define hierarchical ruleset hie1 (variable rule Id_2) is + A >= B errorcode "error4" errorlevel 5 + end hierarchical ruleset; + + DS_r := hierarchy(DS_1, hie1 rule Id_2 all); + + Description: Hierarchy with no EQ rules should raise SemanticError. + + Git Branch: #567. + Goal: Verify that hierarchy raises error 1-1-10-5 when no rules have = operator. + """ + + code = "GH_567_4" + number_inputs = 1 + error_code = "1-3-2-3" + + self.NewSemanticExceptionTest( + code=code, number_inputs=number_inputs, exception_code=error_code + ) diff --git a/tests/ReferenceManual/test_reference_manual.py b/tests/ReferenceManual/test_reference_manual.py index f8541de18..01a2734fb 100644 --- a/tests/ReferenceManual/test_reference_manual.py +++ b/tests/ReferenceManual/test_reference_manual.py @@ -4,36 +4,6 @@ import warnings from pathlib import Path -# if os.environ.get("SPARK", False): -# import sys -# -# virtualenv_path = sys.prefix -# sys.path.append(virtualenv_path) -# # os.environ['PYTHONPATH'] = f'{virtualenv_path}' -# os.environ['PYSPARK_PYTHON'] = f'{virtualenv_path}/bin/python' -# # os.environ['PYSPARK_PYTHON'] = f'{virtualenv_path}\\Scripts\\python' -# # os.environ['VIRTUAL_ENV'] = os.environ.get('PYTHONPATH', f'{virtualenv_path}') -# -# from pyspark import SparkConf, SparkContext -# -# conf = SparkConf() -# conf.set('spark.driver.cores', '2') -# conf.set('spark.executor.cores', '2') -# conf.set('spark.driver.memory', '2g') -# conf.set('spark.executor.memory', '2g') -# # conf.set('spark.sql.execution.arrow.pyspark.enabled', 'true') -# conf.set('spark.pyspark.virtualenv.enabled', 'true') -# conf.set('spark.pyspark.virtualenv.type', 'native') -# conf.set('spark.pyspark.virtualenv.requirements', 'requirements.txt') -# # conf.set('spark.pyspark.virtualenv.bin.path', f'{virtualenv_path}/Scripts/python') -# # Pandas API on Spark automatically uses this Spark context with the configurations set. -# SparkContext(conf=conf) -# -# import pyspark.pandas as pd -# -# pd.set_option('compute.ops_on_diff_frames', True) -# os.environ["PYSPARK_SUBMIT_ARGS"] = "--conf spark.network.timeout=600s pyspark-shell" -# else: import pandas as pd import pytest @@ -92,6 +62,9 @@ # Remove tests due to wrong implicit cast (Duration to String) time_operators.remove(100) +# Remove HR Rules cyclic graph +validation_operators.remove(159) + # Multimeasures on specific operators that must raise errors exceptions_tests = [27, 31] From 12e72f048c804dd677f5bc36ff6eb97f1d29ab35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 10 Mar 2026 13:52:53 +0100 Subject: [PATCH 26/38] Fix #582: Fixed time_agg grammar with single string constant in group_all and windowing (#584) * Grammar aligned with the official VTL 2.1 * Regenerated Lexer, Parser and VTLVisitor * Fixed related tests * Fixed mypy errors --- src/vtlengine/AST/Grammar/Vtl.g4 | 4 +- src/vtlengine/AST/Grammar/lexer.py | 19 +- src/vtlengine/AST/Grammar/parser.py | 2935 ++++------------- src/vtlengine/AST/VtlVisitor.py | 112 +- tests/AST/data/prettier/complete_grammar.vtl | 4 +- .../AST/data/prettier/group_all_time_aggr.vtl | 2 +- .../prettier/reference_complete_grammar.vtl | 4 +- .../reference_group_all_time_aggr.vtl | 2 +- tests/AST/data/vtl/complete_grammar.vtl | 4 +- tests/AST/data/vtl/time.vtl | 6 +- tests/Additional/data/vtl/7-20.vtl | 12 +- tests/Additional/data/vtl/7-21.vtl | 12 +- tests/Additional/test_additional.py | 4 +- .../data/vtl/test_grammar.vtl | 4 +- tests/IfThenElse/data/vtl/GL_436_1.vtl | 4 +- tests/IfThenElse/data/vtl/GL_436_2.vtl | 4 +- tests/ReferenceManual/data/vtl/RM121.vtl | 2 +- .../data/vtl_defined_operators/RM121.vtl | 2 +- tests/TimePeriod/data/vtl/GL_417_1.vtl | 2 +- tests/TimePeriod/data/vtl/GL_417_2.vtl | 2 +- tests/TimePeriod/data/vtl/GL_417_3.vtl | 2 +- tests/TimePeriod/data/vtl/GL_417_4.vtl | 2 +- tests/TimePeriod/test_timeperiod.py | 8 +- tests/UDO/data/vtl/GL_473_1.vtl | 6 +- tests/UDO/data/vtl/GL_473_2.vtl | 4 +- tests/UDO/data/vtl/GL_474_1.vtl | 4 +- tests/UDO/data/vtl/GL_474_2.vtl | 4 +- tests/UDO/data/vtl/GL_474_3.vtl | 4 +- tests/UDO/data/vtl/GL_475_1.vtl | 6 +- tests/UDO/data/vtl/GL_476_1.vtl | 4 +- 30 files changed, 722 insertions(+), 2462 deletions(-) diff --git a/src/vtlengine/AST/Grammar/Vtl.g4 b/src/vtlengine/AST/Grammar/Vtl.g4 index 8873f6dca..2ddd93029 100644 --- a/src/vtlengine/AST/Grammar/Vtl.g4 +++ b/src/vtlengine/AST/Grammar/Vtl.g4 @@ -437,7 +437,7 @@ limitClauseItem: /* ------------------------------------------------------------ GROUPING CLAUSE ------------------------------------*/ groupingClause: GROUP op=(BY | EXCEPT) componentID (COMMA componentID)* ( TIME_AGG LPAREN STRING_CONSTANT (COMMA delim=(FIRST|LAST))? RPAREN )? # groupByOrExcept - | GROUP ALL ( TIME_AGG LPAREN STRING_CONSTANT (COMMA (STRING_CONSTANT|OPTIONAL))? (COMMA optionalExpr)? (COMMA delim=(FIRST|LAST))? RPAREN )? # groupAll + | GROUP ALL ( TIME_AGG LPAREN STRING_CONSTANT (COMMA delim=(FIRST|LAST))? RPAREN )? # groupAll ; havingClause: @@ -706,4 +706,4 @@ basicScalarType: retainType: BOOLEAN_CONSTANT | ALL -; +; \ No newline at end of file diff --git a/src/vtlengine/AST/Grammar/lexer.py b/src/vtlengine/AST/Grammar/lexer.py index bed9dd3ca..fa3d00b98 100644 --- a/src/vtlengine/AST/Grammar/lexer.py +++ b/src/vtlengine/AST/Grammar/lexer.py @@ -1,14 +1,17 @@ -# Generated from Vtl.g4 by ANTLR 4.9.2 +# Generated from Vtl.g4 by ANTLR 4.9.3 +import sys +from io import StringIO +from typing import TextIO + from antlr4 import ( - Lexer as ANTLRLexer, - ATNDeserializer, DFA, - PredictionContextCache, + ATNDeserializer, LexerATNSimulator, + PredictionContextCache, +) +from antlr4 import ( + Lexer as ANTLRLexer, ) -from io import StringIO -import sys -from typing import TextIO def serializedATN(): @@ -2129,7 +2132,7 @@ class Lexer(ANTLRLexer): def __init__(self, input=None, output: TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.9.2") + self.checkVersion("4.9.3") self._interp = LexerATNSimulator( self, self.atn, self.decisionsToDFA, PredictionContextCache() ) diff --git a/src/vtlengine/AST/Grammar/parser.py b/src/vtlengine/AST/Grammar/parser.py index ab5a61b9a..f73f1314b 100644 --- a/src/vtlengine/AST/Grammar/parser.py +++ b/src/vtlengine/AST/Grammar/parser.py @@ -1,19 +1,18 @@ -# Generated from Vtl.g4 by ANTLR 4.9.2 +# Generated from Vtl.g4 by ANTLR 4.9.3 # encoding: utf-8 -from antlr4 import * -from antlr4 import Parser as ANTLRParser -from io import StringIO import sys -from typing import TextIO, Any +from io import StringIO +from typing import Any, TextIO +from antlr4 import * +from antlr4 import Parser as ANTLRParser from antlr4.atn.ATN import ATN -from antlr4.tree.Tree import ParseTreeListener def serializedATN(): with StringIO() as buf: buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\u00fb") - buf.write("\u076b\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\u0763\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31") @@ -150,96 +149,95 @@ def serializedATN(): buf.write("\67\3\67\38\58\u05bb\n8\38\38\39\39\39\39\39\39\39\39") buf.write("\39\39\39\39\39\59\u05cc\n9\3:\3:\3:\3:\3:\7:\u05d3\n") buf.write(":\f:\16:\u05d6\13:\3:\3:\3:\3:\3:\5:\u05dd\n:\3:\5:\u05e0") - buf.write("\n:\3:\3:\3:\3:\3:\3:\3:\5:\u05e9\n:\3:\3:\5:\u05ed\n") - buf.write(":\3:\3:\5:\u05f1\n:\3:\5:\u05f4\n:\5:\u05f6\n:\3;\3;\3") - buf.write(";\3<\3<\3<\3<\5<\u05ff\n<\3=\3=\3=\5=\u0604\n=\3>\3>\5") - buf.write(">\u0608\n>\3?\3?\3?\3?\3?\5?\u060f\n?\3@\3@\3@\5@\u0614") - buf.write("\n@\3A\3A\5A\u0618\nA\3A\5A\u061b\nA\3A\5A\u061e\nA\3") - buf.write("A\5A\u0621\nA\3B\3B\3B\3B\3B\5B\u0628\nB\3C\3C\3C\3C\3") - buf.write("C\7C\u062f\nC\fC\16C\u0632\13C\3C\3C\5C\u0636\nC\3D\3") - buf.write("D\5D\u063a\nD\3E\3E\3E\3E\3E\5E\u0641\nE\3F\3F\3F\3F\3") - buf.write("F\3F\7F\u0649\nF\fF\16F\u064c\13F\3F\3F\5F\u0650\nF\3") - buf.write("F\3F\3F\3F\3F\7F\u0657\nF\fF\16F\u065a\13F\3F\3F\5F\u065e") - buf.write("\nF\5F\u0660\nF\3G\3G\3G\3G\3G\3G\3G\3G\7G\u066a\nG\f") - buf.write("G\16G\u066d\13G\3G\3G\5G\u0671\nG\3G\5G\u0674\nG\3G\3") - buf.write("G\3G\3G\3G\3G\3G\7G\u067d\nG\fG\16G\u0680\13G\3G\3G\5") - buf.write("G\u0684\nG\3G\3G\5G\u0688\nG\5G\u068a\nG\3H\3H\3I\3I\3") - buf.write("J\3J\3J\3J\7J\u0694\nJ\fJ\16J\u0697\13J\3K\3K\3K\5K\u069c") - buf.write("\nK\3L\3L\3L\7L\u06a1\nL\fL\16L\u06a4\13L\3M\3M\5M\u06a8") - buf.write("\nM\3M\3M\3M\3M\5M\u06ae\nM\3M\3M\5M\u06b2\nM\3M\5M\u06b5") - buf.write("\nM\3N\3N\3N\7N\u06ba\nN\fN\16N\u06bd\13N\3O\3O\5O\u06c1") - buf.write("\nO\3O\3O\5O\u06c5\nO\3O\5O\u06c8\nO\3P\3P\3P\5P\u06cd") - buf.write("\nP\3P\3P\3P\3Q\3Q\3Q\7Q\u06d5\nQ\fQ\16Q\u06d8\13Q\3R") - buf.write("\3R\3R\3R\5R\u06de\nR\3R\3R\5R\u06e2\nR\3R\3R\7R\u06e6") - buf.write("\nR\fR\16R\u06e9\13R\3S\5S\u06ec\nS\3S\3S\3S\3S\3S\5S") - buf.write("\u06f3\nS\3T\3T\3T\5T\u06f8\nT\3U\3U\3U\3U\3U\3U\3U\3") - buf.write("U\7U\u0702\nU\fU\16U\u0705\13U\3U\3U\5U\u0709\nU\3V\3") - buf.write("V\3V\5V\u070e\nV\3W\3W\5W\u0712\nW\3X\3X\3Y\3Y\3Z\3Z\3") - buf.write("Z\3Z\7Z\u071c\nZ\fZ\16Z\u071f\13Z\3[\3[\3\\\3\\\3\\\3") - buf.write("]\3]\3^\3^\3_\3_\3`\3`\3a\3a\3b\3b\3b\5b\u0733\nb\3c\3") - buf.write("c\3c\3c\7c\u0739\nc\fc\16c\u073c\13c\3c\3c\3d\3d\3d\3") - buf.write("e\3e\3e\3f\3f\3g\3g\5g\u074a\ng\3h\3h\5h\u074e\nh\3i\3") - buf.write("i\3i\3i\3i\5i\u0755\ni\3j\3j\3j\3k\3k\3l\3l\3m\3m\3n\3") - buf.write("n\3n\3n\3n\5n\u0765\nn\3o\3o\3p\3p\3p\2\4\6\bq\2\4\6\b") - buf.write('\n\f\16\20\22\24\26\30\32\34\36 "$&(*,.\60\62\64\668') - buf.write(":<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0084") - buf.write("\u0086\u0088\u008a\u008c\u008e\u0090\u0092\u0094\u0096") - buf.write("\u0098\u009a\u009c\u009e\u00a0\u00a2\u00a4\u00a6\u00a8") - buf.write("\u00aa\u00ac\u00ae\u00b0\u00b2\u00b4\u00b6\u00b8\u00ba") - buf.write("\u00bc\u00be\u00c0\u00c2\u00c4\u00c6\u00c8\u00ca\u00cc") - buf.write("\u00ce\u00d0\u00d2\u00d4\u00d6\u00d8\u00da\u00dc\u00de") - buf.write("\2%\4\2\17\20\64\64\3\2\21\22\4\2\17\20``\3\2\62\63\3") - buf.write("\2\66\67\3\2+,\4\2\u00cc\u00cc\u00ce\u00ce\3\2\u00c3\u00c4") - buf.write("\3\2\u00c5\u00c6\5\2__ac\u0085\u0086\6\2WWYYnn\u0089\u008b") - buf.write("\3\2[\\\5\2??ZZ]^\4\2JJ\u00d3\u00d3\3\2\u00a4\u00a5\4") - buf.write("\2uu\u00f6\u00f6\3\2TU\4\2==\u008d\u008d\4\2JJvv\5\2R") - buf.write("Seh\u008e\u0091\6\2RSeh\u008e\u0091\u0095\u0096\3\2\u0097") - buf.write("\u0098\3\2PQ\3\2\17\20\4\2NN\u0093\u0093\3\2wx\4\2\17") - buf.write("\17\21\21\5\2JJvv\u00ea\u00ea\3\2\u00e3\u00e8\4\2{{\u00de") - buf.write("\u00de\5\2{{\u0082\u0082\u00dd\u00dd\4\2JJ\u00e2\u00e2") - buf.write("\3\2\t\16\5\2\u00aa\u00b0\u00d4\u00d4\u00eb\u00eb\4\2") - buf.write("JJ\u00f5\u00f5\2\u0833\2\u00e5\3\2\2\2\4\u00f3\3\2\2\2") - buf.write("\6\u0113\3\2\2\2\b\u0156\3\2\2\2\n\u017b\3\2\2\2\f\u0189") - buf.write("\3\2\2\2\16\u0192\3\2\2\2\20\u0194\3\2\2\2\22\u019d\3") - buf.write("\2\2\2\24\u01a5\3\2\2\2\26\u01a8\3\2\2\2\30\u01b1\3\2") - buf.write("\2\2\32\u01ba\3\2\2\2\34\u01bf\3\2\2\2\36\u01cc\3\2\2") - buf.write('\2 \u01e1\3\2\2\2"\u0215\3\2\2\2$\u0250\3\2\2\2&\u028b') - buf.write("\3\2\2\2(\u028f\3\2\2\2*\u0293\3\2\2\2,\u02c5\3\2\2\2") - buf.write(".\u02f7\3\2\2\2\60\u030e\3\2\2\2\62\u0325\3\2\2\2\64\u0347") - buf.write("\3\2\2\2\66\u035e\3\2\2\28\u03c6\3\2\2\2:\u042e\3\2\2") - buf.write("\2<\u044d\3\2\2\2>\u044f\3\2\2\2@\u04a3\3\2\2\2B\u04a5") - buf.write("\3\2\2\2D\u04ac\3\2\2\2F\u04bb\3\2\2\2H\u04bd\3\2\2\2") - buf.write("J\u04f7\3\2\2\2L\u0533\3\2\2\2N\u0535\3\2\2\2P\u0539\3") - buf.write("\2\2\2R\u0542\3\2\2\2T\u0549\3\2\2\2V\u054f\3\2\2\2X\u055f") - buf.write("\3\2\2\2Z\u0561\3\2\2\2\\\u0569\3\2\2\2^\u057c\3\2\2\2") - buf.write("`\u0582\3\2\2\2b\u058f\3\2\2\2d\u0592\3\2\2\2f\u059c\3") - buf.write("\2\2\2h\u05a6\3\2\2\2j\u05ad\3\2\2\2l\u05b5\3\2\2\2n\u05ba") - buf.write("\3\2\2\2p\u05cb\3\2\2\2r\u05f5\3\2\2\2t\u05f7\3\2\2\2") - buf.write("v\u05fa\3\2\2\2x\u0603\3\2\2\2z\u0607\3\2\2\2|\u060e\3") - buf.write("\2\2\2~\u0613\3\2\2\2\u0080\u0617\3\2\2\2\u0082\u0622") - buf.write("\3\2\2\2\u0084\u0629\3\2\2\2\u0086\u0639\3\2\2\2\u0088") - buf.write("\u063b\3\2\2\2\u008a\u065f\3\2\2\2\u008c\u0689\3\2\2\2") - buf.write("\u008e\u068b\3\2\2\2\u0090\u068d\3\2\2\2\u0092\u068f\3") - buf.write("\2\2\2\u0094\u0698\3\2\2\2\u0096\u069d\3\2\2\2\u0098\u06a7") - buf.write("\3\2\2\2\u009a\u06b6\3\2\2\2\u009c\u06c0\3\2\2\2\u009e") - buf.write("\u06c9\3\2\2\2\u00a0\u06d1\3\2\2\2\u00a2\u06dd\3\2\2\2") - buf.write("\u00a4\u06eb\3\2\2\2\u00a6\u06f7\3\2\2\2\u00a8\u0708\3") - buf.write("\2\2\2\u00aa\u070a\3\2\2\2\u00ac\u070f\3\2\2\2\u00ae\u0713") - buf.write("\3\2\2\2\u00b0\u0715\3\2\2\2\u00b2\u0717\3\2\2\2\u00b4") - buf.write("\u0720\3\2\2\2\u00b6\u0722\3\2\2\2\u00b8\u0725\3\2\2\2") - buf.write("\u00ba\u0727\3\2\2\2\u00bc\u0729\3\2\2\2\u00be\u072b\3") - buf.write("\2\2\2\u00c0\u072d\3\2\2\2\u00c2\u072f\3\2\2\2\u00c4\u0734") - buf.write("\3\2\2\2\u00c6\u073f\3\2\2\2\u00c8\u0742\3\2\2\2\u00ca") - buf.write("\u0745\3\2\2\2\u00cc\u0749\3\2\2\2\u00ce\u074d\3\2\2\2") - buf.write("\u00d0\u0754\3\2\2\2\u00d2\u0756\3\2\2\2\u00d4\u0759\3") - buf.write("\2\2\2\u00d6\u075b\3\2\2\2\u00d8\u075d\3\2\2\2\u00da\u0764") - buf.write("\3\2\2\2\u00dc\u0766\3\2\2\2\u00de\u0768\3\2\2\2\u00e0") - buf.write("\u00e1\5\4\3\2\u00e1\u00e2\7\u00f9\2\2\u00e2\u00e4\3\2") - buf.write("\2\2\u00e3\u00e0\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5\u00e3") - buf.write("\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2\u00e7") - buf.write("\u00e5\3\2\2\2\u00e8\u00e9\7\2\2\3\u00e9\3\3\2\2\2\u00ea") - buf.write("\u00eb\5\u00be`\2\u00eb\u00ec\7\26\2\2\u00ec\u00ed\5\6") - buf.write("\4\2\u00ed\u00f4\3\2\2\2\u00ee\u00ef\5\u00be`\2\u00ef") + buf.write("\n:\3:\3:\3:\3:\3:\3:\3:\5:\u05e9\n:\3:\5:\u05ec\n:\5") + buf.write(":\u05ee\n:\3;\3;\3;\3<\3<\3<\3<\5<\u05f7\n<\3=\3=\3=\5") + buf.write("=\u05fc\n=\3>\3>\5>\u0600\n>\3?\3?\3?\3?\3?\5?\u0607\n") + buf.write("?\3@\3@\3@\5@\u060c\n@\3A\3A\5A\u0610\nA\3A\5A\u0613\n") + buf.write("A\3A\5A\u0616\nA\3A\5A\u0619\nA\3B\3B\3B\3B\3B\5B\u0620") + buf.write("\nB\3C\3C\3C\3C\3C\7C\u0627\nC\fC\16C\u062a\13C\3C\3C") + buf.write("\5C\u062e\nC\3D\3D\5D\u0632\nD\3E\3E\3E\3E\3E\5E\u0639") + buf.write("\nE\3F\3F\3F\3F\3F\3F\7F\u0641\nF\fF\16F\u0644\13F\3F") + buf.write("\3F\5F\u0648\nF\3F\3F\3F\3F\3F\7F\u064f\nF\fF\16F\u0652") + buf.write("\13F\3F\3F\5F\u0656\nF\5F\u0658\nF\3G\3G\3G\3G\3G\3G\3") + buf.write("G\3G\7G\u0662\nG\fG\16G\u0665\13G\3G\3G\5G\u0669\nG\3") + buf.write("G\5G\u066c\nG\3G\3G\3G\3G\3G\3G\3G\7G\u0675\nG\fG\16G") + buf.write("\u0678\13G\3G\3G\5G\u067c\nG\3G\3G\5G\u0680\nG\5G\u0682") + buf.write("\nG\3H\3H\3I\3I\3J\3J\3J\3J\7J\u068c\nJ\fJ\16J\u068f\13") + buf.write("J\3K\3K\3K\5K\u0694\nK\3L\3L\3L\7L\u0699\nL\fL\16L\u069c") + buf.write("\13L\3M\3M\5M\u06a0\nM\3M\3M\3M\3M\5M\u06a6\nM\3M\3M\5") + buf.write("M\u06aa\nM\3M\5M\u06ad\nM\3N\3N\3N\7N\u06b2\nN\fN\16N") + buf.write("\u06b5\13N\3O\3O\5O\u06b9\nO\3O\3O\5O\u06bd\nO\3O\5O\u06c0") + buf.write("\nO\3P\3P\3P\5P\u06c5\nP\3P\3P\3P\3Q\3Q\3Q\7Q\u06cd\n") + buf.write("Q\fQ\16Q\u06d0\13Q\3R\3R\3R\3R\5R\u06d6\nR\3R\3R\5R\u06da") + buf.write("\nR\3R\3R\7R\u06de\nR\fR\16R\u06e1\13R\3S\5S\u06e4\nS") + buf.write("\3S\3S\3S\3S\3S\5S\u06eb\nS\3T\3T\3T\5T\u06f0\nT\3U\3") + buf.write("U\3U\3U\3U\3U\3U\3U\7U\u06fa\nU\fU\16U\u06fd\13U\3U\3") + buf.write("U\5U\u0701\nU\3V\3V\3V\5V\u0706\nV\3W\3W\5W\u070a\nW\3") + buf.write("X\3X\3Y\3Y\3Z\3Z\3Z\3Z\7Z\u0714\nZ\fZ\16Z\u0717\13Z\3") + buf.write("[\3[\3\\\3\\\3\\\3]\3]\3^\3^\3_\3_\3`\3`\3a\3a\3b\3b\3") + buf.write("b\5b\u072b\nb\3c\3c\3c\3c\7c\u0731\nc\fc\16c\u0734\13") + buf.write("c\3c\3c\3d\3d\3d\3e\3e\3e\3f\3f\3g\3g\5g\u0742\ng\3h\3") + buf.write("h\5h\u0746\nh\3i\3i\3i\3i\3i\5i\u074d\ni\3j\3j\3j\3k\3") + buf.write("k\3l\3l\3m\3m\3n\3n\3n\3n\3n\5n\u075d\nn\3o\3o\3p\3p\3") + buf.write('p\2\4\6\bq\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 "$') + buf.write("&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|") + buf.write("~\u0080\u0082\u0084\u0086\u0088\u008a\u008c\u008e\u0090") + buf.write("\u0092\u0094\u0096\u0098\u009a\u009c\u009e\u00a0\u00a2") + buf.write("\u00a4\u00a6\u00a8\u00aa\u00ac\u00ae\u00b0\u00b2\u00b4") + buf.write("\u00b6\u00b8\u00ba\u00bc\u00be\u00c0\u00c2\u00c4\u00c6") + buf.write("\u00c8\u00ca\u00cc\u00ce\u00d0\u00d2\u00d4\u00d6\u00d8") + buf.write("\u00da\u00dc\u00de\2%\4\2\17\20\64\64\3\2\21\22\4\2\17") + buf.write("\20``\3\2\62\63\3\2\66\67\3\2+,\4\2\u00cc\u00cc\u00ce") + buf.write("\u00ce\3\2\u00c3\u00c4\3\2\u00c5\u00c6\5\2__ac\u0085\u0086") + buf.write("\6\2WWYYnn\u0089\u008b\3\2[\\\5\2??ZZ]^\4\2JJ\u00d3\u00d3") + buf.write("\3\2\u00a4\u00a5\4\2uu\u00f6\u00f6\3\2TU\4\2==\u008d\u008d") + buf.write("\4\2JJvv\5\2RSeh\u008e\u0091\6\2RSeh\u008e\u0091\u0095") + buf.write("\u0096\3\2\u0097\u0098\3\2PQ\3\2\17\20\4\2NN\u0093\u0093") + buf.write("\3\2wx\4\2\17\17\21\21\5\2JJvv\u00ea\u00ea\3\2\u00e3\u00e8") + buf.write("\4\2{{\u00de\u00de\5\2{{\u0082\u0082\u00dd\u00dd\4\2J") + buf.write("J\u00e2\u00e2\3\2\t\16\5\2\u00aa\u00b0\u00d4\u00d4\u00eb") + buf.write("\u00eb\4\2JJ\u00f5\u00f5\2\u0829\2\u00e5\3\2\2\2\4\u00f3") + buf.write("\3\2\2\2\6\u0113\3\2\2\2\b\u0156\3\2\2\2\n\u017b\3\2\2") + buf.write("\2\f\u0189\3\2\2\2\16\u0192\3\2\2\2\20\u0194\3\2\2\2\22") + buf.write("\u019d\3\2\2\2\24\u01a5\3\2\2\2\26\u01a8\3\2\2\2\30\u01b1") + buf.write("\3\2\2\2\32\u01ba\3\2\2\2\34\u01bf\3\2\2\2\36\u01cc\3") + buf.write('\2\2\2 \u01e1\3\2\2\2"\u0215\3\2\2\2$\u0250\3\2\2\2&') + buf.write("\u028b\3\2\2\2(\u028f\3\2\2\2*\u0293\3\2\2\2,\u02c5\3") + buf.write("\2\2\2.\u02f7\3\2\2\2\60\u030e\3\2\2\2\62\u0325\3\2\2") + buf.write("\2\64\u0347\3\2\2\2\66\u035e\3\2\2\28\u03c6\3\2\2\2:\u042e") + buf.write("\3\2\2\2<\u044d\3\2\2\2>\u044f\3\2\2\2@\u04a3\3\2\2\2") + buf.write("B\u04a5\3\2\2\2D\u04ac\3\2\2\2F\u04bb\3\2\2\2H\u04bd\3") + buf.write("\2\2\2J\u04f7\3\2\2\2L\u0533\3\2\2\2N\u0535\3\2\2\2P\u0539") + buf.write("\3\2\2\2R\u0542\3\2\2\2T\u0549\3\2\2\2V\u054f\3\2\2\2") + buf.write("X\u055f\3\2\2\2Z\u0561\3\2\2\2\\\u0569\3\2\2\2^\u057c") + buf.write("\3\2\2\2`\u0582\3\2\2\2b\u058f\3\2\2\2d\u0592\3\2\2\2") + buf.write("f\u059c\3\2\2\2h\u05a6\3\2\2\2j\u05ad\3\2\2\2l\u05b5\3") + buf.write("\2\2\2n\u05ba\3\2\2\2p\u05cb\3\2\2\2r\u05ed\3\2\2\2t\u05ef") + buf.write("\3\2\2\2v\u05f2\3\2\2\2x\u05fb\3\2\2\2z\u05ff\3\2\2\2") + buf.write("|\u0606\3\2\2\2~\u060b\3\2\2\2\u0080\u060f\3\2\2\2\u0082") + buf.write("\u061a\3\2\2\2\u0084\u0621\3\2\2\2\u0086\u0631\3\2\2\2") + buf.write("\u0088\u0633\3\2\2\2\u008a\u0657\3\2\2\2\u008c\u0681\3") + buf.write("\2\2\2\u008e\u0683\3\2\2\2\u0090\u0685\3\2\2\2\u0092\u0687") + buf.write("\3\2\2\2\u0094\u0690\3\2\2\2\u0096\u0695\3\2\2\2\u0098") + buf.write("\u069f\3\2\2\2\u009a\u06ae\3\2\2\2\u009c\u06b8\3\2\2\2") + buf.write("\u009e\u06c1\3\2\2\2\u00a0\u06c9\3\2\2\2\u00a2\u06d5\3") + buf.write("\2\2\2\u00a4\u06e3\3\2\2\2\u00a6\u06ef\3\2\2\2\u00a8\u0700") + buf.write("\3\2\2\2\u00aa\u0702\3\2\2\2\u00ac\u0707\3\2\2\2\u00ae") + buf.write("\u070b\3\2\2\2\u00b0\u070d\3\2\2\2\u00b2\u070f\3\2\2\2") + buf.write("\u00b4\u0718\3\2\2\2\u00b6\u071a\3\2\2\2\u00b8\u071d\3") + buf.write("\2\2\2\u00ba\u071f\3\2\2\2\u00bc\u0721\3\2\2\2\u00be\u0723") + buf.write("\3\2\2\2\u00c0\u0725\3\2\2\2\u00c2\u0727\3\2\2\2\u00c4") + buf.write("\u072c\3\2\2\2\u00c6\u0737\3\2\2\2\u00c8\u073a\3\2\2\2") + buf.write("\u00ca\u073d\3\2\2\2\u00cc\u0741\3\2\2\2\u00ce\u0745\3") + buf.write("\2\2\2\u00d0\u074c\3\2\2\2\u00d2\u074e\3\2\2\2\u00d4\u0751") + buf.write("\3\2\2\2\u00d6\u0753\3\2\2\2\u00d8\u0755\3\2\2\2\u00da") + buf.write("\u075c\3\2\2\2\u00dc\u075e\3\2\2\2\u00de\u0760\3\2\2\2") + buf.write("\u00e0\u00e1\5\4\3\2\u00e1\u00e2\7\u00f9\2\2\u00e2\u00e4") + buf.write("\3\2\2\2\u00e3\u00e0\3\2\2\2\u00e4\u00e7\3\2\2\2\u00e5") + buf.write("\u00e3\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e8\3\2\2\2") + buf.write("\u00e7\u00e5\3\2\2\2\u00e8\u00e9\7\2\2\3\u00e9\3\3\2\2") + buf.write("\2\u00ea\u00eb\5\u00be`\2\u00eb\u00ec\7\26\2\2\u00ec\u00ed") + buf.write("\5\6\4\2\u00ed\u00f4\3\2\2\2\u00ee\u00ef\5\u00be`\2\u00ef") buf.write("\u00f0\7~\2\2\u00f0\u00f1\5\6\4\2\u00f1\u00f4\3\2\2\2") buf.write('\u00f2\u00f4\5"\22\2\u00f3\u00ea\3\2\2\2\u00f3\u00ee') buf.write("\3\2\2\2\u00f3\u00f2\3\2\2\2\u00f4\5\3\2\2\2\u00f5\u00f6") @@ -796,184 +794,180 @@ def serializedATN(): buf.write("\u05d9\7\3\2\2\u05d9\u05dc\7\u00f6\2\2\u05da\u05db\7\23") buf.write("\2\2\u05db\u05dd\t\22\2\2\u05dc\u05da\3\2\2\2\u05dc\u05dd") buf.write("\3\2\2\2\u05dd\u05de\3\2\2\2\u05de\u05e0\7\4\2\2\u05df") - buf.write("\u05d7\3\2\2\2\u05df\u05e0\3\2\2\2\u05e0\u05f6\3\2\2\2") - buf.write("\u05e1\u05e2\7\u0092\2\2\u05e2\u05f3\7J\2\2\u05e3\u05e4") + buf.write("\u05d7\3\2\2\2\u05df\u05e0\3\2\2\2\u05e0\u05ee\3\2\2\2") + buf.write("\u05e1\u05e2\7\u0092\2\2\u05e2\u05eb\7J\2\2\u05e3\u05e4") buf.write("\7\u00d5\2\2\u05e4\u05e5\7\3\2\2\u05e5\u05e8\7\u00f6\2") - buf.write("\2\u05e6\u05e7\7\23\2\2\u05e7\u05e9\t\21\2\2\u05e8\u05e6") - buf.write("\3\2\2\2\u05e8\u05e9\3\2\2\2\u05e9\u05ec\3\2\2\2\u05ea") - buf.write("\u05eb\7\23\2\2\u05eb\u05ed\5\u00ccg\2\u05ec\u05ea\3\2") - buf.write("\2\2\u05ec\u05ed\3\2\2\2\u05ed\u05f0\3\2\2\2\u05ee\u05ef") - buf.write("\7\23\2\2\u05ef\u05f1\t\22\2\2\u05f0\u05ee\3\2\2\2\u05f0") - buf.write("\u05f1\3\2\2\2\u05f1\u05f2\3\2\2\2\u05f2\u05f4\7\4\2\2") - buf.write("\u05f3\u05e3\3\2\2\2\u05f3\u05f4\3\2\2\2\u05f4\u05f6\3") - buf.write("\2\2\2\u05f5\u05cd\3\2\2\2\u05f5\u05e1\3\2\2\2\u05f6s") - buf.write("\3\2\2\2\u05f7\u05f8\7\u0094\2\2\u05f8\u05f9\5\b\5\2\u05f9") - buf.write("u\3\2\2\2\u05fa\u05fb\5\u00be`\2\u05fb\u05fe\5|?\2\u05fc") - buf.write("\u05fd\7\u00df\2\2\u05fd\u05ff\5X-\2\u05fe\u05fc\3\2\2") - buf.write("\2\u05fe\u05ff\3\2\2\2\u05ffw\3\2\2\2\u0600\u0604\5\u0080") - buf.write("A\2\u0601\u0604\5\u0084C\2\u0602\u0604\5\u0082B\2\u0603") - buf.write("\u0600\3\2\2\2\u0603\u0601\3\2\2\2\u0603\u0602\3\2\2\2") - buf.write("\u0604y\3\2\2\2\u0605\u0608\5\u0082B\2\u0606\u0608\5\u0080") - buf.write("A\2\u0607\u0605\3\2\2\2\u0607\u0606\3\2\2\2\u0608{\3\2") - buf.write("\2\2\u0609\u060f\5\u0080A\2\u060a\u060f\5\u0084C\2\u060b") - buf.write("\u060f\5\u0088E\2\u060c\u060f\5~@\2\u060d\u060f\5\u0082") - buf.write("B\2\u060e\u0609\3\2\2\2\u060e\u060a\3\2\2\2\u060e\u060b") - buf.write("\3\2\2\2\u060e\u060c\3\2\2\2\u060e\u060d\3\2\2\2\u060f") - buf.write("}\3\2\2\2\u0610\u0614\7\u0081\2\2\u0611\u0614\5\u008a") - buf.write("F\2\u0612\u0614\5\u008cG\2\u0613\u0610\3\2\2\2\u0613\u0611") - buf.write("\3\2\2\2\u0613\u0612\3\2\2\2\u0614\177\3\2\2\2\u0615\u0618") - buf.write("\5\u00dco\2\u0616\u0618\5\u008eH\2\u0617\u0615\3\2\2\2") - buf.write("\u0617\u0616\3\2\2\2\u0618\u061a\3\2\2\2\u0619\u061b\5") - buf.write("\u00a8U\2\u061a\u0619\3\2\2\2\u061a\u061b\3\2\2\2\u061b") - buf.write("\u0620\3\2\2\2\u061c\u061e\7\64\2\2\u061d\u061c\3\2\2") - buf.write("\2\u061d\u061e\3\2\2\2\u061e\u061f\3\2\2\2\u061f\u0621") - buf.write("\78\2\2\u0620\u061d\3\2\2\2\u0620\u0621\3\2\2\2\u0621") - buf.write("\u0081\3\2\2\2\u0622\u0627\5\u00d0i\2\u0623\u0624\7\n") - buf.write("\2\2\u0624\u0625\5\u0080A\2\u0625\u0626\7\13\2\2\u0626") - buf.write("\u0628\3\2\2\2\u0627\u0623\3\2\2\2\u0627\u0628\3\2\2\2") - buf.write("\u0628\u0083\3\2\2\2\u0629\u0635\7{\2\2\u062a\u062b\7") - buf.write("\7\2\2\u062b\u0630\5\u00aaV\2\u062c\u062d\7\23\2\2\u062d") - buf.write("\u062f\5\u00aaV\2\u062e\u062c\3\2\2\2\u062f\u0632\3\2") - buf.write("\2\2\u0630\u062e\3\2\2\2\u0630\u0631\3\2\2\2\u0631\u0633") - buf.write("\3\2\2\2\u0632\u0630\3\2\2\2\u0633\u0634\7\b\2\2\u0634") - buf.write("\u0636\3\2\2\2\u0635\u062a\3\2\2\2\u0635\u0636\3\2\2\2") - buf.write("\u0636\u0085\3\2\2\2\u0637\u063a\5\u0084C\2\u0638\u063a") - buf.write("\5\u0080A\2\u0639\u0637\3\2\2\2\u0639\u0638\3\2\2\2\u063a") - buf.write("\u0087\3\2\2\2\u063b\u0640\7\u00f1\2\2\u063c\u063d\7\n") - buf.write("\2\2\u063d\u063e\5\u0080A\2\u063e\u063f\7\13\2\2\u063f") - buf.write("\u0641\3\2\2\2\u0640\u063c\3\2\2\2\u0640\u0641\3\2\2\2") - buf.write("\u0641\u0089\3\2\2\2\u0642\u0660\7\177\2\2\u0643\u064f") - buf.write("\7\u00ed\2\2\u0644\u0645\7\7\2\2\u0645\u064a\5\u008eH") - buf.write("\2\u0646\u0647\7\21\2\2\u0647\u0649\5\u008eH\2\u0648\u0646") - buf.write("\3\2\2\2\u0649\u064c\3\2\2\2\u064a\u0648\3\2\2\2\u064a") - buf.write("\u064b\3\2\2\2\u064b\u064d\3\2\2\2\u064c\u064a\3\2\2\2") - buf.write("\u064d\u064e\7\b\2\2\u064e\u0650\3\2\2\2\u064f\u0644\3") - buf.write("\2\2\2\u064f\u0650\3\2\2\2\u0650\u0660\3\2\2\2\u0651\u065d") - buf.write("\7\u00ee\2\2\u0652\u0653\7\7\2\2\u0653\u0658\5\u00be`") - buf.write("\2\u0654\u0655\7\21\2\2\u0655\u0657\5\u00be`\2\u0656\u0654") - buf.write("\3\2\2\2\u0657\u065a\3\2\2\2\u0658\u0656\3\2\2\2\u0658") - buf.write("\u0659\3\2\2\2\u0659\u065b\3\2\2\2\u065a\u0658\3\2\2\2") - buf.write("\u065b\u065c\7\b\2\2\u065c\u065e\3\2\2\2\u065d\u0652\3") - buf.write("\2\2\2\u065d\u065e\3\2\2\2\u065e\u0660\3\2\2\2\u065f\u0642") - buf.write("\3\2\2\2\u065f\u0643\3\2\2\2\u065f\u0651\3\2\2\2\u0660") - buf.write("\u008b\3\2\2\2\u0661\u068a\7\u0080\2\2\u0662\u0673\7\u00ef") - buf.write("\2\2\u0663\u0664\7\7\2\2\u0664\u0670\7\u00f7\2\2\u0665") - buf.write("\u0666\7\3\2\2\u0666\u066b\5\u008eH\2\u0667\u0668\7\21") - buf.write("\2\2\u0668\u066a\5\u008eH\2\u0669\u0667\3\2\2\2\u066a") - buf.write("\u066d\3\2\2\2\u066b\u0669\3\2\2\2\u066b\u066c\3\2\2\2") - buf.write("\u066c\u066e\3\2\2\2\u066d\u066b\3\2\2\2\u066e\u066f\7") - buf.write("\4\2\2\u066f\u0671\3\2\2\2\u0670\u0665\3\2\2\2\u0670\u0671") - buf.write("\3\2\2\2\u0671\u0672\3\2\2\2\u0672\u0674\7\b\2\2\u0673") - buf.write("\u0663\3\2\2\2\u0673\u0674\3\2\2\2\u0674\u068a\3\2\2\2") - buf.write("\u0675\u0687\7\u00f0\2\2\u0676\u0677\7\7\2\2\u0677\u0683") - buf.write("\5\u00be`\2\u0678\u0679\7\3\2\2\u0679\u067e\5\u00be`\2") - buf.write("\u067a\u067b\7\21\2\2\u067b\u067d\5\u00be`\2\u067c\u067a") - buf.write("\3\2\2\2\u067d\u0680\3\2\2\2\u067e\u067c\3\2\2\2\u067e") - buf.write("\u067f\3\2\2\2\u067f\u0681\3\2\2\2\u0680\u067e\3\2\2\2") - buf.write("\u0681\u0682\7\4\2\2\u0682\u0684\3\2\2\2\u0683\u0678\3") - buf.write("\2\2\2\u0683\u0684\3\2\2\2\u0684\u0685\3\2\2\2\u0685\u0686") - buf.write("\7\b\2\2\u0686\u0688\3\2\2\2\u0687\u0676\3\2\2\2\u0687") - buf.write("\u0688\3\2\2\2\u0688\u068a\3\2\2\2\u0689\u0661\3\2\2\2") - buf.write("\u0689\u0662\3\2\2\2\u0689\u0675\3\2\2\2\u068a\u008d\3") - buf.write("\2\2\2\u068b\u068c\7\u00f7\2\2\u068c\u008f\3\2\2\2\u068d") - buf.write("\u068e\7\u00f7\2\2\u068e\u0091\3\2\2\2\u068f\u0690\t\33") - buf.write("\2\2\u0690\u0695\5\u0094K\2\u0691\u0692\7\23\2\2\u0692") - buf.write("\u0694\5\u0094K\2\u0693\u0691\3\2\2\2\u0694\u0697\3\2") - buf.write("\2\2\u0695\u0693\3\2\2\2\u0695\u0696\3\2\2\2\u0696\u0093") - buf.write("\3\2\2\2\u0697\u0695\3\2\2\2\u0698\u069b\5\u00be`\2\u0699") - buf.write("\u069a\7\60\2\2\u069a\u069c\5\u00bc_\2\u069b\u0699\3\2") - buf.write("\2\2\u069b\u069c\3\2\2\2\u069c\u0095\3\2\2\2\u069d\u06a2") - buf.write("\5\u0098M\2\u069e\u069f\7\u00f9\2\2\u069f\u06a1\5\u0098") - buf.write("M\2\u06a0\u069e\3\2\2\2\u06a1\u06a4\3\2\2\2\u06a2\u06a0") - buf.write("\3\2\2\2\u06a2\u06a3\3\2\2\2\u06a3\u0097\3\2\2\2\u06a4") - buf.write("\u06a2\3\2\2\2\u06a5\u06a6\7\u00f7\2\2\u06a6\u06a8\7\25") - buf.write("\2\2\u06a7\u06a5\3\2\2\2\u06a7\u06a8\3\2\2\2\u06a8\u06ad") - buf.write("\3\2\2\2\u06a9\u06aa\7\u00bb\2\2\u06aa\u06ab\5\b\5\2\u06ab") - buf.write("\u06ac\7\33\2\2\u06ac\u06ae\3\2\2\2\u06ad\u06a9\3\2\2") - buf.write("\2\u06ad\u06ae\3\2\2\2\u06ae\u06af\3\2\2\2\u06af\u06b1") - buf.write("\5\b\5\2\u06b0\u06b2\5\u00c6d\2\u06b1\u06b0\3\2\2\2\u06b1") - buf.write("\u06b2\3\2\2\2\u06b2\u06b4\3\2\2\2\u06b3\u06b5\5\u00c8") - buf.write("e\2\u06b4\u06b3\3\2\2\2\u06b4\u06b5\3\2\2\2\u06b5\u0099") - buf.write("\3\2\2\2\u06b6\u06bb\5\u009cO\2\u06b7\u06b8\7\u00f9\2") - buf.write("\2\u06b8\u06ba\5\u009cO\2\u06b9\u06b7\3\2\2\2\u06ba\u06bd") - buf.write("\3\2\2\2\u06bb\u06b9\3\2\2\2\u06bb\u06bc\3\2\2\2\u06bc") - buf.write("\u009b\3\2\2\2\u06bd\u06bb\3\2\2\2\u06be\u06bf\7\u00f7") - buf.write("\2\2\u06bf\u06c1\7\25\2\2\u06c0\u06be\3\2\2\2\u06c0\u06c1") - buf.write("\3\2\2\2\u06c1\u06c2\3\2\2\2\u06c2\u06c4\5\u00a2R\2\u06c3") - buf.write("\u06c5\5\u00c6d\2\u06c4\u06c3\3\2\2\2\u06c4\u06c5\3\2") - buf.write("\2\2\u06c5\u06c7\3\2\2\2\u06c6\u06c8\5\u00c8e\2\u06c7") - buf.write("\u06c6\3\2\2\2\u06c7\u06c8\3\2\2\2\u06c8\u009d\3\2\2\2") - buf.write("\u06c9\u06cc\t\33\2\2\u06ca\u06cb\7\u00a9\2\2\u06cb\u06cd") - buf.write("\5\u00a0Q\2\u06cc\u06ca\3\2\2\2\u06cc\u06cd\3\2\2\2\u06cd") - buf.write("\u06ce\3\2\2\2\u06ce\u06cf\7\u0082\2\2\u06cf\u06d0\7\u00f7") - buf.write("\2\2\u06d0\u009f\3\2\2\2\u06d1\u06d6\5\u0094K\2\u06d2") - buf.write("\u06d3\7\23\2\2\u06d3\u06d5\5\u0094K\2\u06d4\u06d2\3\2") - buf.write("\2\2\u06d5\u06d8\3\2\2\2\u06d6\u06d4\3\2\2\2\u06d6\u06d7") - buf.write("\3\2\2\2\u06d7\u00a1\3\2\2\2\u06d8\u06d6\3\2\2\2\u06d9") - buf.write("\u06da\7\u00bb\2\2\u06da\u06db\5\b\5\2\u06db\u06dc\7\33") - buf.write("\2\2\u06dc\u06de\3\2\2\2\u06dd\u06d9\3\2\2\2\u06dd\u06de") - buf.write("\3\2\2\2\u06de\u06df\3\2\2\2\u06df\u06e1\5\u00a6T\2\u06e0") - buf.write("\u06e2\5\u00caf\2\u06e1\u06e0\3\2\2\2\u06e1\u06e2\3\2") - buf.write("\2\2\u06e2\u06e3\3\2\2\2\u06e3\u06e7\5\u00a4S\2\u06e4") - buf.write("\u06e6\5\u00a4S\2\u06e5\u06e4\3\2\2\2\u06e6\u06e9\3\2") - buf.write("\2\2\u06e7\u06e5\3\2\2\2\u06e7\u06e8\3\2\2\2\u06e8\u00a3") - buf.write("\3\2\2\2\u06e9\u06e7\3\2\2\2\u06ea\u06ec\t\31\2\2\u06eb") - buf.write("\u06ea\3\2\2\2\u06eb\u06ec\3\2\2\2\u06ec\u06ed\3\2\2\2") - buf.write("\u06ed\u06f2\5\u00a6T\2\u06ee\u06ef\7\5\2\2\u06ef\u06f0") - buf.write("\5\b\5\2\u06f0\u06f1\7\6\2\2\u06f1\u06f3\3\2\2\2\u06f2") - buf.write("\u06ee\3\2\2\2\u06f2\u06f3\3\2\2\2\u06f3\u00a5\3\2\2\2") - buf.write("\u06f4\u06f8\7\u00f7\2\2\u06f5\u06f8\5l\67\2\u06f6\u06f8") - buf.write("\5n8\2\u06f7\u06f4\3\2\2\2\u06f7\u06f5\3\2\2\2\u06f7\u06f6") - buf.write("\3\2\2\2\u06f8\u00a7\3\2\2\2\u06f9\u06fa\7\5\2\2\u06fa") - buf.write("\u06fb\5\b\5\2\u06fb\u06fc\7\6\2\2\u06fc\u0709\3\2\2\2") - buf.write("\u06fd\u06fe\7\7\2\2\u06fe\u0703\5X-\2\u06ff\u0700\7\23") - buf.write("\2\2\u0700\u0702\5X-\2\u0701\u06ff\3\2\2\2\u0702\u0705") - buf.write("\3\2\2\2\u0703\u0701\3\2\2\2\u0703\u0704\3\2\2\2\u0704") - buf.write("\u0706\3\2\2\2\u0705\u0703\3\2\2\2\u0706\u0707\7\b\2\2") - buf.write("\u0707\u0709\3\2\2\2\u0708\u06f9\3\2\2\2\u0708\u06fd\3") - buf.write("\2\2\2\u0709\u00a9\3\2\2\2\u070a\u070d\5\u0082B\2\u070b") - buf.write("\u070e\5\u00c2b\2\u070c\u070e\5\u00acW\2\u070d\u070b\3") - buf.write("\2\2\2\u070d\u070c\3\2\2\2\u070e\u00ab\3\2\2\2\u070f\u0711") - buf.write("\7u\2\2\u0710\u0712\t\34\2\2\u0711\u0710\3\2\2\2\u0711") - buf.write("\u0712\3\2\2\2\u0712\u00ad\3\2\2\2\u0713\u0714\t\35\2") - buf.write("\2\u0714\u00af\3\2\2\2\u0715\u0716\t\36\2\2\u0716\u00b1") - buf.write("\3\2\2\2\u0717\u0718\7\u00a9\2\2\u0718\u071d\5\u00c2b") - buf.write("\2\u0719\u071a\7\23\2\2\u071a\u071c\5\u00c2b\2\u071b\u0719") - buf.write("\3\2\2\2\u071c\u071f\3\2\2\2\u071d\u071b\3\2\2\2\u071d") - buf.write("\u071e\3\2\2\2\u071e\u00b3\3\2\2\2\u071f\u071d\3\2\2\2") - buf.write("\u0720\u0721\t\37\2\2\u0721\u00b5\3\2\2\2\u0722\u0723") - buf.write("\7H\2\2\u0723\u0724\5\6\4\2\u0724\u00b7\3\2\2\2\u0725") - buf.write("\u0726\t \2\2\u0726\u00b9\3\2\2\2\u0727\u0728\t!\2\2\u0728") - buf.write("\u00bb\3\2\2\2\u0729\u072a\7\u00f7\2\2\u072a\u00bd\3\2") - buf.write("\2\2\u072b\u072c\7\u00f7\2\2\u072c\u00bf\3\2\2\2\u072d") - buf.write("\u072e\7\u00f7\2\2\u072e\u00c1\3\2\2\2\u072f\u0732\7\u00f7") - buf.write("\2\2\u0730\u0731\7\27\2\2\u0731\u0733\7\u00f7\2\2\u0732") - buf.write("\u0730\3\2\2\2\u0732\u0733\3\2\2\2\u0733\u00c3\3\2\2\2") - buf.write("\u0734\u0735\7\7\2\2\u0735\u073a\5X-\2\u0736\u0737\7\23") - buf.write("\2\2\u0737\u0739\5X-\2\u0738\u0736\3\2\2\2\u0739\u073c") - buf.write("\3\2\2\2\u073a\u0738\3\2\2\2\u073a\u073b\3\2\2\2\u073b") - buf.write("\u073d\3\2\2\2\u073c\u073a\3\2\2\2\u073d\u073e\7\b\2\2") - buf.write("\u073e\u00c5\3\2\2\2\u073f\u0740\7I\2\2\u0740\u0741\5") - buf.write("\u00dan\2\u0741\u00c7\3\2\2\2\u0742\u0743\7L\2\2\u0743") - buf.write('\u0744\5\u00dan\2\u0744\u00c9\3\2\2\2\u0745\u0746\t"') - buf.write("\2\2\u0746\u00cb\3\2\2\2\u0747\u074a\5\6\4\2\u0748\u074a") - buf.write("\7u\2\2\u0749\u0747\3\2\2\2\u0749\u0748\3\2\2\2\u074a") - buf.write("\u00cd\3\2\2\2\u074b\u074e\5\b\5\2\u074c\u074e\7u\2\2") - buf.write("\u074d\u074b\3\2\2\2\u074d\u074c\3\2\2\2\u074e\u00cf\3") - buf.write("\2\2\2\u074f\u0755\7j\2\2\u0750\u0755\7\u00ec\2\2\u0751") - buf.write("\u0755\7i\2\2\u0752\u0755\7k\2\2\u0753\u0755\5\u00d2j") - buf.write("\2\u0754\u074f\3\2\2\2\u0754\u0750\3\2\2\2\u0754\u0751") - buf.write("\3\2\2\2\u0754\u0752\3\2\2\2\u0754\u0753\3\2\2\2\u0755") - buf.write("\u00d1\3\2\2\2\u0756\u0757\7p\2\2\u0757\u0758\7k\2\2\u0758") - buf.write("\u00d3\3\2\2\2\u0759\u075a\7\u00f7\2\2\u075a\u00d5\3\2") - buf.write("\2\2\u075b\u075c\7\u00f7\2\2\u075c\u00d7\3\2\2\2\u075d") - buf.write("\u075e\7\u00f7\2\2\u075e\u00d9\3\2\2\2\u075f\u0765\5l") - buf.write("\67\2\u0760\u0765\5n8\2\u0761\u0765\7\u00f5\2\2\u0762") - buf.write("\u0765\7\u00f6\2\2\u0763\u0765\78\2\2\u0764\u075f\3\2") - buf.write("\2\2\u0764\u0760\3\2\2\2\u0764\u0761\3\2\2\2\u0764\u0762") - buf.write("\3\2\2\2\u0764\u0763\3\2\2\2\u0765\u00db\3\2\2\2\u0766") - buf.write("\u0767\t#\2\2\u0767\u00dd\3\2\2\2\u0768\u0769\t$\2\2\u0769") - buf.write("\u00df\3\2\2\2\u00ce\u00e5\u00f3\u010c\u0113\u0131\u0133") + buf.write("\2\u05e6\u05e7\7\23\2\2\u05e7\u05e9\t\22\2\2\u05e8\u05e6") + buf.write("\3\2\2\2\u05e8\u05e9\3\2\2\2\u05e9\u05ea\3\2\2\2\u05ea") + buf.write("\u05ec\7\4\2\2\u05eb\u05e3\3\2\2\2\u05eb\u05ec\3\2\2\2") + buf.write("\u05ec\u05ee\3\2\2\2\u05ed\u05cd\3\2\2\2\u05ed\u05e1\3") + buf.write("\2\2\2\u05ees\3\2\2\2\u05ef\u05f0\7\u0094\2\2\u05f0\u05f1") + buf.write("\5\b\5\2\u05f1u\3\2\2\2\u05f2\u05f3\5\u00be`\2\u05f3\u05f6") + buf.write("\5|?\2\u05f4\u05f5\7\u00df\2\2\u05f5\u05f7\5X-\2\u05f6") + buf.write("\u05f4\3\2\2\2\u05f6\u05f7\3\2\2\2\u05f7w\3\2\2\2\u05f8") + buf.write("\u05fc\5\u0080A\2\u05f9\u05fc\5\u0084C\2\u05fa\u05fc\5") + buf.write("\u0082B\2\u05fb\u05f8\3\2\2\2\u05fb\u05f9\3\2\2\2\u05fb") + buf.write("\u05fa\3\2\2\2\u05fcy\3\2\2\2\u05fd\u0600\5\u0082B\2\u05fe") + buf.write("\u0600\5\u0080A\2\u05ff\u05fd\3\2\2\2\u05ff\u05fe\3\2") + buf.write("\2\2\u0600{\3\2\2\2\u0601\u0607\5\u0080A\2\u0602\u0607") + buf.write("\5\u0084C\2\u0603\u0607\5\u0088E\2\u0604\u0607\5~@\2\u0605") + buf.write("\u0607\5\u0082B\2\u0606\u0601\3\2\2\2\u0606\u0602\3\2") + buf.write("\2\2\u0606\u0603\3\2\2\2\u0606\u0604\3\2\2\2\u0606\u0605") + buf.write("\3\2\2\2\u0607}\3\2\2\2\u0608\u060c\7\u0081\2\2\u0609") + buf.write("\u060c\5\u008aF\2\u060a\u060c\5\u008cG\2\u060b\u0608\3") + buf.write("\2\2\2\u060b\u0609\3\2\2\2\u060b\u060a\3\2\2\2\u060c\177") + buf.write("\3\2\2\2\u060d\u0610\5\u00dco\2\u060e\u0610\5\u008eH\2") + buf.write("\u060f\u060d\3\2\2\2\u060f\u060e\3\2\2\2\u0610\u0612\3") + buf.write("\2\2\2\u0611\u0613\5\u00a8U\2\u0612\u0611\3\2\2\2\u0612") + buf.write("\u0613\3\2\2\2\u0613\u0618\3\2\2\2\u0614\u0616\7\64\2") + buf.write("\2\u0615\u0614\3\2\2\2\u0615\u0616\3\2\2\2\u0616\u0617") + buf.write("\3\2\2\2\u0617\u0619\78\2\2\u0618\u0615\3\2\2\2\u0618") + buf.write("\u0619\3\2\2\2\u0619\u0081\3\2\2\2\u061a\u061f\5\u00d0") + buf.write("i\2\u061b\u061c\7\n\2\2\u061c\u061d\5\u0080A\2\u061d\u061e") + buf.write("\7\13\2\2\u061e\u0620\3\2\2\2\u061f\u061b\3\2\2\2\u061f") + buf.write("\u0620\3\2\2\2\u0620\u0083\3\2\2\2\u0621\u062d\7{\2\2") + buf.write("\u0622\u0623\7\7\2\2\u0623\u0628\5\u00aaV\2\u0624\u0625") + buf.write("\7\23\2\2\u0625\u0627\5\u00aaV\2\u0626\u0624\3\2\2\2\u0627") + buf.write("\u062a\3\2\2\2\u0628\u0626\3\2\2\2\u0628\u0629\3\2\2\2") + buf.write("\u0629\u062b\3\2\2\2\u062a\u0628\3\2\2\2\u062b\u062c\7") + buf.write("\b\2\2\u062c\u062e\3\2\2\2\u062d\u0622\3\2\2\2\u062d\u062e") + buf.write("\3\2\2\2\u062e\u0085\3\2\2\2\u062f\u0632\5\u0084C\2\u0630") + buf.write("\u0632\5\u0080A\2\u0631\u062f\3\2\2\2\u0631\u0630\3\2") + buf.write("\2\2\u0632\u0087\3\2\2\2\u0633\u0638\7\u00f1\2\2\u0634") + buf.write("\u0635\7\n\2\2\u0635\u0636\5\u0080A\2\u0636\u0637\7\13") + buf.write("\2\2\u0637\u0639\3\2\2\2\u0638\u0634\3\2\2\2\u0638\u0639") + buf.write("\3\2\2\2\u0639\u0089\3\2\2\2\u063a\u0658\7\177\2\2\u063b") + buf.write("\u0647\7\u00ed\2\2\u063c\u063d\7\7\2\2\u063d\u0642\5\u008e") + buf.write("H\2\u063e\u063f\7\21\2\2\u063f\u0641\5\u008eH\2\u0640") + buf.write("\u063e\3\2\2\2\u0641\u0644\3\2\2\2\u0642\u0640\3\2\2\2") + buf.write("\u0642\u0643\3\2\2\2\u0643\u0645\3\2\2\2\u0644\u0642\3") + buf.write("\2\2\2\u0645\u0646\7\b\2\2\u0646\u0648\3\2\2\2\u0647\u063c") + buf.write("\3\2\2\2\u0647\u0648\3\2\2\2\u0648\u0658\3\2\2\2\u0649") + buf.write("\u0655\7\u00ee\2\2\u064a\u064b\7\7\2\2\u064b\u0650\5\u00be") + buf.write("`\2\u064c\u064d\7\21\2\2\u064d\u064f\5\u00be`\2\u064e") + buf.write("\u064c\3\2\2\2\u064f\u0652\3\2\2\2\u0650\u064e\3\2\2\2") + buf.write("\u0650\u0651\3\2\2\2\u0651\u0653\3\2\2\2\u0652\u0650\3") + buf.write("\2\2\2\u0653\u0654\7\b\2\2\u0654\u0656\3\2\2\2\u0655\u064a") + buf.write("\3\2\2\2\u0655\u0656\3\2\2\2\u0656\u0658\3\2\2\2\u0657") + buf.write("\u063a\3\2\2\2\u0657\u063b\3\2\2\2\u0657\u0649\3\2\2\2") + buf.write("\u0658\u008b\3\2\2\2\u0659\u0682\7\u0080\2\2\u065a\u066b") + buf.write("\7\u00ef\2\2\u065b\u065c\7\7\2\2\u065c\u0668\7\u00f7\2") + buf.write("\2\u065d\u065e\7\3\2\2\u065e\u0663\5\u008eH\2\u065f\u0660") + buf.write("\7\21\2\2\u0660\u0662\5\u008eH\2\u0661\u065f\3\2\2\2\u0662") + buf.write("\u0665\3\2\2\2\u0663\u0661\3\2\2\2\u0663\u0664\3\2\2\2") + buf.write("\u0664\u0666\3\2\2\2\u0665\u0663\3\2\2\2\u0666\u0667\7") + buf.write("\4\2\2\u0667\u0669\3\2\2\2\u0668\u065d\3\2\2\2\u0668\u0669") + buf.write("\3\2\2\2\u0669\u066a\3\2\2\2\u066a\u066c\7\b\2\2\u066b") + buf.write("\u065b\3\2\2\2\u066b\u066c\3\2\2\2\u066c\u0682\3\2\2\2") + buf.write("\u066d\u067f\7\u00f0\2\2\u066e\u066f\7\7\2\2\u066f\u067b") + buf.write("\5\u00be`\2\u0670\u0671\7\3\2\2\u0671\u0676\5\u00be`\2") + buf.write("\u0672\u0673\7\21\2\2\u0673\u0675\5\u00be`\2\u0674\u0672") + buf.write("\3\2\2\2\u0675\u0678\3\2\2\2\u0676\u0674\3\2\2\2\u0676") + buf.write("\u0677\3\2\2\2\u0677\u0679\3\2\2\2\u0678\u0676\3\2\2\2") + buf.write("\u0679\u067a\7\4\2\2\u067a\u067c\3\2\2\2\u067b\u0670\3") + buf.write("\2\2\2\u067b\u067c\3\2\2\2\u067c\u067d\3\2\2\2\u067d\u067e") + buf.write("\7\b\2\2\u067e\u0680\3\2\2\2\u067f\u066e\3\2\2\2\u067f") + buf.write("\u0680\3\2\2\2\u0680\u0682\3\2\2\2\u0681\u0659\3\2\2\2") + buf.write("\u0681\u065a\3\2\2\2\u0681\u066d\3\2\2\2\u0682\u008d\3") + buf.write("\2\2\2\u0683\u0684\7\u00f7\2\2\u0684\u008f\3\2\2\2\u0685") + buf.write("\u0686\7\u00f7\2\2\u0686\u0091\3\2\2\2\u0687\u0688\t\33") + buf.write("\2\2\u0688\u068d\5\u0094K\2\u0689\u068a\7\23\2\2\u068a") + buf.write("\u068c\5\u0094K\2\u068b\u0689\3\2\2\2\u068c\u068f\3\2") + buf.write("\2\2\u068d\u068b\3\2\2\2\u068d\u068e\3\2\2\2\u068e\u0093") + buf.write("\3\2\2\2\u068f\u068d\3\2\2\2\u0690\u0693\5\u00be`\2\u0691") + buf.write("\u0692\7\60\2\2\u0692\u0694\5\u00bc_\2\u0693\u0691\3\2") + buf.write("\2\2\u0693\u0694\3\2\2\2\u0694\u0095\3\2\2\2\u0695\u069a") + buf.write("\5\u0098M\2\u0696\u0697\7\u00f9\2\2\u0697\u0699\5\u0098") + buf.write("M\2\u0698\u0696\3\2\2\2\u0699\u069c\3\2\2\2\u069a\u0698") + buf.write("\3\2\2\2\u069a\u069b\3\2\2\2\u069b\u0097\3\2\2\2\u069c") + buf.write("\u069a\3\2\2\2\u069d\u069e\7\u00f7\2\2\u069e\u06a0\7\25") + buf.write("\2\2\u069f\u069d\3\2\2\2\u069f\u06a0\3\2\2\2\u06a0\u06a5") + buf.write("\3\2\2\2\u06a1\u06a2\7\u00bb\2\2\u06a2\u06a3\5\b\5\2\u06a3") + buf.write("\u06a4\7\33\2\2\u06a4\u06a6\3\2\2\2\u06a5\u06a1\3\2\2") + buf.write("\2\u06a5\u06a6\3\2\2\2\u06a6\u06a7\3\2\2\2\u06a7\u06a9") + buf.write("\5\b\5\2\u06a8\u06aa\5\u00c6d\2\u06a9\u06a8\3\2\2\2\u06a9") + buf.write("\u06aa\3\2\2\2\u06aa\u06ac\3\2\2\2\u06ab\u06ad\5\u00c8") + buf.write("e\2\u06ac\u06ab\3\2\2\2\u06ac\u06ad\3\2\2\2\u06ad\u0099") + buf.write("\3\2\2\2\u06ae\u06b3\5\u009cO\2\u06af\u06b0\7\u00f9\2") + buf.write("\2\u06b0\u06b2\5\u009cO\2\u06b1\u06af\3\2\2\2\u06b2\u06b5") + buf.write("\3\2\2\2\u06b3\u06b1\3\2\2\2\u06b3\u06b4\3\2\2\2\u06b4") + buf.write("\u009b\3\2\2\2\u06b5\u06b3\3\2\2\2\u06b6\u06b7\7\u00f7") + buf.write("\2\2\u06b7\u06b9\7\25\2\2\u06b8\u06b6\3\2\2\2\u06b8\u06b9") + buf.write("\3\2\2\2\u06b9\u06ba\3\2\2\2\u06ba\u06bc\5\u00a2R\2\u06bb") + buf.write("\u06bd\5\u00c6d\2\u06bc\u06bb\3\2\2\2\u06bc\u06bd\3\2") + buf.write("\2\2\u06bd\u06bf\3\2\2\2\u06be\u06c0\5\u00c8e\2\u06bf") + buf.write("\u06be\3\2\2\2\u06bf\u06c0\3\2\2\2\u06c0\u009d\3\2\2\2") + buf.write("\u06c1\u06c4\t\33\2\2\u06c2\u06c3\7\u00a9\2\2\u06c3\u06c5") + buf.write("\5\u00a0Q\2\u06c4\u06c2\3\2\2\2\u06c4\u06c5\3\2\2\2\u06c5") + buf.write("\u06c6\3\2\2\2\u06c6\u06c7\7\u0082\2\2\u06c7\u06c8\7\u00f7") + buf.write("\2\2\u06c8\u009f\3\2\2\2\u06c9\u06ce\5\u0094K\2\u06ca") + buf.write("\u06cb\7\23\2\2\u06cb\u06cd\5\u0094K\2\u06cc\u06ca\3\2") + buf.write("\2\2\u06cd\u06d0\3\2\2\2\u06ce\u06cc\3\2\2\2\u06ce\u06cf") + buf.write("\3\2\2\2\u06cf\u00a1\3\2\2\2\u06d0\u06ce\3\2\2\2\u06d1") + buf.write("\u06d2\7\u00bb\2\2\u06d2\u06d3\5\b\5\2\u06d3\u06d4\7\33") + buf.write("\2\2\u06d4\u06d6\3\2\2\2\u06d5\u06d1\3\2\2\2\u06d5\u06d6") + buf.write("\3\2\2\2\u06d6\u06d7\3\2\2\2\u06d7\u06d9\5\u00a6T\2\u06d8") + buf.write("\u06da\5\u00caf\2\u06d9\u06d8\3\2\2\2\u06d9\u06da\3\2") + buf.write("\2\2\u06da\u06db\3\2\2\2\u06db\u06df\5\u00a4S\2\u06dc") + buf.write("\u06de\5\u00a4S\2\u06dd\u06dc\3\2\2\2\u06de\u06e1\3\2") + buf.write("\2\2\u06df\u06dd\3\2\2\2\u06df\u06e0\3\2\2\2\u06e0\u00a3") + buf.write("\3\2\2\2\u06e1\u06df\3\2\2\2\u06e2\u06e4\t\31\2\2\u06e3") + buf.write("\u06e2\3\2\2\2\u06e3\u06e4\3\2\2\2\u06e4\u06e5\3\2\2\2") + buf.write("\u06e5\u06ea\5\u00a6T\2\u06e6\u06e7\7\5\2\2\u06e7\u06e8") + buf.write("\5\b\5\2\u06e8\u06e9\7\6\2\2\u06e9\u06eb\3\2\2\2\u06ea") + buf.write("\u06e6\3\2\2\2\u06ea\u06eb\3\2\2\2\u06eb\u00a5\3\2\2\2") + buf.write("\u06ec\u06f0\7\u00f7\2\2\u06ed\u06f0\5l\67\2\u06ee\u06f0") + buf.write("\5n8\2\u06ef\u06ec\3\2\2\2\u06ef\u06ed\3\2\2\2\u06ef\u06ee") + buf.write("\3\2\2\2\u06f0\u00a7\3\2\2\2\u06f1\u06f2\7\5\2\2\u06f2") + buf.write("\u06f3\5\b\5\2\u06f3\u06f4\7\6\2\2\u06f4\u0701\3\2\2\2") + buf.write("\u06f5\u06f6\7\7\2\2\u06f6\u06fb\5X-\2\u06f7\u06f8\7\23") + buf.write("\2\2\u06f8\u06fa\5X-\2\u06f9\u06f7\3\2\2\2\u06fa\u06fd") + buf.write("\3\2\2\2\u06fb\u06f9\3\2\2\2\u06fb\u06fc\3\2\2\2\u06fc") + buf.write("\u06fe\3\2\2\2\u06fd\u06fb\3\2\2\2\u06fe\u06ff\7\b\2\2") + buf.write("\u06ff\u0701\3\2\2\2\u0700\u06f1\3\2\2\2\u0700\u06f5\3") + buf.write("\2\2\2\u0701\u00a9\3\2\2\2\u0702\u0705\5\u0082B\2\u0703") + buf.write("\u0706\5\u00c2b\2\u0704\u0706\5\u00acW\2\u0705\u0703\3") + buf.write("\2\2\2\u0705\u0704\3\2\2\2\u0706\u00ab\3\2\2\2\u0707\u0709") + buf.write("\7u\2\2\u0708\u070a\t\34\2\2\u0709\u0708\3\2\2\2\u0709") + buf.write("\u070a\3\2\2\2\u070a\u00ad\3\2\2\2\u070b\u070c\t\35\2") + buf.write("\2\u070c\u00af\3\2\2\2\u070d\u070e\t\36\2\2\u070e\u00b1") + buf.write("\3\2\2\2\u070f\u0710\7\u00a9\2\2\u0710\u0715\5\u00c2b") + buf.write("\2\u0711\u0712\7\23\2\2\u0712\u0714\5\u00c2b\2\u0713\u0711") + buf.write("\3\2\2\2\u0714\u0717\3\2\2\2\u0715\u0713\3\2\2\2\u0715") + buf.write("\u0716\3\2\2\2\u0716\u00b3\3\2\2\2\u0717\u0715\3\2\2\2") + buf.write("\u0718\u0719\t\37\2\2\u0719\u00b5\3\2\2\2\u071a\u071b") + buf.write("\7H\2\2\u071b\u071c\5\6\4\2\u071c\u00b7\3\2\2\2\u071d") + buf.write("\u071e\t \2\2\u071e\u00b9\3\2\2\2\u071f\u0720\t!\2\2\u0720") + buf.write("\u00bb\3\2\2\2\u0721\u0722\7\u00f7\2\2\u0722\u00bd\3\2") + buf.write("\2\2\u0723\u0724\7\u00f7\2\2\u0724\u00bf\3\2\2\2\u0725") + buf.write("\u0726\7\u00f7\2\2\u0726\u00c1\3\2\2\2\u0727\u072a\7\u00f7") + buf.write("\2\2\u0728\u0729\7\27\2\2\u0729\u072b\7\u00f7\2\2\u072a") + buf.write("\u0728\3\2\2\2\u072a\u072b\3\2\2\2\u072b\u00c3\3\2\2\2") + buf.write("\u072c\u072d\7\7\2\2\u072d\u0732\5X-\2\u072e\u072f\7\23") + buf.write("\2\2\u072f\u0731\5X-\2\u0730\u072e\3\2\2\2\u0731\u0734") + buf.write("\3\2\2\2\u0732\u0730\3\2\2\2\u0732\u0733\3\2\2\2\u0733") + buf.write("\u0735\3\2\2\2\u0734\u0732\3\2\2\2\u0735\u0736\7\b\2\2") + buf.write("\u0736\u00c5\3\2\2\2\u0737\u0738\7I\2\2\u0738\u0739\5") + buf.write("\u00dan\2\u0739\u00c7\3\2\2\2\u073a\u073b\7L\2\2\u073b") + buf.write('\u073c\5\u00dan\2\u073c\u00c9\3\2\2\2\u073d\u073e\t"') + buf.write("\2\2\u073e\u00cb\3\2\2\2\u073f\u0742\5\6\4\2\u0740\u0742") + buf.write("\7u\2\2\u0741\u073f\3\2\2\2\u0741\u0740\3\2\2\2\u0742") + buf.write("\u00cd\3\2\2\2\u0743\u0746\5\b\5\2\u0744\u0746\7u\2\2") + buf.write("\u0745\u0743\3\2\2\2\u0745\u0744\3\2\2\2\u0746\u00cf\3") + buf.write("\2\2\2\u0747\u074d\7j\2\2\u0748\u074d\7\u00ec\2\2\u0749") + buf.write("\u074d\7i\2\2\u074a\u074d\7k\2\2\u074b\u074d\5\u00d2j") + buf.write("\2\u074c\u0747\3\2\2\2\u074c\u0748\3\2\2\2\u074c\u0749") + buf.write("\3\2\2\2\u074c\u074a\3\2\2\2\u074c\u074b\3\2\2\2\u074d") + buf.write("\u00d1\3\2\2\2\u074e\u074f\7p\2\2\u074f\u0750\7k\2\2\u0750") + buf.write("\u00d3\3\2\2\2\u0751\u0752\7\u00f7\2\2\u0752\u00d5\3\2") + buf.write("\2\2\u0753\u0754\7\u00f7\2\2\u0754\u00d7\3\2\2\2\u0755") + buf.write("\u0756\7\u00f7\2\2\u0756\u00d9\3\2\2\2\u0757\u075d\5l") + buf.write("\67\2\u0758\u075d\5n8\2\u0759\u075d\7\u00f5\2\2\u075a") + buf.write("\u075d\7\u00f6\2\2\u075b\u075d\78\2\2\u075c\u0757\3\2") + buf.write("\2\2\u075c\u0758\3\2\2\2\u075c\u0759\3\2\2\2\u075c\u075a") + buf.write("\3\2\2\2\u075c\u075b\3\2\2\2\u075d\u00db\3\2\2\2\u075e") + buf.write("\u075f\t#\2\2\u075f\u00dd\3\2\2\2\u0760\u0761\t$\2\2\u0761") + buf.write("\u00df\3\2\2\2\u00cc\u00e5\u00f3\u010c\u0113\u0131\u0133") buf.write("\u0135\u014f\u0156\u016c\u016e\u0170\u017b\u0189\u0192") buf.write("\u019a\u01a1\u01a3\u01ae\u01b7\u01c9\u01d2\u01e1\u01ec") buf.write("\u01ef\u01f4\u0215\u021e\u0221\u022b\u0230\u0234\u023a") @@ -989,13 +983,13 @@ def serializedATN(): buf.write("\u0510\u0512\u0517\u0523\u0533\u053e\u0542\u0549\u055b") buf.write("\u055f\u0566\u056e\u0577\u057a\u057f\u0582\u0587\u058a") buf.write("\u058d\u0599\u05a3\u05a8\u05ad\u05b5\u05ba\u05cb\u05d4") - buf.write("\u05dc\u05df\u05e8\u05ec\u05f0\u05f3\u05f5\u05fe\u0603") - buf.write("\u0607\u060e\u0613\u0617\u061a\u061d\u0620\u0627\u0630") - buf.write("\u0635\u0639\u0640\u064a\u064f\u0658\u065d\u065f\u066b") - buf.write("\u0670\u0673\u067e\u0683\u0687\u0689\u0695\u069b\u06a2") - buf.write("\u06a7\u06ad\u06b1\u06b4\u06bb\u06c0\u06c4\u06c7\u06cc") - buf.write("\u06d6\u06dd\u06e1\u06e7\u06eb\u06f2\u06f7\u0703\u0708") - buf.write("\u070d\u0711\u071d\u0732\u073a\u0749\u074d\u0754\u0764") + buf.write("\u05dc\u05df\u05e8\u05eb\u05ed\u05f6\u05fb\u05ff\u0606") + buf.write("\u060b\u060f\u0612\u0615\u0618\u061f\u0628\u062d\u0631") + buf.write("\u0638\u0642\u0647\u0650\u0655\u0657\u0663\u0668\u066b") + buf.write("\u0676\u067b\u067f\u0681\u068d\u0693\u069a\u069f\u06a5") + buf.write("\u06a9\u06ac\u06b3\u06b8\u06bc\u06bf\u06c4\u06ce\u06d5") + buf.write("\u06d9\u06df\u06e3\u06ea\u06ef\u06fb\u0700\u0705\u0709") + buf.write("\u0715\u072a\u0732\u0741\u0745\u074c\u075c") return buf.getvalue() @@ -1991,7 +1985,7 @@ class Parser(ANTLRParser): def __init__(self, input: TokenStream, output: TextIO = sys.stdout): super().__init__(input, output) - self.checkVersion("4.9.2") + self.checkVersion("4.9.3") self._interp = ParserATNSimulator( self, self.atn, self.decisionsToDFA, self.sharedContextCache ) @@ -2022,14 +2016,6 @@ def EOL(self, i: int = None): def getRuleIndex(self): return Parser.RULE_start - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterStart"): - listener.enterStart(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitStart"): - listener.exitStart(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitStart"): return visitor.visitStart(self) @@ -2086,14 +2072,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Stateme def defOperators(self): return self.getTypedRuleContext(Parser.DefOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDefineExpression"): - listener.enterDefineExpression(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDefineExpression"): - listener.exitDefineExpression(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDefineExpression"): return visitor.visitDefineExpression(self) @@ -2114,14 +2092,6 @@ def ASSIGN(self): def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTemporaryAssignment"): - listener.enterTemporaryAssignment(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTemporaryAssignment"): - listener.exitTemporaryAssignment(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTemporaryAssignment"): return visitor.visitTemporaryAssignment(self) @@ -2142,14 +2112,6 @@ def PUT_SYMBOL(self): def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterPersistAssignment"): - listener.enterPersistAssignment(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitPersistAssignment"): - listener.exitPersistAssignment(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitPersistAssignment"): return visitor.visitPersistAssignment(self) @@ -2222,14 +2184,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprCon def varID(self): return self.getTypedRuleContext(Parser.VarIDContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterVarIdExpr"): - listener.enterVarIdExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitVarIdExpr"): - listener.exitVarIdExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitVarIdExpr"): return visitor.visitVarIdExpr(self) @@ -2250,14 +2204,6 @@ def MEMBERSHIP(self): def simpleComponentId(self): return self.getTypedRuleContext(Parser.SimpleComponentIdContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMembershipExpr"): - listener.enterMembershipExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMembershipExpr"): - listener.exitMembershipExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMembershipExpr"): return visitor.visitMembershipExpr(self) @@ -2286,14 +2232,6 @@ def lists(self): def valueDomainID(self): return self.getTypedRuleContext(Parser.ValueDomainIDContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInNotInExpr"): - listener.enterInNotInExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInNotInExpr"): - listener.exitInNotInExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInNotInExpr"): return visitor.visitInNotInExpr(self) @@ -2323,14 +2261,6 @@ def OR(self): def XOR(self): return self.getToken(Parser.XOR, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBooleanExpr"): - listener.enterBooleanExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBooleanExpr"): - listener.exitBooleanExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBooleanExpr"): return visitor.visitBooleanExpr(self) @@ -2354,14 +2284,6 @@ def expr(self, i: int = None): def comparisonOperand(self): return self.getTypedRuleContext(Parser.ComparisonOperandContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComparisonExpr"): - listener.enterComparisonExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComparisonExpr"): - listener.exitComparisonExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComparisonExpr"): return visitor.visitComparisonExpr(self) @@ -2387,14 +2309,6 @@ def MINUS(self): def NOT(self): return self.getToken(Parser.NOT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryExpr"): - listener.enterUnaryExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryExpr"): - listener.exitUnaryExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryExpr"): return visitor.visitUnaryExpr(self) @@ -2409,14 +2323,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprCon def functions(self): return self.getTypedRuleContext(Parser.FunctionsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFunctionsExpression"): - listener.enterFunctionsExpression(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFunctionsExpression"): - listener.exitFunctionsExpression(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFunctionsExpression"): return visitor.visitFunctionsExpression(self) @@ -2446,14 +2352,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterIfExpr"): - listener.enterIfExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitIfExpr"): - listener.exitIfExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitIfExpr"): return visitor.visitIfExpr(self) @@ -2479,14 +2377,6 @@ def expr(self): def datasetClause(self): return self.getTypedRuleContext(Parser.DatasetClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterClauseExpr"): - listener.enterClauseExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitClauseExpr"): - listener.exitClauseExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitClauseExpr"): return visitor.visitClauseExpr(self) @@ -2497,8 +2387,8 @@ class CaseExprContext(ExprContext): def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprContext super().__init__(parser) self._expr = None # ExprContext - self.condExpr = list() # of ExprContexts - self.thenExpr = list() # of ExprContexts + self.condExpr = [] # of ExprContexts + self.thenExpr = [] # of ExprContexts self.elseExpr = None # ExprContext self.copyFrom(ctx) @@ -2526,14 +2416,6 @@ def THEN(self, i: int = None): else: return self.getToken(Parser.THEN, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCaseExpr"): - listener.enterCaseExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCaseExpr"): - listener.exitCaseExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCaseExpr"): return visitor.visitCaseExpr(self) @@ -2560,14 +2442,6 @@ def MUL(self): def DIV(self): return self.getToken(Parser.DIV, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterArithmeticExpr"): - listener.enterArithmeticExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitArithmeticExpr"): - listener.exitArithmeticExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitArithmeticExpr"): return visitor.visitArithmeticExpr(self) @@ -2588,14 +2462,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterParenthesisExpr"): - listener.enterParenthesisExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitParenthesisExpr"): - listener.exitParenthesisExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitParenthesisExpr"): return visitor.visitParenthesisExpr(self) @@ -2610,14 +2476,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ExprCon def constant(self): return self.getTypedRuleContext(Parser.ConstantContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConstantExpr"): - listener.enterConstantExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConstantExpr"): - listener.exitConstantExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConstantExpr"): return visitor.visitConstantExpr(self) @@ -2647,14 +2505,6 @@ def MINUS(self): def CONCAT(self): return self.getToken(Parser.CONCAT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterArithmeticExprOrConcat"): - listener.enterArithmeticExprOrConcat(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitArithmeticExprOrConcat"): - listener.exitArithmeticExprOrConcat(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitArithmeticExprOrConcat"): return visitor.visitArithmeticExprOrConcat(self) @@ -2758,7 +2608,7 @@ def expr(self, _p: int = 0): self.state = 266 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la == Parser.WHEN): + if _la != Parser.WHEN: break self.state = 268 @@ -3015,14 +2865,6 @@ def MUL(self): def DIV(self): return self.getToken(Parser.DIV, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterArithmeticExprComp"): - listener.enterArithmeticExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitArithmeticExprComp"): - listener.exitArithmeticExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitArithmeticExprComp"): return visitor.visitArithmeticExprComp(self) @@ -3054,14 +2896,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterIfExprComp"): - listener.enterIfExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitIfExprComp"): - listener.exitIfExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitIfExprComp"): return visitor.visitIfExprComp(self) @@ -3086,14 +2920,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComparisonExprComp"): - listener.enterComparisonExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComparisonExprComp"): - listener.exitComparisonExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComparisonExprComp"): return visitor.visitComparisonExprComp(self) @@ -3110,14 +2936,6 @@ def __init__( def functionsComponents(self): return self.getTypedRuleContext(Parser.FunctionsComponentsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFunctionsExpressionComp"): - listener.enterFunctionsExpressionComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFunctionsExpressionComp"): - listener.exitFunctionsExpressionComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFunctionsExpressionComp"): return visitor.visitFunctionsExpressionComp(self) @@ -3134,14 +2952,6 @@ def __init__( def componentID(self): return self.getTypedRuleContext(Parser.ComponentIDContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCompId"): - listener.enterCompId(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCompId"): - listener.exitCompId(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCompId"): return visitor.visitCompId(self) @@ -3158,14 +2968,6 @@ def __init__( def constant(self): return self.getTypedRuleContext(Parser.ConstantContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConstantExprComp"): - listener.enterConstantExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConstantExprComp"): - listener.exitConstantExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConstantExprComp"): return visitor.visitConstantExprComp(self) @@ -3197,14 +2999,6 @@ def MINUS(self): def CONCAT(self): return self.getToken(Parser.CONCAT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterArithmeticExprOrConcatComp"): - listener.enterArithmeticExprOrConcatComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitArithmeticExprOrConcatComp"): - listener.exitArithmeticExprOrConcatComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitArithmeticExprOrConcatComp"): return visitor.visitArithmeticExprOrConcatComp(self) @@ -3227,14 +3021,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterParenthesisExprComp"): - listener.enterParenthesisExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitParenthesisExprComp"): - listener.exitParenthesisExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitParenthesisExprComp"): return visitor.visitParenthesisExprComp(self) @@ -3265,14 +3051,6 @@ def lists(self): def valueDomainID(self): return self.getTypedRuleContext(Parser.ValueDomainIDContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInNotInExprComp"): - listener.enterInNotInExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInNotInExprComp"): - listener.exitInNotInExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInNotInExprComp"): return visitor.visitInNotInExprComp(self) @@ -3300,14 +3078,6 @@ def MINUS(self): def NOT(self): return self.getToken(Parser.NOT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryExprComp"): - listener.enterUnaryExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryExprComp"): - listener.exitUnaryExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryExprComp"): return visitor.visitUnaryExprComp(self) @@ -3320,8 +3090,8 @@ def __init__( ): # actually a Parser.ExprComponentContext super().__init__(parser) self._exprComponent = None # ExprComponentContext - self.condExpr = list() # of ExprComponentContexts - self.thenExpr = list() # of ExprComponentContexts + self.condExpr = [] # of ExprComponentContexts + self.thenExpr = [] # of ExprComponentContexts self.elseExpr = None # ExprComponentContext self.copyFrom(ctx) @@ -3349,14 +3119,6 @@ def THEN(self, i: int = None): else: return self.getToken(Parser.THEN, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCaseExprComp"): - listener.enterCaseExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCaseExprComp"): - listener.exitCaseExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCaseExprComp"): return visitor.visitCaseExprComp(self) @@ -3388,14 +3150,6 @@ def OR(self): def XOR(self): return self.getToken(Parser.XOR, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBooleanExprComp"): - listener.enterBooleanExprComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBooleanExprComp"): - listener.exitBooleanExprComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBooleanExprComp"): return visitor.visitBooleanExprComp(self) @@ -3499,7 +3253,7 @@ def exprComponent(self, _p: int = 0): self.state = 333 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la == Parser.WHEN): + if _la != Parser.WHEN: break self.state = 335 @@ -3709,14 +3463,6 @@ def __init__( def numericOperatorsComponent(self): return self.getTypedRuleContext(Parser.NumericOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterNumericFunctionsComponents"): - listener.enterNumericFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitNumericFunctionsComponents"): - listener.exitNumericFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitNumericFunctionsComponents"): return visitor.visitNumericFunctionsComponents(self) @@ -3733,14 +3479,6 @@ def __init__( def stringOperatorsComponent(self): return self.getTypedRuleContext(Parser.StringOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterStringFunctionsComponents"): - listener.enterStringFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitStringFunctionsComponents"): - listener.exitStringFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitStringFunctionsComponents"): return visitor.visitStringFunctionsComponents(self) @@ -3757,14 +3495,6 @@ def __init__( def comparisonOperatorsComponent(self): return self.getTypedRuleContext(Parser.ComparisonOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComparisonFunctionsComponents"): - listener.enterComparisonFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComparisonFunctionsComponents"): - listener.exitComparisonFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComparisonFunctionsComponents"): return visitor.visitComparisonFunctionsComponents(self) @@ -3781,14 +3511,6 @@ def __init__( def timeOperatorsComponent(self): return self.getTypedRuleContext(Parser.TimeOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeFunctionsComponents"): - listener.enterTimeFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeFunctionsComponents"): - listener.exitTimeFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeFunctionsComponents"): return visitor.visitTimeFunctionsComponents(self) @@ -3805,14 +3527,6 @@ def __init__( def genericOperatorsComponent(self): return self.getTypedRuleContext(Parser.GenericOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterGenericFunctionsComponents"): - listener.enterGenericFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitGenericFunctionsComponents"): - listener.exitGenericFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitGenericFunctionsComponents"): return visitor.visitGenericFunctionsComponents(self) @@ -3829,14 +3543,6 @@ def __init__( def anFunctionComponent(self): return self.getTypedRuleContext(Parser.AnFunctionComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAnalyticFunctionsComponents"): - listener.enterAnalyticFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAnalyticFunctionsComponents"): - listener.exitAnalyticFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAnalyticFunctionsComponents"): return visitor.visitAnalyticFunctionsComponents(self) @@ -3853,14 +3559,6 @@ def __init__( def conditionalOperatorsComponent(self): return self.getTypedRuleContext(Parser.ConditionalOperatorsComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConditionalFunctionsComponents"): - listener.enterConditionalFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConditionalFunctionsComponents"): - listener.exitConditionalFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConditionalFunctionsComponents"): return visitor.visitConditionalFunctionsComponents(self) @@ -3877,14 +3575,6 @@ def __init__( def aggrOperators(self): return self.getTypedRuleContext(Parser.AggrOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggregateFunctionsComponents"): - listener.enterAggregateFunctionsComponents(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggregateFunctionsComponents"): - listener.exitAggregateFunctionsComponents(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggregateFunctionsComponents"): return visitor.visitAggregateFunctionsComponents(self) @@ -3984,14 +3674,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def hierarchyOperators(self): return self.getTypedRuleContext(Parser.HierarchyOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHierarchyFunctions"): - listener.enterHierarchyFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHierarchyFunctions"): - listener.exitHierarchyFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHierarchyFunctions"): return visitor.visitHierarchyFunctions(self) @@ -4006,14 +3688,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def stringOperators(self): return self.getTypedRuleContext(Parser.StringOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterStringFunctions"): - listener.enterStringFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitStringFunctions"): - listener.exitStringFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitStringFunctions"): return visitor.visitStringFunctions(self) @@ -4028,14 +3702,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def validationOperators(self): return self.getTypedRuleContext(Parser.ValidationOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidationFunctions"): - listener.enterValidationFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidationFunctions"): - listener.exitValidationFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidationFunctions"): return visitor.visitValidationFunctions(self) @@ -4050,14 +3716,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def genericOperators(self): return self.getTypedRuleContext(Parser.GenericOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterGenericFunctions"): - listener.enterGenericFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitGenericFunctions"): - listener.exitGenericFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitGenericFunctions"): return visitor.visitGenericFunctions(self) @@ -4072,14 +3730,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def conditionalOperators(self): return self.getTypedRuleContext(Parser.ConditionalOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConditionalFunctions"): - listener.enterConditionalFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConditionalFunctions"): - listener.exitConditionalFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConditionalFunctions"): return visitor.visitConditionalFunctions(self) @@ -4094,14 +3744,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def aggrOperatorsGrouping(self): return self.getTypedRuleContext(Parser.AggrOperatorsGroupingContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggregateFunctions"): - listener.enterAggregateFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggregateFunctions"): - listener.exitAggregateFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggregateFunctions"): return visitor.visitAggregateFunctions(self) @@ -4116,14 +3758,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def joinOperators(self): return self.getTypedRuleContext(Parser.JoinOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinFunctions"): - listener.enterJoinFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinFunctions"): - listener.exitJoinFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinFunctions"): return visitor.visitJoinFunctions(self) @@ -4138,14 +3772,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def comparisonOperators(self): return self.getTypedRuleContext(Parser.ComparisonOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComparisonFunctions"): - listener.enterComparisonFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComparisonFunctions"): - listener.exitComparisonFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComparisonFunctions"): return visitor.visitComparisonFunctions(self) @@ -4160,14 +3786,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def numericOperators(self): return self.getTypedRuleContext(Parser.NumericOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterNumericFunctions"): - listener.enterNumericFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitNumericFunctions"): - listener.exitNumericFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitNumericFunctions"): return visitor.visitNumericFunctions(self) @@ -4182,14 +3800,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def timeOperators(self): return self.getTypedRuleContext(Parser.TimeOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeFunctions"): - listener.enterTimeFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeFunctions"): - listener.exitTimeFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeFunctions"): return visitor.visitTimeFunctions(self) @@ -4204,14 +3814,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def setOperators(self): return self.getTypedRuleContext(Parser.SetOperatorsContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSetFunctions"): - listener.enterSetFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSetFunctions"): - listener.exitSetFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSetFunctions"): return visitor.visitSetFunctions(self) @@ -4226,14 +3828,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.Functio def anFunction(self): return self.getTypedRuleContext(Parser.AnFunctionContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAnalyticFunctions"): - listener.enterAnalyticFunctions(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAnalyticFunctions"): - listener.exitAnalyticFunctions(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAnalyticFunctions"): return visitor.visitAnalyticFunctions(self) @@ -4371,14 +3965,6 @@ def subspaceClause(self): def getRuleIndex(self): return Parser.RULE_datasetClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDatasetClause"): - listener.enterDatasetClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDatasetClause"): - listener.exitDatasetClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDatasetClause"): return visitor.visitDatasetClause(self) @@ -4464,14 +4050,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_renameClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRenameClause"): - listener.enterRenameClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRenameClause"): - listener.exitRenameClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRenameClause"): return visitor.visitRenameClause(self) @@ -4531,14 +4109,6 @@ def havingClause(self): def getRuleIndex(self): return Parser.RULE_aggrClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggrClause"): - listener.enterAggrClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggrClause"): - listener.exitAggrClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggrClause"): return visitor.visitAggrClause(self) @@ -4593,14 +4163,6 @@ def exprComponent(self): def getRuleIndex(self): return Parser.RULE_filterClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFilterClause"): - listener.enterFilterClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFilterClause"): - listener.exitFilterClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFilterClause"): return visitor.visitFilterClause(self) @@ -4650,14 +4212,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_calcClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCalcClause"): - listener.enterCalcClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCalcClause"): - listener.exitCalcClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCalcClause"): return visitor.visitCalcClause(self) @@ -4724,14 +4278,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_keepOrDropClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterKeepOrDropClause"): - listener.enterKeepOrDropClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitKeepOrDropClause"): - listener.exitKeepOrDropClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitKeepOrDropClause"): return visitor.visitKeepOrDropClause(self) @@ -4803,14 +4349,6 @@ def UNPIVOT(self): def getRuleIndex(self): return Parser.RULE_pivotOrUnpivotClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterPivotOrUnpivotClause"): - listener.enterPivotOrUnpivotClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitPivotOrUnpivotClause"): - listener.exitPivotOrUnpivotClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitPivotOrUnpivotClause"): return visitor.visitPivotOrUnpivotClause(self) @@ -4882,14 +4420,6 @@ def componentID(self, i: int = None): def getRuleIndex(self): return Parser.RULE_customPivotClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCustomPivotClause"): - listener.enterCustomPivotClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCustomPivotClause"): - listener.exitCustomPivotClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCustomPivotClause"): return visitor.visitCustomPivotClause(self) @@ -4960,14 +4490,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_subspaceClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSubspaceClause"): - listener.enterSubspaceClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSubspaceClause"): - listener.exitSubspaceClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSubspaceClause"): return visitor.visitSubspaceClause(self) @@ -5053,14 +4575,6 @@ def FULL_JOIN(self): def CROSS_JOIN(self): return self.getToken(Parser.CROSS_JOIN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinExpr"): - listener.enterJoinExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinExpr"): - listener.exitJoinExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinExpr"): return visitor.visitJoinExpr(self) @@ -5190,14 +4704,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDefOperator"): - listener.enterDefOperator(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDefOperator"): - listener.exitDefOperator(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDefOperator"): return visitor.visitDefOperator(self) @@ -5245,14 +4751,6 @@ def ruleClauseHierarchical(self): def END(self): return self.getToken(Parser.END, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDefHierarchical"): - listener.enterDefHierarchical(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDefHierarchical"): - listener.exitDefHierarchical(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDefHierarchical"): return visitor.visitDefHierarchical(self) @@ -5300,14 +4798,6 @@ def ruleClauseDatapoint(self): def END(self): return self.getToken(Parser.END, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDefDatapointRuleset"): - listener.enterDefDatapointRuleset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDefDatapointRuleset"): - listener.exitDefDatapointRuleset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDefDatapointRuleset"): return visitor.visitDefDatapointRuleset(self) @@ -5508,14 +4998,6 @@ def RETURNS(self): def evalDatasetType(self): return self.getTypedRuleContext(Parser.EvalDatasetTypeContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterEvalAtom"): - listener.enterEvalAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitEvalAtom"): - listener.exitEvalAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitEvalAtom"): return visitor.visitEvalAtom(self) @@ -5556,14 +5038,6 @@ def valueDomainName(self): def STRING_CONSTANT(self): return self.getToken(Parser.STRING_CONSTANT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCastExprDataset"): - listener.enterCastExprDataset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCastExprDataset"): - listener.exitCastExprDataset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCastExprDataset"): return visitor.visitCastExprDataset(self) @@ -5598,14 +5072,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCallDataset"): - listener.enterCallDataset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCallDataset"): - listener.exitCallDataset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCallDataset"): return visitor.visitCallDataset(self) @@ -5988,14 +5454,6 @@ def RETURNS(self): def outputParameterTypeComponent(self): return self.getTypedRuleContext(Parser.OutputParameterTypeComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterEvalAtomComponent"): - listener.enterEvalAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitEvalAtomComponent"): - listener.exitEvalAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitEvalAtomComponent"): return visitor.visitEvalAtomComponent(self) @@ -6036,14 +5494,6 @@ def valueDomainName(self): def STRING_CONSTANT(self): return self.getToken(Parser.STRING_CONSTANT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCastExprComponent"): - listener.enterCastExprComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCastExprComponent"): - listener.exitCastExprComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCastExprComponent"): return visitor.visitCastExprComponent(self) @@ -6078,14 +5528,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCallComponent"): - listener.enterCallComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCallComponent"): - listener.exitCallComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCallComponent"): return visitor.visitCallComponent(self) @@ -6404,14 +5846,6 @@ def OPTIONAL(self): def getRuleIndex(self): return Parser.RULE_parameterComponent - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterParameterComponent"): - listener.enterParameterComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitParameterComponent"): - listener.exitParameterComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitParameterComponent"): return visitor.visitParameterComponent(self) @@ -6536,14 +5970,6 @@ def OPTIONAL(self): def getRuleIndex(self): return Parser.RULE_parameter - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterParameter"): - listener.enterParameter(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitParameter"): - listener.exitParameter(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitParameter"): return visitor.visitParameter(self) @@ -6714,14 +6140,6 @@ def optionalExpr(self, i: int = None): else: return self.getTypedRuleContext(Parser.OptionalExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInstrAtom"): - listener.enterInstrAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInstrAtom"): - listener.exitInstrAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInstrAtom"): return visitor.visitInstrAtom(self) @@ -6763,14 +6181,6 @@ def LCASE(self): def LEN(self): return self.getToken(Parser.LEN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryStringFunction"): - listener.enterUnaryStringFunction(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryStringFunction"): - listener.exitUnaryStringFunction(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryStringFunction"): return visitor.visitUnaryStringFunction(self) @@ -6810,14 +6220,6 @@ def optionalExpr(self, i: int = None): else: return self.getTypedRuleContext(Parser.OptionalExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSubstrAtom"): - listener.enterSubstrAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSubstrAtom"): - listener.exitSubstrAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSubstrAtom"): return visitor.visitSubstrAtom(self) @@ -6856,14 +6258,6 @@ def RPAREN(self): def optionalExpr(self): return self.getTypedRuleContext(Parser.OptionalExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterReplaceAtom"): - listener.enterReplaceAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitReplaceAtom"): - listener.exitReplaceAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitReplaceAtom"): return visitor.visitReplaceAtom(self) @@ -7072,14 +6466,6 @@ def RPAREN(self): def optionalExprComponent(self): return self.getTypedRuleContext(Parser.OptionalExprComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterReplaceAtomComponent"): - listener.enterReplaceAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitReplaceAtomComponent"): - listener.exitReplaceAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitReplaceAtomComponent"): return visitor.visitReplaceAtomComponent(self) @@ -7121,14 +6507,6 @@ def LCASE(self): def LEN(self): return self.getToken(Parser.LEN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryStringFunctionComponent"): - listener.enterUnaryStringFunctionComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryStringFunctionComponent"): - listener.exitUnaryStringFunctionComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryStringFunctionComponent"): return visitor.visitUnaryStringFunctionComponent(self) @@ -7168,14 +6546,6 @@ def optionalExprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.OptionalExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSubstrAtomComponent"): - listener.enterSubstrAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSubstrAtomComponent"): - listener.exitSubstrAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSubstrAtomComponent"): return visitor.visitSubstrAtomComponent(self) @@ -7219,14 +6589,6 @@ def optionalExprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.OptionalExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInstrAtomComponent"): - listener.enterInstrAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInstrAtomComponent"): - listener.exitInstrAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInstrAtomComponent"): return visitor.visitInstrAtomComponent(self) @@ -7438,14 +6800,6 @@ def LN(self): def SQRT(self): return self.getToken(Parser.SQRT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryNumeric"): - listener.enterUnaryNumeric(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryNumeric"): - listener.exitUnaryNumeric(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryNumeric"): return visitor.visitUnaryNumeric(self) @@ -7481,14 +6835,6 @@ def COMMA(self): def optionalExpr(self): return self.getTypedRuleContext(Parser.OptionalExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryWithOptionalNumeric"): - listener.enterUnaryWithOptionalNumeric(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryWithOptionalNumeric"): - listener.exitUnaryWithOptionalNumeric(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryWithOptionalNumeric"): return visitor.visitUnaryWithOptionalNumeric(self) @@ -7532,14 +6878,6 @@ def LOG(self): def RANDOM(self): return self.getToken(Parser.RANDOM, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBinaryNumeric"): - listener.enterBinaryNumeric(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBinaryNumeric"): - listener.exitBinaryNumeric(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBinaryNumeric"): return visitor.visitBinaryNumeric(self) @@ -7707,14 +7045,6 @@ def LN(self): def SQRT(self): return self.getToken(Parser.SQRT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryNumericComponent"): - listener.enterUnaryNumericComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryNumericComponent"): - listener.exitUnaryNumericComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryNumericComponent"): return visitor.visitUnaryNumericComponent(self) @@ -7758,14 +7088,6 @@ def LOG(self): def RANDOM(self): return self.getToken(Parser.RANDOM, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBinaryNumericComponent"): - listener.enterBinaryNumericComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBinaryNumericComponent"): - listener.exitBinaryNumericComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBinaryNumericComponent"): return visitor.visitBinaryNumericComponent(self) @@ -7801,14 +7123,6 @@ def COMMA(self): def optionalExprComponent(self): return self.getTypedRuleContext(Parser.OptionalExprComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnaryWithOptionalNumericComponent"): - listener.enterUnaryWithOptionalNumericComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnaryWithOptionalNumericComponent"): - listener.exitUnaryWithOptionalNumericComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnaryWithOptionalNumericComponent"): return visitor.visitUnaryWithOptionalNumericComponent(self) @@ -7972,14 +7286,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBetweenAtom"): - listener.enterBetweenAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBetweenAtom"): - listener.exitBetweenAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBetweenAtom"): return visitor.visitBetweenAtom(self) @@ -8013,14 +7319,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCharsetMatchAtom"): - listener.enterCharsetMatchAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCharsetMatchAtom"): - listener.exitCharsetMatchAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCharsetMatchAtom"): return visitor.visitCharsetMatchAtom(self) @@ -8046,14 +7344,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterIsNullAtom"): - listener.enterIsNullAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitIsNullAtom"): - listener.exitIsNullAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitIsNullAtom"): return visitor.visitIsNullAtom(self) @@ -8093,14 +7383,6 @@ def expr(self, i: int = None): def retainType(self): return self.getTypedRuleContext(Parser.RetainTypeContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterExistInAtom"): - listener.enterExistInAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitExistInAtom"): - listener.exitExistInAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitExistInAtom"): return visitor.visitExistInAtom(self) @@ -8232,14 +7514,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterIsNullAtomComponent"): - listener.enterIsNullAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitIsNullAtomComponent"): - listener.exitIsNullAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitIsNullAtomComponent"): return visitor.visitIsNullAtomComponent(self) @@ -8273,14 +7547,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCharsetMatchAtomComponent"): - listener.enterCharsetMatchAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCharsetMatchAtomComponent"): - listener.exitCharsetMatchAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCharsetMatchAtomComponent"): return visitor.visitCharsetMatchAtomComponent(self) @@ -8318,14 +7584,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBetweenAtomComponent"): - listener.enterBetweenAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBetweenAtomComponent"): - listener.exitBetweenAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBetweenAtomComponent"): return visitor.visitBetweenAtomComponent(self) @@ -8431,14 +7689,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayToYearAtom"): - listener.enterDayToYearAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayToYearAtom"): - listener.exitDayToYearAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayToYearAtom"): return visitor.visitDayToYearAtom(self) @@ -8464,14 +7714,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearAtom"): - listener.enterYearAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearAtom"): - listener.exitYearAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitYearAtom"): return visitor.visitYearAtom(self) @@ -8497,14 +7739,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearTodayAtom"): - listener.enterYearTodayAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearTodayAtom"): - listener.exitYearTodayAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitYearTodayAtom"): return visitor.visitYearTodayAtom(self) @@ -8530,14 +7764,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayToMonthAtom"): - listener.enterDayToMonthAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayToMonthAtom"): - listener.exitDayToMonthAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayToMonthAtom"): return visitor.visitDayToMonthAtom(self) @@ -8563,14 +7789,6 @@ def RPAREN(self): def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterPeriodAtom"): - listener.enterPeriodAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitPeriodAtom"): - listener.exitPeriodAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitPeriodAtom"): return visitor.visitPeriodAtom(self) @@ -8596,14 +7814,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthTodayAtom"): - listener.enterMonthTodayAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthTodayAtom"): - listener.exitMonthTodayAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMonthTodayAtom"): return visitor.visitMonthTodayAtom(self) @@ -8639,14 +7849,6 @@ def SINGLE(self): def ALL(self): return self.getToken(Parser.ALL, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFillTimeAtom"): - listener.enterFillTimeAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFillTimeAtom"): - listener.exitFillTimeAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFillTimeAtom"): return visitor.visitFillTimeAtom(self) @@ -8672,14 +7874,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthAtom"): - listener.enterMonthAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthAtom"): - listener.exitMonthAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMonthAtom"): return visitor.visitMonthAtom(self) @@ -8705,14 +7899,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayOfYearAtom"): - listener.enterDayOfYearAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayOfYearAtom"): - listener.exitDayOfYearAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayOfYearAtom"): return visitor.visitDayOfYearAtom(self) @@ -8742,14 +7928,6 @@ def FLOW_TO_STOCK(self): def STOCK_TO_FLOW(self): return self.getToken(Parser.STOCK_TO_FLOW, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFlowAtom"): - listener.enterFlowAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFlowAtom"): - listener.exitFlowAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFlowAtom"): return visitor.visitFlowAtom(self) @@ -8781,14 +7959,6 @@ def signedInteger(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeShiftAtom"): - listener.enterTimeShiftAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeShiftAtom"): - listener.exitTimeShiftAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeShiftAtom"): return visitor.visitTimeShiftAtom(self) @@ -8839,14 +8009,6 @@ def FIRST(self): def LAST(self): return self.getToken(Parser.LAST, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeAggAtom"): - listener.enterTimeAggAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeAggAtom"): - listener.exitTimeAggAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeAggAtom"): return visitor.visitTimeAggAtom(self) @@ -8880,14 +8042,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDateDiffAtom"): - listener.enterDateDiffAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDateDiffAtom"): - listener.exitDateDiffAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDateDiffAtom"): return visitor.visitDateDiffAtom(self) @@ -8925,14 +8079,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDateAddAtom"): - listener.enterDateAddAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDateAddAtom"): - listener.exitDateAddAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDateAddAtom"): return visitor.visitDateAddAtom(self) @@ -8958,14 +8104,6 @@ def expr(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayOfMonthAtom"): - listener.enterDayOfMonthAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayOfMonthAtom"): - listener.exitDayOfMonthAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayOfMonthAtom"): return visitor.visitDayOfMonthAtom(self) @@ -8988,14 +8126,6 @@ def LPAREN(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCurrentDateAtom"): - listener.enterCurrentDateAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCurrentDateAtom"): - listener.exitCurrentDateAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCurrentDateAtom"): return visitor.visitCurrentDateAtom(self) @@ -9446,14 +8576,6 @@ def RPAREN(self): def exprComponent(self): return self.getTypedRuleContext(Parser.ExprComponentContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterPeriodAtomComponent"): - listener.enterPeriodAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitPeriodAtomComponent"): - listener.exitPeriodAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitPeriodAtomComponent"): return visitor.visitPeriodAtomComponent(self) @@ -9485,14 +8607,6 @@ def signedInteger(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeShiftAtomComponent"): - listener.enterTimeShiftAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeShiftAtomComponent"): - listener.exitTimeShiftAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeShiftAtomComponent"): return visitor.visitTimeShiftAtomComponent(self) @@ -9518,14 +8632,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthTodayAtomComponent"): - listener.enterMonthTodayAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthTodayAtomComponent"): - listener.exitMonthTodayAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMonthTodayAtomComponent"): return visitor.visitMonthTodayAtomComponent(self) @@ -9576,14 +8682,6 @@ def FIRST(self): def LAST(self): return self.getToken(Parser.LAST, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterTimeAggAtomComponent"): - listener.enterTimeAggAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitTimeAggAtomComponent"): - listener.exitTimeAggAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitTimeAggAtomComponent"): return visitor.visitTimeAggAtomComponent(self) @@ -9609,14 +8707,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayToMonthAtomComponent"): - listener.enterDayToMonthAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayToMonthAtomComponent"): - listener.exitDayToMonthAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayToMonthAtomComponent"): return visitor.visitDayToMonthAtomComponent(self) @@ -9654,14 +8744,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDateAddAtomComponent"): - listener.enterDateAddAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDateAddAtomComponent"): - listener.exitDateAddAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDateAddAtomComponent"): return visitor.visitDateAddAtomComponent(self) @@ -9687,14 +8769,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearTodayAtomComponent"): - listener.enterYearTodayAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearTodayAtomComponent"): - listener.exitYearTodayAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitYearTodayAtomComponent"): return visitor.visitYearTodayAtomComponent(self) @@ -9720,14 +8794,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayOfMonthAtomComponent"): - listener.enterDayOfMonthAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayOfMonthAtomComponent"): - listener.exitDayOfMonthAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayOfMonthAtomComponent"): return visitor.visitDayOfMonthAtomComponent(self) @@ -9753,14 +8819,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMonthAtomComponent"): - listener.enterMonthAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMonthAtomComponent"): - listener.exitMonthAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMonthAtomComponent"): return visitor.visitMonthAtomComponent(self) @@ -9796,14 +8854,6 @@ def SINGLE(self): def ALL(self): return self.getToken(Parser.ALL, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFillTimeAtomComponent"): - listener.enterFillTimeAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFillTimeAtomComponent"): - listener.exitFillTimeAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFillTimeAtomComponent"): return visitor.visitFillTimeAtomComponent(self) @@ -9829,14 +8879,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDatOfYearAtomComponent"): - listener.enterDatOfYearAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDatOfYearAtomComponent"): - listener.exitDatOfYearAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDatOfYearAtomComponent"): return visitor.visitDatOfYearAtomComponent(self) @@ -9862,14 +8904,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDayToYearAtomComponent"): - listener.enterDayToYearAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDayToYearAtomComponent"): - listener.exitDayToYearAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDayToYearAtomComponent"): return visitor.visitDayToYearAtomComponent(self) @@ -9892,14 +8926,6 @@ def LPAREN(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCurrentDateAtomComponent"): - listener.enterCurrentDateAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCurrentDateAtomComponent"): - listener.exitCurrentDateAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCurrentDateAtomComponent"): return visitor.visitCurrentDateAtomComponent(self) @@ -9929,14 +8955,6 @@ def FLOW_TO_STOCK(self): def STOCK_TO_FLOW(self): return self.getToken(Parser.STOCK_TO_FLOW, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterFlowAtomComponent"): - listener.enterFlowAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitFlowAtomComponent"): - listener.exitFlowAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitFlowAtomComponent"): return visitor.visitFlowAtomComponent(self) @@ -9970,14 +8988,6 @@ def exprComponent(self): def expr(self): return self.getTypedRuleContext(Parser.ExprContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDateDiffAtomComponent"): - listener.enterDateDiffAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDateDiffAtomComponent"): - listener.exitDateDiffAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDateDiffAtomComponent"): return visitor.visitDateDiffAtomComponent(self) @@ -10003,14 +9013,6 @@ def exprComponent(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterYearAtomComponent"): - listener.enterYearAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitYearAtomComponent"): - listener.exitYearAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitYearAtomComponent"): return visitor.visitYearAtomComponent(self) @@ -10459,14 +9461,6 @@ def SETDIFF(self): def SYMDIFF(self): return self.getToken(Parser.SYMDIFF, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSetOrSYmDiffAtom"): - listener.enterSetOrSYmDiffAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSetOrSYmDiffAtom"): - listener.exitSetOrSYmDiffAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSetOrSYmDiffAtom"): return visitor.visitSetOrSYmDiffAtom(self) @@ -10500,14 +9494,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterIntersectAtom"): - listener.enterIntersectAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitIntersectAtom"): - listener.exitIntersectAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitIntersectAtom"): return visitor.visitIntersectAtom(self) @@ -10541,14 +9527,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterUnionAtom"): - listener.enterUnionAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitUnionAtom"): - listener.exitUnionAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitUnionAtom"): return visitor.visitUnionAtom(self) @@ -10584,7 +9562,7 @@ def setOperators(self): self.state = 1077 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la == Parser.COMMA): + if _la != Parser.COMMA: break self.state = 1079 @@ -10610,7 +9588,7 @@ def setOperators(self): self.state = 1088 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la == Parser.COMMA): + if _la != Parser.COMMA: break self.state = 1090 @@ -10698,14 +9676,6 @@ def componentID(self): def getRuleIndex(self): return Parser.RULE_hierarchyOperators - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHierarchyOperators"): - listener.enterHierarchyOperators(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHierarchyOperators"): - listener.exitHierarchyOperators(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHierarchyOperators"): return visitor.visitHierarchyOperators(self) @@ -10844,14 +9814,6 @@ def inputMode(self): def validationOutput(self): return self.getTypedRuleContext(Parser.ValidationOutputContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidateHRruleset"): - listener.enterValidateHRruleset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidateHRruleset"): - listener.exitValidateHRruleset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidateHRruleset"): return visitor.visitValidateHRruleset(self) @@ -10900,14 +9862,6 @@ def componentID(self, i: int = None): def validationOutput(self): return self.getTypedRuleContext(Parser.ValidationOutputContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidateDPruleset"): - listener.enterValidateDPruleset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidateDPruleset"): - listener.exitValidateDPruleset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidateDPruleset"): return visitor.visitValidateDPruleset(self) @@ -10952,14 +9906,6 @@ def INVALID(self): def ALL(self): return self.getToken(Parser.ALL, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidationSimple"): - listener.enterValidationSimple(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidationSimple"): - listener.exitValidationSimple(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidationSimple"): return visitor.visitValidationSimple(self) @@ -11178,14 +10124,6 @@ def expr(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterNvlAtom"): - listener.enterNvlAtom(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitNvlAtom"): - listener.exitNvlAtom(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitNvlAtom"): return visitor.visitNvlAtom(self) @@ -11259,14 +10197,6 @@ def exprComponent(self, i: int = None): else: return self.getTypedRuleContext(Parser.ExprComponentContext, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterNvlAtomComponent"): - listener.enterNvlAtomComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitNvlAtomComponent"): - listener.exitNvlAtomComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitNvlAtomComponent"): return visitor.visitNvlAtomComponent(self) @@ -11360,14 +10290,6 @@ def VAR_POP(self): def VAR_SAMP(self): return self.getToken(Parser.VAR_SAMP, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggrComp"): - listener.enterAggrComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggrComp"): - listener.exitAggrComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggrComp"): return visitor.visitAggrComp(self) @@ -11390,14 +10312,6 @@ def LPAREN(self): def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCountAggrComp"): - listener.enterCountAggrComp(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCountAggrComp"): - listener.exitCountAggrComp(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCountAggrComp"): return visitor.visitCountAggrComp(self) @@ -11535,14 +10449,6 @@ def groupingClause(self): def havingClause(self): return self.getTypedRuleContext(Parser.HavingClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggrDataset"): - listener.enterAggrDataset(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggrDataset"): - listener.exitAggrDataset(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggrDataset"): return visitor.visitAggrDataset(self) @@ -11675,14 +10581,6 @@ def partitionByClause(self): def scalarItem(self): return self.getTypedRuleContext(Parser.ScalarItemContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterLagOrLeadAn"): - listener.enterLagOrLeadAn(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitLagOrLeadAn"): - listener.exitLagOrLeadAn(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitLagOrLeadAn"): return visitor.visitLagOrLeadAn(self) @@ -11720,14 +10618,6 @@ def RATIO_TO_REPORT(self): def partitionByClause(self): return self.getTypedRuleContext(Parser.PartitionByClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRatioToReportAn"): - listener.enterRatioToReportAn(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRatioToReportAn"): - listener.exitRatioToReportAn(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRatioToReportAn"): return visitor.visitRatioToReportAn(self) @@ -11806,14 +10696,6 @@ def orderByClause(self): def windowingClause(self): return self.getTypedRuleContext(Parser.WindowingClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAnSimpleFunction"): - listener.enterAnSimpleFunction(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAnSimpleFunction"): - listener.exitAnSimpleFunction(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAnSimpleFunction"): return visitor.visitAnSimpleFunction(self) @@ -12080,14 +10962,6 @@ def orderByClause(self): def windowingClause(self): return self.getTypedRuleContext(Parser.WindowingClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAnSimpleFunctionComponent"): - listener.enterAnSimpleFunctionComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAnSimpleFunctionComponent"): - listener.exitAnSimpleFunctionComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAnSimpleFunctionComponent"): return visitor.visitAnSimpleFunctionComponent(self) @@ -12145,14 +11019,6 @@ def partitionByClause(self): def scalarItem(self): return self.getTypedRuleContext(Parser.ScalarItemContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterLagOrLeadAnComponent"): - listener.enterLagOrLeadAnComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitLagOrLeadAnComponent"): - listener.exitLagOrLeadAnComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitLagOrLeadAnComponent"): return visitor.visitLagOrLeadAnComponent(self) @@ -12193,14 +11059,6 @@ def orderByClause(self): def partitionByClause(self): return self.getTypedRuleContext(Parser.PartitionByClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRankAnComponent"): - listener.enterRankAnComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRankAnComponent"): - listener.exitRankAnComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRankAnComponent"): return visitor.visitRankAnComponent(self) @@ -12240,14 +11098,6 @@ def RATIO_TO_REPORT(self): def partitionByClause(self): return self.getTypedRuleContext(Parser.PartitionByClauseContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRatioToReportAnComponent"): - listener.enterRatioToReportAnComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRatioToReportAnComponent"): - listener.exitRatioToReportAnComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRatioToReportAnComponent"): return visitor.visitRatioToReportAnComponent(self) @@ -12496,14 +11346,6 @@ def componentID(self, i: int = None): def getRuleIndex(self): return Parser.RULE_renameClauseItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRenameClauseItem"): - listener.enterRenameClauseItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRenameClauseItem"): - listener.exitRenameClauseItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRenameClauseItem"): return visitor.visitRenameClauseItem(self) @@ -12552,14 +11394,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_aggregateClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggregateClause"): - listener.enterAggregateClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggregateClause"): - listener.exitAggregateClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggregateClause"): return visitor.visitAggregateClause(self) @@ -12617,14 +11451,6 @@ def componentRole(self): def getRuleIndex(self): return Parser.RULE_aggrFunctionClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAggrFunctionClause"): - listener.enterAggrFunctionClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAggrFunctionClause"): - listener.exitAggrFunctionClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAggrFunctionClause"): return visitor.visitAggrFunctionClause(self) @@ -12693,14 +11519,6 @@ def componentRole(self): def getRuleIndex(self): return Parser.RULE_calcClauseItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCalcClauseItem"): - listener.enterCalcClauseItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCalcClauseItem"): - listener.exitCalcClauseItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCalcClauseItem"): return visitor.visitCalcClauseItem(self) @@ -12766,14 +11584,6 @@ def scalarItem(self): def getRuleIndex(self): return Parser.RULE_subspaceClauseItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSubspaceClauseItem"): - listener.enterSubspaceClauseItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSubspaceClauseItem"): - listener.exitSubspaceClauseItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSubspaceClauseItem"): return visitor.visitSubspaceClauseItem(self) @@ -12842,14 +11652,6 @@ def basicScalarType(self): def STRING_CONSTANT(self): return self.getToken(Parser.STRING_CONSTANT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterScalarWithCast"): - listener.enterScalarWithCast(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitScalarWithCast"): - listener.exitScalarWithCast(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitScalarWithCast"): return visitor.visitScalarWithCast(self) @@ -12864,14 +11666,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.ScalarI def constant(self): return self.getTypedRuleContext(Parser.ConstantContext, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSimpleScalar"): - listener.enterSimpleScalar(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSimpleScalar"): - listener.exitSimpleScalar(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSimpleScalar"): return visitor.visitSimpleScalar(self) @@ -12960,14 +11754,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_joinClauseWithoutUsing - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinClauseWithoutUsing"): - listener.enterJoinClauseWithoutUsing(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinClauseWithoutUsing"): - listener.exitJoinClauseWithoutUsing(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinClauseWithoutUsing"): return visitor.visitJoinClauseWithoutUsing(self) @@ -13034,14 +11820,6 @@ def componentID(self, i: int = None): def getRuleIndex(self): return Parser.RULE_joinClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinClause"): - listener.enterJoinClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinClause"): - listener.exitJoinClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinClause"): return visitor.visitJoinClause(self) @@ -13116,14 +11894,6 @@ def alias(self): def getRuleIndex(self): return Parser.RULE_joinClauseItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinClauseItem"): - listener.enterJoinClauseItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinClauseItem"): - listener.exitJoinClauseItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinClauseItem"): return visitor.visitJoinClauseItem(self) @@ -13184,14 +11954,6 @@ def renameClause(self): def getRuleIndex(self): return Parser.RULE_joinBody - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinBody"): - listener.enterJoinBody(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinBody"): - listener.exitJoinBody(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinBody"): return visitor.visitJoinBody(self) @@ -13269,14 +12031,6 @@ def expr(self): def getRuleIndex(self): return Parser.RULE_joinApplyClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterJoinApplyClause"): - listener.enterJoinApplyClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitJoinApplyClause"): - listener.exitJoinApplyClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitJoinApplyClause"): return visitor.visitJoinApplyClause(self) @@ -13329,14 +12083,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_partitionByClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterPartitionByClause"): - listener.enterPartitionByClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitPartitionByClause"): - listener.exitPartitionByClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitPartitionByClause"): return visitor.visitPartitionByClause(self) @@ -13404,14 +12150,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_orderByClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOrderByClause"): - listener.enterOrderByClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOrderByClause"): - listener.exitOrderByClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOrderByClause"): return visitor.visitOrderByClause(self) @@ -13470,14 +12208,6 @@ def DESC(self): def getRuleIndex(self): return Parser.RULE_orderByItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOrderByItem"): - listener.enterOrderByItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOrderByItem"): - listener.exitOrderByItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOrderByItem"): return visitor.visitOrderByItem(self) @@ -13546,14 +12276,6 @@ def POINTS(self): def getRuleIndex(self): return Parser.RULE_windowingClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterWindowingClause"): - listener.enterWindowingClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitWindowingClause"): - listener.exitWindowingClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitWindowingClause"): return visitor.visitWindowingClause(self) @@ -13617,14 +12339,6 @@ def PLUS(self): def getRuleIndex(self): return Parser.RULE_signedInteger - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSignedInteger"): - listener.enterSignedInteger(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSignedInteger"): - listener.exitSignedInteger(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSignedInteger"): return visitor.visitSignedInteger(self) @@ -13679,14 +12393,6 @@ def PLUS(self): def getRuleIndex(self): return Parser.RULE_signedNumber - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSignedNumber"): - listener.enterSignedNumber(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSignedNumber"): - listener.exitSignedNumber(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSignedNumber"): return visitor.visitSignedNumber(self) @@ -13754,14 +12460,6 @@ def UNBOUNDED(self): def getRuleIndex(self): return Parser.RULE_limitClauseItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterLimitClauseItem"): - listener.enterLimitClauseItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitLimitClauseItem"): - listener.exitLimitClauseItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitLimitClauseItem"): return visitor.visitLimitClauseItem(self) @@ -13859,26 +12557,14 @@ def TIME_AGG(self): def LPAREN(self): return self.getToken(Parser.LPAREN, 0) - def STRING_CONSTANT(self, i: int = None): - if i is None: - return self.getTokens(Parser.STRING_CONSTANT) - else: - return self.getToken(Parser.STRING_CONSTANT, i) + def STRING_CONSTANT(self): + return self.getToken(Parser.STRING_CONSTANT, 0) def RPAREN(self): return self.getToken(Parser.RPAREN, 0) - def COMMA(self, i: int = None): - if i is None: - return self.getTokens(Parser.COMMA) - else: - return self.getToken(Parser.COMMA, i) - - def optionalExpr(self): - return self.getTypedRuleContext(Parser.OptionalExprContext, 0) - - def OPTIONAL(self): - return self.getToken(Parser.OPTIONAL, 0) + def COMMA(self): + return self.getToken(Parser.COMMA, 0) def FIRST(self): return self.getToken(Parser.FIRST, 0) @@ -13886,14 +12572,6 @@ def FIRST(self): def LAST(self): return self.getToken(Parser.LAST, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterGroupAll"): - listener.enterGroupAll(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitGroupAll"): - listener.exitGroupAll(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitGroupAll"): return visitor.visitGroupAll(self) @@ -13948,14 +12626,6 @@ def FIRST(self): def LAST(self): return self.getToken(Parser.LAST, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterGroupByOrExcept"): - listener.enterGroupByOrExcept(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitGroupByOrExcept"): - listener.exitGroupByOrExcept(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitGroupByOrExcept"): return visitor.visitGroupByOrExcept(self) @@ -13968,9 +12638,9 @@ def groupingClause(self): self.enterRule(localctx, 112, self.RULE_groupingClause) self._la = 0 # Token type try: - self.state = 1523 + self.state = 1515 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 147, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 145, self._ctx) if la_ == 1: localctx = Parser.GroupByOrExceptContext(self, localctx) self.enterOuterAlt(localctx, 1) @@ -14035,7 +12705,7 @@ def groupingClause(self): self.match(Parser.GROUP) self.state = 1504 self.match(Parser.ALL) - self.state = 1521 + self.state = 1513 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.TIME_AGG: @@ -14047,34 +12717,11 @@ def groupingClause(self): self.match(Parser.STRING_CONSTANT) self.state = 1510 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 143, self._ctx) - if la_ == 1: - self.state = 1508 - self.match(Parser.COMMA) - self.state = 1509 - _la = self._input.LA(1) - if not (_la == Parser.OPTIONAL or _la == Parser.STRING_CONSTANT): - self._errHandler.recoverInline(self) - else: - self._errHandler.reportMatch(self) - self.consume() - - self.state = 1514 - self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 144, self._ctx) - if la_ == 1: - self.state = 1512 - self.match(Parser.COMMA) - self.state = 1513 - self.optionalExpr() - - self.state = 1518 - self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.COMMA: - self.state = 1516 + self.state = 1508 self.match(Parser.COMMA) - self.state = 1517 + self.state = 1509 localctx.delim = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.FIRST or _la == Parser.LAST): @@ -14083,7 +12730,7 @@ def groupingClause(self): self._errHandler.reportMatch(self) self.consume() - self.state = 1520 + self.state = 1512 self.match(Parser.RPAREN) pass @@ -14112,14 +12759,6 @@ def exprComponent(self): def getRuleIndex(self): return Parser.RULE_havingClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHavingClause"): - listener.enterHavingClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHavingClause"): - listener.exitHavingClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHavingClause"): return visitor.visitHavingClause(self) @@ -14132,9 +12771,9 @@ def havingClause(self): self.enterRule(localctx, 114, self.RULE_havingClause) try: self.enterOuterAlt(localctx, 1) - self.state = 1525 + self.state = 1517 self.match(Parser.HAVING) - self.state = 1526 + self.state = 1518 self.exprComponent(0) except RecognitionException as re: localctx.exception = re @@ -14166,14 +12805,6 @@ def scalarItem(self): def getRuleIndex(self): return Parser.RULE_parameterItem - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterParameterItem"): - listener.enterParameterItem(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitParameterItem"): - listener.exitParameterItem(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitParameterItem"): return visitor.visitParameterItem(self) @@ -14187,17 +12818,17 @@ def parameterItem(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1528 + self.state = 1520 self.varID() - self.state = 1529 + self.state = 1521 self.inputParameterType() - self.state = 1532 + self.state = 1524 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.DEFAULT: - self.state = 1530 + self.state = 1522 self.match(Parser.DEFAULT) - self.state = 1531 + self.state = 1523 self.scalarItem() except RecognitionException as re: @@ -14227,14 +12858,6 @@ def componentType(self): def getRuleIndex(self): return Parser.RULE_outputParameterType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOutputParameterType"): - listener.enterOutputParameterType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOutputParameterType"): - listener.exitOutputParameterType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOutputParameterType"): return visitor.visitOutputParameterType(self) @@ -14246,7 +12869,7 @@ def outputParameterType(self): localctx = Parser.OutputParameterTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 118, self.RULE_outputParameterType) try: - self.state = 1537 + self.state = 1529 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -14262,12 +12885,12 @@ def outputParameterType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1534 + self.state = 1526 self.scalarType() pass elif token in [Parser.DATASET]: self.enterOuterAlt(localctx, 2) - self.state = 1535 + self.state = 1527 self.datasetType() pass elif token in [ @@ -14278,7 +12901,7 @@ def outputParameterType(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 3) - self.state = 1536 + self.state = 1528 self.componentType() pass else: @@ -14308,14 +12931,6 @@ def scalarType(self): def getRuleIndex(self): return Parser.RULE_outputParameterTypeComponent - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOutputParameterTypeComponent"): - listener.enterOutputParameterTypeComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOutputParameterTypeComponent"): - listener.exitOutputParameterTypeComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOutputParameterTypeComponent"): return visitor.visitOutputParameterTypeComponent(self) @@ -14327,7 +12942,7 @@ def outputParameterTypeComponent(self): localctx = Parser.OutputParameterTypeComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 120, self.RULE_outputParameterTypeComponent) try: - self.state = 1541 + self.state = 1533 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -14338,7 +12953,7 @@ def outputParameterTypeComponent(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 1) - self.state = 1539 + self.state = 1531 self.componentType() pass elif token in [ @@ -14354,7 +12969,7 @@ def outputParameterTypeComponent(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 2) - self.state = 1540 + self.state = 1532 self.scalarType() pass else: @@ -14393,14 +13008,6 @@ def componentType(self): def getRuleIndex(self): return Parser.RULE_inputParameterType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInputParameterType"): - listener.enterInputParameterType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInputParameterType"): - listener.exitInputParameterType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInputParameterType"): return visitor.visitInputParameterType(self) @@ -14412,7 +13019,7 @@ def inputParameterType(self): localctx = Parser.InputParameterTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 122, self.RULE_inputParameterType) try: - self.state = 1548 + self.state = 1540 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -14428,17 +13035,17 @@ def inputParameterType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1543 + self.state = 1535 self.scalarType() pass elif token in [Parser.DATASET]: self.enterOuterAlt(localctx, 2) - self.state = 1544 + self.state = 1536 self.datasetType() pass elif token in [Parser.SET]: self.enterOuterAlt(localctx, 3) - self.state = 1545 + self.state = 1537 self.scalarSetType() pass elif token in [ @@ -14451,7 +13058,7 @@ def inputParameterType(self): Parser.HIERARCHICAL_ON_VAR, ]: self.enterOuterAlt(localctx, 4) - self.state = 1546 + self.state = 1538 self.rulesetType() pass elif token in [ @@ -14462,7 +13069,7 @@ def inputParameterType(self): Parser.COMPONENT, ]: self.enterOuterAlt(localctx, 5) - self.state = 1547 + self.state = 1539 self.componentType() pass else: @@ -14495,14 +13102,6 @@ def hrRuleset(self): def getRuleIndex(self): return Parser.RULE_rulesetType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRulesetType"): - listener.enterRulesetType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRulesetType"): - listener.exitRulesetType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRulesetType"): return visitor.visitRulesetType(self) @@ -14514,17 +13113,17 @@ def rulesetType(self): localctx = Parser.RulesetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 124, self.RULE_rulesetType) try: - self.state = 1553 + self.state = 1545 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.RULESET]: self.enterOuterAlt(localctx, 1) - self.state = 1550 + self.state = 1542 self.match(Parser.RULESET) pass elif token in [Parser.DATAPOINT, Parser.DATAPOINT_ON_VD, Parser.DATAPOINT_ON_VAR]: self.enterOuterAlt(localctx, 2) - self.state = 1551 + self.state = 1543 self.dpRuleset() pass elif token in [ @@ -14533,7 +13132,7 @@ def rulesetType(self): Parser.HIERARCHICAL_ON_VAR, ]: self.enterOuterAlt(localctx, 3) - self.state = 1552 + self.state = 1544 self.hrRuleset() pass else: @@ -14572,14 +13171,6 @@ def NOT(self): def getRuleIndex(self): return Parser.RULE_scalarType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterScalarType"): - listener.enterScalarType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitScalarType"): - listener.exitScalarType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitScalarType"): return visitor.visitScalarType(self) @@ -14593,7 +13184,7 @@ def scalarType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1557 + self.state = 1549 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -14607,35 +13198,35 @@ def scalarType(self): Parser.DURATION, Parser.SCALAR, ]: - self.state = 1555 + self.state = 1547 self.basicScalarType() pass elif token in [Parser.IDENTIFIER]: - self.state = 1556 + self.state = 1548 self.valueDomainName() pass else: raise NoViableAltException(self) - self.state = 1560 + self.state = 1552 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.QLPAREN or _la == Parser.GLPAREN: - self.state = 1559 + self.state = 1551 self.scalarTypeConstraint() - self.state = 1566 + self.state = 1558 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.NOT or _la == Parser.NULL_CONSTANT: - self.state = 1563 + self.state = 1555 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.NOT: - self.state = 1562 + self.state = 1554 self.match(Parser.NOT) - self.state = 1565 + self.state = 1557 self.match(Parser.NULL_CONSTANT) except RecognitionException as re: @@ -14668,14 +13259,6 @@ def MT(self): def getRuleIndex(self): return Parser.RULE_componentType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComponentType"): - listener.enterComponentType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComponentType"): - listener.exitComponentType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComponentType"): return visitor.visitComponentType(self) @@ -14689,17 +13272,17 @@ def componentType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1568 + self.state = 1560 self.componentRole() - self.state = 1573 + self.state = 1565 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LT: - self.state = 1569 + self.state = 1561 self.match(Parser.LT) - self.state = 1570 + self.state = 1562 self.scalarType() - self.state = 1571 + self.state = 1563 self.match(Parser.MT) except RecognitionException as re: @@ -14741,14 +13324,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_datasetType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDatasetType"): - listener.enterDatasetType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDatasetType"): - listener.exitDatasetType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDatasetType"): return visitor.visitDatasetType(self) @@ -14762,29 +13337,29 @@ def datasetType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1575 + self.state = 1567 self.match(Parser.DATASET) - self.state = 1587 + self.state = 1579 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1576 + self.state = 1568 self.match(Parser.GLPAREN) - self.state = 1577 + self.state = 1569 self.compConstraint() - self.state = 1582 + self.state = 1574 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1578 + self.state = 1570 self.match(Parser.COMMA) - self.state = 1579 + self.state = 1571 self.compConstraint() - self.state = 1584 + self.state = 1576 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1585 + self.state = 1577 self.match(Parser.GRPAREN) except RecognitionException as re: @@ -14811,14 +13386,6 @@ def scalarType(self): def getRuleIndex(self): return Parser.RULE_evalDatasetType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterEvalDatasetType"): - listener.enterEvalDatasetType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitEvalDatasetType"): - listener.exitEvalDatasetType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitEvalDatasetType"): return visitor.visitEvalDatasetType(self) @@ -14830,12 +13397,12 @@ def evalDatasetType(self): localctx = Parser.EvalDatasetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 132, self.RULE_evalDatasetType) try: - self.state = 1591 + self.state = 1583 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.DATASET]: self.enterOuterAlt(localctx, 1) - self.state = 1589 + self.state = 1581 self.datasetType() pass elif token in [ @@ -14851,7 +13418,7 @@ def evalDatasetType(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 2) - self.state = 1590 + self.state = 1582 self.scalarType() pass else: @@ -14887,14 +13454,6 @@ def MT(self): def getRuleIndex(self): return Parser.RULE_scalarSetType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterScalarSetType"): - listener.enterScalarSetType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitScalarSetType"): - listener.exitScalarSetType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitScalarSetType"): return visitor.visitScalarSetType(self) @@ -14908,17 +13467,17 @@ def scalarSetType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1593 + self.state = 1585 self.match(Parser.SET) - self.state = 1598 + self.state = 1590 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LT: - self.state = 1594 + self.state = 1586 self.match(Parser.LT) - self.state = 1595 + self.state = 1587 self.scalarType() - self.state = 1596 + self.state = 1588 self.match(Parser.MT) except RecognitionException as re: @@ -14968,14 +13527,6 @@ def MUL(self, i: int = None): else: return self.getToken(Parser.MUL, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDataPointVd"): - listener.enterDataPointVd(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDataPointVd"): - listener.exitDataPointVd(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDataPointVd"): return visitor.visitDataPointVd(self) @@ -15008,14 +13559,6 @@ def MUL(self, i: int = None): else: return self.getToken(Parser.MUL, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDataPointVar"): - listener.enterDataPointVar(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDataPointVar"): - listener.exitDataPointVar(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDataPointVar"): return visitor.visitDataPointVar(self) @@ -15030,14 +13573,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.DpRules def DATAPOINT(self): return self.getToken(Parser.DATAPOINT, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterDataPoint"): - listener.enterDataPoint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitDataPoint"): - listener.exitDataPoint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitDataPoint"): return visitor.visitDataPoint(self) @@ -15050,70 +13585,70 @@ def dpRuleset(self): self.enterRule(localctx, 136, self.RULE_dpRuleset) self._la = 0 # Token type try: - self.state = 1629 + self.state = 1621 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.DATAPOINT]: localctx = Parser.DataPointContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1600 + self.state = 1592 self.match(Parser.DATAPOINT) pass elif token in [Parser.DATAPOINT_ON_VD]: localctx = Parser.DataPointVdContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1601 + self.state = 1593 self.match(Parser.DATAPOINT_ON_VD) - self.state = 1613 + self.state = 1605 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1602 + self.state = 1594 self.match(Parser.GLPAREN) - self.state = 1603 + self.state = 1595 self.valueDomainName() - self.state = 1608 + self.state = 1600 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1604 + self.state = 1596 self.match(Parser.MUL) - self.state = 1605 + self.state = 1597 self.valueDomainName() - self.state = 1610 + self.state = 1602 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1611 + self.state = 1603 self.match(Parser.GRPAREN) pass elif token in [Parser.DATAPOINT_ON_VAR]: localctx = Parser.DataPointVarContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1615 + self.state = 1607 self.match(Parser.DATAPOINT_ON_VAR) - self.state = 1627 + self.state = 1619 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1616 + self.state = 1608 self.match(Parser.GLPAREN) - self.state = 1617 + self.state = 1609 self.varID() - self.state = 1622 + self.state = 1614 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1618 + self.state = 1610 self.match(Parser.MUL) - self.state = 1619 + self.state = 1611 self.varID() - self.state = 1624 + self.state = 1616 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1625 + self.state = 1617 self.match(Parser.GRPAREN) pass @@ -15177,14 +13712,6 @@ def MUL(self, i: int = None): else: return self.getToken(Parser.MUL, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHrRulesetVdType"): - listener.enterHrRulesetVdType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHrRulesetVdType"): - listener.exitHrRulesetVdType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHrRulesetVdType"): return visitor.visitHrRulesetVdType(self) @@ -15224,14 +13751,6 @@ def MUL(self, i: int = None): else: return self.getToken(Parser.MUL, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHrRulesetVarType"): - listener.enterHrRulesetVarType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHrRulesetVarType"): - listener.exitHrRulesetVarType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHrRulesetVarType"): return visitor.visitHrRulesetVarType(self) @@ -15246,14 +13765,6 @@ def __init__(self, parser, ctx: ParserRuleContext): # actually a Parser.HrRules def HIERARCHICAL(self): return self.getToken(Parser.HIERARCHICAL, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHrRulesetType"): - listener.enterHrRulesetType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHrRulesetType"): - listener.exitHrRulesetType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHrRulesetType"): return visitor.visitHrRulesetType(self) @@ -15266,92 +13777,92 @@ def hrRuleset(self): self.enterRule(localctx, 138, self.RULE_hrRuleset) self._la = 0 # Token type try: - self.state = 1671 + self.state = 1663 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.HIERARCHICAL]: localctx = Parser.HrRulesetTypeContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1631 + self.state = 1623 self.match(Parser.HIERARCHICAL) pass elif token in [Parser.HIERARCHICAL_ON_VD]: localctx = Parser.HrRulesetVdTypeContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1632 + self.state = 1624 self.match(Parser.HIERARCHICAL_ON_VD) - self.state = 1649 + self.state = 1641 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1633 + self.state = 1625 self.match(Parser.GLPAREN) - self.state = 1634 + self.state = 1626 localctx.vdName = self.match(Parser.IDENTIFIER) - self.state = 1646 + self.state = 1638 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LPAREN: - self.state = 1635 + self.state = 1627 self.match(Parser.LPAREN) - self.state = 1636 + self.state = 1628 self.valueDomainName() - self.state = 1641 + self.state = 1633 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1637 + self.state = 1629 self.match(Parser.MUL) - self.state = 1638 + self.state = 1630 self.valueDomainName() - self.state = 1643 + self.state = 1635 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1644 + self.state = 1636 self.match(Parser.RPAREN) - self.state = 1648 + self.state = 1640 self.match(Parser.GRPAREN) pass elif token in [Parser.HIERARCHICAL_ON_VAR]: localctx = Parser.HrRulesetVarTypeContext(self, localctx) self.enterOuterAlt(localctx, 3) - self.state = 1651 + self.state = 1643 self.match(Parser.HIERARCHICAL_ON_VAR) - self.state = 1669 + self.state = 1661 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.GLPAREN: - self.state = 1652 + self.state = 1644 self.match(Parser.GLPAREN) - self.state = 1653 + self.state = 1645 localctx.varName = self.varID() - self.state = 1665 + self.state = 1657 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.LPAREN: - self.state = 1654 + self.state = 1646 self.match(Parser.LPAREN) - self.state = 1655 + self.state = 1647 self.varID() - self.state = 1660 + self.state = 1652 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.MUL: - self.state = 1656 + self.state = 1648 self.match(Parser.MUL) - self.state = 1657 + self.state = 1649 self.varID() - self.state = 1662 + self.state = 1654 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1663 + self.state = 1655 self.match(Parser.RPAREN) - self.state = 1667 + self.state = 1659 self.match(Parser.GRPAREN) pass @@ -15379,14 +13890,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_valueDomainName - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValueDomainName"): - listener.enterValueDomainName(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValueDomainName"): - listener.exitValueDomainName(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValueDomainName"): return visitor.visitValueDomainName(self) @@ -15399,7 +13902,7 @@ def valueDomainName(self): self.enterRule(localctx, 140, self.RULE_valueDomainName) try: self.enterOuterAlt(localctx, 1) - self.state = 1673 + self.state = 1665 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -15422,14 +13925,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_rulesetID - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRulesetID"): - listener.enterRulesetID(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRulesetID"): - listener.exitRulesetID(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRulesetID"): return visitor.visitRulesetID(self) @@ -15442,7 +13937,7 @@ def rulesetID(self): self.enterRule(localctx, 142, self.RULE_rulesetID) try: self.enterOuterAlt(localctx, 1) - self.state = 1675 + self.state = 1667 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -15480,14 +13975,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_rulesetSignature - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRulesetSignature"): - listener.enterRulesetSignature(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRulesetSignature"): - listener.exitRulesetSignature(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRulesetSignature"): return visitor.visitRulesetSignature(self) @@ -15501,24 +13988,24 @@ def rulesetSignature(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1677 + self.state = 1669 _la = self._input.LA(1) if not (_la == Parser.VALUE_DOMAIN or _la == Parser.VARIABLE): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 1678 + self.state = 1670 self.signature() - self.state = 1683 + self.state = 1675 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1679 + self.state = 1671 self.match(Parser.COMMA) - self.state = 1680 + self.state = 1672 self.signature() - self.state = 1685 + self.state = 1677 self._errHandler.sync(self) _la = self._input.LA(1) @@ -15549,14 +14036,6 @@ def alias(self): def getRuleIndex(self): return Parser.RULE_signature - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSignature"): - listener.enterSignature(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSignature"): - listener.exitSignature(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSignature"): return visitor.visitSignature(self) @@ -15570,15 +14049,15 @@ def signature(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1686 + self.state = 1678 self.varID() - self.state = 1689 + self.state = 1681 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.AS: - self.state = 1687 + self.state = 1679 self.match(Parser.AS) - self.state = 1688 + self.state = 1680 self.alias() except RecognitionException as re: @@ -15611,14 +14090,6 @@ def EOL(self, i: int = None): def getRuleIndex(self): return Parser.RULE_ruleClauseDatapoint - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRuleClauseDatapoint"): - listener.enterRuleClauseDatapoint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRuleClauseDatapoint"): - listener.exitRuleClauseDatapoint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRuleClauseDatapoint"): return visitor.visitRuleClauseDatapoint(self) @@ -15632,17 +14103,17 @@ def ruleClauseDatapoint(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1691 + self.state = 1683 self.ruleItemDatapoint() - self.state = 1696 + self.state = 1688 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.EOL: - self.state = 1692 + self.state = 1684 self.match(Parser.EOL) - self.state = 1693 + self.state = 1685 self.ruleItemDatapoint() - self.state = 1698 + self.state = 1690 self._errHandler.sync(self) _la = self._input.LA(1) @@ -15691,14 +14162,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_ruleItemDatapoint - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRuleItemDatapoint"): - listener.enterRuleItemDatapoint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRuleItemDatapoint"): - listener.exitRuleItemDatapoint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRuleItemDatapoint"): return visitor.visitRuleItemDatapoint(self) @@ -15712,40 +14175,40 @@ def ruleItemDatapoint(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1701 + self.state = 1693 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 177, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 175, self._ctx) if la_ == 1: - self.state = 1699 + self.state = 1691 localctx.ruleName = self.match(Parser.IDENTIFIER) - self.state = 1700 + self.state = 1692 self.match(Parser.COLON) - self.state = 1707 + self.state = 1699 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.WHEN: - self.state = 1703 + self.state = 1695 self.match(Parser.WHEN) - self.state = 1704 + self.state = 1696 localctx.antecedentContiditon = self.exprComponent(0) - self.state = 1705 + self.state = 1697 self.match(Parser.THEN) - self.state = 1709 + self.state = 1701 localctx.consequentCondition = self.exprComponent(0) - self.state = 1711 + self.state = 1703 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORCODE: - self.state = 1710 + self.state = 1702 self.erCode() - self.state = 1714 + self.state = 1706 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORLEVEL: - self.state = 1713 + self.state = 1705 self.erLevel() except RecognitionException as re: @@ -15778,14 +14241,6 @@ def EOL(self, i: int = None): def getRuleIndex(self): return Parser.RULE_ruleClauseHierarchical - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRuleClauseHierarchical"): - listener.enterRuleClauseHierarchical(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRuleClauseHierarchical"): - listener.exitRuleClauseHierarchical(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRuleClauseHierarchical"): return visitor.visitRuleClauseHierarchical(self) @@ -15799,17 +14254,17 @@ def ruleClauseHierarchical(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1716 + self.state = 1708 self.ruleItemHierarchical() - self.state = 1721 + self.state = 1713 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.EOL: - self.state = 1717 + self.state = 1709 self.match(Parser.EOL) - self.state = 1718 + self.state = 1710 self.ruleItemHierarchical() - self.state = 1723 + self.state = 1715 self._errHandler.sync(self) _la = self._input.LA(1) @@ -15847,14 +14302,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_ruleItemHierarchical - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRuleItemHierarchical"): - listener.enterRuleItemHierarchical(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRuleItemHierarchical"): - listener.exitRuleItemHierarchical(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRuleItemHierarchical"): return visitor.visitRuleItemHierarchical(self) @@ -15868,29 +14315,29 @@ def ruleItemHierarchical(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1726 + self.state = 1718 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 182, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 180, self._ctx) if la_ == 1: - self.state = 1724 + self.state = 1716 localctx.ruleName = self.match(Parser.IDENTIFIER) - self.state = 1725 + self.state = 1717 self.match(Parser.COLON) - self.state = 1728 + self.state = 1720 self.codeItemRelation() - self.state = 1730 + self.state = 1722 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORCODE: - self.state = 1729 + self.state = 1721 self.erCode() - self.state = 1733 + self.state = 1725 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.ERRORLEVEL: - self.state = 1732 + self.state = 1724 self.erLevel() except RecognitionException as re: @@ -15929,14 +14376,6 @@ def valueDomainSignature(self): def getRuleIndex(self): return Parser.RULE_hierRuleSignature - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterHierRuleSignature"): - listener.enterHierRuleSignature(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitHierRuleSignature"): - listener.exitHierRuleSignature(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitHierRuleSignature"): return visitor.visitHierRuleSignature(self) @@ -15950,25 +14389,25 @@ def hierRuleSignature(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1735 + self.state = 1727 _la = self._input.LA(1) if not (_la == Parser.VALUE_DOMAIN or _la == Parser.VARIABLE): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) self.consume() - self.state = 1738 + self.state = 1730 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.CONDITION: - self.state = 1736 + self.state = 1728 self.match(Parser.CONDITION) - self.state = 1737 + self.state = 1729 self.valueDomainSignature() - self.state = 1740 + self.state = 1732 self.match(Parser.RULE) - self.state = 1741 + self.state = 1733 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -16000,14 +14439,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_valueDomainSignature - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValueDomainSignature"): - listener.enterValueDomainSignature(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValueDomainSignature"): - listener.exitValueDomainSignature(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValueDomainSignature"): return visitor.visitValueDomainSignature(self) @@ -16021,17 +14452,17 @@ def valueDomainSignature(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1743 + self.state = 1735 self.signature() - self.state = 1748 + self.state = 1740 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1744 + self.state = 1736 self.match(Parser.COMMA) - self.state = 1745 + self.state = 1737 self.signature() - self.state = 1750 + self.state = 1742 self._errHandler.sync(self) _la = self._input.LA(1) @@ -16075,14 +14506,6 @@ def comparisonOperand(self): def getRuleIndex(self): return Parser.RULE_codeItemRelation - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCodeItemRelation"): - listener.enterCodeItemRelation(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCodeItemRelation"): - listener.exitCodeItemRelation(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCodeItemRelation"): return visitor.visitCodeItemRelation(self) @@ -16096,20 +14519,20 @@ def codeItemRelation(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1755 + self.state = 1747 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.WHEN: - self.state = 1751 + self.state = 1743 self.match(Parser.WHEN) - self.state = 1752 + self.state = 1744 self.exprComponent(0) - self.state = 1753 + self.state = 1745 self.match(Parser.THEN) - self.state = 1757 + self.state = 1749 localctx.codetemRef = self.valueDomainValue() - self.state = 1759 + self.state = 1751 self._errHandler.sync(self) _la = self._input.LA(1) if ((_la) & ~0x3F) == 0 and ( @@ -16123,12 +14546,12 @@ def codeItemRelation(self): | (1 << Parser.LE) ) ) != 0: - self.state = 1758 + self.state = 1750 self.comparisonOperand() - self.state = 1761 + self.state = 1753 self.codeItemRelationClause() - self.state = 1765 + self.state = 1757 self._errHandler.sync(self) _la = self._input.LA(1) while ( @@ -16147,9 +14570,9 @@ def codeItemRelation(self): != 0 ) ): - self.state = 1762 + self.state = 1754 self.codeItemRelationClause() - self.state = 1767 + self.state = 1759 self._errHandler.sync(self) _la = self._input.LA(1) @@ -16192,14 +14615,6 @@ def MINUS(self): def getRuleIndex(self): return Parser.RULE_codeItemRelationClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCodeItemRelationClause"): - listener.enterCodeItemRelationClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCodeItemRelationClause"): - listener.exitCodeItemRelationClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCodeItemRelationClause"): return visitor.visitCodeItemRelationClause(self) @@ -16213,11 +14628,11 @@ def codeItemRelationClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1769 + self.state = 1761 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 190, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 188, self._ctx) if la_ == 1: - self.state = 1768 + self.state = 1760 localctx.opAdd = self._input.LT(1) _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MINUS): @@ -16226,17 +14641,17 @@ def codeItemRelationClause(self): self._errHandler.reportMatch(self) self.consume() - self.state = 1771 + self.state = 1763 localctx.rightCodeItem = self.valueDomainValue() - self.state = 1776 + self.state = 1768 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.QLPAREN: - self.state = 1772 + self.state = 1764 self.match(Parser.QLPAREN) - self.state = 1773 + self.state = 1765 localctx.rightCondition = self.exprComponent(0) - self.state = 1774 + self.state = 1766 self.match(Parser.QRPAREN) except RecognitionException as re: @@ -16266,14 +14681,6 @@ def signedNumber(self): def getRuleIndex(self): return Parser.RULE_valueDomainValue - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValueDomainValue"): - listener.enterValueDomainValue(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValueDomainValue"): - listener.exitValueDomainValue(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValueDomainValue"): return visitor.visitValueDomainValue(self) @@ -16285,24 +14692,24 @@ def valueDomainValue(self): localctx = Parser.ValueDomainValueContext(self, self._ctx, self.state) self.enterRule(localctx, 164, self.RULE_valueDomainValue) try: - self.state = 1781 + self.state = 1773 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 192, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 190, self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 1778 + self.state = 1770 self.match(Parser.IDENTIFIER) pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 1779 + self.state = 1771 self.signedInteger() pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 1780 + self.state = 1772 self.signedNumber() pass @@ -16352,14 +14759,6 @@ def COMMA(self, i: int = None): else: return self.getToken(Parser.COMMA, i) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRangeConstraint"): - listener.enterRangeConstraint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRangeConstraint"): - listener.exitRangeConstraint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRangeConstraint"): return visitor.visitRangeConstraint(self) @@ -16382,14 +14781,6 @@ def exprComponent(self): def QRPAREN(self): return self.getToken(Parser.QRPAREN, 0) - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConditionConstraint"): - listener.enterConditionConstraint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConditionConstraint"): - listener.exitConditionConstraint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConditionConstraint"): return visitor.visitConditionConstraint(self) @@ -16402,39 +14793,39 @@ def scalarTypeConstraint(self): self.enterRule(localctx, 166, self.RULE_scalarTypeConstraint) self._la = 0 # Token type try: - self.state = 1798 + self.state = 1790 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.QLPAREN]: localctx = Parser.ConditionConstraintContext(self, localctx) self.enterOuterAlt(localctx, 1) - self.state = 1783 + self.state = 1775 self.match(Parser.QLPAREN) - self.state = 1784 + self.state = 1776 self.exprComponent(0) - self.state = 1785 + self.state = 1777 self.match(Parser.QRPAREN) pass elif token in [Parser.GLPAREN]: localctx = Parser.RangeConstraintContext(self, localctx) self.enterOuterAlt(localctx, 2) - self.state = 1787 + self.state = 1779 self.match(Parser.GLPAREN) - self.state = 1788 + self.state = 1780 self.scalarItem() - self.state = 1793 + self.state = 1785 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1789 + self.state = 1781 self.match(Parser.COMMA) - self.state = 1790 + self.state = 1782 self.scalarItem() - self.state = 1795 + self.state = 1787 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1796 + self.state = 1788 self.match(Parser.GRPAREN) pass else: @@ -16467,14 +14858,6 @@ def multModifier(self): def getRuleIndex(self): return Parser.RULE_compConstraint - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterCompConstraint"): - listener.enterCompConstraint(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitCompConstraint"): - listener.exitCompConstraint(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitCompConstraint"): return visitor.visitCompConstraint(self) @@ -16487,17 +14870,17 @@ def compConstraint(self): self.enterRule(localctx, 168, self.RULE_compConstraint) try: self.enterOuterAlt(localctx, 1) - self.state = 1800 + self.state = 1792 self.componentType() - self.state = 1803 + self.state = 1795 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.IDENTIFIER]: - self.state = 1801 + self.state = 1793 self.componentID() pass elif token in [Parser.OPTIONAL]: - self.state = 1802 + self.state = 1794 self.multModifier() pass else: @@ -16530,14 +14913,6 @@ def MUL(self): def getRuleIndex(self): return Parser.RULE_multModifier - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterMultModifier"): - listener.enterMultModifier(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitMultModifier"): - listener.exitMultModifier(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitMultModifier"): return visitor.visitMultModifier(self) @@ -16551,13 +14926,13 @@ def multModifier(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1805 + self.state = 1797 self.match(Parser.OPTIONAL) - self.state = 1807 + self.state = 1799 self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.PLUS or _la == Parser.MUL: - self.state = 1806 + self.state = 1798 _la = self._input.LA(1) if not (_la == Parser.PLUS or _la == Parser.MUL): self._errHandler.recoverInline(self) @@ -16592,14 +14967,6 @@ def ALL(self): def getRuleIndex(self): return Parser.RULE_validationOutput - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidationOutput"): - listener.enterValidationOutput(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidationOutput"): - listener.exitValidationOutput(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidationOutput"): return visitor.visitValidationOutput(self) @@ -16613,7 +14980,7 @@ def validationOutput(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1809 + self.state = 1801 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.INVALID or _la == Parser.ALL_MEASURES): self._errHandler.recoverInline(self) @@ -16656,14 +15023,6 @@ def ALWAYS_ZERO(self): def getRuleIndex(self): return Parser.RULE_validationMode - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValidationMode"): - listener.enterValidationMode(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValidationMode"): - listener.exitValidationMode(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValidationMode"): return visitor.visitValidationMode(self) @@ -16677,7 +15036,7 @@ def validationMode(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1811 + self.state = 1803 _la = self._input.LA(1) if not ( ((_la - 225) & ~0x3F) == 0 @@ -16731,14 +15090,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_conditionClause - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConditionClause"): - listener.enterConditionClause(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConditionClause"): - listener.exitConditionClause(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConditionClause"): return visitor.visitConditionClause(self) @@ -16752,19 +15103,19 @@ def conditionClause(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1813 + self.state = 1805 self.match(Parser.CONDITION) - self.state = 1814 + self.state = 1806 self.componentID() - self.state = 1819 + self.state = 1811 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1815 + self.state = 1807 self.match(Parser.COMMA) - self.state = 1816 + self.state = 1808 self.componentID() - self.state = 1821 + self.state = 1813 self._errHandler.sync(self) _la = self._input.LA(1) @@ -16792,14 +15143,6 @@ def DATASET_PRIORITY(self): def getRuleIndex(self): return Parser.RULE_inputMode - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInputMode"): - listener.enterInputMode(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInputMode"): - listener.exitInputMode(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInputMode"): return visitor.visitInputMode(self) @@ -16813,7 +15156,7 @@ def inputMode(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1822 + self.state = 1814 _la = self._input.LA(1) if not (_la == Parser.DATASET or _la == Parser.DATASET_PRIORITY): self._errHandler.recoverInline(self) @@ -16844,14 +15187,6 @@ def expr(self): def getRuleIndex(self): return Parser.RULE_imbalanceExpr - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterImbalanceExpr"): - listener.enterImbalanceExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitImbalanceExpr"): - listener.exitImbalanceExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitImbalanceExpr"): return visitor.visitImbalanceExpr(self) @@ -16864,9 +15199,9 @@ def imbalanceExpr(self): self.enterRule(localctx, 180, self.RULE_imbalanceExpr) try: self.enterOuterAlt(localctx, 1) - self.state = 1824 + self.state = 1816 self.match(Parser.IMBALANCE) - self.state = 1825 + self.state = 1817 self.expr(0) except RecognitionException as re: localctx.exception = re @@ -16895,14 +15230,6 @@ def RULE_PRIORITY(self): def getRuleIndex(self): return Parser.RULE_inputModeHierarchy - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterInputModeHierarchy"): - listener.enterInputModeHierarchy(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitInputModeHierarchy"): - listener.exitInputModeHierarchy(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitInputModeHierarchy"): return visitor.visitInputModeHierarchy(self) @@ -16916,7 +15243,7 @@ def inputModeHierarchy(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1827 + self.state = 1819 _la = self._input.LA(1) if not (_la == Parser.DATASET or _la == Parser.RULE or _la == Parser.RULE_PRIORITY): self._errHandler.recoverInline(self) @@ -16947,14 +15274,6 @@ def ALL(self): def getRuleIndex(self): return Parser.RULE_outputModeHierarchy - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOutputModeHierarchy"): - listener.enterOutputModeHierarchy(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOutputModeHierarchy"): - listener.exitOutputModeHierarchy(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOutputModeHierarchy"): return visitor.visitOutputModeHierarchy(self) @@ -16968,7 +15287,7 @@ def outputModeHierarchy(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1829 + self.state = 1821 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.COMPUTED): self._errHandler.recoverInline(self) @@ -16996,14 +15315,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_alias - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterAlias"): - listener.enterAlias(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitAlias"): - listener.exitAlias(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitAlias"): return visitor.visitAlias(self) @@ -17016,7 +15327,7 @@ def alias(self): self.enterRule(localctx, 186, self.RULE_alias) try: self.enterOuterAlt(localctx, 1) - self.state = 1831 + self.state = 1823 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17039,14 +15350,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_varID - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterVarID"): - listener.enterVarID(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitVarID"): - listener.exitVarID(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitVarID"): return visitor.visitVarID(self) @@ -17059,7 +15362,7 @@ def varID(self): self.enterRule(localctx, 188, self.RULE_varID) try: self.enterOuterAlt(localctx, 1) - self.state = 1833 + self.state = 1825 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17082,14 +15385,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_simpleComponentId - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterSimpleComponentId"): - listener.enterSimpleComponentId(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitSimpleComponentId"): - listener.exitSimpleComponentId(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitSimpleComponentId"): return visitor.visitSimpleComponentId(self) @@ -17102,7 +15397,7 @@ def simpleComponentId(self): self.enterRule(localctx, 190, self.RULE_simpleComponentId) try: self.enterOuterAlt(localctx, 1) - self.state = 1835 + self.state = 1827 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17131,14 +15426,6 @@ def MEMBERSHIP(self): def getRuleIndex(self): return Parser.RULE_componentID - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComponentID"): - listener.enterComponentID(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComponentID"): - listener.exitComponentID(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComponentID"): return visitor.visitComponentID(self) @@ -17151,15 +15438,15 @@ def componentID(self): self.enterRule(localctx, 192, self.RULE_componentID) try: self.enterOuterAlt(localctx, 1) - self.state = 1837 + self.state = 1829 self.match(Parser.IDENTIFIER) - self.state = 1840 + self.state = 1832 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 198, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 196, self._ctx) if la_ == 1: - self.state = 1838 + self.state = 1830 self.match(Parser.MEMBERSHIP) - self.state = 1839 + self.state = 1831 self.match(Parser.IDENTIFIER) except RecognitionException as re: @@ -17198,14 +15485,6 @@ def COMMA(self, i: int = None): def getRuleIndex(self): return Parser.RULE_lists - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterLists"): - listener.enterLists(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitLists"): - listener.exitLists(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitLists"): return visitor.visitLists(self) @@ -17219,23 +15498,23 @@ def lists(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1842 + self.state = 1834 self.match(Parser.GLPAREN) - self.state = 1843 + self.state = 1835 self.scalarItem() - self.state = 1848 + self.state = 1840 self._errHandler.sync(self) _la = self._input.LA(1) while _la == Parser.COMMA: - self.state = 1844 + self.state = 1836 self.match(Parser.COMMA) - self.state = 1845 + self.state = 1837 self.scalarItem() - self.state = 1850 + self.state = 1842 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 1851 + self.state = 1843 self.match(Parser.GRPAREN) except RecognitionException as re: localctx.exception = re @@ -17261,14 +15540,6 @@ def constant(self): def getRuleIndex(self): return Parser.RULE_erCode - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterErCode"): - listener.enterErCode(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitErCode"): - listener.exitErCode(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitErCode"): return visitor.visitErCode(self) @@ -17281,9 +15552,9 @@ def erCode(self): self.enterRule(localctx, 196, self.RULE_erCode) try: self.enterOuterAlt(localctx, 1) - self.state = 1853 + self.state = 1845 self.match(Parser.ERRORCODE) - self.state = 1854 + self.state = 1846 self.constant() except RecognitionException as re: localctx.exception = re @@ -17309,14 +15580,6 @@ def constant(self): def getRuleIndex(self): return Parser.RULE_erLevel - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterErLevel"): - listener.enterErLevel(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitErLevel"): - listener.exitErLevel(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitErLevel"): return visitor.visitErLevel(self) @@ -17329,9 +15592,9 @@ def erLevel(self): self.enterRule(localctx, 198, self.RULE_erLevel) try: self.enterOuterAlt(localctx, 1) - self.state = 1856 + self.state = 1848 self.match(Parser.ERRORLEVEL) - self.state = 1857 + self.state = 1849 self.constant() except RecognitionException as re: localctx.exception = re @@ -17369,14 +15632,6 @@ def NEQ(self): def getRuleIndex(self): return Parser.RULE_comparisonOperand - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComparisonOperand"): - listener.enterComparisonOperand(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComparisonOperand"): - listener.exitComparisonOperand(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComparisonOperand"): return visitor.visitComparisonOperand(self) @@ -17390,7 +15645,7 @@ def comparisonOperand(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1859 + self.state = 1851 _la = self._input.LA(1) if not ( ((_la) & ~0x3F) == 0 @@ -17435,14 +15690,6 @@ def OPTIONAL(self): def getRuleIndex(self): return Parser.RULE_optionalExpr - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOptionalExpr"): - listener.enterOptionalExpr(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOptionalExpr"): - listener.exitOptionalExpr(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOptionalExpr"): return visitor.visitOptionalExpr(self) @@ -17454,7 +15701,7 @@ def optionalExpr(self): localctx = Parser.OptionalExprContext(self, self._ctx, self.state) self.enterRule(localctx, 202, self.RULE_optionalExpr) try: - self.state = 1863 + self.state = 1855 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -17544,12 +15791,12 @@ def optionalExpr(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1861 + self.state = 1853 self.expr(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 1862 + self.state = 1854 self.match(Parser.OPTIONAL) pass else: @@ -17579,14 +15826,6 @@ def OPTIONAL(self): def getRuleIndex(self): return Parser.RULE_optionalExprComponent - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOptionalExprComponent"): - listener.enterOptionalExprComponent(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOptionalExprComponent"): - listener.exitOptionalExprComponent(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOptionalExprComponent"): return visitor.visitOptionalExprComponent(self) @@ -17598,7 +15837,7 @@ def optionalExprComponent(self): localctx = Parser.OptionalExprComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 204, self.RULE_optionalExprComponent) try: - self.state = 1867 + self.state = 1859 self._errHandler.sync(self) token = self._input.LA(1) if token in [ @@ -17676,12 +15915,12 @@ def optionalExprComponent(self): Parser.IDENTIFIER, ]: self.enterOuterAlt(localctx, 1) - self.state = 1865 + self.state = 1857 self.exprComponent(0) pass elif token in [Parser.OPTIONAL]: self.enterOuterAlt(localctx, 2) - self.state = 1866 + self.state = 1858 self.match(Parser.OPTIONAL) pass else: @@ -17720,14 +15959,6 @@ def viralAttribute(self): def getRuleIndex(self): return Parser.RULE_componentRole - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterComponentRole"): - listener.enterComponentRole(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitComponentRole"): - listener.exitComponentRole(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitComponentRole"): return visitor.visitComponentRole(self) @@ -17739,32 +15970,32 @@ def componentRole(self): localctx = Parser.ComponentRoleContext(self, self._ctx, self.state) self.enterRule(localctx, 206, self.RULE_componentRole) try: - self.state = 1874 + self.state = 1866 self._errHandler.sync(self) token = self._input.LA(1) if token in [Parser.MEASURE]: self.enterOuterAlt(localctx, 1) - self.state = 1869 + self.state = 1861 self.match(Parser.MEASURE) pass elif token in [Parser.COMPONENT]: self.enterOuterAlt(localctx, 2) - self.state = 1870 + self.state = 1862 self.match(Parser.COMPONENT) pass elif token in [Parser.DIMENSION]: self.enterOuterAlt(localctx, 3) - self.state = 1871 + self.state = 1863 self.match(Parser.DIMENSION) pass elif token in [Parser.ATTRIBUTE]: self.enterOuterAlt(localctx, 4) - self.state = 1872 + self.state = 1864 self.match(Parser.ATTRIBUTE) pass elif token in [Parser.VIRAL]: self.enterOuterAlt(localctx, 5) - self.state = 1873 + self.state = 1865 self.viralAttribute() pass else: @@ -17794,14 +16025,6 @@ def ATTRIBUTE(self): def getRuleIndex(self): return Parser.RULE_viralAttribute - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterViralAttribute"): - listener.enterViralAttribute(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitViralAttribute"): - listener.exitViralAttribute(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitViralAttribute"): return visitor.visitViralAttribute(self) @@ -17814,9 +16037,9 @@ def viralAttribute(self): self.enterRule(localctx, 208, self.RULE_viralAttribute) try: self.enterOuterAlt(localctx, 1) - self.state = 1876 + self.state = 1868 self.match(Parser.VIRAL) - self.state = 1877 + self.state = 1869 self.match(Parser.ATTRIBUTE) except RecognitionException as re: localctx.exception = re @@ -17839,14 +16062,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_valueDomainID - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterValueDomainID"): - listener.enterValueDomainID(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitValueDomainID"): - listener.exitValueDomainID(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitValueDomainID"): return visitor.visitValueDomainID(self) @@ -17859,7 +16074,7 @@ def valueDomainID(self): self.enterRule(localctx, 210, self.RULE_valueDomainID) try: self.enterOuterAlt(localctx, 1) - self.state = 1879 + self.state = 1871 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17882,14 +16097,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_operatorID - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterOperatorID"): - listener.enterOperatorID(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitOperatorID"): - listener.exitOperatorID(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitOperatorID"): return visitor.visitOperatorID(self) @@ -17902,7 +16109,7 @@ def operatorID(self): self.enterRule(localctx, 212, self.RULE_operatorID) try: self.enterOuterAlt(localctx, 1) - self.state = 1881 + self.state = 1873 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17925,14 +16132,6 @@ def IDENTIFIER(self): def getRuleIndex(self): return Parser.RULE_routineName - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRoutineName"): - listener.enterRoutineName(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRoutineName"): - listener.exitRoutineName(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRoutineName"): return visitor.visitRoutineName(self) @@ -17945,7 +16144,7 @@ def routineName(self): self.enterRule(localctx, 214, self.RULE_routineName) try: self.enterOuterAlt(localctx, 1) - self.state = 1883 + self.state = 1875 self.match(Parser.IDENTIFIER) except RecognitionException as re: localctx.exception = re @@ -17980,14 +16179,6 @@ def NULL_CONSTANT(self): def getRuleIndex(self): return Parser.RULE_constant - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterConstant"): - listener.enterConstant(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitConstant"): - listener.exitConstant(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitConstant"): return visitor.visitConstant(self) @@ -17999,36 +16190,36 @@ def constant(self): localctx = Parser.ConstantContext(self, self._ctx, self.state) self.enterRule(localctx, 216, self.RULE_constant) try: - self.state = 1890 + self.state = 1882 self._errHandler.sync(self) - la_ = self._interp.adaptivePredict(self._input, 203, self._ctx) + la_ = self._interp.adaptivePredict(self._input, 201, self._ctx) if la_ == 1: self.enterOuterAlt(localctx, 1) - self.state = 1885 + self.state = 1877 self.signedInteger() pass elif la_ == 2: self.enterOuterAlt(localctx, 2) - self.state = 1886 + self.state = 1878 self.signedNumber() pass elif la_ == 3: self.enterOuterAlt(localctx, 3) - self.state = 1887 + self.state = 1879 self.match(Parser.BOOLEAN_CONSTANT) pass elif la_ == 4: self.enterOuterAlt(localctx, 4) - self.state = 1888 + self.state = 1880 self.match(Parser.STRING_CONSTANT) pass elif la_ == 5: self.enterOuterAlt(localctx, 5) - self.state = 1889 + self.state = 1881 self.match(Parser.NULL_CONSTANT) pass @@ -18077,14 +16268,6 @@ def SCALAR(self): def getRuleIndex(self): return Parser.RULE_basicScalarType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterBasicScalarType"): - listener.enterBasicScalarType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitBasicScalarType"): - listener.exitBasicScalarType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitBasicScalarType"): return visitor.visitBasicScalarType(self) @@ -18098,7 +16281,7 @@ def basicScalarType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1892 + self.state = 1884 _la = self._input.LA(1) if not ( ( @@ -18148,14 +16331,6 @@ def ALL(self): def getRuleIndex(self): return Parser.RULE_retainType - def enterRule(self, listener: ParseTreeListener): - if hasattr(listener, "enterRetainType"): - listener.enterRetainType(self) - - def exitRule(self, listener: ParseTreeListener): - if hasattr(listener, "exitRetainType"): - listener.exitRetainType(self) - def accept(self, visitor: ParseTreeVisitor): if hasattr(visitor, "visitRetainType"): return visitor.visitRetainType(self) @@ -18169,7 +16344,7 @@ def retainType(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 1894 + self.state = 1886 _la = self._input.LA(1) if not (_la == Parser.ALL or _la == Parser.BOOLEAN_CONSTANT): self._errHandler.recoverInline(self) @@ -18185,8 +16360,8 @@ def retainType(self): return localctx def sempred(self, localctx: RuleContext, ruleIndex: int, predIndex: int): - if self._predicates == None: - self._predicates = dict() + if self._predicates is None: + self._predicates = {} self._predicates[2] = self.expr_sempred self._predicates[3] = self.exprComponent_sempred pred = self._predicates.get(ruleIndex, None) diff --git a/src/vtlengine/AST/VtlVisitor.py b/src/vtlengine/AST/VtlVisitor.py index b937e4bfc..f2d65f7d4 100644 --- a/src/vtlengine/AST/VtlVisitor.py +++ b/src/vtlengine/AST/VtlVisitor.py @@ -18,16 +18,10 @@ def visitTemporaryAssignment(self, ctx: Parser.TemporaryAssignmentContext): def visitPersistAssignment(self, ctx: Parser.PersistAssignmentContext): return self.visitChildren(ctx) - def visitStatement(self, ctx: Parser.StatementContext): - return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#defineExpression. def visitDefineExpression(self, ctx: Parser.DefineExpressionContext): return self.visitChildren(ctx) - def visitExpr(self, ctx: Parser.ExprContext): - return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#varIdExpr. def visitVarIdExpr(self, ctx: Parser.VarIdExprContext): return self.visitChildren(ctx) @@ -60,14 +54,14 @@ def visitFunctionsExpression(self, ctx: Parser.FunctionsExpressionContext): def visitIfExpr(self, ctx: Parser.IfExprContext): return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#caseExpr. - def visitCaseExpr(self, ctx: Parser.CaseExprContext): - return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#clauseExpr. def visitClauseExpr(self, ctx: Parser.ClauseExprContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#caseExpr. + def visitCaseExpr(self, ctx: Parser.CaseExprContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#arithmeticExpr. def visitArithmeticExpr(self, ctx: Parser.ArithmeticExprContext): return self.visitChildren(ctx) @@ -92,10 +86,6 @@ def visitArithmeticExprComp(self, ctx: Parser.ArithmeticExprCompContext): def visitIfExprComp(self, ctx: Parser.IfExprCompContext): return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#caseExprComp. - def visitCaseExprComp(self, ctx: Parser.CaseExprCompContext): - return self.visitChildren(ctx) - # Visit a parse tree produced by Parser#comparisonExprComp. def visitComparisonExprComp(self, ctx: Parser.ComparisonExprCompContext): return self.visitChildren(ctx) @@ -128,6 +118,10 @@ def visitInNotInExprComp(self, ctx: Parser.InNotInExprCompContext): def visitUnaryExprComp(self, ctx: Parser.UnaryExprCompContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#caseExprComp. + def visitCaseExprComp(self, ctx: Parser.CaseExprCompContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#booleanExprComp. def visitBooleanExprComp(self, ctx: Parser.BooleanExprCompContext): return self.visitChildren(ctx) @@ -242,6 +236,10 @@ def visitKeepOrDropClause(self, ctx: Parser.KeepOrDropClauseContext): def visitPivotOrUnpivotClause(self, ctx: Parser.PivotOrUnpivotClauseContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#customPivotClause. + def visitCustomPivotClause(self, ctx: Parser.CustomPivotClauseContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#subspaceClause. def visitSubspaceClause(self, ctx: Parser.SubspaceClauseContext): return self.visitChildren(ctx) @@ -404,8 +402,48 @@ def visitTimeAggAtom(self, ctx: Parser.TimeAggAtomContext): def visitCurrentDateAtom(self, ctx: Parser.CurrentDateAtomContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#dateDiffAtom. + def visitDateDiffAtom(self, ctx: Parser.DateDiffAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dateAddAtom. + def visitDateAddAtom(self, ctx: Parser.DateAddAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#yearAtom. + def visitYearAtom(self, ctx: Parser.YearAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#monthAtom. + def visitMonthAtom(self, ctx: Parser.MonthAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayOfMonthAtom. + def visitDayOfMonthAtom(self, ctx: Parser.DayOfMonthAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayOfYearAtom. + def visitDayOfYearAtom(self, ctx: Parser.DayOfYearAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayToYearAtom. + def visitDayToYearAtom(self, ctx: Parser.DayToYearAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayToMonthAtom. + def visitDayToMonthAtom(self, ctx: Parser.DayToMonthAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#yearTodayAtom. + def visitYearTodayAtom(self, ctx: Parser.YearTodayAtomContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#monthTodayAtom. + def visitMonthTodayAtom(self, ctx: Parser.MonthTodayAtomContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#periodAtomComponent. - def visitTimeUnaryAtomComponent(self, ctx: Parser.PeriodAtomComponentContext): + def visitPeriodAtomComponent(self, ctx: Parser.PeriodAtomComponentContext): return self.visitChildren(ctx) # Visit a parse tree produced by Parser#fillTimeAtomComponent. @@ -428,6 +466,46 @@ def visitTimeAggAtomComponent(self, ctx: Parser.TimeAggAtomComponentContext): def visitCurrentDateAtomComponent(self, ctx: Parser.CurrentDateAtomComponentContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#dateDiffAtomComponent. + def visitDateDiffAtomComponent(self, ctx: Parser.DateDiffAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dateAddAtomComponent. + def visitDateAddAtomComponent(self, ctx: Parser.DateAddAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#yearAtomComponent. + def visitYearAtomComponent(self, ctx: Parser.YearAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#monthAtomComponent. + def visitMonthAtomComponent(self, ctx: Parser.MonthAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayOfMonthAtomComponent. + def visitDayOfMonthAtomComponent(self, ctx: Parser.DayOfMonthAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#datOfYearAtomComponent. + def visitDatOfYearAtomComponent(self, ctx: Parser.DatOfYearAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayToYearAtomComponent. + def visitDayToYearAtomComponent(self, ctx: Parser.DayToYearAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#dayToMonthAtomComponent. + def visitDayToMonthAtomComponent(self, ctx: Parser.DayToMonthAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#yearTodayAtomComponent. + def visitYearTodayAtomComponent(self, ctx: Parser.YearTodayAtomComponentContext): + return self.visitChildren(ctx) + + # Visit a parse tree produced by Parser#monthTodayAtomComponent. + def visitMonthTodayAtomComponent(self, ctx: Parser.MonthTodayAtomComponentContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#unionAtom. def visitUnionAtom(self, ctx: Parser.UnionAtomContext): return self.visitChildren(ctx) @@ -572,6 +650,10 @@ def visitWindowingClause(self, ctx: Parser.WindowingClauseContext): def visitSignedInteger(self, ctx: Parser.SignedIntegerContext): return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#signedNumber. + def visitSignedNumber(self, ctx: Parser.SignedNumberContext): + return self.visitChildren(ctx) + # Visit a parse tree produced by Parser#limitClauseItem. def visitLimitClauseItem(self, ctx: Parser.LimitClauseItemContext): return self.visitChildren(ctx) diff --git a/tests/AST/data/prettier/complete_grammar.vtl b/tests/AST/data/prettier/complete_grammar.vtl index c28cedcf4..2862fd2e4 100644 --- a/tests/AST/data/prettier/complete_grammar.vtl +++ b/tests/AST/data/prettier/complete_grammar.vtl @@ -294,8 +294,8 @@ timeshift_period_ds := timeshift(drop_identifier(DS_1, Id_date), -2); datediff_ds := DS_1[calc Me_diff := datediff(Me_interval, Id_period)]; date_add_ds := DS_1[calc Me_add := dateadd(Id_period, 2, "M")]; -time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A", _, Id_date)); -time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M", _, Id_period)); +time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A")); +time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M")); /************************************************* USER DEFINE OPERATOR diff --git a/tests/AST/data/prettier/group_all_time_aggr.vtl b/tests/AST/data/prettier/group_all_time_aggr.vtl index b6df21620..bd52cd6de 100644 --- a/tests/AST/data/prettier/group_all_time_aggr.vtl +++ b/tests/AST/data/prettier/group_all_time_aggr.vtl @@ -1 +1 @@ -DS_r := sum(DS_1 group all time_agg("D",_,Id_1)); +DS_r := sum(DS_1 group all time_agg("D")); diff --git a/tests/AST/data/prettier/reference_complete_grammar.vtl b/tests/AST/data/prettier/reference_complete_grammar.vtl index 3e368218e..dcf3cf846 100644 --- a/tests/AST/data/prettier/reference_complete_grammar.vtl +++ b/tests/AST/data/prettier/reference_complete_grammar.vtl @@ -634,11 +634,11 @@ date_add_ds := "M")]; time_agg_date_ds := sum( - drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A", _, Id_date) + drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A") ); time_agg_period_ds := sum( - drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M", _, Id_period) + drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M") ); /************************************************* USER DEFINE OPERATOR diff --git a/tests/AST/data/prettier/reference_group_all_time_aggr.vtl b/tests/AST/data/prettier/reference_group_all_time_aggr.vtl index 8aa2879e8..9f70a5bf6 100644 --- a/tests/AST/data/prettier/reference_group_all_time_aggr.vtl +++ b/tests/AST/data/prettier/reference_group_all_time_aggr.vtl @@ -1,4 +1,4 @@ DS_r := sum( - DS_1 group all time_agg("D", _, Id_1) + DS_1 group all time_agg("D") ); diff --git a/tests/AST/data/vtl/complete_grammar.vtl b/tests/AST/data/vtl/complete_grammar.vtl index c28cedcf4..2862fd2e4 100644 --- a/tests/AST/data/vtl/complete_grammar.vtl +++ b/tests/AST/data/vtl/complete_grammar.vtl @@ -294,8 +294,8 @@ timeshift_period_ds := timeshift(drop_identifier(DS_1, Id_date), -2); datediff_ds := DS_1[calc Me_diff := datediff(Me_interval, Id_period)]; date_add_ds := DS_1[calc Me_add := dateadd(Id_period, 2, "M")]; -time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A", _, Id_date)); -time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M", _, Id_period)); +time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A")); +time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M")); /************************************************* USER DEFINE OPERATOR diff --git a/tests/AST/data/vtl/time.vtl b/tests/AST/data/vtl/time.vtl index 92f033f4a..9345afe6c 100644 --- a/tests/AST/data/vtl/time.vtl +++ b/tests/AST/data/vtl/time.vtl @@ -16,9 +16,9 @@ fill_time_series_date_single_ds := fill_time_series(drop_identifier(DS_1, Id_per fill_time_series_period_all_ds := fill_time_series(drop_identifier(DS_1, Id_date), all); timeshift_date_ds := timeshift(drop_identifier(DS_1, Id_period), 1); timeshift_period_ds := timeshift(drop_identifier(DS_1, Id_date), -2); -datediff_ds := DS_1[calc Me_diff := datediff(Me_interval, Id_period)]; +datediff_ds := DS_1[calc Me_diff := datediff(Me_interva9l, Id_period)]; date_add_ds := DS_1[calc Me_add := dateadd(Id_period, 2, "M")]; -time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A", _, Id_date)); -time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M", _, Id_period)); +time_agg_date_ds := sum(drop_identifier(DS_1, Id_period)#Me_int group all time_agg("A")); +time_agg_period_ds := sum(drop_identifier(DS_1, Id_date)#Me_num group all time_agg("M")); DS_A := time_agg("A", DS_1, last); \ No newline at end of file diff --git a/tests/Additional/data/vtl/7-20.vtl b/tests/Additional/data/vtl/7-20.vtl index bebbd9706..12c8ee404 100644 --- a/tests/Additional/data/vtl/7-20.vtl +++ b/tests/Additional/data/vtl/7-20.vtl @@ -1,6 +1,6 @@ -DS_r := sum(DS_1 group all time_agg("D", _ ,Id_1, first)); -DS_r1 := sum(DS_1 group all time_agg("W", _ ,Id_1, first)); -DS_r2 := sum(DS_1 group all time_agg("M", _ ,Id_1, first)); -DS_r3 := sum(DS_1 group all time_agg("Q", _ ,Id_1, first)); -DS_r4 := sum(DS_1 group all time_agg("S", _ ,Id_1, first)); -DS_r5 := sum(DS_1 group all time_agg("A", _ ,Id_1, first)); +DS_r := sum(DS_1 group all time_agg("D", first)); +DS_r1 := sum(DS_1 group all time_agg("W", first)); +DS_r2 := sum(DS_1 group all time_agg("M", first)); +DS_r3 := sum(DS_1 group all time_agg("Q", first)); +DS_r4 := sum(DS_1 group all time_agg("S", first)); +DS_r5 := sum(DS_1 group all time_agg("A", first)); diff --git a/tests/Additional/data/vtl/7-21.vtl b/tests/Additional/data/vtl/7-21.vtl index b847fb7d8..3c4d814fc 100644 --- a/tests/Additional/data/vtl/7-21.vtl +++ b/tests/Additional/data/vtl/7-21.vtl @@ -1,6 +1,6 @@ -DS_r := sum(DS_1 group all time_agg("D", _ ,Id_1, last)); -DS_r1 := sum(DS_1 group all time_agg("W", _ ,Id_1, last)); -DS_r2 := sum(DS_1 group all time_agg("M", _ ,Id_1, last)); -DS_r3 := sum(DS_1 group all time_agg("Q", _ ,Id_1, last)); -DS_r4 := sum(DS_1 group all time_agg("S", _ ,Id_1, last)); -DS_r5 := sum(DS_1 group all time_agg("A", _ ,Id_1, last)); +DS_r := sum(DS_1 group all time_agg("D", last)); +DS_r1 := sum(DS_1 group all time_agg("W", last)); +DS_r2 := sum(DS_1 group all time_agg("M", last)); +DS_r3 := sum(DS_1 group all time_agg("Q", last)); +DS_r4 := sum(DS_1 group all time_agg("S", last)); +DS_r5 := sum(DS_1 group all time_agg("A", last)); diff --git a/tests/Additional/test_additional.py b/tests/Additional/test_additional.py index 73d5c73a6..56c764125 100644 --- a/tests/Additional/test_additional.py +++ b/tests/Additional/test_additional.py @@ -3733,7 +3733,7 @@ def test_18(self): """ Basic behaviour for datasets with period type. """ - text = """DS_r := sum (DS_1 group all time_agg("A", Id_1));""" + text = """DS_r := sum (DS_1 group all time_agg("A"));""" code = "7-18" number_inputs = 1 references_names = ["DS_r"] @@ -3749,7 +3749,7 @@ def test_19(self): """ Basic behaviour for datasets with date type. """ - text = """DS_r := sum (DS_1 group all time_agg("A", Id_1, last));""" + text = """DS_r := sum (DS_1 group all time_agg("A", last));""" code = "7-19" number_inputs = 1 references_names = ["DS_r"] diff --git a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl index ce1f9488d..c58375db9 100644 --- a/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl +++ b/tests/Complete_VTL_Grammar/data/vtl/test_grammar.vtl @@ -296,8 +296,8 @@ timeshift_period_ds := timeshift(drop_identifier(DS_1_without_time, Id_date), -2 datediff_ds := DS_1[calc Me_diff := datediff(Me_interval, Id_period)]; date_add_ds := DS_1[calc Me_add := dateadd(Id_period, 2, "M")]; -time_agg_date_ds := sum(drop_identifier(DS_1_without_time, Id_period)#Me_int group all time_agg("A", _, Id_date, first)); -time_agg_period_ds := sum(drop_identifier(DS_1_without_time, Id_date)#Me_num group all time_agg("M", _, Id_period)); +time_agg_date_ds := sum(drop_identifier(DS_1_without_time, Id_period)#Me_int group all time_agg("A", first)); +time_agg_period_ds := sum(drop_identifier(DS_1_without_time, Id_date)#Me_num group all time_agg("M")); /************************************************* USER DEFINE OPERATOR diff --git a/tests/IfThenElse/data/vtl/GL_436_1.vtl b/tests/IfThenElse/data/vtl/GL_436_1.vtl index 1d55a398e..81b8f0bdd 100644 --- a/tests/IfThenElse/data/vtl/GL_436_1.vtl +++ b/tests/IfThenElse/data/vtl/GL_436_1.vtl @@ -3,12 +3,12 @@ returns dataset is round( inner_join( count( - DS_CP group all time_agg("Q",_,TIME_PERIOD) + DS_CP group all time_agg("Q") ) [filter int_var = if freq = "Q" then 3 else if freq = "S" then 6 else 12 ] [drop int_var] as A, avg( - DS_CP group all time_agg("Q",_,TIME_PERIOD) + DS_CP group all time_agg("Q") ) as B ) [sub FREQ = "M"] diff --git a/tests/IfThenElse/data/vtl/GL_436_2.vtl b/tests/IfThenElse/data/vtl/GL_436_2.vtl index 69fffcaac..9c0460f51 100644 --- a/tests/IfThenElse/data/vtl/GL_436_2.vtl +++ b/tests/IfThenElse/data/vtl/GL_436_2.vtl @@ -3,11 +3,11 @@ returns dataset is round( inner_join( count( - DS_CP group all time_agg("Q",_,TIME_PERIOD) + DS_CP group all time_agg("Q") ) [calc int_var := if freq = "Q" then 3 else if freq = "S" then 6 else 12 ] as A, avg( - DS_CP group all time_agg("Q",_,TIME_PERIOD) + DS_CP group all time_agg("Q") ) as B ) [sub FREQ = "M"] diff --git a/tests/ReferenceManual/data/vtl/RM121.vtl b/tests/ReferenceManual/data/vtl/RM121.vtl index c87746972..ceb6e93a4 100644 --- a/tests/ReferenceManual/data/vtl/RM121.vtl +++ b/tests/ReferenceManual/data/vtl/RM121.vtl @@ -1 +1 @@ -DS_r := sum ( DS_1 group all time_agg ( "A" , _ , Me_1 )); \ No newline at end of file +DS_r := sum ( DS_1 group all time_agg ( "A" )); \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM121.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM121.vtl index ab0aba6af..8903f5cab 100644 --- a/tests/ReferenceManual/data/vtl_defined_operators/RM121.vtl +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM121.vtl @@ -1,5 +1,5 @@ define operator Test121 (x dataset, c component) returns dataset is - sum ( x group all time_agg ( "A" , _ , c )) + sum ( x group all time_agg ( "A" )) end operator; DS_r := Test121(DS_1, Me_1); diff --git a/tests/TimePeriod/data/vtl/GL_417_1.vtl b/tests/TimePeriod/data/vtl/GL_417_1.vtl index 51d7a9b51..f872d76d8 100644 --- a/tests/TimePeriod/data/vtl/GL_417_1.vtl +++ b/tests/TimePeriod/data/vtl/GL_417_1.vtl @@ -1 +1 @@ -test := avg (BE2_DF_NICP group all time_agg ("Q", "M", TIME_PERIOD)); \ No newline at end of file +test := avg (BE2_DF_NICP group all time_agg ("Q")); \ No newline at end of file diff --git a/tests/TimePeriod/data/vtl/GL_417_2.vtl b/tests/TimePeriod/data/vtl/GL_417_2.vtl index dc24db8d4..a0610e546 100644 --- a/tests/TimePeriod/data/vtl/GL_417_2.vtl +++ b/tests/TimePeriod/data/vtl/GL_417_2.vtl @@ -1 +1 @@ -test := avg (BE2_DF_NICP group all time_agg ("A", "M", TIME_PERIOD)); \ No newline at end of file +test := avg (BE2_DF_NICP group all time_agg ("A")); \ No newline at end of file diff --git a/tests/TimePeriod/data/vtl/GL_417_3.vtl b/tests/TimePeriod/data/vtl/GL_417_3.vtl index 7d768b401..f872d76d8 100644 --- a/tests/TimePeriod/data/vtl/GL_417_3.vtl +++ b/tests/TimePeriod/data/vtl/GL_417_3.vtl @@ -1 +1 @@ -test := avg (BE2_DF_NICP group all time_agg ("Q", "Q", TIME_PERIOD)); \ No newline at end of file +test := avg (BE2_DF_NICP group all time_agg ("Q")); \ No newline at end of file diff --git a/tests/TimePeriod/data/vtl/GL_417_4.vtl b/tests/TimePeriod/data/vtl/GL_417_4.vtl index d2bd57958..a0610e546 100644 --- a/tests/TimePeriod/data/vtl/GL_417_4.vtl +++ b/tests/TimePeriod/data/vtl/GL_417_4.vtl @@ -1 +1 @@ -test := avg (BE2_DF_NICP group all time_agg ("A", "Q", TIME_PERIOD)); \ No newline at end of file +test := avg (BE2_DF_NICP group all time_agg ("A")); \ No newline at end of file diff --git a/tests/TimePeriod/test_timeperiod.py b/tests/TimePeriod/test_timeperiod.py index f0d4af206..139512a34 100644 --- a/tests/TimePeriod/test_timeperiod.py +++ b/tests/TimePeriod/test_timeperiod.py @@ -24,9 +24,9 @@ "GL_416", 'test2_1 := BE2_DF_NICP[filter FREQ = "M" and TIME_PERIOD = cast("2020-01", time_period)];', ), - ("GL_417_1", 'test := avg (BE2_DF_NICP group all time_agg ("Q", "M", TIME_PERIOD));'), - ("GL_417_2", 'test := avg (BE2_DF_NICP group all time_agg ("A", "M", TIME_PERIOD));'), - ("GL_417_4", 'test := avg (BE2_DF_NICP group all time_agg ("A", "Q", TIME_PERIOD));'), + ("GL_417_1", 'test := avg (BE2_DF_NICP group all time_agg ("Q"));'), + ("GL_417_2", 'test := avg (BE2_DF_NICP group all time_agg ("A"));'), + ("GL_417_4", 'test := avg (BE2_DF_NICP group all time_agg ("A"));'), ( "GL_418", 'test2_1 := BE2_DF_NICP[sub DERIVATION = "INDICES"][filter FREQ = "M"][keep OBS_VALUE]; \ @@ -39,7 +39,7 @@ "GL_421_1", 'test2_1 := BE2_DF_NICP[calc FREQ_2 := TIME_PERIOD in {cast("2020-01", time_period), cast("2021-01", time_period)}];', ), - # ("GL_421_2", 'test := avg (BE2_DF_NICP group all time_agg ("A", "M", TIME_PERIOD));'), + # ("GL_421_2", 'test := avg (BE2_DF_NICP group all time_agg ("A"));'), ("GL_440_1", "DS_r := DS_1;"), ("GL_462_1", "added := demo_data_structure;"), ("GL_462_2", "added := demo_data_structure; DS_r := added+ ds_2;"), diff --git a/tests/UDO/data/vtl/GL_473_1.vtl b/tests/UDO/data/vtl/GL_473_1.vtl index 781c71322..692c30aa8 100644 --- a/tests/UDO/data/vtl/GL_473_1.vtl +++ b/tests/UDO/data/vtl/GL_473_1.vtl @@ -18,12 +18,12 @@ define operator AVG_M_Q (ds dataset) round( inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, avg( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) as B ) [sub FREQ = "M"] @@ -40,7 +40,7 @@ conf_status := [calc CONF_STATUS_NUM := conf_status_num(CONF_STATUS)] [aggr CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM]; diff --git a/tests/UDO/data/vtl/GL_473_2.vtl b/tests/UDO/data/vtl/GL_473_2.vtl index 6c4c31abc..8b3593f4a 100644 --- a/tests/UDO/data/vtl/GL_473_2.vtl +++ b/tests/UDO/data/vtl/GL_473_2.vtl @@ -18,7 +18,7 @@ filtered_ds := DS_PPI[filter FREQ = "M"]; with_udo := inner_join( count( - filtered_ds group all time_agg("Q",_,TIME_PERIOD) + filtered_ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -27,7 +27,7 @@ with_udo := [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) diff --git a/tests/UDO/data/vtl/GL_474_1.vtl b/tests/UDO/data/vtl/GL_474_1.vtl index 3b8ed6dd5..6fdf40af4 100644 --- a/tests/UDO/data/vtl/GL_474_1.vtl +++ b/tests/UDO/data/vtl/GL_474_1.vtl @@ -17,7 +17,7 @@ define operator AVG_M_Q (ds dataset) returns dataset is inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -26,7 +26,7 @@ define operator AVG_M_Q (ds dataset) [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) diff --git a/tests/UDO/data/vtl/GL_474_2.vtl b/tests/UDO/data/vtl/GL_474_2.vtl index a65b00bf6..02226ecc3 100644 --- a/tests/UDO/data/vtl/GL_474_2.vtl +++ b/tests/UDO/data/vtl/GL_474_2.vtl @@ -18,7 +18,7 @@ define operator AVG_M_Q (ds dataset) round( inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -27,7 +27,7 @@ define operator AVG_M_Q (ds dataset) [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) diff --git a/tests/UDO/data/vtl/GL_474_3.vtl b/tests/UDO/data/vtl/GL_474_3.vtl index 54ce13ede..bf9e1c5e4 100644 --- a/tests/UDO/data/vtl/GL_474_3.vtl +++ b/tests/UDO/data/vtl/GL_474_3.vtl @@ -10,7 +10,7 @@ define operator AVG_M_Q (ds dataset) returns dataset is inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -19,7 +19,7 @@ define operator AVG_M_Q (ds dataset) [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) diff --git a/tests/UDO/data/vtl/GL_475_1.vtl b/tests/UDO/data/vtl/GL_475_1.vtl index 60f9de31f..d7d0dfd2f 100644 --- a/tests/UDO/data/vtl/GL_475_1.vtl +++ b/tests/UDO/data/vtl/GL_475_1.vtl @@ -17,7 +17,7 @@ define operator AVG_M_Q (ds dataset) returns dataset is inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -26,7 +26,7 @@ define operator AVG_M_Q (ds dataset) [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) @@ -43,7 +43,7 @@ conf_status := [calc CONF_STATUS_NUM := conf_status_num(CONF_STATUS)] [aggr CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS_ST := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM]; diff --git a/tests/UDO/data/vtl/GL_476_1.vtl b/tests/UDO/data/vtl/GL_476_1.vtl index e48c08115..0b88a5b99 100644 --- a/tests/UDO/data/vtl/GL_476_1.vtl +++ b/tests/UDO/data/vtl/GL_476_1.vtl @@ -18,7 +18,7 @@ define operator AVG_M_Q (ds dataset) round( inner_join( count( - ds group all time_agg("Q",_,TIME_PERIOD) + ds group all time_agg("Q") ) [filter int_var = 3] [drop int_var] as A, @@ -27,7 +27,7 @@ define operator AVG_M_Q (ds dataset) [aggr OBS_VALUE := avg(OBS_VALUE), CONF_STATUS_NUM := min(CONF_STATUS_NUM) - group all time_agg("Q",_,TIME_PERIOD)] + group all time_agg("Q")] [calc CONF_STATUS := conf_status_str(CONF_STATUS_NUM)] [drop CONF_STATUS_NUM] as B ) From 0f5a737d60d29c83bbd85b2c5c80c6b75a34443e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Tue, 10 Mar 2026 14:11:08 +0100 Subject: [PATCH 27/38] Fix #585: Remove extra datasets validation (#586) --- src/vtlengine/API/__init__.py | 16 --------------- src/vtlengine/Exceptions/messages.py | 6 ------ tests/API/test_api.py | 30 ---------------------------- 3 files changed, 52 deletions(-) diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index fc83a0604..33dce44f4 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -25,7 +25,6 @@ from vtlengine.AST.ASTConstructor import ASTVisitor from vtlengine.AST.ASTString import ASTString from vtlengine.AST.DAG import DAGAnalyzer -from vtlengine.AST.DAG._models import DatasetSchedule from vtlengine.AST.Grammar.lexer import Lexer from vtlengine.AST.Grammar.parser import Parser from vtlengine.Exceptions import InputValidationException @@ -94,14 +93,6 @@ def _extract_input_datasets(script: Union[str, TransformationScheme, Path]) -> L return dag_inputs -def _validate_extra_datasets(datasets: Dict[str, Any], ds_analysis: DatasetSchedule) -> None: - """Raise if data_structures contains datasets not referenced by the script.""" - script_datasets = set(ds_analysis.global_inputs) | set(ds_analysis.all_outputs) - extra_datasets = set(datasets.keys()) - script_datasets - if extra_datasets: - raise InputValidationException(code="0-1-3-9", datasets=sorted(extra_datasets)) - - def prettify(script: Union[str, TransformationScheme, Path]) -> str: """ Function that prettifies the VTL script given. @@ -270,10 +261,6 @@ def semantic_analysis( # Loading datasets from file/dict/pysdmx objects/URLs datasets, scalars = load_datasets(data_structures, sdmx_mappings=mapping_dict) - # Validate that all provided datasets are required by the script - ds_analysis = DAGAnalyzer.ds_structure(ast) - _validate_extra_datasets(datasets, ds_analysis) - # Handling of library items vd = None if value_domains is not None: @@ -447,9 +434,6 @@ def run( # VTL Efficient analysis ds_analysis = DAGAnalyzer.ds_structure(ast) - # Validate that all provided datasets are required by the script - _validate_extra_datasets(datasets, ds_analysis) - # Checking the output path to be a Path object to a directory if output_folder is not None: _check_output_folder(output_folder) diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index b147096c4..7cd45abe5 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -224,12 +224,6 @@ "description": "Raised when URL datapoints are provided but data_structures is not a " "file path or URL for fetching the SDMX structure definition.", }, - "0-1-3-9": { - "message": "Dataset(s) {datasets} defined in data structures " - "but not required by the script.", - "description": "Raised when the provided data structures contain datasets " - "that are not used as inputs in the VTL script.", - }, # ------------Operators------------- # General Semantic errors "1-1-1-1": { diff --git a/tests/API/test_api.py b/tests/API/test_api.py index 1d462018a..d27632614 100644 --- a/tests/API/test_api.py +++ b/tests/API/test_api.py @@ -2084,33 +2084,3 @@ def test_validate_dataset(ds_input, dp_input, is_valid, message): else: with pytest.raises(Exception, match=message): validate_dataset(ds_data, dp_input) - - -def test_extra_dataset_in_data_structures(): - """run() and semantic_analysis() should fail when data_structures has unused datasets.""" - script = "DS_A <- DS_1 * 10;" - data_structures = { - "datasets": [ - { - "name": "DS_1", - "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, - ], - }, - { - "name": "DS_2", - "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, - ], - }, - ] - } - datapoints = {"DS_1": pd.DataFrame({"Id_1": [1], "Me_1": [10]})} - - with pytest.raises(InputValidationException, match="0-1-3-9"): - semantic_analysis(script=script, data_structures=data_structures) - - with pytest.raises(InputValidationException, match="0-1-3-9"): - run(script=script, data_structures=data_structures, datapoints=datapoints) From d13b89f01667339afe839e6d7602b2c686df484c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Tue, 10 Mar 2026 14:26:17 +0100 Subject: [PATCH 28/38] Bump version to 1.6.0rc6 (#587) --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8a04a6cc3..c4a911e56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc5" +version = "1.6.0rc6" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index ff8f11656..0cace8e94 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc5" +__version__ = "1.6.0rc6" From 90b727b22f395317fee3b976235d57c40f9b896d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Thu, 12 Mar 2026 13:22:19 +0100 Subject: [PATCH 29/38] Bump version to 1.6.0 (#592) --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c4a911e56..0fac0fda3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0rc6" +version = "1.6.0" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index 0cace8e94..a611948db 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0rc6" +__version__ = "1.6.0" From 09329548c32d4b1d48e771041f9b9999203ebdde Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Thu, 12 Mar 2026 13:33:43 +0100 Subject: [PATCH 30/38] Exclude PRs with workflows label from release notes (#593) --- .github/workflows/create-release.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 744ac755e..74d72a283 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -114,6 +114,7 @@ jobs: title url author { login } + labels(first: 10) { nodes { name } } closingIssuesReferences(first: 10, userLinkedOnly: false) { nodes { number @@ -225,6 +226,10 @@ jobs: if (!isPrerelease && /^\(QA\b/i.test(title)) { continue; } + const labels = pr.labels?.nodes?.map(l => l.name) || []; + if (labels.includes('workflows')) { + continue; + } const isDependabot = author === 'dependabot[bot]' || author === 'dependabot'; From 7c29eaaf35b658f92380e534e3922ff798cfaebe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Francisco=20Javier=20Hern=C3=A1ndez=20del=20Ca=C3=B1o?= Date: Thu, 12 Mar 2026 14:06:12 +0100 Subject: [PATCH 31/38] Update GitHub Actions to latest versions for Node.js 24 compatibility (#595) --- .github/workflows/create-release.yml | 4 ++-- .github/workflows/docs.yml | 10 +++++----- .github/workflows/release.yml | 4 ++-- .github/workflows/testing.yml | 4 ++-- .github/workflows/ubuntu_test_24_04.yml | 2 +- .github/workflows/version.yml | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml index 74d72a283..1a81dff68 100644 --- a/.github/workflows/create-release.yml +++ b/.github/workflows/create-release.yml @@ -13,10 +13,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Create release with notes - uses: actions/github-script@v7 + uses: actions/github-script@v8 with: script: | const fs = require('fs'); diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d1cc42fba..d89892cb6 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -38,7 +38,7 @@ jobs: steps: - name: Check for documentation label id: check - uses: actions/github-script@v7 + uses: actions/github-script@v8 with: script: | const merged = context.payload.pull_request.merged; @@ -97,16 +97,16 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 with: fetch-depth: 0 # Fetch all history for all tags and branches - name: Setup Pages id: pages - uses: actions/configure-pages@v4 + uses: actions/configure-pages@v5 - name: Install poetry run: pipx install poetry - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: '3.12' - name: Install dependencies @@ -144,7 +144,7 @@ jobs: exit 1 - name: Upload artifact # Automatically uploads an artifact from the './_site' directory by default - uses: actions/upload-pages-artifact@v3 + uses: actions/upload-pages-artifact@v4 # Deployment job deploy: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 10613383e..9570c6afb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,11 +17,11 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Install poetry run: pipx install poetry - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.x" - name: Build package diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index d27f87f52..516b4be18 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -22,11 +22,11 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Install poetry run: pipx install poetry - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/ubuntu_test_24_04.yml b/.github/workflows/ubuntu_test_24_04.yml index 296084a01..f3c1184a8 100644 --- a/.github/workflows/ubuntu_test_24_04.yml +++ b/.github/workflows/ubuntu_test_24_04.yml @@ -15,7 +15,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Install system dependencies run: | diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 748f7a57f..1cef57086 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -14,10 +14,10 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: '3.12' From a88823cda612114258fa1a634eedfebd799c565d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Mar 2026 10:01:49 +0100 Subject: [PATCH 32/38] Bump ruff from 0.15.5 to 0.15.6 (#602) Bumps [ruff](https://github.com/astral-sh/ruff) from 0.15.5 to 0.15.6. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.15.5...0.15.6) --- updated-dependencies: - dependency-name: ruff dependency-version: 0.15.6 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 229240c95..fc960a400 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2912,30 +2912,30 @@ files = [ [[package]] name = "ruff" -version = "0.15.5" +version = "0.15.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" groups = ["dev"] files = [ - {file = "ruff-0.15.5-py3-none-linux_armv6l.whl", hash = "sha256:4ae44c42281f42e3b06b988e442d344a5b9b72450ff3c892e30d11b29a96a57c"}, - {file = "ruff-0.15.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6edd3792d408ebcf61adabc01822da687579a1a023f297618ac27a5b51ef0080"}, - {file = "ruff-0.15.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:89f463f7c8205a9f8dea9d658d59eff49db05f88f89cc3047fb1a02d9f344010"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba786a8295c6574c1116704cf0b9e6563de3432ac888d8f83685654fe528fd65"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd4b801e57955fe9f02b31d20375ab3a5c4415f2e5105b79fb94cf2642c91440"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391f7c73388f3d8c11b794dbbc2959a5b5afe66642c142a6effa90b45f6f5204"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc18f30302e379fe1e998548b0f5e9f4dff907f52f73ad6da419ea9c19d66c8"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc6e7f90087e2d27f98dc34ed1b3ab7c8f0d273cc5431415454e22c0bd2a681"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cb7169f53c1ddb06e71a9aebd7e98fc0fea936b39afb36d8e86d36ecc2636a"}, - {file = "ruff-0.15.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9b037924500a31ee17389b5c8c4d88874cc6ea8e42f12e9c61a3d754ff72f1ca"}, - {file = "ruff-0.15.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65bb414e5b4eadd95a8c1e4804f6772bbe8995889f203a01f77ddf2d790929dd"}, - {file = "ruff-0.15.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d20aa469ae3b57033519c559e9bc9cd9e782842e39be05b50e852c7c981fa01d"}, - {file = "ruff-0.15.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:15388dd28c9161cdb8eda68993533acc870aa4e646a0a277aa166de9ad5a8752"}, - {file = "ruff-0.15.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b30da330cbd03bed0c21420b6b953158f60c74c54c5f4c1dabbdf3a57bf355d2"}, - {file = "ruff-0.15.5-py3-none-win32.whl", hash = "sha256:732e5ee1f98ba5b3679029989a06ca39a950cced52143a0ea82a2102cb592b74"}, - {file = "ruff-0.15.5-py3-none-win_amd64.whl", hash = "sha256:821d41c5fa9e19117616c35eaa3f4b75046ec76c65e7ae20a333e9a8696bc7fe"}, - {file = "ruff-0.15.5-py3-none-win_arm64.whl", hash = "sha256:b498d1c60d2fe5c10c45ec3f698901065772730b411f164ae270bb6bfcc4740b"}, - {file = "ruff-0.15.5.tar.gz", hash = "sha256:7c3601d3b6d76dce18c5c824fc8d06f4eef33d6df0c21ec7799510cde0f159a2"}, + {file = "ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff"}, + {file = "ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3"}, + {file = "ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e"}, + {file = "ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c"}, + {file = "ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512"}, + {file = "ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0"}, + {file = "ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb"}, + {file = "ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0"}, + {file = "ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c"}, + {file = "ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406"}, + {file = "ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837"}, + {file = "ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4"}, ] [[package]] From c3968dbf4a045cdca7fcd440aae5942359c5c489 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 17 Mar 2026 10:13:17 +0100 Subject: [PATCH 33/38] Fix #596: Fix empty-only-comments AST generation (#597) * Fixed empty/only-comments AST generation * Added related tests --- src/vtlengine/AST/ASTConstructor.py | 5 ++++- tests/AST/test_AST.py | 23 +++++++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/src/vtlengine/AST/ASTConstructor.py b/src/vtlengine/AST/ASTConstructor.py index 4954cad1c..994661b16 100644 --- a/src/vtlengine/AST/ASTConstructor.py +++ b/src/vtlengine/AST/ASTConstructor.py @@ -65,7 +65,10 @@ def visitStart(self, ctx: Parser.StartContext): for statement in statements: statements_nodes.append(self.visitStatement(statement)) - token_info = extract_token_info(ctx) + if ctx.stop is None: + token_info = {"column_start": 0, "column_stop": 0, "line_start": 1, "line_stop": 1} + else: + token_info = extract_token_info(ctx) start_node = Start(children=statements_nodes, **token_info) diff --git a/tests/AST/test_AST.py b/tests/AST/test_AST.py index bf49fd12e..e5d83bd31 100644 --- a/tests/AST/test_AST.py +++ b/tests/AST/test_AST.py @@ -948,3 +948,26 @@ def test_rule_name_not_in_ruleset(): """ ast = create_ast(text=script) assert len(ast.children) == 1 + + +empty_script_params = [ + "", + "//Comment", + "/*Comment*/", +] + + +@pytest.mark.parametrize("script", empty_script_params) +def test_create_ast_empty_script(script): + ast = create_ast(text=script) + assert isinstance(ast, Start) + assert ast.children == [] + + +@pytest.mark.parametrize("script", empty_script_params) +def test_create_ast_with_comments_empty_script(script): + from vtlengine.AST import Comment + + ast = create_ast_with_comments(text=script) + assert isinstance(ast, Start) + assert all(isinstance(child, Comment) for child in ast.children) From 7c86e89383c8650a1cb57db8c97b1abaf3d55bbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 17 Mar 2026 11:01:40 +0100 Subject: [PATCH 34/38] Fix #598: Allow boolean constants in errorlevel and errorcode (#599) * Fixed empty/only-comments AST generation * Added related tests * Fixed errorlevel as boolean handling on ASTString * Fixed linting errors * Added related tests * Fixed mypy errors * Minor fix --- src/vtlengine/AST/ASTString.py | 16 +++--- src/vtlengine/AST/__init__.py | 12 ++--- src/vtlengine/Operators/Validation.py | 36 +++++++------ .../data/DataSet/input/GH_598_2-1.csv | 5 ++ .../data/DataSet/output/GH_598_2-1.csv | 3 ++ .../data/DataStructure/input/GH_598_2-1.json | 27 ++++++++++ .../data/DataStructure/output/GH_598_2-1.json | 45 ++++++++++++++++ tests/DatapointRulesets/data/vtl/GH_598_2.vtl | 6 +++ .../test_datapoint_rulesets.py | 43 ++++++++++++++++ .../data/DataSet/input/GH_598_1-1.csv | 7 +++ .../data/DataSet/output/GH_598_1-1.csv | 3 ++ .../data/DataStructure/input/GH_598_1-1.json | 27 ++++++++++ .../data/DataStructure/output/GH_598_1-1.json | 51 +++++++++++++++++++ tests/Hierarchical/data/vtl/GH_598_1.vtl | 6 +++ tests/Hierarchical/test_hierarchical.py | 43 ++++++++++++++++ .../data/DataSet/input/GH_598_3-1.csv | 5 ++ .../data/DataSet/output/GH_598_3-1.csv | 2 + .../data/DataStructure/input/GH_598_3-1.json | 27 ++++++++++ .../data/DataStructure/output/GH_598_3-1.json | 45 ++++++++++++++++ tests/Validation/data/vtl/GH_598_3.vtl | 1 + tests/Validation/test_validation.py | 38 ++++++++++++++ 21 files changed, 419 insertions(+), 29 deletions(-) create mode 100644 tests/DatapointRulesets/data/DataSet/input/GH_598_2-1.csv create mode 100644 tests/DatapointRulesets/data/DataSet/output/GH_598_2-1.csv create mode 100644 tests/DatapointRulesets/data/DataStructure/input/GH_598_2-1.json create mode 100644 tests/DatapointRulesets/data/DataStructure/output/GH_598_2-1.json create mode 100644 tests/DatapointRulesets/data/vtl/GH_598_2.vtl create mode 100644 tests/Hierarchical/data/DataSet/input/GH_598_1-1.csv create mode 100644 tests/Hierarchical/data/DataSet/output/GH_598_1-1.csv create mode 100644 tests/Hierarchical/data/DataStructure/input/GH_598_1-1.json create mode 100644 tests/Hierarchical/data/DataStructure/output/GH_598_1-1.json create mode 100644 tests/Hierarchical/data/vtl/GH_598_1.vtl create mode 100644 tests/Validation/data/DataSet/input/GH_598_3-1.csv create mode 100644 tests/Validation/data/DataSet/output/GH_598_3-1.csv create mode 100644 tests/Validation/data/DataStructure/input/GH_598_3-1.json create mode 100644 tests/Validation/data/DataStructure/output/GH_598_3-1.json create mode 100644 tests/Validation/data/vtl/GH_598_3.vtl diff --git a/src/vtlengine/AST/ASTString.py b/src/vtlengine/AST/ASTString.py index 71ad5ccd0..e70b68e96 100644 --- a/src/vtlengine/AST/ASTString.py +++ b/src/vtlengine/AST/ASTString.py @@ -134,10 +134,10 @@ def visit_HRuleset(self, node: AST.HRuleset) -> None: self.vtl_script += f"define hierarchical ruleset {node.name}({signature}) is{nl}" for _i, rule in enumerate(node.rules): rule_str = f"{tab}{self.visit(rule)}" - if rule.erCode: + if rule.erCode is not None: rule_str += f"{nl}{tab}errorcode {_handle_literal(rule.erCode)}" - if rule.erLevel: - rule_str += f"{nl}{tab}errorlevel {rule.erLevel}" + if rule.erLevel is not None: + rule_str += f"{nl}{tab}errorlevel {_handle_literal(rule.erLevel)}" rules_strs.append(rule_str) rules_sep = f";{nl * 2}" if len(rules_strs) > 1 else "" rules = rules_sep.join(rules_strs) @@ -146,10 +146,10 @@ def visit_HRuleset(self, node: AST.HRuleset) -> None: else: for rule in node.rules: rule_str = self.visit(rule) - if rule.erCode: + if rule.erCode is not None: rule_str += f" errorcode {_handle_literal(rule.erCode)}" - if rule.erLevel: - rule_str += f" errorlevel {rule.erLevel}" + if rule.erLevel is not None: + rule_str += f" errorlevel {_handle_literal(rule.erLevel)}" rules_strs.append(rule_str) rules_sep = "; " if len(rules_strs) > 1 else "" rules = rules_sep.join(rules_strs) @@ -195,7 +195,7 @@ def visit_DPRule(self, node: AST.DPRule) -> str: if node.erCode is not None: lines.append(f"{tab * 3}errorcode {_handle_literal(node.erCode)}") if node.erLevel is not None: - lines.append(f"{tab * 3}errorlevel {node.erLevel}") + lines.append(f"{tab * 3}errorlevel {_handle_literal(node.erLevel)}") return nl.join(lines) else: vtl_script = "" @@ -205,7 +205,7 @@ def visit_DPRule(self, node: AST.DPRule) -> str: if node.erCode is not None: vtl_script += f" errorcode {_handle_literal(node.erCode)}" if node.erLevel is not None: - vtl_script += f" errorlevel {node.erLevel}" + vtl_script += f" errorlevel {_handle_literal(node.erLevel)}" return vtl_script def visit_DPRIdentifier(self, node: AST.DPRIdentifier) -> str: diff --git a/src/vtlengine/AST/__init__.py b/src/vtlengine/AST/__init__.py index 990a54971..36600c2ac 100644 --- a/src/vtlengine/AST/__init__.py +++ b/src/vtlengine/AST/__init__.py @@ -484,8 +484,8 @@ class Validation(AST): op: str validation: AST - error_code: Optional[str] - error_level: Optional[Union[int, str]] + error_code: Optional[Union[str, int, float, bool]] + error_level: Optional[Union[str, int, float, bool]] imbalance: Optional[AST] invalid: bool @@ -631,8 +631,8 @@ class HRule(AST): name: Optional[str] rule: HRBinOp - erCode: Optional[str] - erLevel: Optional[Union[int, str]] + erCode: Optional[Union[str, int, float, bool]] + erLevel: Optional[Union[str, int, float, bool]] __eq__ = AST.ast_equality @@ -645,8 +645,8 @@ class DPRule(AST): name: Optional[str] rule: HRBinOp - erCode: Optional[str] - erLevel: Optional[Union[int, str]] + erCode: Optional[Union[str, int, float, bool]] + erLevel: Optional[Union[str, int, float, bool]] __eq__ = AST.ast_equality diff --git a/src/vtlengine/Operators/Validation.py b/src/vtlengine/Operators/Validation.py index bba9735d4..7c89d6273 100644 --- a/src/vtlengine/Operators/Validation.py +++ b/src/vtlengine/Operators/Validation.py @@ -1,5 +1,5 @@ from copy import copy -from typing import Any, Dict, Optional, Union +from typing import Any, Dict, Optional, Type, Union import pandas as pd @@ -8,6 +8,7 @@ Boolean, Integer, Number, + ScalarType, String, check_unary_implicit_promotion, ) @@ -26,8 +27,8 @@ def validate( cls, validation_element: Dataset, imbalance_element: Optional[Dataset], - error_code: Optional[str], - error_level: Optional[Union[int, str]], + error_code: Optional[Union[str, int, float, bool]], + error_level: Optional[Union[str, int, float, bool]], invalid: bool, ) -> Dataset: dataset_name = VirtualCounter._new_ds_name() @@ -36,11 +37,13 @@ def validate( measure = validation_element.get_measures()[0] if measure.data_type != Boolean: raise SemanticError("1-1-10-1", op=cls.op, op_type="validation", me_type="Boolean") - error_level_type = None - if error_level is None or isinstance(error_level, int): + error_level_type: Optional[Type[ScalarType]] = None + if isinstance(error_level, bool): + error_level_type = Boolean + elif error_level is None or isinstance(error_level, int): error_level_type = Integer elif isinstance(error_level, str): - error_level_type = String # type: ignore[assignment] + error_level_type = String else: error_level_type = String @@ -79,7 +82,7 @@ def validate( result_components["errorlevel"] = Component( name="errorlevel", - data_type=error_level_type, # type: ignore[arg-type] + data_type=error_level_type, role=Role.MEASURE, nullable=True, ) @@ -91,8 +94,8 @@ def evaluate( cls, validation_element: Dataset, imbalance_element: Optional[Dataset], - error_code: Optional[str], - error_level: Optional[Union[int, str]], + error_code: Optional[Union[str, int, float, bool]], + error_level: Optional[Union[str, int, float, bool]], invalid: bool, ) -> Dataset: result = cls.validate( @@ -116,7 +119,8 @@ def evaluate( bool_col = result.data[validation_measure_name] is_false = bool_col.fillna(True) == False # noqa: E712 result.data["errorcode"] = pd.Series(None, index=result.data.index, dtype="string[pyarrow]") - result.data.loc[is_false, "errorcode"] = error_code + ec_value = str(error_code) if error_code is not None else None + result.data.loc[is_false, "errorcode"] = ec_value errorlevel_dtype = result.components["errorlevel"].data_type.dtype() result.data["errorlevel"] = pd.Series(None, index=result.data.index, dtype=errorlevel_dtype) if error_level is not None: @@ -150,7 +154,7 @@ def _generate_result_data(cls, rule_info: Dict[str, Any]) -> pd.DataFrame: @classmethod def validate(cls, dataset_element: Dataset, rule_info: Dict[str, Any], output: str) -> Dataset: - error_level_type = None + error_level_type: Optional[Type[ScalarType]] = None error_levels = [ rule_data.get("errorlevel") for rule_data in rule_info.values() @@ -158,12 +162,14 @@ def validate(cls, dataset_element: Dataset, rule_info: Dict[str, Any], output: s ] non_null_levels = [el for el in error_levels if el is not None] - if len(non_null_levels) == 0 or all(isinstance(el, int) for el in non_null_levels): + if all(isinstance(el, bool) for el in non_null_levels) and len(non_null_levels) > 0: + error_level_type = Boolean + elif len(non_null_levels) == 0 or all(isinstance(el, int) for el in non_null_levels): error_level_type = Number elif all(isinstance(el, str) for el in non_null_levels): - error_level_type = String # type: ignore[assignment] + error_level_type = String else: - error_level_type = String # type: ignore[assignment] + error_level_type = String dataset_name = VirtualCounter._new_ds_name() result_components = {comp.name: comp for comp in dataset_element.get_identifiers()} result_components["ruleid"] = Component( @@ -191,7 +197,7 @@ def validate(cls, dataset_element: Dataset, rule_info: Dict[str, Any], output: s ) result_components["errorlevel"] = Component( name="errorlevel", - data_type=error_level_type, # type: ignore[arg-type] + data_type=error_level_type, role=Role.MEASURE, nullable=True, ) diff --git a/tests/DatapointRulesets/data/DataSet/input/GH_598_2-1.csv b/tests/DatapointRulesets/data/DataSet/input/GH_598_2-1.csv new file mode 100644 index 000000000..c93f7a146 --- /dev/null +++ b/tests/DatapointRulesets/data/DataSet/input/GH_598_2-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +A,1,10 +A,2,-5 +B,1,100 +B,2,30 diff --git a/tests/DatapointRulesets/data/DataSet/output/GH_598_2-1.csv b/tests/DatapointRulesets/data/DataSet/output/GH_598_2-1.csv new file mode 100644 index 000000000..efa161630 --- /dev/null +++ b/tests/DatapointRulesets/data/DataSet/output/GH_598_2-1.csv @@ -0,0 +1,3 @@ +Id_1,Id_2,ruleid,Me_1,errorcode,errorlevel +A,2,rule_1,-5.0,True,True +B,2,rule_2,30.0,False,False diff --git a/tests/DatapointRulesets/data/DataStructure/input/GH_598_2-1.json b/tests/DatapointRulesets/data/DataStructure/input/GH_598_2-1.json new file mode 100644 index 000000000..a20b58429 --- /dev/null +++ b/tests/DatapointRulesets/data/DataStructure/input/GH_598_2-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DatapointRulesets/data/DataStructure/output/GH_598_2-1.json b/tests/DatapointRulesets/data/DataStructure/output/GH_598_2-1.json new file mode 100644 index 000000000..ed586e7c3 --- /dev/null +++ b/tests/DatapointRulesets/data/DataStructure/output/GH_598_2-1.json @@ -0,0 +1,45 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "ruleid", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "errorcode", + "role": "Measure", + "type": "String", + "nullable": true + }, + { + "name": "errorlevel", + "role": "Measure", + "type": "Boolean", + "nullable": true + } + ] + } + ] +} diff --git a/tests/DatapointRulesets/data/vtl/GH_598_2.vtl b/tests/DatapointRulesets/data/vtl/GH_598_2.vtl new file mode 100644 index 000000000..171db56df --- /dev/null +++ b/tests/DatapointRulesets/data/vtl/GH_598_2.vtl @@ -0,0 +1,6 @@ +define datapoint ruleset dp_bool (variable Id_1 as X, Me_1 as M) is + rule_1: when X = "A" then M > 0 errorcode true errorlevel true; + rule_2: when X = "B" then M > 50 errorcode false errorlevel false +end datapoint ruleset; + +DS_r <- check_datapoint(DS_1, dp_bool); diff --git a/tests/DatapointRulesets/test_datapoint_rulesets.py b/tests/DatapointRulesets/test_datapoint_rulesets.py index 1d8434e0e..bcf51f3ba 100644 --- a/tests/DatapointRulesets/test_datapoint_rulesets.py +++ b/tests/DatapointRulesets/test_datapoint_rulesets.py @@ -1,6 +1,8 @@ from pathlib import Path from tests.Helper import TestHelper +from vtlengine.API import create_ast +from vtlengine.AST.ASTString import ASTString class TestDataPointRuleset(TestHelper): @@ -420,3 +422,44 @@ def test_14(self): self.NewSemanticExceptionTest( code=code, number_inputs=number_inputs, exception_code=message ) + + def test_GH_598_2(self): + """ + check_datapoint with boolean errorcode and errorlevel constants. + Dataset --> Dataset + Status: OK + + define datapoint ruleset dp_bool (variable Id_1 as X, Me_1 as M) is + rule_1: when X = "A" then M > 0 errorcode true errorlevel true; + rule_2: when X = "B" then M > 50 errorcode false errorlevel false + end datapoint ruleset; + + DS_r <- check_datapoint(DS_1, dp_bool); + + Git Branch: cr-596. + Goal: Verify boolean constants work as errorcode/errorlevel in datapoint + rulesets, including AST round-trip. + """ + code = "GH_598_2" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_598_2_roundtrip(self): + """ + AST round-trip for check_datapoint with boolean errorcode/errorlevel. + + Git Branch: cr-596. + Goal: Verify that rendering the AST back to VTL and re-parsing produces the + same result when boolean constants are used in errorcode/errorlevel. + """ + code = "GH_598_2" + number_inputs = 1 + references_names = ["1"] + + text = self.LoadVTL(code) + rendered = ASTString().render(create_ast(text)) + self.BaseTest( + code=code, number_inputs=number_inputs, references_names=references_names, text=rendered + ) diff --git a/tests/Hierarchical/data/DataSet/input/GH_598_1-1.csv b/tests/Hierarchical/data/DataSet/input/GH_598_1-1.csv new file mode 100644 index 000000000..2e7361820 --- /dev/null +++ b/tests/Hierarchical/data/DataSet/input/GH_598_1-1.csv @@ -0,0 +1,7 @@ +Id_1,Id_2,Me_1 +B,XX,100 +C,XX,80 +D,XX,30 +N,XX,200 +A,XX,190 +L,XX,10 diff --git a/tests/Hierarchical/data/DataSet/output/GH_598_1-1.csv b/tests/Hierarchical/data/DataSet/output/GH_598_1-1.csv new file mode 100644 index 000000000..5a9e87326 --- /dev/null +++ b/tests/Hierarchical/data/DataSet/output/GH_598_1-1.csv @@ -0,0 +1,3 @@ +Id_1,Id_2,ruleid,Me_1,errorcode,errorlevel,imbalance +B,XX,1,100.0,True,True,50.0 +N,XX,2,200.0,False,False,20.0 diff --git a/tests/Hierarchical/data/DataStructure/input/GH_598_1-1.json b/tests/Hierarchical/data/DataStructure/input/GH_598_1-1.json new file mode 100644 index 000000000..50a362378 --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/input/GH_598_1-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/DataStructure/output/GH_598_1-1.json b/tests/Hierarchical/data/DataStructure/output/GH_598_1-1.json new file mode 100644 index 000000000..5d681c006 --- /dev/null +++ b/tests/Hierarchical/data/DataStructure/output/GH_598_1-1.json @@ -0,0 +1,51 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "ruleid", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "errorcode", + "role": "Measure", + "type": "String", + "nullable": true + }, + { + "name": "errorlevel", + "role": "Measure", + "type": "Boolean", + "nullable": true + }, + { + "name": "imbalance", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Hierarchical/data/vtl/GH_598_1.vtl b/tests/Hierarchical/data/vtl/GH_598_1.vtl new file mode 100644 index 000000000..2bb632d28 --- /dev/null +++ b/tests/Hierarchical/data/vtl/GH_598_1.vtl @@ -0,0 +1,6 @@ +define hierarchical ruleset hr_bool (variable rule Id_1) is + B = C - D errorcode true errorlevel true; + N = A - L errorcode false errorlevel false +end hierarchical ruleset; + +DS_r <- check_hierarchy(DS_1, hr_bool rule Id_1); diff --git a/tests/Hierarchical/test_hierarchical.py b/tests/Hierarchical/test_hierarchical.py index 727d4071a..37225fe76 100644 --- a/tests/Hierarchical/test_hierarchical.py +++ b/tests/Hierarchical/test_hierarchical.py @@ -3,6 +3,8 @@ import pytest from tests.Helper import TestHelper +from vtlengine.API import create_ast +from vtlengine.AST.ASTString import ASTString class HierarchicalHelper(TestHelper): @@ -1437,6 +1439,47 @@ def test_GL_566_1(self): self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_GH_598_1(self): + """ + check_hierarchy with boolean errorcode and errorlevel constants. + Dataset --> Dataset + Status: OK + + define hierarchical ruleset hr_bool (variable rule Id_1) is + B = C - D errorcode true errorlevel true; + N = A - L errorcode false errorlevel false + end hierarchical ruleset; + + DS_r <- check_hierarchy(DS_1, hr_bool rule Id_1); + + Git Branch: cr-596. + Goal: Verify boolean constants work as errorcode/errorlevel in hierarchical + rulesets, including AST round-trip. + """ + code = "GH_598_1" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_598_1_roundtrip(self): + """ + AST round-trip for check_hierarchy with boolean errorcode/errorlevel. + + Git Branch: cr-596. + Goal: Verify that rendering the AST back to VTL and re-parsing produces the + same result when boolean constants are used in errorcode/errorlevel. + """ + code = "GH_598_1" + number_inputs = 1 + references_names = ["1"] + + text = self.LoadVTL(code) + rendered = ASTString().render(create_ast(text)) + self.BaseTest( + code=code, number_inputs=number_inputs, references_names=references_names, text=rendered + ) + class HierarchicalRollUpOperatorsTest(HierarchicalHelper): """ diff --git a/tests/Validation/data/DataSet/input/GH_598_3-1.csv b/tests/Validation/data/DataSet/input/GH_598_3-1.csv new file mode 100644 index 000000000..c93f7a146 --- /dev/null +++ b/tests/Validation/data/DataSet/input/GH_598_3-1.csv @@ -0,0 +1,5 @@ +Id_1,Id_2,Me_1 +A,1,10 +A,2,-5 +B,1,100 +B,2,30 diff --git a/tests/Validation/data/DataSet/output/GH_598_3-1.csv b/tests/Validation/data/DataSet/output/GH_598_3-1.csv new file mode 100644 index 000000000..006ac0b2b --- /dev/null +++ b/tests/Validation/data/DataSet/output/GH_598_3-1.csv @@ -0,0 +1,2 @@ +Id_1,Id_2,bool_var,imbalance,errorcode,errorlevel +A,2,False,,True,False diff --git a/tests/Validation/data/DataStructure/input/GH_598_3-1.json b/tests/Validation/data/DataStructure/input/GH_598_3-1.json new file mode 100644 index 000000000..a20b58429 --- /dev/null +++ b/tests/Validation/data/DataStructure/input/GH_598_3-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Validation/data/DataStructure/output/GH_598_3-1.json b/tests/Validation/data/DataStructure/output/GH_598_3-1.json new file mode 100644 index 000000000..911194762 --- /dev/null +++ b/tests/Validation/data/DataStructure/output/GH_598_3-1.json @@ -0,0 +1,45 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "Integer", + "nullable": false + }, + { + "name": "bool_var", + "role": "Measure", + "type": "Boolean", + "nullable": true + }, + { + "name": "imbalance", + "role": "Measure", + "type": "Number", + "nullable": true + }, + { + "name": "errorcode", + "role": "Measure", + "type": "String", + "nullable": true + }, + { + "name": "errorlevel", + "role": "Measure", + "type": "Boolean", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Validation/data/vtl/GH_598_3.vtl b/tests/Validation/data/vtl/GH_598_3.vtl new file mode 100644 index 000000000..86b6d3c64 --- /dev/null +++ b/tests/Validation/data/vtl/GH_598_3.vtl @@ -0,0 +1 @@ +DS_r <- check(DS_1#Me_1 > 0 errorcode true errorlevel false invalid); diff --git a/tests/Validation/test_validation.py b/tests/Validation/test_validation.py index a69a4dae9..0a8e751b7 100644 --- a/tests/Validation/test_validation.py +++ b/tests/Validation/test_validation.py @@ -1,6 +1,8 @@ from pathlib import Path from tests.Helper import TestHelper +from vtlengine.API import create_ast +from vtlengine.AST.ASTString import ASTString class ValidationHelper(TestHelper): @@ -451,3 +453,39 @@ def test_GH_427_2(self): references_names = ["1"] self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_598_3(self): + """ + Check with boolean errorcode and errorlevel constants. + Dataset --> Dataset + Status: OK + + DS_r <- check(DS_1#Me_1 > 0 errorcode true errorlevel false invalid); + + Git Branch: cr-596. + Goal: Verify boolean constants work as errorcode/errorlevel in the check + operator, including AST round-trip. + """ + code = "GH_598_3" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + + def test_GH_598_3_roundtrip(self): + """ + AST round-trip for check with boolean errorcode/errorlevel. + + Git Branch: cr-596. + Goal: Verify that rendering the AST back to VTL and re-parsing produces the + same result when boolean constants are used in errorcode/errorlevel. + """ + code = "GH_598_3" + number_inputs = 1 + references_names = ["1"] + + text = self.LoadVTL(code) + rendered = ASTString().render(create_ast(text)) + self.BaseTest( + code=code, number_inputs=number_inputs, references_names=references_names, text=rendered + ) From d0373f0091b4ea37bf71fcfacd9003f01925369c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 17 Mar 2026 11:32:24 +0100 Subject: [PATCH 35/38] Fix #565: Review Time_Agg in group by / group except (#591) * Implemented new time_agg in group_by/except functionality * Added related tests * Added more tests --- .../AST/ASTConstructorModules/Expr.py | 79 +++++++++------ src/vtlengine/Interpreter/__init__.py | 55 +++++++---- .../data/DataSet/input/7-30-DS_1.csv | 8 ++ .../data/DataSet/input/7-31-DS_1.csv | 9 ++ .../data/DataSet/input/7-32-DS_1.csv | 8 ++ .../data/DataSet/input/7-33-DS_1.csv | 7 ++ .../data/DataSet/input/7-34-DS_1.csv | 6 ++ .../data/DataSet/input/7-35-DS_1.csv | 7 ++ .../data/DataSet/output/7-30-DS_r.csv | 4 + .../data/DataSet/output/7-31-DS_r.csv | 2 + .../data/DataSet/output/7-32-DS_r.csv | 4 + .../data/DataSet/output/7-33-DS_r.csv | 4 + .../data/DataSet/output/7-34-DS_r.csv | 3 + .../data/DataSet/output/7-35-DS_r.csv | 3 + .../data/DataStructure/input/7-30-DS_1.json | 27 ++++++ .../data/DataStructure/input/7-31-DS_1.json | 27 ++++++ .../data/DataStructure/input/7-32-DS_1.json | 27 ++++++ .../data/DataStructure/input/7-33-DS_1.json | 33 +++++++ .../data/DataStructure/input/7-34-DS_1.json | 33 +++++++ .../data/DataStructure/input/7-35-DS_1.json | 33 +++++++ .../data/DataStructure/output/7-30-DS_r.json | 27 ++++++ .../data/DataStructure/output/7-31-DS_r.json | 21 ++++ .../data/DataStructure/output/7-32-DS_r.json | 27 ++++++ .../data/DataStructure/output/7-33-DS_r.json | 33 +++++++ .../data/DataStructure/output/7-34-DS_r.json | 27 ++++++ .../data/DataStructure/output/7-35-DS_r.json | 21 ++++ tests/Additional/test_additional.py | 97 +++++++++++++++++++ 27 files changed, 582 insertions(+), 50 deletions(-) create mode 100644 tests/Additional/data/DataSet/input/7-30-DS_1.csv create mode 100644 tests/Additional/data/DataSet/input/7-31-DS_1.csv create mode 100644 tests/Additional/data/DataSet/input/7-32-DS_1.csv create mode 100644 tests/Additional/data/DataSet/input/7-33-DS_1.csv create mode 100644 tests/Additional/data/DataSet/input/7-34-DS_1.csv create mode 100644 tests/Additional/data/DataSet/input/7-35-DS_1.csv create mode 100644 tests/Additional/data/DataSet/output/7-30-DS_r.csv create mode 100644 tests/Additional/data/DataSet/output/7-31-DS_r.csv create mode 100644 tests/Additional/data/DataSet/output/7-32-DS_r.csv create mode 100644 tests/Additional/data/DataSet/output/7-33-DS_r.csv create mode 100644 tests/Additional/data/DataSet/output/7-34-DS_r.csv create mode 100644 tests/Additional/data/DataSet/output/7-35-DS_r.csv create mode 100644 tests/Additional/data/DataStructure/input/7-30-DS_1.json create mode 100644 tests/Additional/data/DataStructure/input/7-31-DS_1.json create mode 100644 tests/Additional/data/DataStructure/input/7-32-DS_1.json create mode 100644 tests/Additional/data/DataStructure/input/7-33-DS_1.json create mode 100644 tests/Additional/data/DataStructure/input/7-34-DS_1.json create mode 100644 tests/Additional/data/DataStructure/input/7-35-DS_1.json create mode 100644 tests/Additional/data/DataStructure/output/7-30-DS_r.json create mode 100644 tests/Additional/data/DataStructure/output/7-31-DS_r.json create mode 100644 tests/Additional/data/DataStructure/output/7-32-DS_r.json create mode 100644 tests/Additional/data/DataStructure/output/7-33-DS_r.json create mode 100644 tests/Additional/data/DataStructure/output/7-34-DS_r.json create mode 100644 tests/Additional/data/DataStructure/output/7-35-DS_r.json diff --git a/src/vtlengine/AST/ASTConstructorModules/Expr.py b/src/vtlengine/AST/ASTConstructorModules/Expr.py index 1f93d6ac1..744bbd02e 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Expr.py +++ b/src/vtlengine/AST/ASTConstructorModules/Expr.py @@ -1,6 +1,6 @@ import re from copy import copy -from typing import Any, Optional +from typing import Any, List, Optional, Tuple from antlr4.tree.Tree import TerminalNodeImpl @@ -1653,8 +1653,11 @@ def visitAggrClause(self, ctx: Parser.AggrClauseContext): def visitGroupingClause(self, ctx: Parser.GroupingClauseContext): """ groupingClause: - GROUP op=(BY | EXCEPT) componentID (COMMA componentID)* # groupByOrExcept - | GROUP ALL exprComponent # groupAll + GROUP op=(BY | EXCEPT) componentID (COMMA componentID)* + ( TIME_AGG LPAREN STRING_CONSTANT + (COMMA delim=(FIRST|LAST))? RPAREN )? + | GROUP ALL ( TIME_AGG LPAREN STRING_CONSTANT + (COMMA delim=(FIRST|LAST))? RPAREN )? ; """ if isinstance(ctx, Parser.GroupByOrExceptContext): @@ -1706,6 +1709,22 @@ def visitHavingClause(self, ctx: Parser.HavingClauseContext): op=op_node, children=None, params=param_nodes, **extract_token_info(ctx) ), expr + @staticmethod + def _extract_time_agg_tokens( + ctx_list: List[Any], + ) -> Tuple[Optional[str], Optional[str]]: + """Extract TIME_AGG parameters (period_to, conf) from parse tree children.""" + period_to: Optional[str] = None + conf: Optional[str] = None + for child in ctx_list: + if isinstance(child, TerminalNodeImpl): + token = child.getSymbol() + if token.type == Parser.STRING_CONSTANT: + period_to = token.text[1:-1] + elif token.type in (Parser.FIRST, Parser.LAST): + conf = token.text + return period_to, conf + def visitGroupByOrExcept(self, ctx: Parser.GroupByOrExceptContext): ctx_list = list(ctx.getChildren()) @@ -1714,11 +1733,29 @@ def visitGroupByOrExcept(self, ctx: Parser.GroupByOrExceptContext): op_node = token_left + " " + token_right - children_nodes = [ - Terminals().visitComponentID(identifier) - for identifier in ctx_list - if isinstance(identifier, Parser.ComponentIDContext) - ] + children_nodes: List[Any] = [] + has_time_agg = False + + for child in ctx_list: + if isinstance(child, Parser.ComponentIDContext): + children_nodes.append(Terminals().visitComponentID(child)) + elif isinstance(child, TerminalNodeImpl) and child.getSymbol().type == Parser.TIME_AGG: + has_time_agg = True + + if has_time_agg: + period_to, conf = self._extract_time_agg_tokens(ctx_list) + if period_to is None: + raise NotImplementedError + children_nodes.append( + TimeAggregation( + op="time_agg", + operand=None, + period_to=period_to, + period_from=None, + conf=conf, + **extract_token_info(ctx), + ) + ) return op_node, children_nodes @@ -1734,34 +1771,14 @@ def visitGroupAll(self, ctx: Parser.GroupAllContext): # Check if TIME_AGG is present (more than just GROUP ALL) if len(ctx_list) > 2: - period_to = None - period_from = None - operand_node = None - conf = None - - for child in ctx_list: - if isinstance(child, TerminalNodeImpl): - token = child.getSymbol() - if token.type == Parser.STRING_CONSTANT: - if period_to is None: - period_to = token.text[1:-1] - else: - period_from = token.text[1:-1] - elif token.type in [Parser.FIRST, Parser.LAST]: - conf = token.text - elif isinstance(child, Parser.OptionalExprContext): - operand_node = self.visitOptionalExpr(child) - if isinstance(operand_node, ID): - operand_node = None - elif isinstance(operand_node, Identifier): - operand_node = VarID(value=operand_node.value, **extract_token_info(child)) + period_to, conf = self._extract_time_agg_tokens(ctx_list) children_nodes = [ TimeAggregation( op="time_agg", - operand=operand_node, + operand=None, period_to=period_to, - period_from=period_from, + period_from=None, conf=conf, **extract_token_info(ctx), ) diff --git a/src/vtlengine/Interpreter/__init__.py b/src/vtlengine/Interpreter/__init__.py index 656d22c46..e7d00040a 100644 --- a/src/vtlengine/Interpreter/__init__.py +++ b/src/vtlengine/Interpreter/__init__.py @@ -492,13 +492,35 @@ def visit_UnaryOp(self, node: AST.UnaryOp) -> None: return ROLE_SETTER_MAPPING[node.op].analyze(operand, data_size) return UNARY_MAPPING[node.op].analyze(operand) - def visit_Aggregation(self, node: AST.Aggregation) -> None: - # Having takes precedence as it is lower in the AST + @staticmethod + def _apply_time_agg_grouping( + operand: Dataset, + groupings: List[Any], + grouping_op: Optional[str], + ) -> List[Any]: + """Extract TimeAggregation DataComponent from groupings and merge into operand.""" + time_comp = None + regular_groupings: List[Any] = [] + for g in groupings: + if isinstance(g, DataComponent): + time_comp = g + else: + regular_groupings.append(g) + if time_comp is not None: + if operand.data is not None and time_comp.data is not None and len(time_comp.data) > 0: + operand.data = operand.data.copy() + operand.data[time_comp.name] = time_comp.data + if grouping_op != "group except": + regular_groupings.append(time_comp.name) + return regular_groupings + + def _resolve_aggregation_operand(self, node: AST.Aggregation) -> Any: + """Resolve the operand for an aggregation node.""" if self.is_from_having: if node.operand is not None: self.visit(node.operand) - operand = self.aggregation_dataset - elif self.is_from_regular_aggregation and self.regular_aggregation_dataset is not None: + return self.aggregation_dataset + if self.is_from_regular_aggregation and self.regular_aggregation_dataset is not None: operand = self.regular_aggregation_dataset if node.operand is not None and operand is not None: op_comp: DataComponent = self.visit(node.operand) @@ -520,9 +542,12 @@ def visit_Aggregation(self, node: AST.Aggregation) -> None: data_to_keep[op_comp.name] = op_comp.data else: data_to_keep = None - operand = Dataset(name=operand.name, components=comps_to_keep, data=data_to_keep) - else: - operand = self.visit(node.operand) + return Dataset(name=operand.name, components=comps_to_keep, data=data_to_keep) + return operand + return self.visit(node.operand) + + def visit_Aggregation(self, node: AST.Aggregation) -> None: + operand = self._resolve_aggregation_operand(node) if not isinstance(operand, Dataset): raise SemanticError("2-3-4", op=node.op, comp="dataset") @@ -538,7 +563,8 @@ def visit_Aggregation(self, node: AST.Aggregation) -> None: having = None grouping_op = node.grouping_op if node.grouping is not None: - if grouping_op == "group all": + has_time_agg = any(isinstance(x, AST.TimeAggregation) for x in node.grouping) + if grouping_op == "group all" or has_time_agg: data = None if self.only_semantic else copy(operand.data) self.aggregation_dataset = Dataset( name=operand.name, components=operand.components, data=data @@ -548,17 +574,8 @@ def visit_Aggregation(self, node: AST.Aggregation) -> None: for x in node.grouping: groupings.append(self.visit(x)) self.is_from_grouping = False - if grouping_op == "group all": - comp_grouped = groupings[0] - if ( - operand.data is not None - and comp_grouped.data is not None - and len(comp_grouped.data) > 0 - ): - # Deep copy the data to avoid modifying the original dataset - operand.data = operand.data.copy() - operand.data[comp_grouped.name] = comp_grouped.data - groupings = [comp_grouped.name] + if grouping_op == "group all" or has_time_agg: + groupings = self._apply_time_agg_grouping(operand, groupings, grouping_op) self.aggregation_dataset = None if node.having_clause is not None: self.aggregation_dataset = Dataset( diff --git a/tests/Additional/data/DataSet/input/7-30-DS_1.csv b/tests/Additional/data/DataSet/input/7-30-DS_1.csv new file mode 100644 index 000000000..c71ab1f2e --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-30-DS_1.csv @@ -0,0 +1,8 @@ +Id_1,Id_2,Me_1 +2010Q1,A,20 +2010Q2,A,20 +2010Q3,A,20 +2010Q1,B,50 +2010Q2,B,50 +2010Q1,C,10 +2010Q2,C,10 diff --git a/tests/Additional/data/DataSet/input/7-31-DS_1.csv b/tests/Additional/data/DataSet/input/7-31-DS_1.csv new file mode 100644 index 000000000..2f5dc78a7 --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-31-DS_1.csv @@ -0,0 +1,9 @@ +Id_1,Id_2,Me_1 +2010-10-22,A,20 +2010-09-15,A,30 +2010-08-27,A,50 +2010-07-14,A,80 +2010-06-18,B,100 +2010-05-04,B,20 +2010-04-20,B,30 +2010-03-12,B,40 diff --git a/tests/Additional/data/DataSet/input/7-32-DS_1.csv b/tests/Additional/data/DataSet/input/7-32-DS_1.csv new file mode 100644 index 000000000..e9ee6469a --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-32-DS_1.csv @@ -0,0 +1,8 @@ +Id_1,Id_2,Me_1 +2010-03-15,A,10 +2010-07-20,A,30 +2010-11-05,A,20 +2011-02-10,A,40 +2011-08-25,A,60 +2010-04-12,B,50 +2010-09-30,B,70 diff --git a/tests/Additional/data/DataSet/input/7-33-DS_1.csv b/tests/Additional/data/DataSet/input/7-33-DS_1.csv new file mode 100644 index 000000000..a49a7fc25 --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-33-DS_1.csv @@ -0,0 +1,7 @@ +Id_1,Id_2,Me_1,Me_2 +2010Q1,A,10,100 +2010Q2,A,20,200 +2010Q3,A,30,300 +2011Q1,A,40,400 +2011Q2,B,50,500 +2011Q3,B,60,600 diff --git a/tests/Additional/data/DataSet/input/7-34-DS_1.csv b/tests/Additional/data/DataSet/input/7-34-DS_1.csv new file mode 100644 index 000000000..9ea5f98a8 --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-34-DS_1.csv @@ -0,0 +1,6 @@ +Id_1,Id_2,Id_3,Me_1 +2010Q1,A,X,10 +2010Q2,A,X,20 +2010Q1,A,Y,30 +2010Q2,B,X,40 +2010Q3,B,Y,50 diff --git a/tests/Additional/data/DataSet/input/7-35-DS_1.csv b/tests/Additional/data/DataSet/input/7-35-DS_1.csv new file mode 100644 index 000000000..cf05876ac --- /dev/null +++ b/tests/Additional/data/DataSet/input/7-35-DS_1.csv @@ -0,0 +1,7 @@ +Id_1,Id_2,Id_3,Me_1 +2010-03-15,A,X,10 +2010-06-20,A,Y,20 +2010-09-10,B,X,30 +2011-01-05,A,X,40 +2011-04-18,B,Y,50 +2011-07-22,B,X,60 diff --git a/tests/Additional/data/DataSet/output/7-30-DS_r.csv b/tests/Additional/data/DataSet/output/7-30-DS_r.csv new file mode 100644 index 000000000..a6605a588 --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-30-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +2010,A,60 +2010,B,100 +2010,C,20 diff --git a/tests/Additional/data/DataSet/output/7-31-DS_r.csv b/tests/Additional/data/DataSet/output/7-31-DS_r.csv new file mode 100644 index 000000000..ce065c993 --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-31-DS_r.csv @@ -0,0 +1,2 @@ +Id_1,Me_1 +2010-12-31,370 diff --git a/tests/Additional/data/DataSet/output/7-32-DS_r.csv b/tests/Additional/data/DataSet/output/7-32-DS_r.csv new file mode 100644 index 000000000..1fc30eed4 --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-32-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +2010-01-01,A,60 +2011-01-01,A,100 +2010-01-01,B,120 diff --git a/tests/Additional/data/DataSet/output/7-33-DS_r.csv b/tests/Additional/data/DataSet/output/7-33-DS_r.csv new file mode 100644 index 000000000..888b369ed --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-33-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1,Me_2 +2010,A,60,600 +2011,A,40,400 +2011,B,110,1100 diff --git a/tests/Additional/data/DataSet/output/7-34-DS_r.csv b/tests/Additional/data/DataSet/output/7-34-DS_r.csv new file mode 100644 index 000000000..10d6eaa6f --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-34-DS_r.csv @@ -0,0 +1,3 @@ +Id_1,Id_2,Me_1 +2010,A,60 +2010,B,90 diff --git a/tests/Additional/data/DataSet/output/7-35-DS_r.csv b/tests/Additional/data/DataSet/output/7-35-DS_r.csv new file mode 100644 index 000000000..b3b1c9a31 --- /dev/null +++ b/tests/Additional/data/DataSet/output/7-35-DS_r.csv @@ -0,0 +1,3 @@ +Id_1,Me_1 +2010-12-31,60 +2011-12-31,150 diff --git a/tests/Additional/data/DataStructure/input/7-30-DS_1.json b/tests/Additional/data/DataStructure/input/7-30-DS_1.json new file mode 100644 index 000000000..84b8fa916 --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-30-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/input/7-31-DS_1.json b/tests/Additional/data/DataStructure/input/7-31-DS_1.json new file mode 100644 index 000000000..ea7c9acf0 --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-31-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/input/7-32-DS_1.json b/tests/Additional/data/DataStructure/input/7-32-DS_1.json new file mode 100644 index 000000000..ea7c9acf0 --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-32-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/input/7-33-DS_1.json b/tests/Additional/data/DataStructure/input/7-33-DS_1.json new file mode 100644 index 000000000..95b61cea9 --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-33-DS_1.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/input/7-34-DS_1.json b/tests/Additional/data/DataStructure/input/7-34-DS_1.json new file mode 100644 index 000000000..33f9fe0d0 --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-34-DS_1.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_3", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/input/7-35-DS_1.json b/tests/Additional/data/DataStructure/input/7-35-DS_1.json new file mode 100644 index 000000000..319e7f05e --- /dev/null +++ b/tests/Additional/data/DataStructure/input/7-35-DS_1.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Id_3", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-30-DS_r.json b/tests/Additional/data/DataStructure/output/7-30-DS_r.json new file mode 100644 index 000000000..eac06f397 --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-30-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-31-DS_r.json b/tests/Additional/data/DataStructure/output/7-31-DS_r.json new file mode 100644 index 000000000..63d0d08a8 --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-31-DS_r.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-32-DS_r.json b/tests/Additional/data/DataStructure/output/7-32-DS_r.json new file mode 100644 index 000000000..caf876c0d --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-32-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-33-DS_r.json b/tests/Additional/data/DataStructure/output/7-33-DS_r.json new file mode 100644 index 000000000..561de7780 --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-33-DS_r.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + }, + { + "name": "Me_2", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-34-DS_r.json b/tests/Additional/data/DataStructure/output/7-34-DS_r.json new file mode 100644 index 000000000..eac06f397 --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-34-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Time_Period", + "nullable": false + }, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/data/DataStructure/output/7-35-DS_r.json b/tests/Additional/data/DataStructure/output/7-35-DS_r.json new file mode 100644 index 000000000..63d0d08a8 --- /dev/null +++ b/tests/Additional/data/DataStructure/output/7-35-DS_r.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "role": "Identifier", + "type": "Date", + "nullable": false + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Integer", + "nullable": true + } + ] + } + ] +} diff --git a/tests/Additional/test_additional.py b/tests/Additional/test_additional.py index 56c764125..d4c7a921a 100644 --- a/tests/Additional/test_additional.py +++ b/tests/Additional/test_additional.py @@ -3886,6 +3886,103 @@ def test_29(self): text=text, code=code, number_inputs=number_inputs, exception_code=exception_code ) + def test_30(self): + """ + Group by with time_agg on Time_Period type. + """ + text = """DS_r := sum(DS_1 group by Id_2 time_agg("A"));""" + code = "7-30" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + + def test_31(self): + """ + Group except with time_agg on Date type. + Excludes Id_2 from grouping, time_agg transforms Id_1 to annual. + """ + text = """DS_r := sum(DS_1 group except Id_2 time_agg("A", last));""" + code = "7-31" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + + def test_32(self): + """ + Group by with time_agg on Date type with first conf, spanning multiple years. + """ + text = """DS_r := sum(DS_1 group by Id_2 time_agg("A", first));""" + code = "7-32" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + + def test_33(self): + """ + Group by with time_agg on Time_Period, multiple measures, multiple years. + """ + text = """DS_r := sum(DS_1 group by Id_2 time_agg("A"));""" + code = "7-33" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + + def test_34(self): + """ + Group except with time_agg on Time_Period, 3 identifiers, exclude one. + """ + text = """DS_r := sum(DS_1 group except Id_3 time_agg("A"));""" + code = "7-34" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + + def test_35(self): + """ + Group except with time_agg on Date, exclude multiple identifiers, multiple years. + """ + text = """DS_r := sum(DS_1 group except Id_2, Id_3 time_agg("A", last));""" + code = "7-35" + number_inputs = 1 + references_names = ["DS_r"] + + self.BaseTest( + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) + def test_GH_261_1(self): text = "DS_r <- DS_1[calc Me_2 := Me_1 < Me_1];" code = "GH_261" From 70cc545a233819c3e5816d511fdb9da431bb191d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Tue, 17 Mar 2026 12:21:43 +0100 Subject: [PATCH 36/38] Bump version to 1.6.1rc1 (#600) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Francisco Javier Hernández del Caño --- pyproject.toml | 2 +- src/vtlengine/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0fac0fda3..e4f98d437 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "vtlengine" -version = "1.6.0" +version = "1.6.1rc1" description = "Run and Validate VTL Scripts" license = "AGPL-3.0" readme = "README.md" diff --git a/src/vtlengine/__init__.py b/src/vtlengine/__init__.py index a611948db..0c1b1bbcd 100644 --- a/src/vtlengine/__init__.py +++ b/src/vtlengine/__init__.py @@ -24,4 +24,4 @@ "validate_external_routine", ] -__version__ = "1.6.0" +__version__ = "1.6.1rc1" From 92bbf7e80a57caa83241e43e38fdece81fb080f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Thu, 19 Mar 2026 17:50:39 +0100 Subject: [PATCH 37/38] Fix #609: Apply operator fails on semantic execution (#610) * Fixed apply validation method fails on semantic execution * Added related test --- src/vtlengine/Operators/Join.py | 22 +++++++++++++++++-- .../data/DataStructure/input/GH_609-1.json | 21 ++++++++++++++++++ .../data/DataStructure/output/GH_609-1.json | 21 ++++++++++++++++++ tests/Semantic/data/vtl/GH_609.vtl | 1 + tests/Semantic/test_semantic.py | 13 +++++++++++ 5 files changed, 76 insertions(+), 2 deletions(-) create mode 100644 tests/Semantic/data/DataStructure/input/GH_609-1.json create mode 100644 tests/Semantic/data/DataStructure/output/GH_609-1.json create mode 100644 tests/Semantic/data/vtl/GH_609.vtl diff --git a/src/vtlengine/Operators/Join.py b/src/vtlengine/Operators/Join.py index 043121ac1..fc67df5fc 100644 --- a/src/vtlengine/Operators/Join.py +++ b/src/vtlengine/Operators/Join.py @@ -406,7 +406,20 @@ def execute(cls, dataset: Dataset, op: Any, left: str, right: str) -> Dataset: return op.evaluate(left_dataset, right_dataset) @classmethod - def validate(cls, dataset: Dataset, child: Any, op_map: Dict[str, Any]) -> None: + def validate(cls, dataset: Dataset, child: Any, op_map: Dict[str, Any]) -> Dataset: + if isinstance(child, list): + for c in child: + dataset = cls.validate(dataset, c, op_map) + else: + cls._check_bin_expr(dataset, child, op_map) + left_dataset = cls.create_dataset("left", child.left.value, dataset) + right_dataset = cls.create_dataset("right", child.right.value, dataset) + dataset, _ = cls.get_common_components(left_dataset, right_dataset) + + return dataset + + @classmethod + def _check_bin_expr(cls, dataset: Dataset, child: Any, op_map: Dict[str, Any]) -> None: if not isinstance(child, BinOp): raise Exception( f"Invalid expression {child} on apply operator. Only BinOp are accepted" @@ -438,7 +451,12 @@ def create_dataset(cls, name: str, prefix: str, dataset: Dataset) -> Dataset: for component in dataset.components.values() if component.name.startswith(prefix) or component.role is Role.IDENTIFIER } - data = dataset.data[list(components.keys())] if dataset.data is not None else pd.DataFrame() + comp_names = list(components.keys()) + data = ( + dataset.data[comp_names] + if dataset.data is not None + else pd.DataFrame(columns=comp_names) + ) for component in components.values(): component.name = ( diff --git a/tests/Semantic/data/DataStructure/input/GH_609-1.json b/tests/Semantic/data/DataStructure/input/GH_609-1.json new file mode 100644 index 000000000..55773c574 --- /dev/null +++ b/tests/Semantic/data/DataStructure/input/GH_609-1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Semantic/data/DataStructure/output/GH_609-1.json b/tests/Semantic/data/DataStructure/output/GH_609-1.json new file mode 100644 index 000000000..9143772a7 --- /dev/null +++ b/tests/Semantic/data/DataStructure/output/GH_609-1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Semantic/data/vtl/GH_609.vtl b/tests/Semantic/data/vtl/GH_609.vtl new file mode 100644 index 000000000..3ccc2352f --- /dev/null +++ b/tests/Semantic/data/vtl/GH_609.vtl @@ -0,0 +1 @@ +DS_r := inner_join (DS_1 as d1, DS_1 as d2 apply d1 + d2); \ No newline at end of file diff --git a/tests/Semantic/test_semantic.py b/tests/Semantic/test_semantic.py index dac46892f..95e038414 100644 --- a/tests/Semantic/test_semantic.py +++ b/tests/Semantic/test_semantic.py @@ -1854,6 +1854,19 @@ def test_20(self): self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_GH_609(self): + """ + Dataset --> Dataset + Status: + Expression: DS_r := inner_join (DS_1 as d1, DS_1 as d2 apply d1 + d2); + Description: Apply operation fails on semantic run + """ + code = "GH_609" + number_inputs = 1 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + class AggregateTests(SemanticHelper): """ """ From df12453ed31383865545d2b71f7a260bf6ea494e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateo=20de=20Lorenzo=20Argel=C3=A9s?= <160473799+mla2001@users.noreply.github.com> Date: Thu, 19 Mar 2026 18:22:25 +0100 Subject: [PATCH 38/38] Fix #611: Setdiff operator return matching values whit nulls (#612) * Fixed SetDiff operator taking rows with pre-existing null values as results * Fixed related test references * Added related test --- src/vtlengine/Operators/Set.py | 21 +++++++-------- .../data/DataSet/input/GH_611-1.csv | 4 +++ .../data/DataSet/input/GH_611-2.csv | 4 +++ .../data/DataSet/output/8-4-1-8-1.csv | 3 --- .../data/DataSet/output/GH_611-1.csv | 3 +++ .../data/DataStructure/input/GH_611-1.json | 27 +++++++++++++++++++ .../data/DataStructure/input/GH_611-2.json | 27 +++++++++++++++++++ .../data/DataStructure/output/GH_611-1.json | 27 +++++++++++++++++++ tests/Attributes/data/vtl/GH_611.vtl | 1 + tests/Attributes/test_attributes.py | 15 +++++++++++ 10 files changed, 117 insertions(+), 15 deletions(-) create mode 100644 tests/Attributes/data/DataSet/input/GH_611-1.csv create mode 100644 tests/Attributes/data/DataSet/input/GH_611-2.csv create mode 100644 tests/Attributes/data/DataSet/output/GH_611-1.csv create mode 100644 tests/Attributes/data/DataStructure/input/GH_611-1.json create mode 100644 tests/Attributes/data/DataStructure/input/GH_611-2.json create mode 100644 tests/Attributes/data/DataStructure/output/GH_611-1.json create mode 100644 tests/Attributes/data/vtl/GH_611.vtl diff --git a/src/vtlengine/Operators/Set.py b/src/vtlengine/Operators/Set.py index 96007ea54..d9826ad88 100644 --- a/src/vtlengine/Operators/Set.py +++ b/src/vtlengine/Operators/Set.py @@ -149,18 +149,15 @@ def evaluate(cls, operands: List[Dataset]) -> Dataset: else: if data is None: data = pd.DataFrame(columns=result.get_identifiers_names()) - result.data = result.data.merge(data, how="left", on=result.get_identifiers_names()) - if len(result.data) > 0: - result.data = result.data[result.data.isnull().any(axis=1)] - - not_identifiers = result.get_measures_names() + result.get_attributes_names() - for col in not_identifiers: - if col + "_x" in result.data: - result.data[col] = result.data[col + "_x"] - del result.data[col + "_x"] - if col + "_y" in result.data: - del result.data[col + "_y"] - result.data = result.data[result.get_identifiers_names() + not_identifiers] + id_names = result.get_identifiers_names() + result.data = result.data.merge( + data[id_names].drop_duplicates(), + how="left", + on=id_names, + indicator=True, + ) + result.data = result.data[result.data["_merge"] == "left_only"] + result.data = result.data.drop(columns=["_merge"]) if result.data is not None: result.data.reset_index(drop=True, inplace=True) return result diff --git a/tests/Attributes/data/DataSet/input/GH_611-1.csv b/tests/Attributes/data/DataSet/input/GH_611-1.csv new file mode 100644 index 000000000..8711f4ce6 --- /dev/null +++ b/tests/Attributes/data/DataSet/input/GH_611-1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,At_1 +1,1,1 +2,2,2 +3,3,3 \ No newline at end of file diff --git a/tests/Attributes/data/DataSet/input/GH_611-2.csv b/tests/Attributes/data/DataSet/input/GH_611-2.csv new file mode 100644 index 000000000..08cbc4a29 --- /dev/null +++ b/tests/Attributes/data/DataSet/input/GH_611-2.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +3,3 +4,4 +5,5 \ No newline at end of file diff --git a/tests/Attributes/data/DataSet/output/8-4-1-8-1.csv b/tests/Attributes/data/DataSet/output/8-4-1-8-1.csv index f2023220c..2d651ba16 100644 --- a/tests/Attributes/data/DataSet/output/8-4-1-8-1.csv +++ b/tests/Attributes/data/DataSet/output/8-4-1-8-1.csv @@ -1,5 +1,2 @@ Id_1,Id_2,Id_3,Id_4,Me_1,Me_2 2021,Belgium,Total,Total,10.0,10.0 -2021,Denmark,Total,Total,4.0,20.0 -2021,France,Total,Total,6.0,24.0 -2021,Spain,Total,Total,8.0,40.0 \ No newline at end of file diff --git a/tests/Attributes/data/DataSet/output/GH_611-1.csv b/tests/Attributes/data/DataSet/output/GH_611-1.csv new file mode 100644 index 000000000..f83efbe8c --- /dev/null +++ b/tests/Attributes/data/DataSet/output/GH_611-1.csv @@ -0,0 +1,3 @@ +Id_1,Me_1,At_1 +1,1,1 +2,2,2 \ No newline at end of file diff --git a/tests/Attributes/data/DataStructure/input/GH_611-1.json b/tests/Attributes/data/DataStructure/input/GH_611-1.json new file mode 100644 index 000000000..b0658051a --- /dev/null +++ b/tests/Attributes/data/DataStructure/input/GH_611-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + }, + { + "name": "At_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Attributes/data/DataStructure/input/GH_611-2.json b/tests/Attributes/data/DataStructure/input/GH_611-2.json new file mode 100644 index 000000000..0d729e96a --- /dev/null +++ b/tests/Attributes/data/DataStructure/input/GH_611-2.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_2", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + }, + { + "name": "At_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Attributes/data/DataStructure/output/GH_611-1.json b/tests/Attributes/data/DataStructure/output/GH_611-1.json new file mode 100644 index 000000000..2a13788f5 --- /dev/null +++ b/tests/Attributes/data/DataStructure/output/GH_611-1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + }, + { + "name": "At_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/Attributes/data/vtl/GH_611.vtl b/tests/Attributes/data/vtl/GH_611.vtl new file mode 100644 index 000000000..78325106d --- /dev/null +++ b/tests/Attributes/data/vtl/GH_611.vtl @@ -0,0 +1 @@ +DS_r <- setdiff(DS_1, DS_2); \ No newline at end of file diff --git a/tests/Attributes/test_attributes.py b/tests/Attributes/test_attributes.py index 8946affc5..a4f947979 100644 --- a/tests/Attributes/test_attributes.py +++ b/tests/Attributes/test_attributes.py @@ -4205,6 +4205,21 @@ def test_10(self): self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + def test_GH_611(self): + """ + SET DIFFERENCE: setdiff + Dataset --> Dataset + Status: OK + Expression: DS_r := setdiff(DS_1,DS_2) + + Description: Check the operator its not returning rows with pre-existing null values as results + """ + code = "GH_611" + number_inputs = 2 + references_names = ["1"] + + self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + class ConditionalOperatorsTest(TestAttributesHelper): """