diff --git a/main.py b/main.py index 4b6ae47d..eb62c0bf 100644 --- a/main.py +++ b/main.py @@ -9,34 +9,25 @@ def main(): """ data_structures = { - 'datasets': [ - {'name': 'DS_1', - 'DataStructure': [ - {'name': 'Id_1', - 'type': - 'Integer', - 'role': 'Identifier', - 'nullable': False}, - {'name': 'Me_1', - 'type': 'Number', - 'role': 'Measure', - 'nullable': True} - ] - } + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, + {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, + ], + } ] } - data_df = pd.DataFrame( - {"Id_1": [1, 2, 3], - "Me_1": [10, 20, 30]}) + data_df = pd.DataFrame({"Id_1": [1, 2, 3], "Me_1": [10, 20, 30]}) datapoints = {"DS_1": data_df} - run_result = run(script=script, data_structures=data_structures, - datapoints=datapoints) + run_result = run(script=script, data_structures=data_structures, datapoints=datapoints) print(run_result) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pyproject.toml b/pyproject.toml index d4aec712..84239496 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,8 @@ mypy = "^1.11.2" pandas-stubs = "^2.2.3.241009" stubs = "^1.0.0" toml = "^0.10.2" -ruff = "^0.7.1" +ruff = "^0.8.3" + [tool.ruff] line-length = 100 diff --git a/src/vtlengine/API/_InternalApi.py b/src/vtlengine/API/_InternalApi.py index 0d26ad20..bcbba1fc 100644 --- a/src/vtlengine/API/_InternalApi.py +++ b/src/vtlengine/API/_InternalApi.py @@ -56,7 +56,9 @@ def _load_dataset_from_structure(structures: Dict[str, Any]) -> Dict[str, Any]: for scalar_json in structures["scalars"]: scalar_name = scalar_json["name"] scalar = Scalar( - name=scalar_name, data_type=SCALAR_TYPES[scalar_json["type"]], value=None + name=scalar_name, + data_type=SCALAR_TYPES[scalar_json["type"]], + value=None, ) datasets[scalar_name] = scalar # type: ignore[assignment] return datasets @@ -115,7 +117,7 @@ def _load_single_datapoint(datapoint: Union[str, Path]) -> Dict[str, Any]: def _load_datapoints_path( - datapoints: Union[Path, str, List[Union[str, Path]]] + datapoints: Union[Path, str, List[Union[str, Path]]], ) -> Dict[str, Dataset]: """ Returns a dict with the data given from a Path. @@ -156,7 +158,7 @@ def _load_datastructure_single(data_structure: Union[Dict[str, Any], Path]) -> D def load_datasets( - data_structure: Union[Dict[str, Any], Path, List[Union[Dict[str, Any], Path]]] + data_structure: Union[Dict[str, Any], Path, List[Union[Dict[str, Any], Path]]], ) -> Dict[str, Dataset]: """ Loads multiple datasets. diff --git a/src/vtlengine/API/__init__.py b/src/vtlengine/API/__init__.py index d965ea0d..7e82ab84 100644 --- a/src/vtlengine/API/__init__.py +++ b/src/vtlengine/API/__init__.py @@ -32,7 +32,13 @@ class __VTLSingleErrorListener(ErrorListener): # type: ignore[misc] """ """ def syntaxError( - self, recognizer: Any, offendingSymbol: str, line: str, column: str, msg: str, e: Any + self, + recognizer: Any, + offendingSymbol: str, + line: str, + column: str, + msg: str, + e: Any, ) -> None: raise Exception( f"Not valid VTL Syntax \n " @@ -150,7 +156,10 @@ class takes all of this information and checks it with the ast generated to # Running the interpreter interpreter = InterpreterAnalyzer( - datasets=structures, value_domains=vd, external_routines=ext_routines, only_semantic=True + datasets=structures, + value_domains=vd, + external_routines=ext_routines, + only_semantic=True, ) with pd.option_context("future.no_silent_downcasting", True): result = interpreter.visit(ast) diff --git a/src/vtlengine/AST/ASTConstructor.py b/src/vtlengine/AST/ASTConstructor.py index fcdaa433..c12ab176 100644 --- a/src/vtlengine/AST/ASTConstructor.py +++ b/src/vtlengine/AST/ASTConstructor.py @@ -382,7 +382,8 @@ def visitHierRuleSignature(self, ctx: Parser.HierRuleSignatureContext): if conditions: identifiers_list = [ DefIdentifier( - value=elto.alias if getattr(elto, "alias", None) else elto.value, kind=kind + value=elto.alias if getattr(elto, "alias", None) else elto.value, + kind=kind, ) for elto in conditions[0] ] @@ -395,7 +396,7 @@ def visitHierRuleSignature(self, ctx: Parser.HierRuleSignatureContext): def visitValueDomainSignature(self, ctx: Parser.ValueDomainSignatureContext): """ valueDomainSignature: CONDITION IDENTIFIER (AS IDENTIFIER)? (',' IDENTIFIER (AS IDENTIFIER)?)* ; - """ # noqa E501 + """ # noqa E501 # AST_ASTCONSTRUCTOR.7 ctx_list = list(ctx.getChildren()) component_nodes = [ @@ -459,7 +460,7 @@ def visitCodeItemRelation(self, ctx: Parser.CodeItemRelationContext): codeItemRelation: ( WHEN expr THEN )? codeItemRef codeItemRelationClause (codeItemRelationClause)* ; ( WHEN exprComponent THEN )? codetemRef=valueDomainValue comparisonOperand? codeItemRelationClause (codeItemRelationClause)* - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) @@ -512,7 +513,7 @@ def visitCodeItemRelation(self, ctx: Parser.CodeItemRelationContext): def visitCodeItemRelationClause(self, ctx: Parser.CodeItemRelationClauseContext): """ (opAdd=( PLUS | MINUS ))? rightCodeItem=valueDomainValue ( QLPAREN rightCondition=exprComponent QRPAREN )? - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) expr = [expr for expr in ctx_list if isinstance(expr, Parser.ExprContext)] diff --git a/src/vtlengine/AST/ASTConstructorModules/Expr.py b/src/vtlengine/AST/ASTConstructorModules/Expr.py index 7b5d2248..93bb1a64 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Expr.py +++ b/src/vtlengine/AST/ASTConstructorModules/Expr.py @@ -45,7 +45,8 @@ class Expr(VtlVisitor): Expr Definition. - _______________________________________________________________________________________""" + _______________________________________________________________________________________ + """ def visitExpr(self, ctx: Parser.ExprContext): """ @@ -66,7 +67,7 @@ def visitExpr(self, ctx: Parser.ExprContext): | constant # constantExpr | varID # varIdExpr ; - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -121,7 +122,6 @@ def visitExpr(self, ctx: Parser.ExprContext): # CASE WHEN expr THEN expr ELSE expr END # caseExpr elif isinstance(c, TerminalNodeImpl) and (c.getSymbol().type == Parser.CASE): - if len(ctx_list) % 4 != 3: raise ValueError("Syntax error.") @@ -221,7 +221,6 @@ def visitMembershipExpr(self, ctx: Parser.MembershipExprContext): return previous_node def visitClauseExpr(self, ctx: Parser.ClauseExprContext): - ctx_list = list(ctx.getChildren()) dataset = self.visitExpr(ctx_list[0]) @@ -347,7 +346,7 @@ def visitJoinClauseItem(self, ctx: Parser.JoinClauseItemContext): def visitJoinClause(self, ctx: Parser.JoinClauseContext): """ - joinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)? + JoinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)? """ ctx_list = list(ctx.getChildren()) @@ -373,7 +372,7 @@ def visitJoinClause(self, ctx: Parser.JoinClauseContext): def visitJoinClauseWithoutUsing(self, ctx: Parser.JoinClauseWithoutUsingContext): """ joinClause: joinClauseItem (COMMA joinClauseItem)* (USING componentID (COMMA componentID)*)? ; - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) clause_nodes = [] @@ -388,7 +387,7 @@ def visitJoinClauseWithoutUsing(self, ctx: Parser.JoinClauseWithoutUsingContext) def visitJoinBody(self, ctx: Parser.JoinBodyContext): """ joinBody: filterClause? (calcClause|joinApplyClause|aggrClause)? (keepOrDropClause)? renameClause? - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) body_nodes = [] @@ -457,7 +456,7 @@ def visitCallDataset(self, ctx: Parser.CallDatasetContext): def visitEvalAtom(self, ctx: Parser.EvalAtomContext): """ | EVAL LPAREN routineName LPAREN (varID|scalarItem)? (COMMA (varID|scalarItem))* RPAREN (LANGUAGE STRING_CONSTANT)? (RETURNS evalDatasetType)? RPAREN # evalAtom - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) routine_name = Terminals().visitRoutineName(ctx_list[2]) @@ -505,7 +504,7 @@ def visitEvalAtom(self, ctx: Parser.EvalAtomContext): def visitCastExprDataset(self, ctx: Parser.CastExprDatasetContext): """ | CAST LPAREN expr COMMA (basicScalarType|valueDomainName) (COMMA STRING_CONSTANT)? RPAREN # castExprDataset - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -795,15 +794,19 @@ def visitTimeFunctions(self, ctx: Parser.TimeFunctionsContext): return self.visitTimeDiffAtom(ctx) elif isinstance(ctx, Parser.DateAddAtomContext): return self.visitTimeAddAtom(ctx) - elif isinstance(ctx, (Parser.YearAtomContext, - Parser.MonthAtomContext, - Parser.DayOfMonthAtomContext, - Parser.DayOfYearAtomContext, - Parser.DayToYearAtomContext, - Parser.DayToMonthAtomContext, - Parser.YearTodayAtomContext, - Parser.MonthTodayAtomContext)): - + elif isinstance( + ctx, + ( + Parser.YearAtomContext, + Parser.MonthAtomContext, + Parser.DayOfMonthAtomContext, + Parser.DayOfYearAtomContext, + Parser.DayToYearAtomContext, + Parser.DayToMonthAtomContext, + Parser.YearTodayAtomContext, + Parser.MonthTodayAtomContext, + ), + ): return self.visitTimeUnaryAtom(ctx) else: raise NotImplementedError @@ -878,7 +881,7 @@ def visitFillTimeAtom(self, ctx: Parser.FillTimeAtomContext): def visitTimeAggAtom(self, ctx: Parser.TimeAggAtomContext): """ TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExpr)? (COMMA (FIRST|LAST))? RPAREN # timeAggAtom - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -911,7 +914,11 @@ def visitTimeAggAtom(self, ctx: Parser.TimeAggAtomContext): # AST_ASTCONSTRUCTOR.17 raise Exception("Optional as expression node is not allowed in Time Aggregation") return TimeAggregation( - op=op, operand=operand_node, period_to=period_to, period_from=period_from, conf=conf + op=op, + operand=operand_node, + period_to=period_to, + period_from=period_from, + conf=conf, ) def visitFlowAtom(self, ctx: Parser.FlowAtomContext): @@ -988,7 +995,7 @@ def visitSetFunctions(self, ctx: Parser.SetFunctionsContext): setExpr: UNION LPAREN left=expr (COMMA expr)+ RPAREN # unionAtom | INTERSECT LPAREN left=expr (COMMA expr)+ RPAREN # intersectAtom | op=(SETDIFF|SYMDIFF) LPAREN left=expr COMMA right=expr RPAREN # setOrSYmDiffAtom - """ # noqa E501 + """ # noqa E501 if isinstance(ctx, Parser.UnionAtomContext): return self.visitUnionAtom(ctx) elif isinstance(ctx, Parser.IntersectAtomContext): @@ -1031,7 +1038,7 @@ def visitSetOrSYmDiffAtom(self, ctx: Parser.SetOrSYmDiffAtomContext): def visitHierarchyFunctions(self, ctx: Parser.HierarchyFunctionsContext): """ HIERARCHY LPAREN op=expr COMMA hrName=IDENTIFIER (conditionClause)? (RULE ruleComponent=componentID)? (validationMode)? (inputModeHierarchy)? outputModeHierarchy? RPAREN - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -1102,7 +1109,7 @@ def visitValidationFunctions(self, ctx: Parser.ValidationFunctionsContext): def visitValidateDPruleset(self, ctx: Parser.ValidateDPrulesetContext): """ validationDatapoint: CHECK_DATAPOINT '(' expr ',' IDENTIFIER (COMPONENTS componentID (',' componentID)*)? (INVALID|ALL_MEASURES|ALL)? ')' ; - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -1137,7 +1144,7 @@ def visitValidateDPruleset(self, ctx: Parser.ValidateDPrulesetContext): def visitValidateHRruleset(self, ctx: Parser.ValidateHRrulesetContext): """ CHECK_HIERARCHY LPAREN op=expr COMMA hrName=IDENTIFIER conditionClause? (RULE componentID)? validationMode? inputMode? validationOutput? RPAREN # validateHRruleset - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -1199,7 +1206,7 @@ def visitValidateHRruleset(self, ctx: Parser.ValidateHRrulesetContext): def visitValidationSimple(self, ctx: Parser.ValidationSimpleContext): """ | CHECK LPAREN op=expr (codeErr=erCode)? (levelCode=erLevel)? imbalanceExpr? output=(INVALID|ALL)? RPAREN # validationSimple - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] token = c.getSymbol() @@ -1331,11 +1338,19 @@ def visitAnSimpleFunction(self, ctx: Parser.AnSimpleFunctionContext): if window is None: window = Windowing( - type_="data", start=-1, stop=0, start_mode="preceding", stop_mode="current" + type_="data", + start=-1, + stop=0, + start_mode="preceding", + stop_mode="current", ) return Analytic( - op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, window=window + op=op_node, + operand=operand, + partition_by=partition_by, + order_by=order_by, + window=window, ) def visitLagOrLeadAn(self, ctx: Parser.LagOrLeadAnContext): @@ -1369,7 +1384,11 @@ def visitLagOrLeadAn(self, ctx: Parser.LagOrLeadAnContext): raise Exception(f"{op_node} requires an offset parameter.") return Analytic( - op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, params=params + op=op_node, + operand=operand, + partition_by=partition_by, + order_by=order_by, + params=params, ) def visitRatioToReportAn(self, ctx: Parser.RatioToReportAnContext): diff --git a/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py b/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py index a68b8790..0ef582c0 100644 --- a/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py +++ b/src/vtlengine/AST/ASTConstructorModules/ExprComponents.py @@ -31,7 +31,8 @@ class ExprComp(VtlVisitor): ExprComponent Definition. - _______________________________________________________________________________________""" + _______________________________________________________________________________________ + """ def visitExprComponent(self, ctx: Parser.ExprComponentContext): """ @@ -49,7 +50,7 @@ def visitExprComponent(self, ctx: Parser.ExprComponentContext): | constant # constantExprComp | componentID # compId ; - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -280,7 +281,7 @@ def visitGenericFunctionsComponents(self, ctx: Parser.GenericFunctionsComponents def visitCallComponent(self, ctx: Parser.CallComponentContext): """ callFunction: operatorID LPAREN (parameterComponent (COMMA parameterComponent)*)? RPAREN # callComponent - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -296,7 +297,7 @@ def visitCallComponent(self, ctx: Parser.CallComponentContext): def visitEvalAtomComponent(self, ctx: Parser.EvalAtomComponentContext): """ | EVAL LPAREN routineName LPAREN (componentID|scalarItem)? (COMMA (componentID|scalarItem))* RPAREN (LANGUAGE STRING_CONSTANT)? (RETURNS outputParameterTypeComponent)? RPAREN # evalAtomComponent - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) routine_name = Terminals().visitRoutineName(ctx_list[2]) @@ -347,7 +348,7 @@ def visitEvalAtomComponent(self, ctx: Parser.EvalAtomComponentContext): def visitCastExprComponent(self, ctx: Parser.CastExprComponentContext): """ | CAST LPAREN exprComponent COMMA (basicScalarType|valueDomainName) (COMMA STRING_CONSTANT)? RPAREN # castExprComponent - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -391,7 +392,6 @@ def visitCastExprComponent(self, ctx: Parser.CastExprComponentContext): raise NotImplementedError def visitParameterComponent(self, ctx: Parser.ParameterComponentContext): - ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -582,16 +582,19 @@ def visitTimeFunctionsComponents(self, ctx: Parser.TimeFunctionsComponentsContex return self.visitDateDiffAtomComponent(ctx) elif isinstance(ctx, Parser.DateAddAtomComponentContext): return self.visitDateAddAtomComponentContext(ctx) - elif (isinstance(ctx, ( - Parser.YearAtomComponentContext, - Parser.MonthAtomComponentContext, - Parser.DayOfMonthAtomComponentContext, - Parser.DayOfYearAtomComponentContext, - Parser.DayToYearAtomComponentContext, - Parser.DayToMonthAtomComponentContext, - Parser.YearToDayAtomComponentContext, - Parser.MonthToDayAtomComponentContext - ))): + elif isinstance( + ctx, + ( + Parser.YearAtomComponentContext, + Parser.MonthAtomComponentContext, + Parser.DayOfMonthAtomComponentContext, + Parser.DayOfYearAtomComponentContext, + Parser.DayToYearAtomComponentContext, + Parser.DayToMonthAtomComponentContext, + Parser.YearToDayAtomComponentContext, + Parser.MonthToDayAtomComponentContext, + ), + ): return self.visitTimeUnaryAtomComponent(ctx) else: raise NotImplementedError @@ -650,7 +653,7 @@ def visitTimeAggAtomComponent(self, ctx: Parser.TimeAggAtomComponentContext): """ TIME_AGG LPAREN periodIndTo=STRING_CONSTANT (COMMA periodIndFrom=(STRING_CONSTANT| OPTIONAL ))? (COMMA op=optionalExprComponent)? (COMMA (FIRST|LAST))? RPAREN # timeAggAtomComponent; - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -684,7 +687,11 @@ def visitTimeAggAtomComponent(self, ctx: Parser.TimeAggAtomComponentContext): # AST_ASTCONSTRUCTOR.17 raise SemanticError("1-4-2-2") return TimeAggregation( - op=op, operand=operand_node, period_to=period_to, period_from=period_from, conf=conf + op=op, + operand=operand_node, + period_to=period_to, + period_from=period_from, + conf=conf, ) def visitCurrentDateAtomComponent(self, ctx: Parser.CurrentDateAtomComponentContext): @@ -825,7 +832,6 @@ def visitCountAggrComp(self, ctx: Parser.CountAggrCompContext): """ def visitAnalyticFunctionsComponents(self, ctx: Parser.AnalyticFunctionsComponentsContext): - if isinstance(ctx, Parser.AnSimpleFunctionComponentContext): return self.visitAnSimpleFunctionComponent(ctx) elif isinstance(ctx, Parser.LagOrLeadAnComponentContext): @@ -861,7 +867,11 @@ def visitAnSimpleFunctionComponent(self, ctx: Parser.AnSimpleFunctionComponentCo raise NotImplementedError return Analytic( - op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, window=params + op=op_node, + operand=operand, + partition_by=partition_by, + order_by=order_by, + window=params, ) def visitLagOrLeadAnComponent(self, ctx: Parser.LagOrLeadAnComponentContext): @@ -891,7 +901,11 @@ def visitLagOrLeadAnComponent(self, ctx: Parser.LagOrLeadAnComponentContext): continue return Analytic( - op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, params=params + op=op_node, + operand=operand, + partition_by=partition_by, + order_by=order_by, + params=params, ) def visitRankAnComponent(self, ctx: Parser.RankAnComponentContext): @@ -911,7 +925,11 @@ def visitRankAnComponent(self, ctx: Parser.RankAnComponentContext): continue return Analytic( - op=op_node, operand=None, partition_by=partition_by, order_by=order_by, window=None + op=op_node, + operand=None, + partition_by=partition_by, + order_by=order_by, + window=None, ) def visitRatioToReportAnComponent(self, ctx: Parser.RatioToReportAnComponentContext): @@ -926,5 +944,9 @@ def visitRatioToReportAnComponent(self, ctx: Parser.RatioToReportAnComponentCont partition_by = Terminals().visitPartitionByClause(ctx_list[5]) return Analytic( - op=op_node, operand=operand, partition_by=partition_by, order_by=order_by, window=params + op=op_node, + operand=operand, + partition_by=partition_by, + order_by=order_by, + window=params, ) diff --git a/src/vtlengine/AST/ASTConstructorModules/Terminals.py b/src/vtlengine/AST/ASTConstructorModules/Terminals.py index aed6a289..6e53a8f2 100644 --- a/src/vtlengine/AST/ASTConstructorModules/Terminals.py +++ b/src/vtlengine/AST/ASTConstructorModules/Terminals.py @@ -70,7 +70,6 @@ def visitVarID(self, ctx: Parser.VarIDContext): return var_id_node def visitVarIdExpr(self, ctx: Parser.VarIdExprContext): - if isinstance(ctx.children[0], Parser.VarIDContext): return self.visitVarID(ctx.children[0]) @@ -127,7 +126,10 @@ def visitValueDomainID(self, ctx: Parser.ValueDomainIDContext): valueDomainID: IDENTIFIER ; """ return Collection( - name=ctx.children[0].getSymbol().text, children=[], kind="ValueDomain", type="" + name=ctx.children[0].getSymbol().text, + children=[], + kind="ValueDomain", + type="", ) def visitRulesetID(self, ctx: Parser.RulesetIDContext): @@ -381,7 +383,7 @@ def visitDpRuleset(self, ctx: Parser.DpRulesetContext): | DATAPOINT_ON_VD (GLPAREN valueDomainName (MUL valueDomainName)* GRPAREN )? # dataPointVd | DATAPOINT_ON_VAR (GLPAREN varID (MUL varID)* GRPAREN )? # dataPointVar ; - """ # noqa E501 + """ # noqa E501 # AST_ASTCONSTRUCTOR.54 raise NotImplementedError @@ -391,7 +393,7 @@ def visitHrRuleset(self, ctx: Parser.HrRulesetContext): | HIERARCHICAL_ON_VD ( GLPAREN vdName=IDENTIFIER (LPAREN valueDomainName (MUL valueDomainName)* RPAREN)? GRPAREN )? # hrRulesetVdType | HIERARCHICAL_ON_VAR ( GLPAREN varName=varID (LPAREN varID (MUL varID)* RPAREN)? GRPAREN )? # hrRulesetVarType ; - """ # noqa E501 + """ # noqa E501 # AST_ASTCONSTRUCTOR.55 raise NotImplementedError @@ -483,7 +485,6 @@ def visitOutputParameterTypeComponent(self, ctx: Parser.OutputParameterTypeCompo raise NotImplementedError def visitScalarItem(self, ctx: Parser.ScalarItemContext): - ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -497,7 +498,7 @@ def visitScalarItem(self, ctx: Parser.ScalarItemContext): def visitScalarWithCast(self, ctx: Parser.ScalarWithCastContext): """ | CAST LPAREN constant COMMA (basicScalarType) (COMMA STRING_CONSTANT)? RPAREN #scalarWithCast # noqa E501 - """ # noqa E501 + """ # noqa E501 ctx_list = list(ctx.getChildren()) c = ctx_list[0] @@ -593,7 +594,7 @@ def visitErLevel(self, ctx: Parser.ErLevelContext): def visitSignature(self, ctx: Parser.SignatureContext, kind="ComponentID"): """ - varID (AS alias)? + VarID (AS alias)? """ ctx_list = list(ctx.getChildren()) @@ -672,8 +673,12 @@ def visitWindowingClause(self, ctx: Parser.WindowingClauseContext): first = num_rows_1 # unbounded (default value) second = num_rows_2 # current data point (default value) - if (mode_2 == "preceding" and mode_1 == "preceding" and num_rows_1 == -1 - and num_rows_2 == -1): # preceding and preceding (error) + if ( + mode_2 == "preceding" + and mode_1 == "preceding" + and num_rows_1 == -1 + and num_rows_2 == -1 + ): # preceding and preceding (error) raise Exception( f"Cannot have 2 preceding clauses with unbounded in analytic clause, " f"line {ctx_list[3].start.line}" @@ -706,7 +711,8 @@ def visitOrderByItem(self, ctx: Parser.OrderByItemContext): return OrderBy(component=self.visitComponentID(ctx_list[0]).value, order="asc") return OrderBy( - component=self.visitComponentID(ctx_list[0]).value, order=ctx_list[1].getSymbol().text + component=self.visitComponentID(ctx_list[0]).value, + order=ctx_list[1].getSymbol().text, ) def visitLimitClauseItem(self, ctx: Parser.LimitClauseItemContext): @@ -735,5 +741,9 @@ def create_windowing(win_mode, values, modes): values[e] = "CURRENT ROW" return Windowing( - type_=win_mode, start=values[0], stop=values[1], start_mode=modes[0], stop_mode=modes[1] + type_=win_mode, + start=values[0], + stop=values[1], + start_mode=modes[0], + stop_mode=modes[1], ) diff --git a/src/vtlengine/AST/ASTEncoders.py b/src/vtlengine/AST/ASTEncoders.py index e4f4e033..ed939065 100644 --- a/src/vtlengine/AST/ASTEncoders.py +++ b/src/vtlengine/AST/ASTEncoders.py @@ -1,6 +1,6 @@ import json -import AST +from vtlengine import AST class ComplexEncoder(json.JSONEncoder): diff --git a/src/vtlengine/AST/DAG/__init__.py b/src/vtlengine/AST/DAG/__init__.py index 2283e52b..61bb0a14 100644 --- a/src/vtlengine/AST/DAG/__init__.py +++ b/src/vtlengine/AST/DAG/__init__.py @@ -300,7 +300,6 @@ def visit_PersistentAssignment(self, node: PersistentAssignment) -> None: self.visit(node.right) def visit_RegularAggregation(self, node: RegularAggregation) -> None: - self.visit(node.dataset) for child in node.children: self.isFromRegularAggregation = True @@ -329,7 +328,6 @@ def visit_Identifier(self, node: Identifier) -> None: self.inputs.append(node.value) def visit_ParamOp(self, node: ParamOp) -> None: - if self.udos and node.op in self.udos: DO_AST: Operator = self.udos[node.op] @@ -426,7 +424,6 @@ def visit_DefIdentifier(self, node: DefIdentifier): # def visit_Identifier(self, node: Identifier) -> None: if node.kind == "CodeItemID": # and node.value not in self.alias: if self.isFirstAssignment: - self.isFirstAssignment = False self.outputs.append(node.value) else: diff --git a/src/vtlengine/AST/Grammar/lexer.py b/src/vtlengine/AST/Grammar/lexer.py index 64e7d93d..60961616 100644 --- a/src/vtlengine/AST/Grammar/lexer.py +++ b/src/vtlengine/AST/Grammar/lexer.py @@ -1116,7 +1116,6 @@ def serializedATN(): class Lexer(Lexer): - atn = ATNDeserializer().deserialize(serializedATN()) decisionsToDFA = [DFA(ds, i) for i, ds in enumerate(atn.decisionToState)] diff --git a/src/vtlengine/AST/Grammar/parser.py b/src/vtlengine/AST/Grammar/parser.py index 18cafefa..6254a19d 100644 --- a/src/vtlengine/AST/Grammar/parser.py +++ b/src/vtlengine/AST/Grammar/parser.py @@ -973,7 +973,6 @@ def serializedATN(): class Parser(Parser): - grammarFileName = "Vtl.g4" atn = ATNDeserializer().deserialize(serializedATN()) @@ -2003,7 +2002,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitStart(self) def start(self) -> StartContext: - localctx = Parser.StartContext(self, self._ctx, self.state) self.enterRule(localctx, 0, self.RULE_start) self._la = 0 # Token type @@ -2045,7 +2043,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DefineExpressionContext(StatementContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.StatementContext super().__init__(parser) self.copyFrom(ctx) @@ -2062,7 +2059,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDefineExpression(self) class TemporaryAssignmentContext(StatementContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.StatementContext super().__init__(parser) self.copyFrom(ctx) @@ -2085,7 +2081,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTemporaryAssignment(self) class PersistAssignmentContext(StatementContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.StatementContext super().__init__(parser) self.copyFrom(ctx) @@ -2108,7 +2103,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitPersistAssignment(self) def statement(self): - localctx = Parser.StatementContext(self, self._ctx, self.state) self.enterRule(localctx, 2, self.RULE_statement) try: @@ -2166,7 +2160,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class VarIdExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2183,7 +2176,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitVarIdExpr(self) class MembershipExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2206,7 +2198,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMembershipExpr(self) class InNotInExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.left = None # ExprContext @@ -2237,7 +2228,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInNotInExpr(self) class BooleanExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.left = None # ExprContext @@ -2269,7 +2259,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBooleanExpr(self) class ComparisonExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.left = None # ExprContext @@ -2295,7 +2284,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComparisonExpr(self) class UnaryExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.op = None # Token @@ -2323,7 +2311,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryExpr(self) class FunctionsExpressionContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2340,7 +2327,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFunctionsExpression(self) class IfExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.conditionalExpr = None # ExprContext @@ -2372,7 +2358,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitIfExpr(self) class ClauseExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.dataset = None # ExprContext @@ -2400,7 +2385,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitClauseExpr(self) class CaseExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2438,7 +2422,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCaseExpr(self) class ArithmeticExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.left = None # ExprContext @@ -2467,7 +2450,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitArithmeticExpr(self) class ParenthesisExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2490,7 +2472,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitParenthesisExpr(self) class ConstantExprContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.copyFrom(ctx) @@ -2507,7 +2488,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConstantExpr(self) class ArithmeticExprOrConcatContext(ExprContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.ExprContext super().__init__(parser) self.left = None # ExprContext @@ -2580,14 +2560,11 @@ def expr(self, _p: int = 0): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - ((_la) & ~0x3F) == 0 - and ( - (1 << _la) - & ((1 << Parser.PLUS) | (1 << Parser.MINUS) | (1 << Parser.NOT)) - ) - != 0 + ((_la) & ~0x3F) == 0 + and ( + (1 << _la) & ((1 << Parser.PLUS) | (1 << Parser.MINUS) | (1 << Parser.NOT)) ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -2872,7 +2849,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ArithmeticExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -2903,7 +2879,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitArithmeticExprComp(self) class IfExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -2937,7 +2912,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitIfExprComp(self) class ComparisonExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -2964,7 +2938,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComparisonExprComp(self) class FunctionsExpressionCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -2983,7 +2956,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFunctionsExpressionComp(self) class CompIdContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3002,7 +2974,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCompId(self) class ConstantExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3021,7 +2992,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConstantExprComp(self) class ArithmeticExprOrConcatCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3055,7 +3025,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitArithmeticExprOrConcatComp(self) class ParenthesisExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3080,7 +3049,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitParenthesisExprComp(self) class InNotInExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3113,7 +3081,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInNotInExprComp(self) class UnaryExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3143,7 +3110,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryExprComp(self) class CaseExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3183,7 +3149,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCaseExprComp(self) class BooleanExprCompContext(ExprComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ExprComponentContext @@ -3258,14 +3223,11 @@ def exprComponent(self, _p: int = 0): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - ((_la) & ~0x3F) == 0 - and ( - (1 << _la) - & ((1 << Parser.PLUS) | (1 << Parser.MINUS) | (1 << Parser.NOT)) - ) - != 0 + ((_la) & ~0x3F) == 0 + and ( + (1 << _la) & ((1 << Parser.PLUS) | (1 << Parser.MINUS) | (1 << Parser.NOT)) ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -3353,7 +3315,8 @@ def exprComponent(self, _p: int = 0): la_ = self._interp.adaptivePredict(self._input, 10, self._ctx) if la_ == 1: localctx = Parser.ArithmeticExprCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3376,7 +3339,8 @@ def exprComponent(self, _p: int = 0): elif la_ == 2: localctx = Parser.ArithmeticExprOrConcatCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3399,7 +3363,8 @@ def exprComponent(self, _p: int = 0): elif la_ == 3: localctx = Parser.ComparisonExprCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3416,7 +3381,8 @@ def exprComponent(self, _p: int = 0): elif la_ == 4: localctx = Parser.BooleanExprCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3433,7 +3399,8 @@ def exprComponent(self, _p: int = 0): elif la_ == 5: localctx = Parser.BooleanExprCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3456,7 +3423,8 @@ def exprComponent(self, _p: int = 0): elif la_ == 6: localctx = Parser.InNotInExprCompContext( - self, Parser.ExprComponentContext(self, _parentctx, _parentState) + self, + Parser.ExprComponentContext(self, _parentctx, _parentState), ) localctx.left = _prevctx self.pushNewRecursionContext(localctx, _startState, self.RULE_exprComponent) @@ -3515,7 +3483,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class NumericFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3534,7 +3501,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitNumericFunctionsComponents(self) class StringFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3553,7 +3519,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitStringFunctionsComponents(self) class ComparisonFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3572,7 +3537,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComparisonFunctionsComponents(self) class TimeFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3591,7 +3555,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeFunctionsComponents(self) class GenericFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3610,7 +3573,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitGenericFunctionsComponents(self) class AnalyticFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3629,7 +3591,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAnalyticFunctionsComponents(self) class ConditionalFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3648,7 +3609,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConditionalFunctionsComponents(self) class AggregateFunctionsComponentsContext(FunctionsComponentsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.FunctionsComponentsContext @@ -3667,7 +3627,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggregateFunctionsComponents(self) def functionsComponents(self): - localctx = Parser.FunctionsComponentsContext(self, self._ctx, self.state) self.enterRule(localctx, 8, self.RULE_functionsComponents) try: @@ -3752,7 +3711,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class HierarchyFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3769,7 +3727,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHierarchyFunctions(self) class StringFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3786,7 +3743,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitStringFunctions(self) class ValidationFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3803,7 +3759,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidationFunctions(self) class GenericFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3820,7 +3775,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitGenericFunctions(self) class ConditionalFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3837,7 +3791,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConditionalFunctions(self) class AggregateFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3854,7 +3807,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggregateFunctions(self) class JoinFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3871,7 +3823,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinFunctions(self) class ComparisonFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3888,7 +3839,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComparisonFunctions(self) class NumericFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3905,7 +3855,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitNumericFunctions(self) class TimeFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3922,7 +3871,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeFunctions(self) class SetFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3939,7 +3887,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSetFunctions(self) class AnalyticFunctionsContext(FunctionsContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.FunctionsContext super().__init__(parser) self.copyFrom(ctx) @@ -3956,7 +3903,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAnalyticFunctions(self) def functions(self): - localctx = Parser.FunctionsContext(self, self._ctx, self.state) self.enterRule(localctx, 10, self.RULE_functions) try: @@ -4095,7 +4041,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDatasetClause(self) def datasetClause(self): - localctx = Parser.DatasetClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 12, self.RULE_datasetClause) try: @@ -4182,7 +4127,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRenameClause(self) def renameClause(self): - localctx = Parser.RenameClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 14, self.RULE_renameClause) self._la = 0 # Token type @@ -4243,7 +4187,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggrClause(self) def aggrClause(self): - localctx = Parser.AggrClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 16, self.RULE_aggrClause) self._la = 0 # Token type @@ -4299,7 +4242,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFilterClause(self) def filterClause(self): - localctx = Parser.FilterClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 18, self.RULE_filterClause) try: @@ -4350,7 +4292,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCalcClause(self) def calcClause(self): - localctx = Parser.CalcClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 20, self.RULE_calcClause) self._la = 0 # Token type @@ -4418,7 +4359,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitKeepOrDropClause(self) def keepOrDropClause(self): - localctx = Parser.KeepOrDropClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 22, self.RULE_keepOrDropClause) self._la = 0 # Token type @@ -4491,7 +4431,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitPivotOrUnpivotClause(self) def pivotOrUnpivotClause(self): - localctx = Parser.PivotOrUnpivotClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 24, self.RULE_pivotOrUnpivotClause) self._la = 0 # Token type @@ -4564,7 +4503,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCustomPivotClause(self) def customPivotClause(self): - localctx = Parser.CustomPivotClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 26, self.RULE_customPivotClause) self._la = 0 # Token type @@ -4636,7 +4574,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSubspaceClause(self) def subspaceClause(self): - localctx = Parser.SubspaceClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 28, self.RULE_subspaceClause) self._la = 0 # Token type @@ -4680,7 +4617,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class JoinExprContext(JoinOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.JoinOperatorsContext @@ -4724,7 +4660,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinExpr(self) def joinOperators(self): - localctx = Parser.JoinOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 30, self.RULE_joinOperators) self._la = 0 # Token type @@ -4797,7 +4732,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DefOperatorContext(DefOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.DefOperatorsContext @@ -4858,7 +4792,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDefOperator(self) class DefHierarchicalContext(DefOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.DefOperatorsContext @@ -4910,7 +4843,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDefHierarchical(self) class DefDatapointRulesetContext(DefOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.DefOperatorsContext @@ -4962,7 +4894,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDefDatapointRuleset(self) def defOperators(self): - localctx = Parser.DefOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 32, self.RULE_defOperators) self._la = 0 # Token type @@ -5101,7 +5032,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class EvalAtomContext(GenericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsContext @@ -5165,7 +5095,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitEvalAtom(self) class CastExprDatasetContext(GenericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsContext @@ -5208,7 +5137,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCastExprDataset(self) class CallDatasetContext(GenericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsContext @@ -5245,7 +5173,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCallDataset(self) def genericOperators(self): - localctx = Parser.GenericOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 34, self.RULE_genericOperators) self._la = 0 # Token type @@ -5299,7 +5226,7 @@ def genericOperators(self): != 0 ) or ( - (((_la - 66)) & ~0x3F) == 0 + ((_la - 66) & ~0x3F) == 0 and ( (1 << (_la - 66)) & ( @@ -5333,7 +5260,7 @@ def genericOperators(self): != 0 ) or ( - (((_la - 131)) & ~0x3F) == 0 + ((_la - 131) & ~0x3F) == 0 and ( (1 << (_la - 131)) & ( @@ -5365,7 +5292,7 @@ def genericOperators(self): != 0 ) or ( - (((_la - 195)) & ~0x3F) == 0 + ((_la - 195) & ~0x3F) == 0 and ( (1 << (_la - 195)) & ( @@ -5562,7 +5489,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class EvalAtomComponentContext(GenericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsComponentContext @@ -5626,7 +5552,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitEvalAtomComponent(self) class CastExprComponentContext(GenericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsComponentContext @@ -5669,7 +5594,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCastExprComponent(self) class CallComponentContext(GenericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GenericOperatorsComponentContext @@ -5706,7 +5630,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCallComponent(self) def genericOperatorsComponent(self): - localctx = Parser.GenericOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 36, self.RULE_genericOperatorsComponent) self._la = 0 # Token type @@ -5757,7 +5680,7 @@ def genericOperatorsComponent(self): != 0 ) or ( - (((_la - 77)) & ~0x3F) == 0 + ((_la - 77) & ~0x3F) == 0 and ( (1 << (_la - 77)) & ( @@ -5797,7 +5720,7 @@ def genericOperatorsComponent(self): != 0 ) or ( - (((_la - 141)) & ~0x3F) == 0 + ((_la - 141) & ~0x3F) == 0 and ( (1 << (_la - 141)) & ( @@ -5818,7 +5741,7 @@ def genericOperatorsComponent(self): != 0 ) or ( - (((_la - 208)) & ~0x3F) == 0 + ((_la - 208) & ~0x3F) == 0 and ( (1 << (_la - 208)) & ( @@ -6022,7 +5945,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitParameterComponent(self) def parameterComponent(self): - localctx = Parser.ParameterComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 38, self.RULE_parameterComponent) try: @@ -6148,7 +6070,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitParameter(self) def parameter(self): - localctx = Parser.ParameterContext(self, self._ctx, self.state) self.enterRule(localctx, 40, self.RULE_parameter) try: @@ -6275,7 +6196,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class InstrAtomContext(StringOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsContext @@ -6321,7 +6241,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInstrAtom(self) class UnaryStringFunctionContext(StringOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsContext @@ -6365,7 +6284,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryStringFunction(self) class SubstrAtomContext(StringOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsContext @@ -6407,7 +6325,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSubstrAtom(self) class ReplaceAtomContext(StringOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsContext @@ -6448,7 +6365,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitReplaceAtom(self) def stringOperators(self): - localctx = Parser.StringOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 42, self.RULE_stringOperators) self._la = 0 # Token type @@ -6470,21 +6386,19 @@ def stringOperators(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 93)) & ~0x3F) == 0 - and ( - (1 << (_la - 93)) - & ( - (1 << (Parser.LEN - 93)) - | (1 << (Parser.TRIM - 93)) - | (1 << (Parser.UCASE - 93)) - | (1 << (Parser.LCASE - 93)) - | (1 << (Parser.LTRIM - 93)) - | (1 << (Parser.RTRIM - 93)) - ) + ((_la - 93) & ~0x3F) == 0 + and ( + (1 << (_la - 93)) + & ( + (1 << (Parser.LEN - 93)) + | (1 << (Parser.TRIM - 93)) + | (1 << (Parser.UCASE - 93)) + | (1 << (Parser.LCASE - 93)) + | (1 << (Parser.LTRIM - 93)) + | (1 << (Parser.RTRIM - 93)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -6620,7 +6534,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ReplaceAtomComponentContext(StringOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsComponentContext @@ -6661,7 +6574,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitReplaceAtomComponent(self) class UnaryStringFunctionComponentContext(StringOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsComponentContext @@ -6705,7 +6617,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryStringFunctionComponent(self) class SubstrAtomComponentContext(StringOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsComponentContext @@ -6747,7 +6658,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSubstrAtomComponent(self) class InstrAtomComponentContext(StringOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.StringOperatorsComponentContext @@ -6793,7 +6703,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInstrAtomComponent(self) def stringOperatorsComponent(self): - localctx = Parser.StringOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 44, self.RULE_stringOperatorsComponent) self._la = 0 # Token type @@ -6815,21 +6724,19 @@ def stringOperatorsComponent(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 93)) & ~0x3F) == 0 - and ( - (1 << (_la - 93)) - & ( - (1 << (Parser.LEN - 93)) - | (1 << (Parser.TRIM - 93)) - | (1 << (Parser.UCASE - 93)) - | (1 << (Parser.LCASE - 93)) - | (1 << (Parser.LTRIM - 93)) - | (1 << (Parser.RTRIM - 93)) - ) + ((_la - 93) & ~0x3F) == 0 + and ( + (1 << (_la - 93)) + & ( + (1 << (Parser.LEN - 93)) + | (1 << (Parser.TRIM - 93)) + | (1 << (Parser.UCASE - 93)) + | (1 << (Parser.LCASE - 93)) + | (1 << (Parser.LTRIM - 93)) + | (1 << (Parser.RTRIM - 93)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -6965,7 +6872,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class UnaryNumericContext(NumericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsContext @@ -7009,7 +6915,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryNumeric(self) class UnaryWithOptionalNumericContext(NumericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsContext @@ -7047,7 +6952,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryWithOptionalNumeric(self) class BinaryNumericContext(NumericOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsContext @@ -7093,7 +6997,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBinaryNumeric(self) def numericOperators(self): - localctx = Parser.NumericOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 46, self.RULE_numericOperators) self._la = 0 # Token type @@ -7101,28 +7004,33 @@ def numericOperators(self): self.state = 778 self._errHandler.sync(self) token = self._input.LA(1) - if token in [Parser.ABS, Parser.LN, Parser.EXP, Parser.CEIL, Parser.FLOOR, Parser.SQRT]: + if token in [ + Parser.ABS, + Parser.LN, + Parser.EXP, + Parser.CEIL, + Parser.FLOOR, + Parser.SQRT, + ]: localctx = Parser.UnaryNumericContext(self, localctx) self.enterOuterAlt(localctx, 1) self.state = 757 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 85)) & ~0x3F) == 0 - and ( - (1 << (_la - 85)) - & ( - (1 << (Parser.ABS - 85)) - | (1 << (Parser.LN - 85)) - | (1 << (Parser.EXP - 85)) - | (1 << (Parser.CEIL - 85)) - | (1 << (Parser.FLOOR - 85)) - | (1 << (Parser.SQRT - 85)) - ) + ((_la - 85) & ~0x3F) == 0 + and ( + (1 << (_la - 85)) + & ( + (1 << (Parser.ABS - 85)) + | (1 << (Parser.LN - 85)) + | (1 << (Parser.EXP - 85)) + | (1 << (Parser.CEIL - 85)) + | (1 << (Parser.FLOOR - 85)) + | (1 << (Parser.SQRT - 85)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -7169,19 +7077,17 @@ def numericOperators(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 61)) & ~0x3F) == 0 - and ( - (1 << (_la - 61)) - & ( - (1 << (Parser.RANDOM - 61)) - | (1 << (Parser.LOG - 61)) - | (1 << (Parser.POWER - 61)) - | (1 << (Parser.MOD - 61)) - ) + ((_la - 61) & ~0x3F) == 0 + and ( + (1 << (_la - 61)) + & ( + (1 << (Parser.RANDOM - 61)) + | (1 << (Parser.LOG - 61)) + | (1 << (Parser.POWER - 61)) + | (1 << (Parser.MOD - 61)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -7223,7 +7129,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class UnaryNumericComponentContext(NumericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsComponentContext @@ -7267,7 +7172,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryNumericComponent(self) class BinaryNumericComponentContext(NumericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsComponentContext @@ -7313,7 +7217,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBinaryNumericComponent(self) class UnaryWithOptionalNumericComponentContext(NumericOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.NumericOperatorsComponentContext @@ -7351,7 +7254,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnaryWithOptionalNumericComponent(self) def numericOperatorsComponent(self): - localctx = Parser.NumericOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 48, self.RULE_numericOperatorsComponent) self._la = 0 # Token type @@ -7359,28 +7261,33 @@ def numericOperatorsComponent(self): self.state = 801 self._errHandler.sync(self) token = self._input.LA(1) - if token in [Parser.ABS, Parser.LN, Parser.EXP, Parser.CEIL, Parser.FLOOR, Parser.SQRT]: + if token in [ + Parser.ABS, + Parser.LN, + Parser.EXP, + Parser.CEIL, + Parser.FLOOR, + Parser.SQRT, + ]: localctx = Parser.UnaryNumericComponentContext(self, localctx) self.enterOuterAlt(localctx, 1) self.state = 780 localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 85)) & ~0x3F) == 0 - and ( - (1 << (_la - 85)) - & ( - (1 << (Parser.ABS - 85)) - | (1 << (Parser.LN - 85)) - | (1 << (Parser.EXP - 85)) - | (1 << (Parser.CEIL - 85)) - | (1 << (Parser.FLOOR - 85)) - | (1 << (Parser.SQRT - 85)) - ) + ((_la - 85) & ~0x3F) == 0 + and ( + (1 << (_la - 85)) + & ( + (1 << (Parser.ABS - 85)) + | (1 << (Parser.LN - 85)) + | (1 << (Parser.EXP - 85)) + | (1 << (Parser.CEIL - 85)) + | (1 << (Parser.FLOOR - 85)) + | (1 << (Parser.SQRT - 85)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -7427,19 +7334,17 @@ def numericOperatorsComponent(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 61)) & ~0x3F) == 0 - and ( - (1 << (_la - 61)) - & ( - (1 << (Parser.RANDOM - 61)) - | (1 << (Parser.LOG - 61)) - | (1 << (Parser.POWER - 61)) - | (1 << (Parser.MOD - 61)) - ) + ((_la - 61) & ~0x3F) == 0 + and ( + (1 << (_la - 61)) + & ( + (1 << (Parser.RANDOM - 61)) + | (1 << (Parser.LOG - 61)) + | (1 << (Parser.POWER - 61)) + | (1 << (Parser.MOD - 61)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -7481,7 +7386,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class BetweenAtomContext(ComparisonOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsContext @@ -7521,7 +7425,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBetweenAtom(self) class CharsetMatchAtomContext(ComparisonOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsContext @@ -7557,7 +7460,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCharsetMatchAtom(self) class IsNullAtomContext(ComparisonOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsContext @@ -7585,7 +7487,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitIsNullAtom(self) class ExistInAtomContext(ComparisonOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsContext @@ -7627,7 +7528,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitExistInAtom(self) def comparisonOperators(self): - localctx = Parser.ComparisonOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 50, self.RULE_comparisonOperators) self._la = 0 # Token type @@ -7733,7 +7633,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class IsNullAtomComponentContext(ComparisonOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsComponentContext @@ -7761,7 +7660,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitIsNullAtomComponent(self) class CharsetMatchAtomComponentContext(ComparisonOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsComponentContext @@ -7797,7 +7695,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCharsetMatchAtomComponent(self) class BetweenAtomComponentContext(ComparisonOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ComparisonOperatorsComponentContext @@ -7837,7 +7734,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBetweenAtomComponent(self) def comparisonOperatorsComponent(self): - localctx = Parser.ComparisonOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 52, self.RULE_comparisonOperatorsComponent) try: @@ -7917,7 +7813,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DayToYearAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -7945,7 +7840,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayToYearAtom(self) class YearAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -7973,7 +7867,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitYearAtom(self) class YearTodayAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8001,7 +7894,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitYearTodayAtom(self) class DayToMonthAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8029,7 +7921,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayToMonthAtom(self) class DayOfYearAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8057,7 +7948,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDatOfYearAtom(self) class PeriodAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8085,7 +7975,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitPeriodAtom(self) class MonthTodayAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8113,7 +8002,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMonthTodayAtom(self) class FillTimeAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8150,7 +8038,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFillTimeAtom(self) class MonthAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8178,7 +8065,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMonthAtom(self) class FlowAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8210,7 +8096,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFlowAtom(self) class TimeShiftAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8244,7 +8129,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeShiftAtom(self) class TimeAggAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8296,7 +8180,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeAggAtom(self) class DateDiffAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8332,7 +8215,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDateDiffAtom(self) class DateAddAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8372,7 +8254,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDateAddAtom(self) class DayOfMonthAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8400,7 +8281,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayOfMonthAtom(self) class CurrentDateAtomContext(TimeOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsContext @@ -8425,7 +8305,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCurrentDateAtom(self) def timeOperators(self): - localctx = Parser.TimeOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 54, self.RULE_timeOperators) self._la = 0 # Token type @@ -8479,7 +8358,7 @@ def timeOperators(self): != 0 ) or ( - (((_la - 66)) & ~0x3F) == 0 + ((_la - 66) & ~0x3F) == 0 and ( (1 << (_la - 66)) & ( @@ -8512,7 +8391,7 @@ def timeOperators(self): != 0 ) or ( - (((_la - 131)) & ~0x3F) == 0 + ((_la - 131) & ~0x3F) == 0 and ( (1 << (_la - 131)) & ( @@ -8544,7 +8423,7 @@ def timeOperators(self): != 0 ) or ( - (((_la - 195)) & ~0x3F) == 0 + ((_la - 195) & ~0x3F) == 0 and ( (1 << (_la - 195)) & ( @@ -8848,7 +8727,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class PeriodAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -8876,7 +8754,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitPeriodAtomComponent(self) class TimeShiftAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -8910,7 +8787,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeShiftAtomComponent(self) class MonthToDayAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -8938,7 +8814,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMonthTodayAtomComponent(self) class TimeAggAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -8990,7 +8865,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitTimeAggAtomComponent(self) class DayToMonthAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9018,7 +8892,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayToMonthAtomComponent(self) class DateAddAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9058,7 +8931,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDateAddAtomComponent(self) class YearToDayAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9086,7 +8958,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitYearTodayAtomComponent(self) class DayOfMonthAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9114,7 +8985,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayOfMonthAtomComponent(self) class MonthAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9142,7 +9012,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMonthAtomComponent(self) class FillTimeAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9179,7 +9048,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFillTimeAtomComponent(self) class DayOfYearAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9207,7 +9075,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDatOfYearAtomComponent(self) class DayToYearAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9235,7 +9102,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDayToYearAtomComponent(self) class CurrentDateAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9260,7 +9126,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCurrentDateAtomComponent(self) class FlowAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9292,7 +9157,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitFlowAtomComponent(self) class DateDiffAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9328,7 +9192,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDateDiffAtomComponent(self) class YearAtomComponentContext(TimeOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.TimeOperatorsComponentContext @@ -9356,7 +9219,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitYearAtomComponent(self) def timeOperatorsComponent(self): - localctx = Parser.TimeOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 56, self.RULE_timeOperatorsComponent) self._la = 0 # Token type @@ -9407,7 +9269,7 @@ def timeOperatorsComponent(self): != 0 ) or ( - (((_la - 77)) & ~0x3F) == 0 + ((_la - 77) & ~0x3F) == 0 and ( (1 << (_la - 77)) & ( @@ -9446,7 +9308,7 @@ def timeOperatorsComponent(self): != 0 ) or ( - (((_la - 141)) & ~0x3F) == 0 + ((_la - 141) & ~0x3F) == 0 and ( (1 << (_la - 141)) & ( @@ -9467,7 +9329,7 @@ def timeOperatorsComponent(self): != 0 ) or ( - (((_la - 208)) & ~0x3F) == 0 + ((_la - 208) & ~0x3F) == 0 and ( (1 << (_la - 208)) & ( @@ -9767,7 +9629,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class SetOrSYmDiffAtomContext(SetOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.SetOperatorsContext @@ -9807,7 +9668,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSetOrSYmDiffAtom(self) class IntersectAtomContext(SetOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.SetOperatorsContext @@ -9845,7 +9705,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitIntersectAtom(self) class UnionAtomContext(SetOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.SetOperatorsContext @@ -9883,7 +9742,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitUnionAtom(self) def setOperators(self): - localctx = Parser.SetOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 58, self.RULE_setOperators) self._la = 0 # Token type @@ -10034,7 +9892,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHierarchyOperators(self) def hierarchyOperators(self): - localctx = Parser.HierarchyOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 60, self.RULE_hierarchyOperators) self._la = 0 # Token type @@ -10069,7 +9926,7 @@ def hierarchyOperators(self): self.state = 1112 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la - 225)) & ~0x3F) == 0 and ( + if ((_la - 225) & ~0x3F) == 0 and ( (1 << (_la - 225)) & ( (1 << (Parser.NON_NULL - 225)) @@ -10121,7 +9978,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ValidateHRrulesetContext(ValidationOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ValidationOperatorsContext @@ -10175,7 +10031,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidateHRruleset(self) class ValidateDPrulesetContext(ValidationOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ValidationOperatorsContext @@ -10226,7 +10081,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidateDPruleset(self) class ValidationSimpleContext(ValidationOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ValidationOperatorsContext @@ -10273,7 +10127,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidationSimple(self) def validationOperators(self): - localctx = Parser.ValidationOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 62, self.RULE_validationOperators) self._la = 0 # Token type @@ -10356,7 +10209,7 @@ def validationOperators(self): self.state = 1156 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la - 225)) & ~0x3F) == 0 and ( + if ((_la - 225) & ~0x3F) == 0 and ( (1 << (_la - 225)) & ( (1 << (Parser.NON_NULL - 225)) @@ -10458,7 +10311,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class NvlAtomContext(ConditionalOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ConditionalOperatorsContext @@ -10494,7 +10346,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitNvlAtom(self) def conditionalOperators(self): - localctx = Parser.ConditionalOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 64, self.RULE_conditionalOperators) try: @@ -10534,7 +10385,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class NvlAtomComponentContext(ConditionalOperatorsComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ConditionalOperatorsComponentContext @@ -10570,7 +10420,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitNvlAtomComponent(self) def conditionalOperatorsComponent(self): - localctx = Parser.ConditionalOperatorsComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 66, self.RULE_conditionalOperatorsComponent) try: @@ -10610,7 +10459,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class AggrCompContext(AggrOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AggrOperatorsContext @@ -10666,7 +10514,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggrComp(self) class CountAggrCompContext(AggrOperatorsContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AggrOperatorsContext @@ -10691,7 +10538,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCountAggrComp(self) def aggrOperators(self): - localctx = Parser.AggrOperatorsContext(self, self._ctx, self.state) self.enterRule(localctx, 68, self.RULE_aggrOperators) self._la = 0 # Token type @@ -10706,25 +10552,23 @@ def aggrOperators(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 80)) & ~0x3F) == 0 - and ( - (1 << (_la - 80)) - & ( - (1 << (Parser.MIN - 80)) - | (1 << (Parser.MAX - 80)) - | (1 << (Parser.SUM - 80)) - | (1 << (Parser.AVG - 80)) - | (1 << (Parser.MEDIAN - 80)) - | (1 << (Parser.COUNT - 80)) - | (1 << (Parser.STDDEV_POP - 80)) - | (1 << (Parser.STDDEV_SAMP - 80)) - | (1 << (Parser.VAR_POP - 80)) - | (1 << (Parser.VAR_SAMP - 80)) - ) + ((_la - 80) & ~0x3F) == 0 + and ( + (1 << (_la - 80)) + & ( + (1 << (Parser.MIN - 80)) + | (1 << (Parser.MAX - 80)) + | (1 << (Parser.SUM - 80)) + | (1 << (Parser.AVG - 80)) + | (1 << (Parser.MEDIAN - 80)) + | (1 << (Parser.COUNT - 80)) + | (1 << (Parser.STDDEV_POP - 80)) + | (1 << (Parser.STDDEV_SAMP - 80)) + | (1 << (Parser.VAR_POP - 80)) + | (1 << (Parser.VAR_SAMP - 80)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -10771,7 +10615,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class AggrDatasetContext(AggrOperatorsGroupingContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AggrOperatorsGroupingContext @@ -10833,7 +10676,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggrDataset(self) def aggrOperatorsGrouping(self): - localctx = Parser.AggrOperatorsGroupingContext(self, self._ctx, self.state) self.enterRule(localctx, 70, self.RULE_aggrOperatorsGrouping) self._la = 0 # Token type @@ -10844,25 +10686,23 @@ def aggrOperatorsGrouping(self): localctx.op = self._input.LT(1) _la = self._input.LA(1) if not ( - ( - (((_la - 80)) & ~0x3F) == 0 - and ( - (1 << (_la - 80)) - & ( - (1 << (Parser.MIN - 80)) - | (1 << (Parser.MAX - 80)) - | (1 << (Parser.SUM - 80)) - | (1 << (Parser.AVG - 80)) - | (1 << (Parser.MEDIAN - 80)) - | (1 << (Parser.COUNT - 80)) - | (1 << (Parser.STDDEV_POP - 80)) - | (1 << (Parser.STDDEV_SAMP - 80)) - | (1 << (Parser.VAR_POP - 80)) - | (1 << (Parser.VAR_SAMP - 80)) - ) + ((_la - 80) & ~0x3F) == 0 + and ( + (1 << (_la - 80)) + & ( + (1 << (Parser.MIN - 80)) + | (1 << (Parser.MAX - 80)) + | (1 << (Parser.SUM - 80)) + | (1 << (Parser.AVG - 80)) + | (1 << (Parser.MEDIAN - 80)) + | (1 << (Parser.COUNT - 80)) + | (1 << (Parser.STDDEV_POP - 80)) + | (1 << (Parser.STDDEV_SAMP - 80)) + | (1 << (Parser.VAR_POP - 80)) + | (1 << (Parser.VAR_SAMP - 80)) ) - != 0 ) + != 0 ): localctx.op = self._errHandler.recoverInline(self) else: @@ -10909,7 +10749,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class LagOrLeadAnContext(AnFunctionContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionContext @@ -10972,7 +10811,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitLagOrLeadAn(self) class RatioToReportAnContext(AnFunctionContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionContext @@ -11014,7 +10852,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRatioToReportAn(self) class AnSimpleFunctionContext(AnFunctionContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionContext @@ -11097,7 +10934,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAnSimpleFunction(self) def anFunction(self): - localctx = Parser.AnFunctionContext(self, self._ctx, self.state) self.enterRule(localctx, 72, self.RULE_anFunction) self._la = 0 # Token type @@ -11126,7 +10962,7 @@ def anFunction(self): _la = self._input.LA(1) if not ( ( - (((_la - 80)) & ~0x3F) == 0 + ((_la - 80) & ~0x3F) == 0 and ( (1 << (_la - 80)) & ( @@ -11283,7 +11119,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class AnSimpleFunctionComponentContext(AnFunctionComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionComponentContext @@ -11366,7 +11201,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAnSimpleFunctionComponent(self) class LagOrLeadAnComponentContext(AnFunctionComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionComponentContext @@ -11426,7 +11260,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitLagOrLeadAnComponent(self) class RankAnComponentContext(AnFunctionComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionComponentContext @@ -11469,7 +11302,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRankAnComponent(self) class RatioToReportAnComponentContext(AnFunctionComponentContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.AnFunctionComponentContext @@ -11511,7 +11343,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRatioToReportAnComponent(self) def anFunctionComponent(self): - localctx = Parser.AnFunctionComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 74, self.RULE_anFunctionComponent) self._la = 0 # Token type @@ -11540,7 +11371,7 @@ def anFunctionComponent(self): _la = self._input.LA(1) if not ( ( - (((_la - 80)) & ~0x3F) == 0 + ((_la - 80) & ~0x3F) == 0 and ( (1 << (_la - 80)) & ( @@ -11627,7 +11458,7 @@ def anFunctionComponent(self): self._errHandler.sync(self) _la = self._input.LA(1) if _la == Parser.NULL_CONSTANT or ( - (((_la - 218)) & ~0x3F) == 0 + ((_la - 218) & ~0x3F) == 0 and ( (1 << (_la - 218)) & ( @@ -11750,7 +11581,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRenameClauseItem(self) def renameClauseItem(self): - localctx = Parser.RenameClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 76, self.RULE_renameClauseItem) try: @@ -11800,7 +11630,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggregateClause(self) def aggregateClause(self): - localctx = Parser.AggregateClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 78, self.RULE_aggregateClause) self._la = 0 # Token type @@ -11859,7 +11688,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAggrFunctionClause(self) def aggrFunctionClause(self): - localctx = Parser.AggrFunctionClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 80, self.RULE_aggrFunctionClause) self._la = 0 # Token type @@ -11869,7 +11697,7 @@ def aggrFunctionClause(self): self._errHandler.sync(self) _la = self._input.LA(1) if ( - (((_la - 103)) & ~0x3F) == 0 + ((_la - 103) & ~0x3F) == 0 and ( (1 << (_la - 103)) & ( @@ -11929,7 +11757,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCalcClauseItem(self) def calcClauseItem(self): - localctx = Parser.CalcClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 82, self.RULE_calcClauseItem) self._la = 0 # Token type @@ -11939,7 +11766,7 @@ def calcClauseItem(self): self._errHandler.sync(self) _la = self._input.LA(1) if ( - (((_la - 103)) & ~0x3F) == 0 + ((_la - 103) & ~0x3F) == 0 and ( (1 << (_la - 103)) & ( @@ -11996,7 +11823,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSubspaceClauseItem(self) def subspaceClauseItem(self): - localctx = Parser.SubspaceClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 84, self.RULE_subspaceClauseItem) try: @@ -12029,7 +11855,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class ScalarWithCastContext(ScalarItemContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ScalarItemContext @@ -12069,7 +11894,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitScalarWithCast(self) class SimpleScalarContext(ScalarItemContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ScalarItemContext @@ -12088,7 +11912,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSimpleScalar(self) def scalarItem(self): - localctx = Parser.ScalarItemContext(self, self._ctx, self.state) self.enterRule(localctx, 86, self.RULE_scalarItem) self._la = 0 # Token type @@ -12176,7 +11999,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinClauseWithoutUsing(self) def joinClauseWithoutUsing(self): - localctx = Parser.JoinClauseWithoutUsingContext(self, self._ctx, self.state) self.enterRule(localctx, 88, self.RULE_joinClauseWithoutUsing) self._la = 0 # Token type @@ -12244,7 +12066,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinClause(self) def joinClause(self): - localctx = Parser.JoinClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 90, self.RULE_joinClause) self._la = 0 # Token type @@ -12320,7 +12141,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinClauseItem(self) def joinClauseItem(self): - localctx = Parser.JoinClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 92, self.RULE_joinClauseItem) self._la = 0 # Token type @@ -12382,7 +12202,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinBody(self) def joinBody(self): - localctx = Parser.JoinBodyContext(self, self._ctx, self.state) self.enterRule(localctx, 94, self.RULE_joinBody) self._la = 0 # Token type @@ -12461,7 +12280,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitJoinApplyClause(self) def joinApplyClause(self): - localctx = Parser.JoinApplyClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 96, self.RULE_joinApplyClause) try: @@ -12515,7 +12333,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitPartitionByClause(self) def partitionByClause(self): - localctx = Parser.PartitionByClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 98, self.RULE_partitionByClause) self._la = 0 # Token type @@ -12584,7 +12401,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOrderByClause(self) def orderByClause(self): - localctx = Parser.OrderByClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 100, self.RULE_orderByClause) self._la = 0 # Token type @@ -12644,7 +12460,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOrderByItem(self) def orderByItem(self): - localctx = Parser.OrderByItemContext(self, self._ctx, self.state) self.enterRule(localctx, 102, self.RULE_orderByItem) self._la = 0 # Token type @@ -12714,7 +12529,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitWindowingClause(self) def windowingClause(self): - localctx = Parser.WindowingClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 104, self.RULE_windowingClause) try: @@ -12773,7 +12587,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSignedInteger(self) def signedInteger(self): - localctx = Parser.SignedIntegerContext(self, self._ctx, self.state) self.enterRule(localctx, 106, self.RULE_signedInteger) try: @@ -12828,7 +12641,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitLimitClauseItem(self) def limitClauseItem(self): - localctx = Parser.LimitClauseItemContext(self, self._ctx, self.state) self.enterRule(localctx, 108, self.RULE_limitClauseItem) try: @@ -12899,7 +12711,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class GroupAllContext(GroupingClauseContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GroupingClauseContext @@ -12924,7 +12735,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitGroupAll(self) class GroupByOrExceptContext(GroupingClauseContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.GroupingClauseContext @@ -12962,7 +12772,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitGroupByOrExcept(self) def groupingClause(self): - localctx = Parser.GroupingClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 110, self.RULE_groupingClause) self._la = 0 # Token type @@ -13043,7 +12852,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHavingClause(self) def havingClause(self): - localctx = Parser.HavingClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 112, self.RULE_havingClause) try: @@ -13091,7 +12899,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitParameterItem(self) def parameterItem(self): - localctx = Parser.ParameterItemContext(self, self._ctx, self.state) self.enterRule(localctx, 114, self.RULE_parameterItem) self._la = 0 # Token type @@ -13146,7 +12953,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOutputParameterType(self) def outputParameterType(self): - localctx = Parser.OutputParameterTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 116, self.RULE_outputParameterType) try: @@ -13221,7 +13027,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOutputParameterTypeComponent(self) def outputParameterTypeComponent(self): - localctx = Parser.OutputParameterTypeComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 118, self.RULE_outputParameterTypeComponent) try: @@ -13300,7 +13105,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInputParameterType(self) def inputParameterType(self): - localctx = Parser.InputParameterTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 120, self.RULE_inputParameterType) try: @@ -13396,7 +13200,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRulesetType(self) def rulesetType(self): - localctx = Parser.RulesetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 122, self.RULE_rulesetType) try: @@ -13408,7 +13211,11 @@ def rulesetType(self): self.state = 1511 self.match(Parser.RULESET) pass - elif token in [Parser.DATAPOINT, Parser.DATAPOINT_ON_VD, Parser.DATAPOINT_ON_VAR]: + elif token in [ + Parser.DATAPOINT, + Parser.DATAPOINT_ON_VD, + Parser.DATAPOINT_ON_VAR, + ]: self.enterOuterAlt(localctx, 2) self.state = 1512 self.dpRuleset() @@ -13467,7 +13274,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitScalarType(self) def scalarType(self): - localctx = Parser.ScalarTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 124, self.RULE_scalarType) self._la = 0 # Token type @@ -13557,7 +13363,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComponentType(self) def componentType(self): - localctx = Parser.ComponentTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 126, self.RULE_componentType) self._la = 0 # Token type @@ -13624,7 +13429,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDatasetType(self) def datasetType(self): - localctx = Parser.DatasetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 128, self.RULE_datasetType) self._la = 0 # Token type @@ -13688,7 +13492,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitEvalDatasetType(self) def evalDatasetType(self): - localctx = Parser.EvalDatasetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 130, self.RULE_evalDatasetType) try: @@ -13758,7 +13561,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitScalarSetType(self) def scalarSetType(self): - localctx = Parser.ScalarSetTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 132, self.RULE_scalarSetType) self._la = 0 # Token type @@ -13799,7 +13601,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class DataPointVdContext(DpRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.DpRulesetContext super().__init__(parser) self.copyFrom(ctx) @@ -13834,7 +13635,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDataPointVd(self) class DataPointVarContext(DpRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.DpRulesetContext super().__init__(parser) self.copyFrom(ctx) @@ -13869,7 +13669,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDataPointVar(self) class DataPointContext(DpRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.DpRulesetContext super().__init__(parser) self.copyFrom(ctx) @@ -13886,7 +13685,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitDataPoint(self) def dpRuleset(self): - localctx = Parser.DpRulesetContext(self, self._ctx, self.state) self.enterRule(localctx, 134, self.RULE_dpRuleset) self._la = 0 # Token type @@ -13983,7 +13781,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class HrRulesetVdTypeContext(HrRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.HrRulesetContext super().__init__(parser) self.vdName = None # Token @@ -14028,7 +13825,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHrRulesetVdType(self) class HrRulesetVarTypeContext(HrRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.HrRulesetContext super().__init__(parser) self.varName = None # VarIDContext @@ -14070,7 +13866,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHrRulesetVarType(self) class HrRulesetTypeContext(HrRulesetContext): - def __init__(self, parser, ctx: ParserRuleContext): # actually a VtlParser.HrRulesetContext super().__init__(parser) self.copyFrom(ctx) @@ -14087,7 +13882,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHrRulesetType(self) def hrRuleset(self): - localctx = Parser.HrRulesetContext(self, self._ctx, self.state) self.enterRule(localctx, 136, self.RULE_hrRuleset) self._la = 0 # Token type @@ -14214,7 +14008,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValueDomainName(self) def valueDomainName(self): - localctx = Parser.ValueDomainNameContext(self, self._ctx, self.state) self.enterRule(localctx, 138, self.RULE_valueDomainName) try: @@ -14251,7 +14044,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRulesetID(self) def rulesetID(self): - localctx = Parser.RulesetIDContext(self, self._ctx, self.state) self.enterRule(localctx, 140, self.RULE_rulesetID) try: @@ -14303,7 +14095,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRulesetSignature(self) def rulesetSignature(self): - localctx = Parser.RulesetSignatureContext(self, self._ctx, self.state) self.enterRule(localctx, 142, self.RULE_rulesetSignature) self._la = 0 # Token type @@ -14366,7 +14157,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSignature(self) def signature(self): - localctx = Parser.SignatureContext(self, self._ctx, self.state) self.enterRule(localctx, 144, self.RULE_signature) self._la = 0 # Token type @@ -14422,7 +14212,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRuleClauseDatapoint(self) def ruleClauseDatapoint(self): - localctx = Parser.RuleClauseDatapointContext(self, self._ctx, self.state) self.enterRule(localctx, 146, self.RULE_ruleClauseDatapoint) self._la = 0 # Token type @@ -14496,7 +14285,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRuleItemDatapoint(self) def ruleItemDatapoint(self): - localctx = Parser.RuleItemDatapointContext(self, self._ctx, self.state) self.enterRule(localctx, 148, self.RULE_ruleItemDatapoint) self._la = 0 # Token type @@ -14577,7 +14365,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRuleClauseHierarchical(self) def ruleClauseHierarchical(self): - localctx = Parser.RuleClauseHierarchicalContext(self, self._ctx, self.state) self.enterRule(localctx, 150, self.RULE_ruleClauseHierarchical) self._la = 0 # Token type @@ -14640,7 +14427,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRuleItemHierarchical(self) def ruleItemHierarchical(self): - localctx = Parser.RuleItemHierarchicalContext(self, self._ctx, self.state) self.enterRule(localctx, 152, self.RULE_ruleItemHierarchical) self._la = 0 # Token type @@ -14716,7 +14502,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitHierRuleSignature(self) def hierRuleSignature(self): - localctx = Parser.HierRuleSignatureContext(self, self._ctx, self.state) self.enterRule(localctx, 154, self.RULE_hierRuleSignature) self._la = 0 # Token type @@ -14781,7 +14566,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValueDomainSignature(self) def valueDomainSignature(self): - localctx = Parser.ValueDomainSignatureContext(self, self._ctx, self.state) self.enterRule(localctx, 156, self.RULE_valueDomainSignature) self._la = 0 # Token type @@ -14850,7 +14634,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCodeItemRelation(self) def codeItemRelation(self): - localctx = Parser.CodeItemRelationContext(self, self._ctx, self.state) self.enterRule(localctx, 158, self.RULE_codeItemRelation) self._la = 0 # Token type @@ -14895,7 +14678,7 @@ def codeItemRelation(self): _la == Parser.PLUS or _la == Parser.MINUS or ( - (((_la - 241)) & ~0x3F) == 0 + ((_la - 241) & ~0x3F) == 0 and ( (1 << (_la - 241)) & ( @@ -14961,7 +14744,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCodeItemRelationClause(self) def codeItemRelationClause(self): - localctx = Parser.CodeItemRelationClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 160, self.RULE_codeItemRelationClause) self._la = 0 # Token type @@ -15029,7 +14811,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValueDomainValue(self) def valueDomainValue(self): - localctx = Parser.ValueDomainValueContext(self, self._ctx, self.state) self.enterRule(localctx, 162, self.RULE_valueDomainValue) self._la = 0 # Token type @@ -15038,18 +14819,16 @@ def valueDomainValue(self): self.state = 1739 _la = self._input.LA(1) if not ( - ( - (((_la - 241)) & ~0x3F) == 0 - and ( - (1 << (_la - 241)) - & ( - (1 << (Parser.INTEGER_CONSTANT - 241)) - | (1 << (Parser.NUMBER_CONSTANT - 241)) - | (1 << (Parser.IDENTIFIER - 241)) - ) + ((_la - 241) & ~0x3F) == 0 + and ( + (1 << (_la - 241)) + & ( + (1 << (Parser.INTEGER_CONSTANT - 241)) + | (1 << (Parser.NUMBER_CONSTANT - 241)) + | (1 << (Parser.IDENTIFIER - 241)) ) - != 0 ) + != 0 ): self._errHandler.recoverInline(self) else: @@ -15077,7 +14856,6 @@ def copyFrom(self, ctx: ParserRuleContext): super().copyFrom(ctx) class RangeConstraintContext(ScalarTypeConstraintContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ScalarTypeConstraintContext @@ -15111,7 +14889,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRangeConstraint(self) class ConditionConstraintContext(ScalarTypeConstraintContext): - def __init__( self, parser, ctx: ParserRuleContext ): # actually a VtlParser.ScalarTypeConstraintContext @@ -15136,7 +14913,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConditionConstraint(self) def scalarTypeConstraint(self): - localctx = Parser.ScalarTypeConstraintContext(self, self._ctx, self.state) self.enterRule(localctx, 164, self.RULE_scalarTypeConstraint) self._la = 0 # Token type @@ -15215,7 +14991,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitCompConstraint(self) def compConstraint(self): - localctx = Parser.CompConstraintContext(self, self._ctx, self.state) self.enterRule(localctx, 166, self.RULE_compConstraint) try: @@ -15272,7 +15047,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitMultModifier(self) def multModifier(self): - localctx = Parser.MultModifierContext(self, self._ctx, self.state) self.enterRule(localctx, 168, self.RULE_multModifier) self._la = 0 # Token type @@ -15328,7 +15102,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidationOutput(self) def validationOutput(self): - localctx = Parser.ValidationOutputContext(self, self._ctx, self.state) self.enterRule(localctx, 170, self.RULE_validationOutput) self._la = 0 # Token type @@ -15386,7 +15159,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValidationMode(self) def validationMode(self): - localctx = Parser.ValidationModeContext(self, self._ctx, self.state) self.enterRule(localctx, 172, self.RULE_validationMode) self._la = 0 # Token type @@ -15395,21 +15167,19 @@ def validationMode(self): self.state = 1769 _la = self._input.LA(1) if not ( - ( - (((_la - 225)) & ~0x3F) == 0 - and ( - (1 << (_la - 225)) - & ( - (1 << (Parser.NON_NULL - 225)) - | (1 << (Parser.NON_ZERO - 225)) - | (1 << (Parser.PARTIAL_NULL - 225)) - | (1 << (Parser.PARTIAL_ZERO - 225)) - | (1 << (Parser.ALWAYS_NULL - 225)) - | (1 << (Parser.ALWAYS_ZERO - 225)) - ) + ((_la - 225) & ~0x3F) == 0 + and ( + (1 << (_la - 225)) + & ( + (1 << (Parser.NON_NULL - 225)) + | (1 << (Parser.NON_ZERO - 225)) + | (1 << (Parser.PARTIAL_NULL - 225)) + | (1 << (Parser.PARTIAL_ZERO - 225)) + | (1 << (Parser.ALWAYS_NULL - 225)) + | (1 << (Parser.ALWAYS_ZERO - 225)) ) - != 0 ) + != 0 ): self._errHandler.recoverInline(self) else: @@ -15457,7 +15227,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConditionClause(self) def conditionClause(self): - localctx = Parser.ConditionClauseContext(self, self._ctx, self.state) self.enterRule(localctx, 174, self.RULE_conditionClause) self._la = 0 # Token type @@ -15512,7 +15281,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInputMode(self) def inputMode(self): - localctx = Parser.InputModeContext(self, self._ctx, self.state) self.enterRule(localctx, 176, self.RULE_inputMode) self._la = 0 # Token type @@ -15558,7 +15326,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitImbalanceExpr(self) def imbalanceExpr(self): - localctx = Parser.ImbalanceExprContext(self, self._ctx, self.state) self.enterRule(localctx, 178, self.RULE_imbalanceExpr) try: @@ -15603,7 +15370,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitInputModeHierarchy(self) def inputModeHierarchy(self): - localctx = Parser.InputModeHierarchyContext(self, self._ctx, self.state) self.enterRule(localctx, 180, self.RULE_inputModeHierarchy) self._la = 0 # Token type @@ -15649,7 +15415,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOutputModeHierarchy(self) def outputModeHierarchy(self): - localctx = Parser.OutputModeHierarchyContext(self, self._ctx, self.state) self.enterRule(localctx, 182, self.RULE_outputModeHierarchy) self._la = 0 # Token type @@ -15692,7 +15457,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitAlias(self) def alias(self): - localctx = Parser.AliasContext(self, self._ctx, self.state) self.enterRule(localctx, 184, self.RULE_alias) try: @@ -15729,7 +15493,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitVarID(self) def varID(self): - localctx = Parser.VarIDContext(self, self._ctx, self.state) self.enterRule(localctx, 186, self.RULE_varID) try: @@ -15766,7 +15529,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitSimpleComponentId(self) def simpleComponentId(self): - localctx = Parser.SimpleComponentIdContext(self, self._ctx, self.state) self.enterRule(localctx, 188, self.RULE_simpleComponentId) try: @@ -15809,7 +15571,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComponentID(self) def componentID(self): - localctx = Parser.ComponentIDContext(self, self._ctx, self.state) self.enterRule(localctx, 190, self.RULE_componentID) try: @@ -15870,7 +15631,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitLists(self) def lists(self): - localctx = Parser.ListsContext(self, self._ctx, self.state) self.enterRule(localctx, 192, self.RULE_lists) self._la = 0 # Token type @@ -15927,7 +15687,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitErCode(self) def erCode(self): - localctx = Parser.ErCodeContext(self, self._ctx, self.state) self.enterRule(localctx, 194, self.RULE_erCode) try: @@ -15969,7 +15728,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitErLevel(self) def erLevel(self): - localctx = Parser.ErLevelContext(self, self._ctx, self.state) self.enterRule(localctx, 196, self.RULE_erLevel) try: @@ -16023,7 +15781,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComparisonOperand(self) def comparisonOperand(self): - localctx = Parser.ComparisonOperandContext(self, self._ctx, self.state) self.enterRule(localctx, 198, self.RULE_comparisonOperand) self._la = 0 # Token type @@ -16032,21 +15789,19 @@ def comparisonOperand(self): self.state = 1817 _la = self._input.LA(1) if not ( - ( - ((_la) & ~0x3F) == 0 - and ( - (1 << _la) - & ( - (1 << Parser.EQ) - | (1 << Parser.LT) - | (1 << Parser.MT) - | (1 << Parser.ME) - | (1 << Parser.NEQ) - | (1 << Parser.LE) - ) + ((_la) & ~0x3F) == 0 + and ( + (1 << _la) + & ( + (1 << Parser.EQ) + | (1 << Parser.LT) + | (1 << Parser.MT) + | (1 << Parser.ME) + | (1 << Parser.NEQ) + | (1 << Parser.LE) ) - != 0 ) + != 0 ): self._errHandler.recoverInline(self) else: @@ -16085,7 +15840,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOptionalExpr(self) def optionalExpr(self): - localctx = Parser.OptionalExprContext(self, self._ctx, self.state) self.enterRule(localctx, 200, self.RULE_optionalExpr) try: @@ -16223,7 +15977,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOptionalExprComponent(self) def optionalExprComponent(self): - localctx = Parser.OptionalExprComponentContext(self, self._ctx, self.state) self.enterRule(localctx, 202, self.RULE_optionalExprComponent) try: @@ -16358,7 +16111,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitComponentRole(self) def componentRole(self): - localctx = Parser.ComponentRoleContext(self, self._ctx, self.state) self.enterRule(localctx, 204, self.RULE_componentRole) try: @@ -16426,7 +16178,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitViralAttribute(self) def viralAttribute(self): - localctx = Parser.ViralAttributeContext(self, self._ctx, self.state) self.enterRule(localctx, 206, self.RULE_viralAttribute) try: @@ -16465,7 +16216,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitValueDomainID(self) def valueDomainID(self): - localctx = Parser.ValueDomainIDContext(self, self._ctx, self.state) self.enterRule(localctx, 208, self.RULE_valueDomainID) try: @@ -16502,7 +16252,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitOperatorID(self) def operatorID(self): - localctx = Parser.OperatorIDContext(self, self._ctx, self.state) self.enterRule(localctx, 210, self.RULE_operatorID) try: @@ -16539,7 +16288,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRoutineName(self) def routineName(self): - localctx = Parser.RoutineNameContext(self, self._ctx, self.state) self.enterRule(localctx, 212, self.RULE_routineName) try: @@ -16588,7 +16336,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitConstant(self) def constant(self): - localctx = Parser.ConstantContext(self, self._ctx, self.state) self.enterRule(localctx, 214, self.RULE_constant) self._la = 0 # Token type @@ -16599,7 +16346,7 @@ def constant(self): if not ( _la == Parser.NULL_CONSTANT or ( - (((_la - 241)) & ~0x3F) == 0 + ((_la - 241) & ~0x3F) == 0 and ( (1 << (_la - 241)) & ( @@ -16670,7 +16417,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitBasicScalarType(self) def basicScalarType(self): - localctx = Parser.BasicScalarTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 216, self.RULE_basicScalarType) self._la = 0 # Token type @@ -16680,7 +16426,7 @@ def basicScalarType(self): _la = self._input.LA(1) if not ( ( - (((_la - 168)) & ~0x3F) == 0 + ((_la - 168) & ~0x3F) == 0 and ( (1 << (_la - 168)) & ( @@ -16735,7 +16481,6 @@ def exitRule(self, listener: ParseTreeListener): listener.exitRetainType(self) def retainType(self): - localctx = Parser.RetainTypeContext(self, self._ctx, self.state) self.enterRule(localctx, 218, self.RULE_retainType) self._la = 0 # Token type diff --git a/src/vtlengine/AST/VtlVisitor.py b/src/vtlengine/AST/VtlVisitor.py index 8871b49c..b937e4bf 100644 --- a/src/vtlengine/AST/VtlVisitor.py +++ b/src/vtlengine/AST/VtlVisitor.py @@ -6,7 +6,6 @@ class VtlVisitor(ParseTreeVisitor): - # Visit a parse tree produced by Parser#start. def visitStart(self, ctx: Parser.StartContext): return self.visitChildren(ctx) diff --git a/src/vtlengine/DataTypes/NumericTypesHandling.py b/src/vtlengine/DataTypes/NumericTypesHandling.py deleted file mode 100644 index 8b820982..00000000 --- a/src/vtlengine/DataTypes/NumericTypesHandling.py +++ /dev/null @@ -1,38 +0,0 @@ -import operator -from decimal import Decimal -from typing import Union - - -def decimal_add(a: Union[float, int], b: Union[float, int]) -> float: - """ - Adds two numbers, if they are floats, converts them to Decimal and then to float - :param a: first number - :param b: second number - :return: the sum of the two numbers - """ - if isinstance(a, float) and isinstance(b, float): - decimal_value = Decimal(a) + Decimal(b) - return float(decimal_value) - - return operator.add(a, b) - - -def decimal_sub(a: Union[float, int], b: Union[float, int]) -> float: - if isinstance(a, float) and isinstance(b, float): - decimal_value = Decimal(a) - Decimal(b) - return float(decimal_value) - return operator.sub(a, b) - - -def decimal_mul(a: Union[float, int], b: Union[float, int]) -> float: - if isinstance(a, float) and isinstance(b, float): - decimal_value = Decimal(a) * Decimal(b) - return float(decimal_value) - return operator.mul(a, b) - - -def decimal_div(a: Union[float, int], b: Union[float, int]) -> float: - if isinstance(a, float) and isinstance(b, float): - decimal_value = Decimal(a) / Decimal(b) - return float(decimal_value) - return operator.truediv(a, b) diff --git a/src/vtlengine/DataTypes/TimeHandling.py b/src/vtlengine/DataTypes/TimeHandling.py index 39230f24..f6e1a531 100644 --- a/src/vtlengine/DataTypes/TimeHandling.py +++ b/src/vtlengine/DataTypes/TimeHandling.py @@ -32,15 +32,16 @@ def date_to_period(date_value: date, period_indicator: str) -> Any: return TimePeriodHandler(f"{date_value.year}D{date_value.timetuple().tm_yday}") -def period_to_date(year: int, - period_indicator: str, - period_number: int, - start: bool = False - ) -> date: +def period_to_date( + year: int, period_indicator: str, period_number: int, start: bool = False +) -> date: if period_indicator == "A": return date(year, 1, 1) if start else date(year, 12, 31) periods = { - "S": [(date(year, 1, 1), date(year, 6, 30)), (date(year, 7, 1), date(year, 12, 31))], + "S": [ + (date(year, 1, 1), date(year, 6, 30)), + (date(year, 7, 1), date(year, 12, 31)), + ], "Q": [ (date(year, 1, 1), date(year, 3, 31)), (date(year, 4, 1), date(year, 6, 30)), @@ -237,7 +238,8 @@ def _meta_comparison(self, other: Any, py_op: Any) -> Optional[bool]: return False other = TimePeriodHandler(other) return py_op( - DURATION_MAPPING[self.period_indicator], DURATION_MAPPING[other.period_indicator] + DURATION_MAPPING[self.period_indicator], + DURATION_MAPPING[other.period_indicator], ) def start_date(self, as_date: bool = False) -> Union[date, str]: diff --git a/src/vtlengine/DataTypes/__init__.py b/src/vtlengine/DataTypes/__init__.py index d6a4cc7d..7946d11d 100644 --- a/src/vtlengine/DataTypes/__init__.py +++ b/src/vtlengine/DataTypes/__init__.py @@ -2,7 +2,11 @@ import pandas as pd -from vtlengine.DataTypes.TimeHandling import check_max_date, date_to_period_str, str_period_to_date +from vtlengine.DataTypes.TimeHandling import ( + check_max_date, + date_to_period_str, + str_period_to_date, +) from vtlengine.Exceptions import SemanticError DTYPE_MAPPING: Dict[str, str] = { @@ -397,7 +401,6 @@ def explicit_cast(cls, value: Any, from_type: Any) -> Any: class Duration(ScalarType): - @classmethod def implicit_cast(cls, value: Any, from_type: Any) -> str: if from_type in {Duration, String}: @@ -549,7 +552,17 @@ def dtype(cls) -> str: TimePeriod: {TimeInterval, TimePeriod}, Duration: {Duration}, Boolean: {String, Boolean}, - Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null}, + Null: { + String, + Number, + Integer, + TimeInterval, + Date, + TimePeriod, + Duration, + Boolean, + Null, + }, } # TODO: Implicit are valid as cast without mask @@ -564,7 +577,17 @@ def dtype(cls) -> str: TimePeriod: {TimePeriod, String}, Duration: {Duration, String}, Boolean: {Integer, Number, String, Boolean}, - Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null}, + Null: { + String, + Number, + Integer, + TimeInterval, + Date, + TimePeriod, + Duration, + Boolean, + Null, + }, } EXPLICIT_WITH_MASK_TYPE_PROMOTION_MAPPING: Dict[Type[ScalarType], Any] = { @@ -576,7 +599,17 @@ def dtype(cls) -> str: TimePeriod: {Date}, Duration: {String}, Boolean: {}, - Null: {String, Number, Integer, TimeInterval, Date, TimePeriod, Duration, Boolean, Null}, + Null: { + String, + Number, + Integer, + TimeInterval, + Date, + TimePeriod, + Duration, + Boolean, + Null, + }, } @@ -634,7 +667,10 @@ def binary_implicit_promotion( def check_binary_implicit_promotion( - left: Type[ScalarType], right: Any, type_to_check: Any = None, return_type: Any = None + left: Type[ScalarType], + right: Any, + type_to_check: Any = None, + return_type: Any = None, ) -> bool: """ Validates the compatibility between the types of the operands and the operator diff --git a/src/vtlengine/Exceptions/__init__.py b/src/vtlengine/Exceptions/__init__.py index 2c13d379..ac539a41 100644 --- a/src/vtlengine/Exceptions/__init__.py +++ b/src/vtlengine/Exceptions/__init__.py @@ -124,7 +124,7 @@ def __init__( lino: Optional[str] = None, colno: Optional[str] = None, code: Optional[str] = None, - **kwargs: Any + **kwargs: Any, ) -> None: if code is not None: message = centralised_messages[code].format(**kwargs) @@ -143,7 +143,7 @@ def check_key(field: str, dict_keys: Any, key: str) -> None: def find_closest_key(dict_keys: Any, key: str) -> Optional[str]: closest_key = None max_distance = 3 - min_distance = float('inf') + min_distance = float("inf") for dict_key in dict_keys: distance = key_distance(key, dict_key) @@ -167,8 +167,6 @@ def key_distance(key: str, objetive: str) -> int: for i in range(1, len(key) + 1): for j in range(1, len(objetive) + 1): cost = 0 if key[i - 1] == objetive[j - 1] else 1 - dp[i][j] = min(dp[i - 1][j] + 1, - dp[i][j - 1] + 1, - dp[i - 1][j - 1] + cost) + dp[i][j] = min(dp[i - 1][j] + 1, dp[i][j - 1] + 1, dp[i - 1][j - 1] + cost) return dp[-1][-1] diff --git a/src/vtlengine/Exceptions/messages.py b/src/vtlengine/Exceptions/messages.py index 6bdfbb67..ec5179df 100644 --- a/src/vtlengine/Exceptions/messages.py +++ b/src/vtlengine/Exceptions/messages.py @@ -10,7 +10,7 @@ centralised_messages = { # Input Validation errors "0-1-2-1": "Invalid json structure because additional properties have been supplied " - "on file {filename}.", + "on file {filename}.", "0-1-2-2": "Errors found on file {filename}: {errors}", "0-1-2-3": "Component {component} is duplicated.", "0-1-2-4": "Invalid json structure because {err} on file {filename}.", @@ -22,14 +22,14 @@ "0-1-1-2": "The provided {source} must have data to can infer the data structure.", "0-1-1-3": "Can not infer data structure: {errors}.", "0-1-1-4": "On Dataset {name} loading: An identifier cannot have null values, found null " - "values on {null_identifier}.", + "values on {null_identifier}.", "0-1-1-5": "On Dataset {name} loading: Datasets without identifiers must have 0 or " - "1 datapoints.", + "1 datapoints.", "0-1-1-6": "Duplicated records. Combination of identifiers are repeated.", "0-1-1-7": "G1 - The provided CSV file is empty.", "0-1-1-8": "The following identifiers {ids} were not found , review file {file}.", "0-1-1-9": "You have a problem related with commas, review rfc4180 standard, review file " - "{file}.", + "{file}.", "0-1-1-10": "On Dataset {name} loading: Component {comp_name} is missing in Datapoints.", "0-1-1-11": "Wrong data in the file for this scalardataset {name}.", "0-1-1-12": "On Dataset {name} loading: not possible to cast column {column} to {type}.", @@ -59,7 +59,7 @@ "1-1-1-13": "At op {op}: Component {comp_name} role must be '{role_1}', found '{role_2}'.", # "1-1-1-14": "At op {op}: Dataset {name} type must be '{type_1}'.", "1-1-1-15": "At op {op}: Datasets {name_1} and {name_2} does not contain the same number of " - "{type}.", + "{type}.", "1-1-1-16": "Found structure not nullable and null values.", # "1-1-1-17": "At op {op}: Problem with nullability for this components {name_1} and {name_2}.", # "1-1-1-18": "No {type} {value} found.", @@ -76,36 +76,36 @@ # TODO: Use error message 1-1-1-8 # "1-1-2-1": "At op {op}: No measures found to aggregate.", "1-1-2-2": "At op {op}: Only Identifiers are allowed for grouping, " - "found {id_name} - {id_type}.", + "found {id_name} - {id_type}.", "1-1-2-3": "Having component output type must be boolean, found {type}.", # "1-1-2-4": "At op {op}: Component {id_name} not found in dataset", # Analytic errors # TODO: Use error message 1-1-1-8 # "1-1-3-1": "At op {op}: No measures found to analyse.", "1-1-3-2": "At op {op}: Only Identifiers are allowed for partitioning, " - "found {id_name} - {id_type}.", + "found {id_name} - {id_type}.", # Cast errors "1-1-5-1": "Type {type_1}, cannot be cast to {type_2}.", "1-1-5-3": "Impossible to cast from type {type_1} to {type_2}, without providing a mask.", "1-1-5-4": "Invalid mask to cast from type {type_1} to {type_2}.", "1-1-5-5": "A mask can't be provided to cast from type {type_1} to {type_2}. Mask provided: " - "{mask_value}.", + "{mask_value}.", "2-1-5-1": "Impossible to cast {value} from type {type_1} to {type_2}.", # Clause errors # "1-1-6-1": "At op {op}: Component {comp_name} not found in dataset {dataset_name}.", "1-1-6-2": "At op {op}: The identifier {name} in dataset {dataset} could not be included " - "in the {op} op.", + "in the {op} op.", # TODO: This is not possible at all, as calc clause adds a new column and # identifiers are still unique # "1-1-6-3": "Found duplicated values on identifiers after Calc clause.", "1-1-6-4": "At op {op}: Alias symbol cannot have the name of a component symbol: " - "{symbol_name} - {comp_name}.", + "{symbol_name} - {comp_name}.", "1-1-6-5": "At op {op}: Scalar values are not allowed at sub operator, found {name}.", "1-1-6-6": "Membership is not allowed inside a clause, found {dataset_name}#{comp_name}.", "1-1-6-7": "Cannot use component {comp_name} as it was generated in another calc expression.", # all the components used in calccomp must belong to the operand dataset "1-1-6-8": "Cannot use component {comp_name} for rename, it is already in the dataset " - "{dataset_name}.", + "{dataset_name}.", # it is the same error that 1-1-8-1 AND similar but not the same 1-3-1 "1-1-6-9": "At op {op}: The following components are repeated: {from_components}.", "1-1-6-10": "At op {op}: Component {operand} in dataset {dataset_name} is not an identifier", @@ -116,25 +116,25 @@ # "1-1-6-15": "At op {op}: Component {comp_name} already exists in dataset {dataset_name}", # Comparison errors "1-1-7-1": "At op {op}: Value in {left_name} of type {left_type} is not comparable to value " - "{right_name} of type {right_type}.", + "{right_name} of type {right_type}.", # Conditional errors "1-1-9-1": "At op {op}: The evaluation condition must result in a Boolean " - "expression, found '{type}'.", + "expression, found '{type}'.", "1-1-9-3": "At op {op}: Then clause {then_name} and else clause {else_name}, both must be " - "Scalars.", + "Scalars.", "1-1-9-4": "At op {op}: The condition dataset {name} must contain an unique measure.", "1-1-9-5": "At op {op}: The condition dataset Measure must be a Boolean, found '{type}'.", "1-1-9-6": "At op {op}: Then-else datasets have different number of identifiers compared " - "with condition dataset.", + "with condition dataset.", "1-1-9-9": "At op {op}: {clause} component {clause_name} role must be {role_1}, found " - "{role_2}.", + "{role_2}.", "1-1-9-10": "At op {op}: {clause} dataset have different number of identifiers compared with " - "condition dataset.", + "condition dataset.", "1-1-9-11": "At op {op}: Condition component {name} must be Boolean, found {type}.", "1-1-9-12": "At op {op}: then clause {then_symbol} and else clause {else_symbol}, both must " - "be Datasets or at least one of them a Scalar.", + "be Datasets or at least one of them a Scalar.", "1-1-9-13": "At op {op}: then {then} and else {else_clause} datasets must contain the same " - "number of components.", + "number of components.", "2-1-9-1": "At op {op}: Condition operators must have the same operator type.", "2-1-9-2": "At op {op}: Condition {name} it's not a boolean.", "2-1-9-3": "At op {op}: All then and else operands must be scalars.", @@ -146,16 +146,16 @@ "1-1-10-1": "At op {op}: The {op_type} operand must have exactly one measure of type {me_type}", "1-1-10-2": "At op {op}: Number of variable has to be equal between the call and signature.", "1-1-10-3": "At op {op}: Name in the call {found} has to be equal to variable rule in " - "signature {expected}.", + "signature {expected}.", "1-1-10-4": "At op {op}: When a hierarchical ruleset is defined for value domain, it is " - "necessary to specify the component with the rule clause on call.", + "necessary to specify the component with the rule clause on call.", "1-1-10-5": "No rules to analyze on Hierarchy Roll-up as rules have no = operator.", "1-1-10-6": "At op {op}: Name in the call {found} has to be equal to variable condition in " - "signature {expected} .", + "signature {expected} .", "1-1-10-7": "Not found component {comp_name} on signature.", "1-1-10-8": "At op {op}: Measures involved have to be numerical, other types found {found}.", "1-1-10-9": "Invalid signature for the ruleset {ruleset}. On variables, condComp and " - "ruleComp must be the same", + "ruleComp must be the same", # General Operators # "1-1-12-1": "At op {op}: You could not recalculate the identifier {name} on dataset " # "{dataset}.", @@ -165,49 +165,49 @@ "1-1-13-1": "At op {op}: Duplicated alias {duplicates}.", "1-1-13-2": "At op {op}: Missing mandatory aliasing.", "1-1-13-3": "At op {op}: Join conflict with duplicated names for column {name} from original " - "datasets.", + "datasets.", "1-1-13-4": "At op {op}: Using clause, using={using_names}, does not define all the " - "identifiers, of non reference dataset {dataset}.", + "identifiers, of non reference dataset {dataset}.", "1-1-13-5": "At op {op}: Invalid subcase B1, All the datasets must share as identifiers the " - "using ones.", + "using ones.", # not in use but we keep for later, in use 1-1-13-4 "1-1-13-6": "At op {op}: Invalid subcase B2, All the declared using components " - "'{using_components}' must be present as components in the reference dataset " - "'{reference}'.", + "'{using_components}' must be present as components in the reference dataset " + "'{reference}'.", "1-1-13-7": "At op {op}: Invalid subcase B2, All the non reference datasets must share as " - "identifiers the using ones.", + "identifiers the using ones.", "1-1-13-8": "At op {op}: No available using clause.", "1-1-13-9": "Ambiguity for this variable {comp_name} inside a join clause.", "1-1-13-10": "The join operator does not perform scalar/component operations.", "1-1-13-11": "At op {op}: Invalid subcase A, {dataset_reference} should be a superset but " - "{component} not found.", + "{component} not found.", # inner_join and left join "1-1-13-12": "At op {op}: Invalid subcase A. There are different identifiers for the provided " - "datasets", + "datasets", # full_join "1-1-13-13": "At op {op}: Invalid subcase A. There are not same number of identifiers for the " - "provided datasets", + "provided datasets", # full_join "1-1-13-14": "Cannot perform a join over a Dataset Without Identifiers: {name}.", "1-1-13-15": "At op {op}: {comp_name} has to be a Measure for all the provided datasets inside " - "the join", + "the join", "1-1-13-16": "At op {op}: Invalid use, please review : {msg}.", "1-1-13-17": "At op {op}: {comp_name} not present in the dataset(result from join VDS) at the " - "time it is called", + "time it is called", # Operators general errors "1-1-14-1": "At op {op}: Measure names don't match: {left} - {right}.", "1-1-14-3": "At op {op}: Invalid scalar types for identifiers at DataSet {dataset}. One {type} " - "identifier expected, {count} found.", + "identifier expected, {count} found.", "1-1-14-5": "At op {op}: {names} with type/s {types} is not compatible with {op}", "1-1-14-6": "At op {op}: {comp_name} with type {comp_type} and scalar_set with type " - "{scalar_type} is not compatible with {op}", + "{scalar_type} is not compatible with {op}", # "1-1-14-8": "At op {op}: Operation not allowed for multimeasure datasets.", "1-1-14-9": "At op {op}: {names} with type/s {types} is not compatible with {op} on datasets " - "{datasets}.", + "{datasets}.", # Numeric Operators "1-1-15-8": "At op {op}: {op} operator cannot have a {comp_type} as parameter.", "2-1-15-1": "At op {op}: Component {comp_name} from dataset {dataset_name} contains negative " - "values.", + "values.", "2-1-15-2": "At op {op}: Value {value} could not be negative.", "2-1-15-3": "At op {op}: Base value {value} could not be less or equal 0.", "2-1-15-4": "At op {op}: Invalid values in Component {name}.", @@ -216,7 +216,7 @@ "2-1-15-7": "At op {op}: {op} operator cannot be a dataset.", # Set Operators "1-1-17-1": "At op {op}: Datasets {dataset_1} and {dataset_2} have different number of " - "components", + "components", # String Operators # "1-1-18-1": "At op {op}: Invalid Dataset {name}. Dataset with one measure expected.", "1-1-18-2": "At op {op}: Composition of DataSet and Component is not allowed.", @@ -230,12 +230,12 @@ "1-1-19-2": "At op {op}: Unknown date type for {op}.", "1-1-19-3": "At op {op}: Invalid {param} for {op}.", "1-1-19-4": "At op {op}: Invalid values {value_1} and {value_2}, periodIndTo parameter must be " - "a larger duration value than periodIndFrom parameter.", + "a larger duration value than periodIndFrom parameter.", "1-1-19-5": "At op {op}: periodIndTo parameter must be a larger duration value than the values " - "to aggregate.", + "to aggregate.", "1-1-19-6": "At op {op}: Time type used in the component {comp} is not supported.", "1-1-19-7": "At op {op}: can be applied only on Data Sets (of time series) and returns a Data " - "Set (of time series).", + "Set (of time series).", # flow_to_stock, stock_to_flow "1-1-19-8": "At op {op}: {op} can only be applied to a {comp_type}", "1-1-19-9": "At op {op}: {op} can only be applied to a {comp_type} with a {param}", @@ -243,25 +243,24 @@ "1-1-19-10": "{op} can only be applied to operands with data type as Date or Time Period", # Other time operators "2-1-19-1": "At op {op}: Invalid values {value_1} and {value_2} for duration, " - "periodIndTo parameter must be a larger duration value than the " - "values to aggregate.", + "periodIndTo parameter must be a larger duration value than the " + "values to aggregate.", "2-1-19-2": "Invalid period indicator {period}.", "2-1-19-3": "Only same period indicator allowed for both parameters ({period1} != {period2}).", "2-1-19-4": "Date setter, ({value} > {date}). Cannot set date1 with a value higher than date2.", "2-1-19-5": "Date setter, ({value} < {date}). Cannot set date2 with a value lower than date1.", "2-1-19-6": "Invalid period format, must be YYYY-(L)NNN: {period_format}", "2-1-19-7": "Period Number must be between 1 and {periods} for period indicator " - "{period_indicator}.", + "{period_indicator}.", "2-1-19-8": "Invalid date format, must be YYYY-MM-DD: {date}", "2-1-19-9": "Invalid day {day} for year {year}.", "2-1-19-10": "Invalid year {year}, must be between 1900 and 9999.", "2-1-19-11": "{op} operator is not compatible with time values", - "2-1-19-12": "At op {op}: Invalid param type {type} for param {name}, " - "expected {expected}.", + "2-1-19-12": "At op {op}: Invalid param type {type} for param {name}, " "expected {expected}.", "2-1-19-13": "At op {op}: Invalid param data_type {type} for param {name}, " - "expected {expected}.", + "expected {expected}.", "2-1-19-14": "At op {op}: Invalid dataset {name}, requires at least one Date/Time_Period " - "measure.", + "measure.", "2-1-19-15": "{op} can only be applied according to the following mask: PY/YDDD/D", "2-1-19-16": "{op} can only be applied according to the following mask: PM/MDD/D", "2-1-19-17": "{op} can only be positive numbers", @@ -293,13 +292,13 @@ "1-3-22": "Unable to categorize {node_value}.", "1-3-23": "Missing value domain '{name}' definition, please provide an structure.", "1-3-24": "Internal error on Analytic operators inside a calc, No partition or " - "order symbol found.", + "order symbol found.", "1-3-26": "Value domain {name} not found.", "1-3-27": "Dataset without identifiers are not allowed in {op} operator.", "1-3-28": "At op {op}: invalid number of parameters: received {received}, expected at " - "least: {expected}", + "least: {expected}", "1-3-29": "At op {op}: can not use user defined operator that returns a component outside " - "clause operator or rule", + "clause operator or rule", "1-3-30": "At op {op}: too many parameters: received {received}, expected: {expected}", "1-3-31": "Cannot use component {name} outside an aggregate function in a having clause.", "1-3-32": "Cannot perform operation {op} inside having clause.", @@ -313,23 +312,23 @@ "1-4-1-3": "At op {op}: using variable {value}, not defined as an argument.", "1-4-1-4": "Found duplicates at arguments naming, please review {type} " "definition {op}.", "1-4-1-5": "Found duplicates at rule naming: {names}. Please review {type} " - "{ruleset_name} definition.", + "{ruleset_name} definition.", "1-4-1-6": "At op {op}: Arguments incoherence, {defined} defined {passed} passed.", "1-4-1-7": "All rules must be named or not named, but found mixed criteria at {type} " - "definition {name}.", + "definition {name}.", "1-4-1-8": "All rules must have different code items in the left side of '=' in hierarchy " - "operator at hierachical ruleset definition {name}.", + "operator at hierachical ruleset definition {name}.", "1-4-1-9": "At op check_datapoint: {name} has an invalid datatype expected DataSet, found " - "Scalar.", + "Scalar.", # AST Creation "1-4-2-1": "Eval could not be called without a {option} type definition.", "1-4-2-2": "Optional or empty expression node is not allowed in time_agg.", "1-4-2-3": "{value} could not be called in the count.", "1-4-2-4": "At op {op}: Only one order_by element must be used in Analytic with range " - "windowing.", + "windowing.", "1-4-2-5": "At op {op}: User defined operator without returns is not implemented.", "1-4-2-6": "At op {op}: Window must be provided.", "1-4-2-7": "At op {op}: Partition by or order by clause must be provided for Analytic " - "operators.", + "operators.", # Not Implemented Error } diff --git a/src/vtlengine/Interpreter/__init__.py b/src/vtlengine/Interpreter/__init__.py index 2aa30f19..fac4ca6f 100644 --- a/src/vtlengine/Interpreter/__init__.py +++ b/src/vtlengine/Interpreter/__init__.py @@ -68,10 +68,19 @@ from vtlengine.Operators.Comparison import Between, ExistIn from vtlengine.Operators.Conditional import Case, If from vtlengine.Operators.General import Eval -from vtlengine.Operators.HROperators import HAAssignment, Hierarchy, get_measure_from_dataset +from vtlengine.Operators.HROperators import ( + HAAssignment, + Hierarchy, + get_measure_from_dataset, +) from vtlengine.Operators.Numeric import Round, Trunc from vtlengine.Operators.String import Instr, Replace, Substr -from vtlengine.Operators.Time import Current_Date, Date_Add, Fill_time_series, Time_Aggregation +from vtlengine.Operators.Time import ( + Current_Date, + Date_Add, + Fill_time_series, + Time_Aggregation, +) from vtlengine.Operators.Validation import Check, Check_Datapoint, Check_Hierarchy from vtlengine.Utils import ( AGGREGATION_MAPPING, @@ -153,7 +162,9 @@ def _load_datapoints_efficient(self, statement_num: int) -> None: for ds_name in self.ds_analysis[INSERT][statement_num]: if ds_name in self.datapoints_paths: self.datasets[ds_name].data = load_datapoints( - self.datasets[ds_name].components, ds_name, self.datapoints_paths[ds_name] + self.datasets[ds_name].components, + ds_name, + self.datapoints_paths[ds_name], ) elif ds_name in self.datasets and self.datasets[ds_name].data is None: _fill_dataset_empty_data(self.datasets[ds_name]) @@ -180,7 +191,9 @@ def _save_datapoints_efficient(self, statement_num: int) -> None: # Saving only datasets, no scalars save_datapoints( - self.time_period_representation, self.datasets[ds_name], self.output_path + self.time_period_representation, + self.datasets[ds_name], + self.output_path, ) self.datasets[ds_name].data = None @@ -201,8 +214,9 @@ def visit_Start(self, node: AST.Start) -> Any: if isinstance(child, (AST.Assignment, AST.PersistentAssignment)): vtlengine.Exceptions.dataset_output = child.left.value # type: ignore[attr-defined] self._load_datapoints_efficient(statement_num) - if (not isinstance(child, (AST.HRuleset, AST.DPRuleset, AST.Operator)) and - not isinstance(child, (AST.Assignment, AST.PersistentAssignment))): + if not isinstance( + child, (AST.HRuleset, AST.DPRuleset, AST.Operator) + ) and not isinstance(child, (AST.Assignment, AST.PersistentAssignment)): raise SemanticError("1-3-17") result = self.visit(child) @@ -229,7 +243,6 @@ def visit_Start(self, node: AST.Start) -> Any: # Definition Language def visit_Operator(self, node: AST.Operator) -> None: - if self.udos is None: self.udos = {} elif node.op in self.udos: @@ -262,7 +275,6 @@ def visit_Operator(self, node: AST.Operator) -> None: } def visit_DPRuleset(self, node: AST.DPRuleset) -> None: - # Rule names are optional, if not provided, they are generated. # If provided, all must be provided rule_names = [rule.name for rule in node.rules if rule.name is not None] @@ -358,7 +370,6 @@ def visit_PersistentAssignment(self, node: AST.PersistentAssignment) -> Any: return self.visit_Assignment(node) def visit_BinOp(self, node: AST.BinOp) -> Any: - is_from_if = False if ( not self.is_from_condition @@ -369,8 +380,12 @@ def visit_BinOp(self, node: AST.BinOp) -> Any: is_from_if = self.is_from_if self.is_from_if = False - if (self.is_from_join and node.op in [MEMBERSHIP, AGGREGATE] and - hasattr(node.left, "value") and hasattr(node.right, "value")): + if ( + self.is_from_join + and node.op in [MEMBERSHIP, AGGREGATE] + and hasattr(node.left, "value") + and hasattr(node.right, "value") + ): if self.udo_params is not None and node.right.value in self.udo_params[-1]: comp_name = f"{node.left.value}#{self.udo_params[-1][node.right.value]}" else: @@ -421,7 +436,10 @@ def visit_Aggregation(self, node: AST.Aggregation) -> None: if node.operand is not None and operand is not None: op_comp: DataComponent = self.visit(node.operand) comps_to_keep = {} - for comp_name, comp in self.regular_aggregation_dataset.components.items(): + for ( + comp_name, + comp, + ) in self.regular_aggregation_dataset.components.items(): if comp.role == Role.IDENTIFIER: comps_to_keep[comp_name] = copy(comp) comps_to_keep[op_comp.name] = Component( @@ -744,8 +762,12 @@ def visit_VarID(self, node: AST.VarID) -> Any: # noqa: C901 is_partial_present = 0 found_comp = None for comp_name in self.regular_aggregation_dataset.get_components_names(): - if ("#" in comp_name and comp_name.split("#")[1] == node.value or "#" - in node.value and node.value.split("#")[1] == comp_name): + if ( + "#" in comp_name + and comp_name.split("#")[1] == node.value + or "#" in node.value + and node.value.split("#")[1] == comp_name + ): is_partial_present += 1 found_comp = comp_name if is_partial_present == 0: @@ -783,7 +805,9 @@ def visit_VarID(self, node: AST.VarID) -> Any: # noqa: C901 comp_name = self.ruleset_signature[node.value] if comp_name not in self.ruleset_dataset.components: raise SemanticError( - "1-1-1-10", comp_name=node.value, dataset_name=self.ruleset_dataset.name + "1-1-1-10", + comp_name=node.value, + dataset_name=self.ruleset_dataset.name, ) data = None if self.rule_data is None else self.rule_data[comp_name] return DataComponent( @@ -938,7 +962,6 @@ def visit_RegularAggregation(self, node: AST.RegularAggregation) -> None: # noq return REGULAR_AGGREGATION_MAPPING[node.op].analyze(operands, dataset) def visit_If(self, node: AST.If) -> Dataset: - self.is_from_condition = True condition = self.visit(node.condition) self.is_from_condition = False @@ -948,7 +971,10 @@ def visit_If(self, node: AST.If) -> Dataset: elseValue = self.visit(node.elseOp) if not isinstance(thenValue, Scalar) or not isinstance(elseValue, Scalar): raise SemanticError( - "1-1-9-3", op="If_op", then_name=thenValue.name, else_name=elseValue.name + "1-1-9-3", + op="If_op", + then_name=thenValue.name, + else_name=elseValue.name, ) if condition.value: return self.visit(node.thenOp) @@ -1024,7 +1050,9 @@ def visit_RenameNode(self, node: AST.RenameNode) -> Any: def visit_Constant(self, node: AST.Constant) -> Any: return Scalar( - name=str(node.value), value=node.value, data_type=BASIC_TYPES[type(node.value)] + name=str(node.value), + value=node.value, + data_type=BASIC_TYPES[type(node.value)], ) def visit_JoinOp(self, node: AST.JoinOp) -> None: @@ -1130,7 +1158,9 @@ def visit_ParamOp(self, node: AST.ParamOp) -> None: # noqa: C901 for comp_name in node.children[2:]: if comp_name.__str__() not in dataset_element.components: raise SemanticError( - "1-1-1-10", comp_name=comp_name, dataset_name=dataset_element.name + "1-1-1-10", + comp_name=comp_name, + dataset_name=dataset_element.name, ) if dpr_info is not None and dpr_info["signature_type"] == "variable": for i, comp_name in enumerate(node.children[2:]): @@ -1164,7 +1194,9 @@ def visit_ParamOp(self, node: AST.ParamOp) -> None: # noqa: C901 # Datapoint Ruleset final evaluation return Check_Datapoint.analyze( - dataset_element=dataset_element, rule_info=rule_output_values, output=output + dataset_element=dataset_element, + rule_info=rule_output_values, + output=output, ) elif node.op in (CHECK_HIERARCHY, HIERARCHY): if len(node.children) == 3: @@ -1203,7 +1235,10 @@ def visit_ParamOp(self, node: AST.ParamOp) -> None: # noqa: C901 and hr_info["signature"] != component ): raise SemanticError( - "1-1-10-3", op=node.op, found=component, expected=hr_info["signature"] + "1-1-10-3", + op=node.op, + found=component, + expected=hr_info["signature"], ) elif hr_info["node"].signature_type == "valuedomain" and component is None: raise SemanticError("1-1-10-4", op=node.op) @@ -1215,7 +1250,10 @@ def visit_ParamOp(self, node: AST.ParamOp) -> None: # noqa: C901 and cond_components[i] != cond_comp ): raise SemanticError( - "1-1-10-6", op=node.op, expected=cond_comp, found=cond_components[i] + "1-1-10-6", + op=node.op, + expected=cond_comp, + found=cond_components[i], ) cond_info[cond_comp] = cond_components[i] @@ -1270,7 +1308,9 @@ def visit_ParamOp(self, node: AST.ParamOp) -> None: # noqa: C901 # Final evaluation if node.op == CHECK_HIERARCHY: result = Check_Hierarchy.analyze( - dataset_element=dataset, rule_info=rule_output_values, output=output + dataset_element=dataset, + rule_info=rule_output_values, + output=output, ) del rule_output_values else: @@ -1396,10 +1436,12 @@ def visit_HRBinOp(self, node: AST.HRBinOp) -> Any: left_operand.data = pd.DataFrame({measure_name: []}) if right_operand.data is None: right_operand.data = pd.DataFrame({measure_name: []}) - left_null_indexes = set(left_operand.data[left_operand.data[ - measure_name].isnull()].index) - right_null_indexes = set(right_operand.data[right_operand.data[ - measure_name].isnull()].index) + left_null_indexes = set( + left_operand.data[left_operand.data[measure_name].isnull()].index + ) + right_null_indexes = set( + right_operand.data[right_operand.data[measure_name].isnull()].index + ) # If no indexes are in common, then one datapoint is not null invalid_indexes = list(left_null_indexes.intersection(right_null_indexes)) if len(invalid_indexes) > 0: @@ -1415,7 +1457,6 @@ def visit_HRUnOp(self, node: AST.HRUnOp) -> None: return HR_UNARY_MAPPING[node.op].analyze(operand) def visit_Validation(self, node: AST.Validation) -> Dataset: - validation_element = self.visit(node.validation) if not isinstance(validation_element, Dataset): raise ValueError(f"Expected dataset, got {type(validation_element).__name__}") @@ -1532,7 +1573,10 @@ def generate_then_else_datasets(self, condition: Union[Dataset, DataComponent]) components.update( { name: Component( - name=name, data_type=BASIC_TYPES[int], role=Role.MEASURE, nullable=True + name=name, + data_type=BASIC_TYPES[int], + role=Role.MEASURE, + nullable=True, ) } ) @@ -1737,9 +1781,7 @@ def visit_UDOCall(self, node: AST.UDOCall) -> None: # noqa: C901 signature_values[param["name"]] = self.visit(node.params[i]) elif param["type"] in ["Dataset", "Component"]: if isinstance(node.params[i], AST.VarID): - signature_values[param["name"]] = node.params[ - i - ].value # type: ignore[attr-defined] + signature_values[param["name"]] = node.params[i].value # type: ignore[attr-defined] else: param_element = self.visit(node.params[i]) if isinstance(param_element, Dataset): @@ -1834,5 +1876,8 @@ def visit_TimeAggregation(self, node: AST.TimeAggregation) -> None: operand = self.visit(node.operand) return Time_Aggregation.analyze( - operand=operand, period_from=node.period_from, period_to=node.period_to, conf=node.conf + operand=operand, + period_from=node.period_from, + period_to=node.period_to, + conf=node.conf, ) diff --git a/src/vtlengine/Model/__init__.py b/src/vtlengine/Model/__init__.py index 2b070398..11a2b315 100644 --- a/src/vtlengine/Model/__init__.py +++ b/src/vtlengine/Model/__init__.py @@ -229,23 +229,21 @@ def __eq__(self, other: Any) -> bool: self.data[comp.name] = self.data[comp.name].astype(str) other.data[comp.name] = other.data[comp.name].astype(str) self.data[comp.name] = self.data[comp.name].map( - lambda x: str(TimePeriodHandler(x)) if x != "" else "", na_action="ignore" + lambda x: str(TimePeriodHandler(x)) if x != "" else "", + na_action="ignore", ) other.data[comp.name] = other.data[comp.name].map( - lambda x: str(TimePeriodHandler(x)) if x != "" else "", na_action="ignore" + lambda x: str(TimePeriodHandler(x)) if x != "" else "", + na_action="ignore", ) elif type_name in ["Integer", "Number"]: type_ = "int64" if type_name == "Integer" else "float32" # We use here a number to avoid errors on equality on empty strings self.data[comp.name] = ( - self.data[comp.name] - .replace("", -1234997) - .astype(type_) # type: ignore[call-overload] + self.data[comp.name].replace("", -1234997).astype(type_) # type: ignore[call-overload] ) other.data[comp.name] = ( - other.data[comp.name] - .replace("", -1234997) - .astype(type_) # type: ignore[call-overload] + other.data[comp.name].replace("", -1234997).astype(type_) # type: ignore[call-overload] ) try: assert_frame_equal( @@ -334,7 +332,7 @@ def to_dict(self) -> Dict[str, Any]: return { "name": self.name, "components": {k: v.to_dict() for k, v in self.components.items()}, - "data": self.data.to_dict(orient="records") if self.data is not None else None, + "data": (self.data.to_dict(orient="records") if self.data is not None else None), } def to_json(self) -> str: diff --git a/src/vtlengine/Operators/Aggregation.py b/src/vtlengine/Operators/Aggregation.py index 69acaa8a..0a2e7645 100644 --- a/src/vtlengine/Operators/Aggregation.py +++ b/src/vtlengine/Operators/Aggregation.py @@ -89,7 +89,10 @@ def _handle_data_types(cls, data: pd.DataFrame, measures: List[Component], mode: data[measure.name] = ( data[measure.name] .astype(object) - .map(lambda x: TimeIntervalHandler.from_iso_format(x), na_action="ignore") + .map( + lambda x: TimeIntervalHandler.from_iso_format(x), + na_action="ignore", + ) ) else: data[measure.name] = data[measure.name].map( @@ -128,7 +131,10 @@ def validate( # type: ignore[override] for comp_name in grouping_columns: if comp_name not in operand.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=operand.name + "1-1-1-10", + op=cls.op, + comp_name=comp_name, + dataset_name=operand.name, ) if operand.components[comp_name].role != Role.IDENTIFIER: raise SemanticError( diff --git a/src/vtlengine/Operators/Analytic.py b/src/vtlengine/Operators/Analytic.py index d3d993b8..8f48a6e2 100644 --- a/src/vtlengine/Operators/Analytic.py +++ b/src/vtlengine/Operators/Analytic.py @@ -29,7 +29,12 @@ VAR_POP, VAR_SAMP, ) -from vtlengine.DataTypes import COMP_NAME_MAPPING, Integer, Number, unary_implicit_promotion +from vtlengine.DataTypes import ( + COMP_NAME_MAPPING, + Integer, + Number, + unary_implicit_promotion, +) from vtlengine.Exceptions import SemanticError from vtlengine.Model import Component, Dataset, Role @@ -66,7 +71,10 @@ def validate( # type: ignore[override] for comp_name in partitioning: if comp_name not in operand.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=operand.name + "1-1-1-10", + op=cls.op, + comp_name=comp_name, + dataset_name=operand.name, ) if comp_name not in identifier_names: raise SemanticError( @@ -78,7 +86,10 @@ def validate( # type: ignore[override] for comp_name in order_components: if comp_name not in operand.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=comp_name, dataset_name=operand.name + "1-1-1-10", + op=cls.op, + comp_name=comp_name, + dataset_name=operand.name, ) if component_name is not None: if cls.type_to_check is not None: diff --git a/src/vtlengine/Operators/Assignment.py b/src/vtlengine/Operators/Assignment.py index fe0b29f2..43244252 100644 --- a/src/vtlengine/Operators/Assignment.py +++ b/src/vtlengine/Operators/Assignment.py @@ -8,7 +8,6 @@ class Assignment(Binary): - @classmethod def validate(cls, left_operand: Any, right_operand: Any) -> ALL_MODEL_TYPES: if ( diff --git a/src/vtlengine/Operators/CastOperator.py b/src/vtlengine/Operators/CastOperator.py index 889b42e5..e69e9426 100644 --- a/src/vtlengine/Operators/CastOperator.py +++ b/src/vtlengine/Operators/CastOperator.py @@ -34,43 +34,43 @@ class Cast(Operator.Unary): # CASTS VALUES # Converts the value from one type to another in a way that is according to the mask @classmethod - def cast_string_to_number(cls, *args: Any) -> Any: + def cast_string_to_number(cls, value: Any, mask: str) -> Any: """ This method casts a string to a number, according to the mask. """ - raise NotImplementedError("How this cast should be implemented is not yet defined.") + raise NotImplementedError("How this mask should be implemented is not yet defined.") @classmethod - def cast_string_to_date(cls, *args: Any) -> Any: + def cast_string_to_date(cls, value: Any, mask: str) -> Any: """ This method casts a string to a number, according to the mask. """ - raise NotImplementedError("How this cast should be implemented is not yet defined.") + raise NotImplementedError("How this mask should be implemented is not yet defined.") @classmethod - def cast_string_to_duration(cls, *args: Any) -> Any: + def cast_string_to_duration(cls, value: Any, mask: str) -> Any: """ This method casts a string to a duration, according to the mask. """ - raise NotImplementedError("How this cast should be implemented is not yet defined.") + raise NotImplementedError("How this mask should be implemented is not yet defined.") @classmethod - def cast_string_to_time_period(cls, *args: Any) -> Any: + def cast_string_to_time_period(cls, value: Any, mask: str) -> Any: """ This method casts a string to a time period, according to the mask. """ - raise NotImplementedError("How this cast should be implemented is not yet defined.") + raise NotImplementedError("How this mask should be implemented is not yet defined.") @classmethod - def cast_string_to_time(cls, *args: Any) -> Any: + def cast_string_to_time(cls, value: Any, mask: str) -> Any: """ This method casts a string to a time, according to the mask. @@ -78,20 +78,21 @@ def cast_string_to_time(cls, *args: Any) -> Any: raise NotImplementedError("How this cast should be implemented is not yet defined.") - @classmethod - def cast_date_to_string(cls, *args: Any) -> Any: - """ """ - return NotImplementedError("How this cast should be implemented is not yet defined.") - - @classmethod - def cast_duration_to_string(cls, *args: Any) -> Any: - """ """ - return NotImplementedError("How this cast should be implemented is not yet defined.") - - @classmethod - def cast_time_to_string(cls, *args: Any) -> Any: - """ """ - return NotImplementedError("How this cast should be implemented is not yet defined.") + # + # @classmethod + # def cast_date_to_string(cls, value: Any, mask: str) -> Any: + # """ """ + # return NotImplementedError("How this cast should be implemented is not yet defined.") + # + # @classmethod + # def cast_duration_to_string(cls, value: Any, mask: str) -> Any: + # """ """ + # return NotImplementedError("How this cast should be implemented is not yet defined.") + # + # @classmethod + # def cast_time_to_string(cls, value: Any, mask: str) -> Any: + # """ """ + # return NotImplementedError("How this cast should be implemented is not yet defined.") @classmethod def cast_time_period_to_date(cls, value: Any, mask_value: str) -> Any: @@ -142,7 +143,6 @@ def check_mask_value( @classmethod def check_mask_value_from_time_period_to_date(cls, mask_value: str) -> None: - if mask_value not in ["START", "END"]: raise SemanticError("1-1-5-4", op=cls.op, type_1="Time_Period", type_2="Date") @@ -180,9 +180,11 @@ def check_mask_value_from_duration_to_string(cls, *args: Any) -> None: @classmethod def check_cast( - cls, from_type: Type[ScalarType], to_type: Type[ScalarType], mask_value: Optional[str] + cls, + from_type: Type[ScalarType], + to_type: Type[ScalarType], + mask_value: Optional[str], ) -> None: - if mask_value is not None: cls.check_with_mask(from_type, to_type, mask_value) else: @@ -192,7 +194,6 @@ def check_cast( def check_with_mask( cls, from_type: Type[ScalarType], to_type: Type[ScalarType], mask_value: str ) -> None: - explicit_promotion = EXPLICIT_WITH_MASK_TYPE_PROMOTION_MAPPING[from_type] if to_type.is_included(explicit_promotion): return cls.check_mask_value(from_type, to_type, mask_value) @@ -207,7 +208,6 @@ def check_with_mask( @classmethod def check_without_mask(cls, from_type: Type[ScalarType], to_type: Type[ScalarType]) -> None: - explicit_promotion = EXPLICIT_WITHOUT_MASK_TYPE_PROMOTION_MAPPING[from_type] implicit_promotion = IMPLICIT_TYPE_PROMOTION_MAPPING[from_type] if not (to_type.is_included(explicit_promotion) or to_type.is_included(implicit_promotion)): @@ -231,7 +231,7 @@ def cast_component( cls, data: Any, from_type: Type[ScalarType], to_type: Type[ScalarType] ) -> Any: """ - cast the component to the type to_type without mask + Cast the component to the type to_type without mask """ if to_type.is_included(IMPLICIT_TYPE_PROMOTION_MAPPING[from_type]): @@ -242,15 +242,17 @@ def cast_component( @classmethod def cast_mask_component(cls, data: Any, from_type: Any, to_type: Any, mask: str) -> Any: - result = data.map(lambda x: cls.cast_value(x, from_type, to_type, mask), na_action="ignore") return result @classmethod def cast_value( - cls, value: Any, provided_type: Type[ScalarType], to_type: Type[ScalarType], mask_value: str + cls, + value: Any, + provided_type: Type[ScalarType], + to_type: Type[ScalarType], + mask_value: str, ) -> Any: - if provided_type == String and to_type == Number: return cls.cast_string_to_number(value, mask_value) if provided_type == String and to_type == Date: @@ -261,12 +263,12 @@ def cast_value( return cls.cast_string_to_time_period(value, mask_value) if provided_type == String and to_type == TimeInterval: return cls.cast_string_to_time(value, mask_value) - if provided_type == Date and to_type == String: - return cls.cast_date_to_string(value, mask_value) - if provided_type == Duration and to_type == String: - return cls.cast_duration_to_string(value, mask_value) - if provided_type == TimeInterval and to_type == String: - return cls.cast_time_to_string(value, mask_value) + # if provided_type == Date and to_type == String: + # return cls.cast_date_to_string(value, mask_value) + # if provided_type == Duration and to_type == String: + # return cls.cast_duration_to_string(value, mask_value) + # if provided_type == TimeInterval and to_type == String: + # return cls.cast_time_to_string(value, mask_value) if provided_type == TimePeriod and to_type == Date: return cls.cast_time_period_to_date(value, mask_value) @@ -285,7 +287,6 @@ def validate( # type: ignore[override] scalarType: Type[ScalarType], mask: Optional[str] = None, ) -> Any: - if mask is not None and not isinstance(mask, str): raise Exception(f"{cls.op} mask must be a string") @@ -325,7 +326,10 @@ def dataset_validation( # type: ignore[override] else: measure_name = measure.name result_components[measure_name] = Component( - name=measure_name, data_type=to_type, role=Role.MEASURE, nullable=measure.nullable + name=measure_name, + data_type=to_type, + role=Role.MEASURE, + nullable=measure.nullable, ) return Dataset(name="result", components=result_components, data=None) @@ -366,7 +370,6 @@ def evaluate( # type: ignore[override] scalarType: Type[ScalarType], mask: Optional[str] = None, ) -> Any: - if isinstance(operand, Dataset): return cls.dataset_evaluation(operand, scalarType, mask) if isinstance(operand, Scalar): @@ -381,7 +384,6 @@ def dataset_evaluation( # type: ignore[override] to_type: Type[ScalarType], mask: Optional[str] = None, ) -> Dataset: - from_type = operand.get_measures()[0].data_type original_measure = operand.get_measures()[0] result_dataset = cls.dataset_validation(operand, to_type, mask) @@ -410,7 +412,6 @@ def scalar_evaluation( # type: ignore[override] to_type: Type[ScalarType], mask: Optional[str] = None, ) -> Scalar: - from_type = operand.data_type result_scalar = cls.scalar_validation(operand, to_type, mask) if pd.isna(operand.value): @@ -431,7 +432,6 @@ def component_evaluation( # type: ignore[override] to_type: Type[ScalarType], mask: Optional[str] = None, ) -> DataComponent: - from_type = operand.data_type result_component = cls.component_validation(operand, to_type, mask) if mask: diff --git a/src/vtlengine/Operators/Clause.py b/src/vtlengine/Operators/Clause.py index 10f43da4..926a1e9f 100644 --- a/src/vtlengine/Operators/Clause.py +++ b/src/vtlengine/Operators/Clause.py @@ -22,12 +22,10 @@ class Calc(Operator): @classmethod def validate(cls, operands: List[Union[DataComponent, Scalar]], dataset: Dataset) -> Dataset: - result_components = {name: copy(comp) for name, comp in dataset.components.items()} result_dataset = Dataset(name=dataset.name, components=result_components, data=None) for operand in operands: - if operand.name in result_dataset.components: if result_dataset.components[operand.name].role == Role.IDENTIFIER: raise SemanticError("1-1-6-13", op=cls.op, comp_name=operand.name) @@ -72,7 +70,6 @@ class Aggregate(Operator): @classmethod def validate(cls, operands: List[Union[DataComponent, Scalar]], dataset: Dataset) -> Dataset: - result_dataset = Dataset(name=dataset.name, components=dataset.components, data=None) for operand in operands: @@ -121,7 +118,6 @@ def evaluate(cls, operands: List[Union[DataComponent, Scalar]], dataset: Dataset class Filter(Operator): - @classmethod def validate(cls, condition: DataComponent, dataset: Dataset) -> Dataset: if condition.data_type != Boolean: @@ -212,11 +208,17 @@ def validate(cls, operands: List[RenameNode], dataset: Dataset) -> Dataset: for operand in operands: if operand.old_name not in dataset.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=operand.old_name, dataset_name=dataset.name + "1-1-1-10", + op=cls.op, + comp_name=operand.old_name, + dataset_name=dataset.name, ) if operand.new_name in dataset.components: raise SemanticError( - "1-1-6-8", op=cls.op, comp_name=operand.new_name, dataset_name=dataset.name + "1-1-6-8", + op=cls.op, + comp_name=operand.new_name, + dataset_name=dataset.name, ) result_components = {comp.name: comp for comp in dataset.components.values()} @@ -242,7 +244,6 @@ def evaluate(cls, operands: List[RenameNode], dataset: Dataset) -> Dataset: class Pivot(Operator): - @classmethod def validate(cls, operands: List[str], dataset: Dataset) -> Dataset: raise NotImplementedError @@ -253,7 +254,6 @@ def evaluate(cls, operands: List[str], dataset: Dataset) -> Dataset: class Unpivot(Operator): - @classmethod def validate(cls, operands: List[str], dataset: Dataset) -> Dataset: if len(operands) != 2: @@ -311,11 +311,17 @@ def validate(cls, operands: List[DataComponent], dataset: Dataset) -> Dataset: for operand in operands: if operand.name not in dataset.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=operand.name, dataset_name=dataset.name + "1-1-1-10", + op=cls.op, + comp_name=operand.name, + dataset_name=dataset.name, ) if operand.role != Role.IDENTIFIER: raise SemanticError( - "1-1-6-10", op=cls.op, operand=operand.name, dataset_name=dataset.name + "1-1-6-10", + op=cls.op, + operand=operand.name, + dataset_name=dataset.name, ) if isinstance(operand, Scalar): raise SemanticError("1-1-6-5", op=cls.op, name=operand.name) diff --git a/src/vtlengine/Operators/Comparison.py b/src/vtlengine/Operators/Comparison.py index e47cffa0..5274b3eb 100644 --- a/src/vtlengine/Operators/Comparison.py +++ b/src/vtlengine/Operators/Comparison.py @@ -74,10 +74,11 @@ class Binary(Operator.Binary): return_type = Boolean @classmethod - def _cast_values(cls, - x: Optional[Union[int, float, str, bool]], - y: Optional[Union[int, float, str, bool]] - ) -> Any: + def _cast_values( + cls, + x: Optional[Union[int, float, str, bool]], + y: Optional[Union[int, float, str, bool]], + ) -> Any: # Cast values to compatible types for comparison try: if isinstance(x, str) and isinstance(y, bool): @@ -247,9 +248,7 @@ def op_func( z: Optional[Union[int, float, bool, str]], ) -> Optional[bool]: return ( - None - if (pd.isnull(x) or pd.isnull(y) or pd.isnull(z)) - else y <= x <= z # type: ignore[operator] + None if (pd.isnull(x) or pd.isnull(y) or pd.isnull(z)) else y <= x <= z # type: ignore[operator] ) @classmethod @@ -264,7 +263,8 @@ def apply_operation_component(cls, series: Any, from_data: Any, to_data: Any) -> to_data = pd.Series(to_data, index=series.index) df = pd.DataFrame({"operand": series, "from_data": from_data, "to_data": to_data}) return df.apply( - lambda x: cls.op_func(x["operand"], x["from_data"], x["to_data"]), axis=1 + lambda x: cls.op_func(x["operand"], x["from_data"], x["to_data"]), + axis=1, ) return series.map(lambda x: cls.op_func(x, from_data, to_data)) @@ -310,13 +310,19 @@ def validate( result = Dataset(name=operand.name, components=result_components, data=None) elif isinstance(operand, DataComponent): result = DataComponent( - name=operand.name, data=None, data_type=cls.return_type, role=operand.role + name=operand.name, + data=None, + data_type=cls.return_type, + role=operand.role, ) elif isinstance(from_, Scalar) and isinstance(to, Scalar): result = Scalar(name=operand.name, value=None, data_type=cls.return_type) else: # From or To is a DataComponent, or both result = DataComponent( - name=operand.name, data=None, data_type=cls.return_type, role=Role.MEASURE + name=operand.name, + data=None, + data_type=cls.return_type, + role=Role.MEASURE, ) if isinstance(operand, Dataset): @@ -369,14 +375,16 @@ def evaluate( elif isinstance(operand, Scalar) and ( isinstance(from_data, pd.Series) or isinstance(to_data, pd.Series) ): # From or To is a DataComponent, or both - if isinstance(from_data, pd.Series): series = pd.Series(operand.value, index=from_data.index, dtype=object) elif isinstance(to_data, pd.Series): series = pd.Series(operand.value, index=to_data.index, dtype=object) result_series = cls.apply_operation_component(series, from_data, to_data) result = DataComponent( - name=operand.name, data=result_series, data_type=cls.return_type, role=Role.MEASURE + name=operand.name, + data=result_series, + data_type=cls.return_type, + role=Role.MEASURE, ) return result diff --git a/src/vtlengine/Operators/Conditional.py b/src/vtlengine/Operators/Conditional.py index 260019c7..c0367f1e 100644 --- a/src/vtlengine/Operators/Conditional.py +++ b/src/vtlengine/Operators/Conditional.py @@ -39,7 +39,7 @@ class If(Operator): validate: Class method that has two branches so datacomponent and datasets can be validated. With datacomponent, the code reviews if it is actually a Measure and if it is a binary operation. Dataset branch reviews if the identifiers are the same in 'if', 'then' and 'else'. - """ # noqa E501 + """ # noqa E501 @classmethod def evaluate(cls, condition: Any, true_branch: Any, false_branch: Any) -> Any: @@ -66,7 +66,7 @@ def component_level_evaluation( else: false_data = false_branch.data.reindex(condition.data.index) result = np.where(condition.data, true_data, false_data) - return pd.Series(result, index=condition.data.index) + return pd.Series(result, index=condition.data.index) # type: ignore[union-attr] @classmethod def dataset_level_evaluation( @@ -80,7 +80,11 @@ def dataset_level_evaluation( if isinstance(true_branch, Dataset): if len(true_data) > 0 and true_branch.data is not None: true_data = pd.merge( - true_data, true_branch.data, on=ids, how="right", suffixes=("_condition", "") + true_data, + true_branch.data, + on=ids, + how="right", + suffixes=("_condition", ""), ) else: true_data = pd.DataFrame(columns=true_branch.get_components_names()) @@ -91,7 +95,11 @@ def dataset_level_evaluation( if isinstance(false_branch, Dataset): if len(false_data) > 0 and false_branch.data is not None: false_data = pd.merge( - false_data, false_branch.data, on=ids, how="right", suffixes=("_condition", "") + false_data, + false_branch.data, + on=ids, + how="right", + suffixes=("_condition", ""), ) else: false_data = pd.DataFrame(columns=false_branch.get_components_names()) @@ -141,7 +149,9 @@ def validate( # noqa: C901 if isinstance(condition, DataComponent): if not condition.data_type == Boolean: raise SemanticError( - "1-1-9-11", op=cls.op, type=SCALAR_TYPES_CLASS_REVERSE[condition.data_type] + "1-1-9-11", + op=cls.op, + type=SCALAR_TYPES_CLASS_REVERSE[condition.data_type], ) if not isinstance(left, Scalar) or not isinstance(right, Scalar): nullable = condition.nullable @@ -191,7 +201,8 @@ def validate( # noqa: C901 if component.data_type != right.components[component.name].data_type: component.data_type = right.components[component.name].data_type = ( binary_implicit_promotion( - component.data_type, right.components[component.name].data_type + component.data_type, + right.components[component.name].data_type, ) ) if isinstance(condition, Dataset): @@ -219,7 +230,7 @@ class Nvl(Binary): Validate: Class method that validates if the operation at scalar, datacomponent or dataset level can be performed. Evaluate: Evaluates the actual operation, returning the result. - """ # noqa E501 + """ # noqa E501 @classmethod def evaluate(cls, left: Any, right: Any) -> Union[Scalar, DataComponent, Dataset]: @@ -290,13 +301,16 @@ def validate(cls, left: Any, right: Any) -> Union[Scalar, DataComponent, Dataset class Case(Operator): - @classmethod def evaluate( cls, conditions: List[Any], thenOps: List[Any], elseOp: Any ) -> Union[Scalar, DataComponent, Dataset]: - result = cls.validate(conditions, thenOps, elseOp) + for condition in conditions: + if isinstance(condition, (DataComponent, Dataset)): + condition.data.fillna(False, inplace=True) # type: ignore[union-attr] + elif isinstance(condition, Scalar) and condition.value is None: + condition.value = False if isinstance(result, Scalar): result.value = elseOp.value @@ -309,8 +323,10 @@ def evaluate( for i, condition in enumerate(conditions): value = thenOps[i].value if isinstance(thenOps[i], Scalar) else thenOps[i].data - result.data = np.where( - condition.data, value, result.data # type: ignore[call-overload] + result.data = np.where( # type: ignore[call-overload] + condition.data.notna(), + np.where(condition.data, value, result.data), # type: ignore[call-overload] + result.data, ) condition_mask_else = ~np.any([condition.data for condition in conditions], axis=0) @@ -349,7 +365,7 @@ def evaluate( ] ) - result.data.loc[condition_mask_else, columns] = ( + result.data.loc[condition_mask_else, columns] = ( # type: ignore[index] elseOp.value if isinstance(elseOp, Scalar) else elseOp.data.loc[condition_mask_else, columns] @@ -361,7 +377,6 @@ def evaluate( def validate( cls, conditions: List[Any], thenOps: List[Any], elseOp: Any ) -> Union[Scalar, DataComponent, Dataset]: - if len(set(map(type, conditions))) > 1: raise SemanticError("2-1-9-1", op=cls.op) @@ -395,9 +410,10 @@ def validate( raise SemanticError("2-1-9-4", op=cls.op, name=condition.name) nullable = any( - thenOp.nullable if isinstance(thenOp, DataComponent) else thenOp.data_type == Null + (thenOp.nullable if isinstance(thenOp, DataComponent) else thenOp.data_type == Null) for thenOp in ops ) + nullable |= any(condition.nullable for condition in conditions) data_type = ops[0].data_type for op in ops[1:]: diff --git a/src/vtlengine/Operators/General.py b/src/vtlengine/Operators/General.py index 9cdb1893..0356bdcd 100644 --- a/src/vtlengine/Operators/General.py +++ b/src/vtlengine/Operators/General.py @@ -23,7 +23,10 @@ class Membership(Binary): def validate(cls, left_operand: Any, right_operand: Any) -> Dataset: if right_operand not in left_operand.components: raise SemanticError( - "1-1-1-10", op=cls.op, comp_name=right_operand, dataset_name=left_operand.name + "1-1-1-10", + op=cls.op, + comp_name=right_operand, + dataset_name=left_operand.name, ) component = left_operand.components[right_operand] @@ -48,7 +51,10 @@ def validate(cls, left_operand: Any, right_operand: Any) -> Dataset: @classmethod def evaluate( - cls, left_operand: Dataset, right_operand: str, is_from_component_assignment: bool = False + cls, + left_operand: Dataset, + right_operand: str, + is_from_component_assignment: bool = False, ) -> Union[DataComponent, Dataset]: result_dataset = cls.validate(left_operand, right_operand) if left_operand.data is not None: @@ -128,7 +134,6 @@ def validate( # type: ignore[override] external_routine: ExternalRoutine, output: Dataset, ) -> Dataset: - empty_data_dict = {} for ds_name in external_routine.dataset_names: if ds_name not in operands: diff --git a/src/vtlengine/Operators/HROperators.py b/src/vtlengine/Operators/HROperators.py index 9415ec73..556dad36 100644 --- a/src/vtlengine/Operators/HROperators.py +++ b/src/vtlengine/Operators/HROperators.py @@ -24,7 +24,6 @@ def get_measure_from_dataset(dataset: Dataset, code_item: str) -> DataComponent: class HRComparison(Operators.Binary): - @classmethod def imbalance_func(cls, x: Any, y: Any) -> Any: if pd.isnull(x) or pd.isnull(y): @@ -44,8 +43,8 @@ def hr_func(left_series: Any, right_series: Any, hr_mode: str) -> Any: result[mask_remove] = "REMOVE_VALUE" result[mask_null] = None elif hr_mode == "non_null": - mask_remove = left_series.isnull() | right_series.isnull() - result[mask_remove] = "REMOVE_VALUE" + mask_remove = left_series.isnull() | right_series.isnull() + result[mask_remove] = "REMOVE_VALUE" elif hr_mode == "non_zero": mask_remove = (left_series == 0) & (right_series == 0) result[mask_remove] = "REMOVE_VALUE" @@ -66,11 +65,7 @@ def apply_hr_func(cls, left_series: Any, right_series: Any, hr_mode: str, func: return result @classmethod - def validate(cls, - left_operand: Dataset, - right_operand: DataComponent, - hr_mode: str - ) -> Dataset: + def validate(cls, left_operand: Dataset, right_operand: DataComponent, hr_mode: str) -> Dataset: result_components = { comp_name: copy(comp) for comp_name, comp in left_operand.components.items() @@ -136,7 +131,6 @@ class HRLessEqual(HRComparison): class HRBinNumeric(Operators.Binary): - @classmethod def op_func(cls, x: Any, y: Any) -> Any: if not pd.isnull(x) and x == "REMOVE_VALUE": @@ -166,7 +160,6 @@ class HRBinMinus(HRBinNumeric): class HRUnNumeric(Operators.Unary): - @classmethod def evaluate(cls, operand: DataComponent) -> DataComponent: # type: ignore[override] result_data = cls.apply_operation_component(operand.data) @@ -190,7 +183,6 @@ class HRUnMinus(HRUnNumeric): class HAAssignment(Operators.Binary): - @classmethod def validate(cls, left: Dataset, right: DataComponent, hr_mode: str) -> Dataset: result_components = {comp_name: copy(comp) for comp_name, comp in left.components.items()} diff --git a/src/vtlengine/Operators/Join.py b/src/vtlengine/Operators/Join.py index 7aadc0cb..cac502b2 100644 --- a/src/vtlengine/Operators/Join.py +++ b/src/vtlengine/Operators/Join.py @@ -70,7 +70,9 @@ def merge_components( comp.role = ( Role.IDENTIFIER if is_identifier - else Role.MEASURE if comp.role == Role.IDENTIFIER else comp.role + else Role.MEASURE + if comp.role == Role.IDENTIFIER + else comp.role ) if comp.name not in nullability: nullability[comp.name] = copy(comp.nullable) @@ -107,7 +109,8 @@ def merge_components( else: if component_name in using and component_name in merged_components: data_type = binary_implicit_promotion( - merged_components[component_name].data_type, component.data_type + merged_components[component_name].data_type, + component.data_type, ) component.data_type = data_type merged_components[component_name] = component @@ -216,7 +219,6 @@ def validate(cls, operands: List[Dataset], using: Optional[List[str]]) -> Datase @classmethod def identifiers_validation(cls, operands: List[Dataset], using: Optional[List[str]]) -> None: - # (Case A) info = {op.name: op.get_identifiers_names() for op in operands} for op_name, identifiers in info.items(): @@ -224,11 +226,12 @@ def identifiers_validation(cls, operands: List[Dataset], using: Optional[List[st raise SemanticError("1-1-13-14", op=cls.op, name=op_name) for op_name, identifiers in info.items(): - if (using is None and op_name != cls.reference_dataset.name and not - set(identifiers).issubset(set(info[cls.reference_dataset.name]))): - missing_components = list( - set(identifiers) - set(info[cls.reference_dataset.name]) - ) + if ( + using is None + and op_name != cls.reference_dataset.name + and not set(identifiers).issubset(set(info[cls.reference_dataset.name])) + ): + missing_components = list(set(identifiers) - set(info[cls.reference_dataset.name])) raise SemanticError( "1-1-13-11", op=cls.op, @@ -277,7 +280,6 @@ class InnerJoin(Join): def generate_result_components( cls, operands: List[Dataset], using: Optional[List[str]] = None ) -> Dict[str, Component]: - if using is None: return super().generate_result_components(operands, using) @@ -334,7 +336,9 @@ def execute(cls, operands: List[Dataset], using: Optional[List[str]] = None) -> else: if result.data is not None: result.data = pd.merge( - result.data, op.data, how=cls.how # type: ignore[arg-type] + result.data, + op.data, + how=cls.how, # type: ignore[arg-type] ) if result.data is not None: result.data = result.data.rename( @@ -357,7 +361,6 @@ def identifiers_validation( class Apply(Operator): - @classmethod def evaluate(cls, dataset: Dataset, expression: Any, op_map: Dict[str, Any]) -> Dataset: for child in expression: @@ -424,9 +427,7 @@ def create_dataset(cls, name: str, prefix: str, dataset: Dataset) -> Dataset: return Dataset(name=name, components=components, data=data) @classmethod - def get_common_components( - cls, left: Dataset, right: Dataset - ) -> (Dataset, Dataset): # type: ignore[syntax] + def get_common_components(cls, left: Dataset, right: Dataset) -> (Dataset, Dataset): # type: ignore[syntax] common = set(left.get_components_names()) & set(right.get_components_names()) left.components = { comp.name: comp for comp in left.components.values() if comp.name in common diff --git a/src/vtlengine/Operators/Numeric.py b/src/vtlengine/Operators/Numeric.py index a197fa02..6f5ed08b 100644 --- a/src/vtlengine/Operators/Numeric.py +++ b/src/vtlengine/Operators/Numeric.py @@ -73,7 +73,7 @@ def op_func(cls, x: Any, y: Any) -> Any: class UnPlus(Unary): """ `Plus `_ unary operator - """ # noqa E501 + """ # noqa E501 op = PLUS py_op = operator.pos @@ -86,7 +86,7 @@ def apply_operation_component(cls, series: Any) -> Any: class UnMinus(Unary): """ `Minus `_unary operator - """ # noqa E501 + """ # noqa E501 op = MINUS py_op = operator.neg @@ -95,7 +95,7 @@ class UnMinus(Unary): class AbsoluteValue(Unary): """ `Absolute `_ unary operator - """ # noqa E501 + """ # noqa E501 op = ABS py_op = operator.abs @@ -104,7 +104,7 @@ class AbsoluteValue(Unary): class Exponential(Unary): """ `Exponential `_ unary operator - """ # noqa E501 + """ # noqa E501 op = EXP py_op = math.exp @@ -115,7 +115,7 @@ class NaturalLogarithm(Unary): """ `Natural logarithm `_ unary operator - """ # noqa E501 + """ # noqa E501 op = LN py_op = math.log @@ -126,7 +126,7 @@ class SquareRoot(Unary): """ `Square Root '_ unary operator - """ # noqa E501 + """ # noqa E501 op = SQRT py_op = math.sqrt @@ -136,7 +136,7 @@ class SquareRoot(Unary): class Ceil(Unary): """ `Ceilling `_ unary operator - """ # noqa E501 + """ # noqa E501 op = CEIL py_op = math.ceil @@ -146,7 +146,7 @@ class Ceil(Unary): class Floor(Unary): """ `Floor `_ unary operator - """ # noqa E501 + """ # noqa E501 op = FLOOR py_op = math.floor @@ -156,7 +156,7 @@ class Floor(Unary): class BinPlus(Binary): """ `Addition `_ binary operator - """ # noqa E501 + """ # noqa E501 op = PLUS py_op = operator.add @@ -166,7 +166,7 @@ class BinPlus(Binary): class BinMinus(Binary): """ `Subtraction `_ binary operator - """ # noqa E501 + """ # noqa E501 op = MINUS py_op = operator.sub @@ -177,7 +177,7 @@ class Mult(Binary): """ `Multiplication `_ binary operator - """ # noqa E501 + """ # noqa E501 op = MULT py_op = operator.mul @@ -187,7 +187,7 @@ class Div(Binary): """ `Division `_ binary operator - """ # noqa E501 + """ # noqa E501 op = DIV py_op = operator.truediv @@ -197,7 +197,7 @@ class Div(Binary): class Logarithm(Binary): """ `Logarithm `_ operator - """ # noqa E501 + """ # noqa E501 op = LOG return_type = Number @@ -215,7 +215,7 @@ def py_op(cls, x: Any, param: Any) -> Any: class Modulo(Binary): """ `Module `_ operator - """ # noqa E501 + """ # noqa E501 op = MOD py_op = operator.mod @@ -224,7 +224,7 @@ class Modulo(Binary): class Power(Binary): """ `Power `_ operator - """ # noqa E501 + """ # noqa E501 op = POWER return_type = Number @@ -248,14 +248,15 @@ def validate( operand: Operator.ALL_MODEL_DATA_TYPES, param: Optional[Union[DataComponent, Scalar]] = None, ) -> Any: - if param is not None: if isinstance(param, Dataset): raise SemanticError("1-1-15-8", op=cls.op, comp_type="Dataset") if isinstance(param, DataComponent): if isinstance(operand, Scalar): raise SemanticError( - "1-1-15-8", op=cls.op, comp_type="DataComponent and an Scalar operand" + "1-1-15-8", + op=cls.op, + comp_type="DataComponent and an Scalar operand", ) cls.validate_type_compatibility(param.data_type) else: @@ -298,14 +299,19 @@ def dataset_evaluation( ) except ValueError: raise SemanticError( - "2-1-15-1", op=cls.op, comp_name=measure_name, dataset_name=operand.name + "2-1-15-1", + op=cls.op, + comp_name=measure_name, + dataset_name=operand.name, ) from None result.data = result.data[result.get_components_names()] return result @classmethod def component_evaluation( - cls, operand: DataComponent, param: Optional[Union[DataComponent, Scalar]] = None + cls, + operand: DataComponent, + param: Optional[Union[DataComponent, Scalar]] = None, ) -> DataComponent: result = cls.validate(operand, param) if operand.data is None: @@ -327,7 +333,9 @@ def scalar_evaluation(cls, operand: Scalar, param: Optional[Any] = None) -> Scal @classmethod def evaluate( - cls, operand: ALL_MODEL_DATA_TYPES, param: Optional[Union[DataComponent, Scalar]] = None + cls, + operand: ALL_MODEL_DATA_TYPES, + param: Optional[Union[DataComponent, Scalar]] = None, ) -> Union[DataComponent, Dataset, Scalar]: if isinstance(operand, Dataset): return cls.dataset_evaluation(operand, param) @@ -340,7 +348,7 @@ def evaluate( class Round(Parameterized): """ `Round `_ operator - """ # noqa E501 + """ # noqa E501 op = ROUND return_type = Integer @@ -365,7 +373,7 @@ def py_op(cls, x: Any, param: Any) -> Any: class Trunc(Parameterized): """ `Trunc `_ operator. - """ # noqa E501 + """ # noqa E501 op = TRUNC @@ -384,14 +392,12 @@ def py_op(cls, x: float, param: Optional[float]) -> Any: class PseudoRandom(_random.Random): - def __init__(self, seed: Union[int, float]) -> None: super().__init__() self.seed(seed) class Random(Parameterized): - op = RANDOM return_type = Number diff --git a/src/vtlengine/Operators/RoleSetter.py b/src/vtlengine/Operators/RoleSetter.py index 3f73af55..c8938540 100644 --- a/src/vtlengine/Operators/RoleSetter.py +++ b/src/vtlengine/Operators/RoleSetter.py @@ -35,8 +35,12 @@ def validate(cls, operand: ALLOWED_MODEL_TYPES, data_size: int = 0) -> DataCompo @classmethod def evaluate(cls, operand: Any, data_size: int = 0) -> DataComponent: - if (isinstance(operand, DataComponent) and operand.data is not None and - not operand.nullable and any(operand.data.isnull())): + if ( + isinstance(operand, DataComponent) + and operand.data is not None + and not operand.nullable + and any(operand.data.isnull()) + ): raise SemanticError("1-1-1-16") result = cls.validate(operand, data_size) if isinstance(operand, Scalar): diff --git a/src/vtlengine/Operators/Set.py b/src/vtlengine/Operators/Set.py index 83be0c16..d83b9bfa 100644 --- a/src/vtlengine/Operators/Set.py +++ b/src/vtlengine/Operators/Set.py @@ -13,12 +13,14 @@ class Set(Operator): - @classmethod def check_same_structure(cls, dataset_1: Dataset, dataset_2: Dataset) -> None: if len(dataset_1.components) != len(dataset_2.components): raise SemanticError( - "1-1-17-1", op=cls.op, dataset_1=dataset_1.name, dataset_2=dataset_2.name + "1-1-17-1", + op=cls.op, + dataset_1=dataset_1.name, + dataset_2=dataset_2.name, ) for comp in dataset_1.components.values(): @@ -26,7 +28,10 @@ def check_same_structure(cls, dataset_1: Dataset, dataset_2: Dataset) -> None: raise Exception(f"Component {comp.name} not found in dataset {dataset_2.name}") second_comp = dataset_2.components[comp.name] binary_implicit_promotion( - comp.data_type, second_comp.data_type, cls.type_to_check, cls.return_type + comp.data_type, + second_comp.data_type, + cls.type_to_check, + cls.return_type, ) if comp.role != second_comp.role: raise Exception( @@ -36,7 +41,6 @@ def check_same_structure(cls, dataset_1: Dataset, dataset_2: Dataset) -> None: @classmethod def validate(cls, operands: List[Dataset]) -> Dataset: - base_operand = operands[0] for operand in operands[1:]: cls.check_same_structure(base_operand, operand) @@ -70,7 +74,6 @@ def evaluate(cls, operands: List[Dataset]) -> Dataset: class Intersection(Set): - @classmethod def evaluate(cls, operands: List[Dataset]) -> Dataset: result = cls.validate(operands) @@ -97,7 +100,6 @@ def evaluate(cls, operands: List[Dataset]) -> Dataset: class Symdiff(Set): - @classmethod def evaluate(cls, operands: List[Dataset]) -> Dataset: result = cls.validate(operands) @@ -110,7 +112,10 @@ def evaluate(cls, operands: List[Dataset]) -> Dataset: else: # Realiza la operación equivalente en pyspark.pandas result.data = result.data.merge( - data, how="outer", on=result.get_identifiers_names(), suffixes=("_x", "_y") + data, + how="outer", + on=result.get_identifiers_names(), + suffixes=("_x", "_y"), ) for measure in result.get_measures_names(): @@ -140,7 +145,6 @@ def evaluate(cls, operands: List[Dataset]) -> Dataset: class Setdiff(Set): - @staticmethod def has_null(row: Any) -> bool: return row.isnull().any() diff --git a/src/vtlengine/Operators/String.py b/src/vtlengine/Operators/String.py index 87ab9095..b830166b 100644 --- a/src/vtlengine/Operators/String.py +++ b/src/vtlengine/Operators/String.py @@ -31,7 +31,6 @@ class Unary(Operator.Unary): @classmethod def op_func(cls, x: Any) -> Any: - x = "" if pd.isnull(x) else str(x) return cls.py_op(x) @@ -116,7 +115,6 @@ class Concatenate(Binary): class Parameterized(Unary): - @classmethod def validate(cls, *args: Any) -> Any: operand: Operator.ALL_MODEL_DATA_TYPES @@ -353,7 +351,6 @@ def validate( param2: Optional[Operator.ALL_MODEL_DATA_TYPES] = None, param3: Optional[Operator.ALL_MODEL_DATA_TYPES] = None, ) -> Any: - if ( isinstance(param1, Dataset) or isinstance(param2, Dataset) @@ -395,7 +392,10 @@ def check_param(cls, param: Optional[Union[DataComponent, Scalar]], position: in else: if not check_unary_implicit_promotion(data_type, Integer): raise SemanticError( - "1-1-18-4", op=cls.op, param_type="Occurrence", correct_type="Integer" + "1-1-18-4", + op=cls.op, + param_type="Occurrence", + correct_type="Integer", ) if isinstance(param, DataComponent): if param.data is not None: @@ -408,9 +408,7 @@ def check_param_value(cls, param: Any, position: int) -> None: if position == 2 and not pd.isnull(param) and param < 1: raise SemanticError("1-1-18-4", op=cls.op, param_type="Start", correct_type=">= 1") elif position == 3 and not pd.isnull(param) and param < 1: - raise SemanticError( - "1-1-18-4", op=cls.op, param_type="Occurrence", correct_type=">= 1" - ) + raise SemanticError("1-1-18-4", op=cls.op, param_type="Occurrence", correct_type=">= 1") @classmethod def apply_operation_series_scalar( @@ -529,7 +527,6 @@ def op_func( # type: ignore[override] param2: Optional[Any], param3: Optional[Any], ) -> Any: - if pd.isnull(x): return None return cls.py_op(x, param1, param2, param3) @@ -560,7 +557,10 @@ def py_op( else: # OPERATORS_STRINGOPERATORS.93 raise SemanticError( - "1-1-18-4", op=cls.op, param_type="Occurrence", correct_type="Integer" + "1-1-18-4", + op=cls.op, + param_type="Occurrence", + correct_type="Integer", ) else: occurrence = 0 diff --git a/src/vtlengine/Operators/Time.py b/src/vtlengine/Operators/Time.py index c137a515..871c8b9a 100644 --- a/src/vtlengine/Operators/Time.py +++ b/src/vtlengine/Operators/Time.py @@ -98,7 +98,8 @@ def find_min_frequency(cls, differences: Any) -> str: months_deltas = differences.apply(lambda x: x.days // 30) days_deltas = differences.apply(lambda x: x.days) min_months = min( - (diff for diff in months_deltas if diff > 0 and diff % 12 != 0), default=None + (diff for diff in months_deltas if diff > 0 and diff % 12 != 0), + default=None, ) min_days = min( (diff for diff in days_deltas if diff > 0 and diff % 365 != 0 and diff % 366 != 0), @@ -118,7 +119,6 @@ def get_date_format(cls, date_str: Union[str, date]) -> str: class Unary(Time): - @classmethod def validate(cls, operand: Any) -> Any: if not isinstance(operand, Dataset): @@ -190,7 +190,10 @@ def validate(cls, operand: Any) -> Any: if comp.role == Role.IDENTIFIER } result_components["duration_var"] = Component( - name="duration_var", data_type=Duration, role=Role.MEASURE, nullable=True + name="duration_var", + data_type=Duration, + role=Role.MEASURE, + nullable=True, ) return Dataset(name="result", components=result_components, data=None) # DataComponent and Scalar validation @@ -202,7 +205,7 @@ def validate(cls, operand: Any) -> Any: @classmethod def evaluate( - cls, operand: Union[Dataset, DataComponent, Scalar, str] + cls, operand: Union[Dataset, DataComponent, Scalar, str] ) -> Union[Dataset, DataComponent, Scalar, str]: result = cls.validate(operand) if isinstance(operand, str): @@ -226,7 +229,6 @@ def evaluate( class Parametrized(Time): - @classmethod def validate(cls, operand: Any, param: Any) -> Any: pass @@ -237,14 +239,12 @@ def evaluate(cls, operand: Any, param: Any) -> Any: class Flow_to_stock(Unary): - @classmethod def py_op(cls, x: Any) -> Any: return x.cumsum().fillna(x) class Stock_to_flow(Unary): - @classmethod def py_op(cls, x: Any) -> Any: return x.diff().fillna(x) @@ -299,7 +299,6 @@ def validate(cls, operand: Dataset, fill_type: str) -> Dataset: @classmethod def max_min_from_period(cls, data: pd.DataFrame, mode: str = "all") -> Dict[str, Any]: - result_dict: Dict[Any, Any] = {} data = data.assign( Periods_col=data[cls.time_id].apply(cls._get_period), @@ -369,7 +368,10 @@ def period_filler(cls, data: pd.DataFrame, single: bool = False) -> pd.DataFrame else: if period in period_limits["min"] and period in period_limits["max"]: vals = list( - range(period_limits["min"][period], period_limits["max"][period] + 1) + range( + period_limits["min"][period], + period_limits["max"][period] + 1, + ) ) filled_data.extend( cls.fill_periods_rows(group_df, period, years, vals=vals) @@ -385,7 +387,11 @@ def period_filler(cls, data: pd.DataFrame, single: bool = False) -> pd.DataFrame @classmethod def fill_periods_rows( - cls, group_df: Any, period: str, years: List[int], vals: Optional[List[int]] = None + cls, + group_df: Any, + period: str, + years: List[int], + vals: Optional[List[int]] = None, ) -> List[Any]: rows = [] for year in years: @@ -398,7 +404,7 @@ def fill_periods_rows( @classmethod def create_period_row( - cls, group_df: Any, period: str, year: int, val: Optional[int] = None + cls, group_df: Any, period: str, year: int, val: Optional[int] = None ) -> Any: row = group_df.iloc[0].copy() row[cls.time_id] = f"{year}" if period == "A" else f"{year}-{period}{val:d}" @@ -436,9 +442,7 @@ def date_filler(cls, data: pd.DataFrame, fill_type: str, min_frequency: str) -> date_format = None filled_data = [] - def create_filled_dates( - group: Any, min_max: Dict[str, Any] - ) -> (pd.DataFrame, str): # type: ignore[syntax] + def create_filled_dates(group: Any, min_max: Dict[str, Any]) -> (pd.DataFrame, str): # type: ignore[syntax] date_range = pd.date_range(start=min_max["min"], end=min_max["max"], freq=min_frequency) date_df = pd.DataFrame(date_range, columns=[cls.time_id]) date_df[cls.other_ids] = group.iloc[0][cls.other_ids] @@ -480,7 +484,7 @@ def extract_max_min(group: Any) -> Dict[str, Any]: @classmethod def fill_time_intervals( - cls, data: pd.DataFrame, fill_type: str, frequency: str + cls, data: pd.DataFrame, fill_type: str, frequency: str ) -> pd.DataFrame: result_data = cls.time_filler(data, fill_type, frequency) not_na = result_data[cls.measures].notna().any(axis=1) @@ -588,7 +592,7 @@ def shift_dates(cls, dates: Any, shift_value: int, frequency: str) -> Any: @classmethod def shift_period( - cls, period_str: str, shift_value: int, frequency: Optional[int] = None + cls, period_str: str, shift_value: int, frequency: Optional[int] = None ) -> str: period_type = cls._get_period(period_str) @@ -642,7 +646,7 @@ def _check_params(cls, period_from: Optional[str], period_to: str) -> None: @classmethod def dataset_validation( - cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str + cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str ) -> Dataset: # TODO: Review with VTL TF as this makes no sense @@ -661,7 +665,10 @@ def dataset_validation( count_time_types += 1 if count_time_types != 1: raise SemanticError( - "1-1-19-9", op=cls.op, comp_type="dataset", param="single time identifier" + "1-1-19-9", + op=cls.op, + comp_type="dataset", + param="single time identifier", ) if count_time_types != 1: @@ -679,7 +686,11 @@ def dataset_validation( @classmethod def component_validation( - cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str + cls, + operand: DataComponent, + period_from: Optional[str], + period_to: str, + conf: str, ) -> DataComponent: if operand.data_type not in cls.TIME_DATA_TYPES: raise SemanticError("1-1-19-8", op=cls.op, comp_type="time component") @@ -692,7 +703,7 @@ def component_validation( @classmethod def scalar_validation( - cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str + cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str ) -> Scalar: if operand.data_type not in cls.TIME_DATA_TYPES: raise SemanticError("1-1-19-8", op=cls.op, comp_type="time scalar") @@ -701,12 +712,12 @@ def scalar_validation( @classmethod def _execute_time_aggregation( - cls, - value: str, - data_type: Type[ScalarType], - period_from: Optional[str], - period_to: str, - conf: str, + cls, + value: str, + data_type: Type[ScalarType], + period_from: Optional[str], + period_to: str, + conf: str, ) -> str: if data_type == TimePeriod: # Time period return _time_period_access(value, period_to) @@ -722,7 +733,7 @@ def _execute_time_aggregation( @classmethod def dataset_evaluation( - cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str + cls, operand: Dataset, period_from: Optional[str], period_to: str, conf: str ) -> Dataset: result = cls.dataset_validation(operand, period_from, period_to, conf) result.data = operand.data.copy() if operand.data is not None else pd.DataFrame() @@ -738,7 +749,11 @@ def dataset_evaluation( @classmethod def component_evaluation( - cls, operand: DataComponent, period_from: Optional[str], period_to: str, conf: str + cls, + operand: DataComponent, + period_from: Optional[str], + period_to: str, + conf: str, ) -> DataComponent: result = cls.component_validation(operand, period_from, period_to, conf) if operand.data is not None: @@ -752,7 +767,7 @@ def component_evaluation( @classmethod def scalar_evaluation( - cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str + cls, operand: Scalar, period_from: Optional[str], period_to: str, conf: str ) -> Scalar: result = cls.scalar_validation(operand, period_from, period_to, conf) result.value = cls._execute_time_aggregation( @@ -762,11 +777,11 @@ def scalar_evaluation( @classmethod def validate( - cls, - operand: Union[Dataset, DataComponent, Scalar], - period_from: Optional[str], - period_to: str, - conf: str, + cls, + operand: Union[Dataset, DataComponent, Scalar], + period_from: Optional[str], + period_to: str, + conf: str, ) -> Union[Dataset, DataComponent, Scalar]: cls._check_params(period_from, period_to) if isinstance(operand, Dataset): @@ -778,11 +793,11 @@ def validate( @classmethod def evaluate( - cls, - operand: Union[Dataset, DataComponent, Scalar], - period_from: Optional[str], - period_to: str, - conf: str, + cls, + operand: Union[Dataset, DataComponent, Scalar], + period_from: Optional[str], + period_to: str, + conf: str, ) -> Union[Dataset, DataComponent, Scalar]: cls._check_params(period_from, period_to) if isinstance(operand, Dataset): @@ -809,7 +824,6 @@ def _date_access(v: str, to_param: str, start: bool) -> Any: class Current_Date(Time): - @classmethod def validate(cls) -> Scalar: return Scalar(name="current_date", data_type=Date, value=None) @@ -830,26 +844,30 @@ def validate_type_compatibility(cls, left: Any, right: Any) -> bool: if left == TimePeriod and right == Date: return False - return not (left == TimePeriod and right == Date) + return not (left == TimePeriod and right == Date) @classmethod def validate( - cls, left_operand: Union[Dataset, DataComponent, Scalar], - right_operand: Union[Dataset, DataComponent, Scalar] + cls, + left_operand: Union[Dataset, DataComponent, Scalar], + right_operand: Union[Dataset, DataComponent, Scalar], ) -> Union[Dataset, DataComponent, Scalar]: if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset): raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset") if not cls.validate_type_compatibility(left_operand.data_type, right_operand.data_type): raise SemanticError( - "1-1-1-2", type_1=left_operand.data_type, type_2=right_operand.data_type, - type_check=cls.type_to_check + "1-1-1-2", + type_1=left_operand.data_type, + type_2=right_operand.data_type, + type_check=cls.type_to_check, ) return super().validate(left_operand, right_operand) @classmethod def evaluate( - cls, left_operand: Union[Dataset, DataComponent, Scalar], - right_operand: Union[Dataset, DataComponent, Scalar] + cls, + left_operand: Union[Dataset, DataComponent, Scalar], + right_operand: Union[Dataset, DataComponent, Scalar], ) -> Union[Dataset, DataComponent, Scalar]: if isinstance(left_operand, Dataset) or isinstance(right_operand, Dataset): raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset") @@ -869,12 +887,12 @@ def py_op(cls, x: Any, y: Any) -> int: raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset") if x.count("-") == 2: - fecha1 = datetime.strptime(x, '%Y-%m-%d').date() + fecha1 = datetime.strptime(x, "%Y-%m-%d").date() else: fecha1 = TimePeriodHandler(x).end_date(as_date=True) # type: ignore[assignment] if y.count("-") == 2: - fecha2 = datetime.strptime(y, '%Y-%m-%d').date() + fecha2 = datetime.strptime(y, "%Y-%m-%d").date() else: fecha2 = TimePeriodHandler(y).end_date(as_date=True) # type: ignore[assignment] @@ -885,26 +903,31 @@ class Date_Add(Parametrized): op = DATE_ADD @classmethod - def validate(cls, - operand: Union[Scalar, DataComponent, Dataset], - param_list: List[Scalar] - ) -> Union[Scalar, DataComponent, Dataset]: - + def validate( + cls, operand: Union[Scalar, DataComponent, Dataset], param_list: List[Scalar] + ) -> Union[Scalar, DataComponent, Dataset]: expected_types = [Integer, String] for i, param in enumerate(param_list): - error = 12 if not isinstance(param, Scalar) else 13 if ( # type: ignore[redundant-expr] - param.data_type != expected_types[i]) else None + error = ( + 12 + if not isinstance(param, Scalar) # type: ignore[redundant-expr] + else 13 + if (param.data_type != expected_types[i]) + else None + ) if error is not None: - raise SemanticError(f"2-1-19-{error}", - op=cls.op, - type=param.__class__.__name__ if error == 12 else - param.data_type.__name__, - name="shiftNumber" if error == 12 else "periodInd", - expected="Scalar" if error == 12 else expected_types[i].__name__ - ) - - if (isinstance(operand, (Scalar, DataComponent)) and - operand.data_type not in [Date, TimePeriod]): + raise SemanticError( + f"2-1-19-{error}", + op=cls.op, + type=(param.__class__.__name__ if error == 12 else param.data_type.__name__), + name="shiftNumber" if error == 12 else "periodInd", + expected="Scalar" if error == 12 else expected_types[i].__name__, + ) + + if isinstance(operand, (Scalar, DataComponent)) and operand.data_type not in [ + Date, + TimePeriod, + ]: unary_implicit_promotion(operand.data_type, Date) if isinstance(operand, Scalar): @@ -914,31 +937,38 @@ def validate(cls, if all(comp.data_type not in [Date, TimePeriod] for comp in operand.components.values()): raise SemanticError("2-1-19-14", op=cls.op, name=operand.name) - return Dataset(name='result', components=operand.components.copy(), data=None) + return Dataset(name="result", components=operand.components.copy(), data=None) @classmethod - def evaluate(cls, - operand: Union[Scalar, DataComponent, Dataset], - param_list: List[Scalar] - ) -> Union[Scalar, DataComponent, Dataset]: + def evaluate( + cls, operand: Union[Scalar, DataComponent, Dataset], param_list: List[Scalar] + ) -> Union[Scalar, DataComponent, Dataset]: result = cls.validate(operand, param_list) shift, period = param_list[0].value, param_list[1].value is_tp = isinstance(operand, (Scalar, DataComponent)) and operand.data_type == TimePeriod if isinstance(result, Scalar) and isinstance(operand, Scalar) and operand.value is not None: result.value = cls.py_op(operand.value, shift, period, is_tp) - elif (isinstance(result, DataComponent) and isinstance(operand, DataComponent) and - operand.data is not None): - result.data = operand.data.map(lambda x: cls.py_op(x, shift, period, is_tp), - na_action="ignore") - elif (isinstance(result, Dataset) and isinstance(operand, Dataset) and - operand.data is not None): + elif ( + isinstance(result, DataComponent) + and isinstance(operand, DataComponent) + and operand.data is not None + ): + result.data = operand.data.map( + lambda x: cls.py_op(x, shift, period, is_tp), na_action="ignore" + ) + elif ( + isinstance(result, Dataset) + and isinstance(operand, Dataset) + and operand.data is not None + ): result.data = operand.data.copy() for measure in operand.get_measures(): if measure.data_type in [Date, TimePeriod]: result.data[measure.name] = result.data[measure.name].map( lambda x: cls.py_op(x, shift, period, measure.data_type == TimePeriod), - na_action="ignore") + na_action="ignore", + ) measure.data_type = Date if isinstance(result, (Scalar, DataComponent)): @@ -946,47 +976,46 @@ def evaluate(cls, return result @classmethod - def py_op(cls, - date_str: str, - shift: int, period: str, - is_tp: bool = False - ) -> str: + def py_op(cls, date_str: str, shift: int, period: str, is_tp: bool = False) -> str: if is_tp: tp_value = TimePeriodHandler(date_str) date = period_to_date(tp_value.year, tp_value.period_indicator, tp_value.period_number) else: date = datetime.strptime(date_str, "%Y-%m-%d") - if period in ['D', 'W']: - days_shift = shift * (7 if period == 'W' else 1) + if period in ["D", "W"]: + days_shift = shift * (7 if period == "W" else 1) return (date + timedelta(days=days_shift)).strftime("%Y-%m-%d") - month_shift = {'M': 1, 'Q': 3, 'S': 6, 'A': 12}[period] * shift + month_shift = {"M": 1, "Q": 3, "S": 6, "A": 12}[period] * shift new_year = date.year + (date.month - 1 + month_shift) // 12 new_month = (date.month - 1 + month_shift) % 12 + 1 last_day = (datetime(new_year, new_month % 12 + 1, 1) - timedelta(days=1)).day - return date.replace(year=new_year, month=new_month, - day=min(date.day, last_day)).strftime("%Y-%m-%d") + return date.replace(year=new_year, month=new_month, day=min(date.day, last_day)).strftime( + "%Y-%m-%d" + ) class SimpleUnaryTime(Operators.Unary): - @classmethod def validate( - cls, operand: Union[Dataset, DataComponent, Scalar] + cls, operand: Union[Dataset, DataComponent, Scalar] ) -> Union[Dataset, DataComponent, Scalar]: if isinstance(operand, Dataset): raise SemanticError("1-1-19-8", op=cls.op, comp_type="time dataset") # Limit the operand to Date and TimePeriod (cannot be implemented with type_to_check) - if operand.data_type == TimeInterval or operand.data_type not in (Date, TimePeriod): + if operand.data_type == TimeInterval or operand.data_type not in ( + Date, + TimePeriod, + ): raise SemanticError("1-1-19-10", op=cls.op) return super().validate(operand) @classmethod def evaluate( - cls, operand: Union[Dataset, DataComponent, Scalar] + cls, operand: Union[Dataset, DataComponent, Scalar] ) -> Union[Dataset, DataComponent, Scalar]: cls.validate(operand) return super().evaluate(operand) @@ -1040,7 +1069,9 @@ def py_op(cls, value: str) -> int: result = TimePeriodHandler(value).end_date(as_date=True) datetime_value = datetime( - year=result.year, month=result.month, day=result.day # type: ignore[union-attr] + year=result.year, # type: ignore[union-attr] + month=result.month, # type: ignore[union-attr] + day=result.day, # type: ignore[union-attr] ) return datetime_value.timetuple().tm_yday @@ -1089,7 +1120,7 @@ def py_op(cls, value: str) -> int: raise SemanticError("2-1-19-15", op=cls.op) index_y = value.index("Y") years = int(value[1:index_y]) - days = int(value[(index_y + 1): -1]) + days = int(value[(index_y + 1) : -1]) return years * 365 + days @@ -1105,5 +1136,5 @@ def py_op(cls, value: str) -> int: raise SemanticError("2-1-19-16", op=cls.op) index_m = value.index("M") months = int(value[1:index_m]) - days = int(value[(index_m + 1): -1]) + days = int(value[(index_m + 1) : -1]) return months * 30 + days diff --git a/src/vtlengine/Operators/Validation.py b/src/vtlengine/Operators/Validation.py index 57fe5439..175f0aa6 100644 --- a/src/vtlengine/Operators/Validation.py +++ b/src/vtlengine/Operators/Validation.py @@ -4,7 +4,13 @@ import pandas as pd from vtlengine.AST.Grammar.tokens import CHECK, CHECK_HIERARCHY -from vtlengine.DataTypes import Boolean, Integer, Number, String, check_unary_implicit_promotion +from vtlengine.DataTypes import ( + Boolean, + Integer, + Number, + String, + check_unary_implicit_promotion, +) from vtlengine.Exceptions import SemanticError from vtlengine.Model import Component, Dataset, Role from vtlengine.Operators import Operator @@ -103,7 +109,6 @@ def evaluate( # noinspection PyTypeChecker class Validation(Operator): - @classmethod def _generate_result_data(cls, rule_info: Dict[str, Any]) -> pd.DataFrame: rule_list_df = [] @@ -171,7 +176,6 @@ def evaluate(cls, dataset_element: Dataset, rule_info: Dict[str, Any], output: s elif output == "all": result.data = result.data[result.get_identifiers_names() + validation_measures] else: # output == 'all_measures' - result.data = result.data[ result.get_identifiers_names() + dataset_element.get_measures_names() @@ -230,7 +234,9 @@ def validate_hr_dataset(dataset: Dataset, component_name: str) -> None: ) if dataset.components[component_name].role != Role.IDENTIFIER: raise SemanticError( - "1-3-20", name=component_name, role=dataset.components[component_name].role.value + "1-3-20", + name=component_name, + role=dataset.components[component_name].role.value, ) # Remove attributes from dataset if len(dataset.get_attributes()) > 0: diff --git a/src/vtlengine/Operators/__init__.py b/src/vtlengine/Operators/__init__.py index 5c023667..37303635 100644 --- a/src/vtlengine/Operators/__init__.py +++ b/src/vtlengine/Operators/__init__.py @@ -8,7 +8,20 @@ # import pandas as pd import pandas as pd -from vtlengine.AST.Grammar.tokens import AND, CEIL, EQ, FLOOR, GT, GTE, LT, LTE, NEQ, OR, ROUND, XOR +from vtlengine.AST.Grammar.tokens import ( + AND, + CEIL, + EQ, + FLOOR, + GT, + GTE, + LT, + LTE, + NEQ, + OR, + ROUND, + XOR, +) from vtlengine.DataTypes import ( COMP_NAME_MAPPING, SCALAR_TYPES_CLASS_REVERSE, @@ -54,7 +67,6 @@ def analyze(cls, *args: Any, **kwargs: Any) -> Any: @classmethod def cast_time_types(cls, data_type: Any, series: Any) -> Any: - if cls.op not in BINARY_COMPARISON_OPERATORS: return series if data_type.__name__ == "TimeInterval": @@ -165,11 +177,11 @@ def apply_return_type(cls, *args: Any) -> None: def _id_type_promotion_join_keys( - c_left: Component, - c_right: Component, - join_key: str, - left_data: Optional[pd.DataFrame] = None, - right_data: Optional[pd.DataFrame] = None, + c_left: Component, + c_right: Component, + join_key: str, + left_data: Optional[pd.DataFrame] = None, + right_data: Optional[pd.DataFrame] = None, ) -> None: if left_data is None: left_data = pd.DataFrame() @@ -184,7 +196,7 @@ def _id_type_promotion_join_keys( right_data[join_key] = right_data[join_key].astype(object) return if (left_type_name == "Integer" and right_type_name == "Number") or ( - left_type_name == "Number" and right_type_name == "Integer" + left_type_name == "Number" and right_type_name == "Integer" ): left_data[join_key] = left_data[join_key].map(lambda x: int(float(x))) right_data[join_key] = right_data[join_key].map(lambda x: int(float(x))) @@ -209,7 +221,6 @@ def _handle_str_number(x: Union[str, int, float]) -> Union[str, int, float]: class Binary(Operator): - @classmethod def op_func(cls, *args: Any) -> Any: x, y = args @@ -220,7 +231,6 @@ def op_func(cls, *args: Any) -> Any: @classmethod def apply_operation_two_series(cls, left_series: Any, right_series: Any) -> Any: - if os.getenv("SPARK", False): if cls.spark_op is None: cls.spark_op = cls.py_op @@ -234,12 +244,11 @@ def apply_operation_two_series(cls, left_series: Any, right_series: Any) -> Any: @classmethod def apply_operation_series_scalar( - cls, - series: Any, - scalar: Scalar, - series_left: bool, + cls, + series: Any, + scalar: Scalar, + series_left: bool, ) -> Any: - if scalar is None: return pd.Series(None, index=series.index) if series_left: @@ -280,7 +289,6 @@ def validate(cls, *args: Any) -> Any: @classmethod def dataset_validation(cls, left_operand: Dataset, right_operand: Dataset) -> Dataset: - left_identifiers = left_operand.get_identifiers_names() right_identifiers = right_operand.get_identifiers_names() @@ -293,7 +301,10 @@ def dataset_validation(cls, left_operand: Dataset, right_operand: Dataset) -> Da if left_measures_names != right_measures_names: raise SemanticError( - "1-1-14-1", op=cls.op, left=left_measures_names, right=right_measures_names + "1-1-14-1", + op=cls.op, + left=left_measures_names, + right=right_measures_names, ) elif len(left_measures) == 0: raise SemanticError("1-1-1-8", op=cls.op, name=left_operand.name) @@ -331,7 +342,6 @@ def dataset_validation(cls, left_operand: Dataset, right_operand: Dataset) -> Da @classmethod def dataset_scalar_validation(cls, dataset: Dataset, scalar: Scalar) -> Dataset: - if len(dataset.get_measures()) == 0: raise SemanticError("1-1-1-8", op=cls.op, name=dataset.name) @@ -346,11 +356,12 @@ def dataset_scalar_validation(cls, dataset: Dataset, scalar: Scalar) -> Dataset: @classmethod def scalar_validation(cls, left_operand: Scalar, right_operand: Scalar) -> Scalar: - if not cls.validate_type_compatibility(left_operand.data_type, right_operand.data_type): raise SemanticError( - "1-1-1-2", type_1=left_operand.data_type, type_2=right_operand.data_type, - type_check=cls.type_to_check + "1-1-1-2", + type_1=left_operand.data_type, + type_2=right_operand.data_type, + type_check=cls.type_to_check, ) return Scalar( name="result", @@ -360,7 +371,7 @@ def scalar_validation(cls, left_operand: Scalar, right_operand: Scalar) -> Scala @classmethod def component_validation( - cls, left_operand: DataComponent, right_operand: DataComponent + cls, left_operand: DataComponent, right_operand: DataComponent ) -> DataComponent: """ Validates the compatibility between the types of the components and the operator @@ -382,7 +393,6 @@ def component_validation( @classmethod def component_scalar_validation(cls, component: DataComponent, scalar: Scalar) -> DataComponent: - cls.type_validation(component.data_type, scalar.data_type) result = DataComponent( name=component.name, @@ -395,7 +405,6 @@ def component_scalar_validation(cls, component: DataComponent, scalar: Scalar) - @classmethod def dataset_set_validation(cls, dataset: Dataset, scalar_set: ScalarSet) -> Dataset: - if len(dataset.get_measures()) == 0: raise SemanticError("1-1-1-8", op=cls.op, name=dataset.name) for measure in dataset.get_measures(): @@ -412,9 +421,8 @@ def dataset_set_validation(cls, dataset: Dataset, scalar_set: ScalarSet) -> Data @classmethod def component_set_validation( - cls, component: DataComponent, scalar_set: ScalarSet + cls, component: DataComponent, scalar_set: ScalarSet ) -> DataComponent: - cls.type_validation(component.data_type, scalar_set.data_type) result = DataComponent( name="result", @@ -427,7 +435,6 @@ def component_set_validation( @classmethod def scalar_set_validation(cls, scalar: Scalar, scalar_set: ScalarSet) -> Scalar: - cls.type_validation(scalar.data_type, scalar_set.data_type) return Scalar( name="result", @@ -468,7 +475,7 @@ def validate_type_compatibility(cls, left: Any, right: Any) -> bool: @classmethod def apply_return_type_dataset( - cls, result_dataset: Dataset, left_operand: Any, right_operand: Any + cls, result_dataset: Dataset, left_operand: Any, right_operand: Any ) -> None: """ Used in dataset's validation. @@ -498,9 +505,9 @@ def apply_return_type_dataset( if result_dataset.data is not None: result_dataset.data.rename(columns={measure.name: component.name}, inplace=True) elif ( - changed_allowed is False - and is_mono_measure is False - and left_type.promotion_changed_type(result_data_type) + changed_allowed is False + and is_mono_measure is False + and left_type.promotion_changed_type(result_data_type) ): raise SemanticError("1-1-1-4", op=cls.op) else: @@ -508,7 +515,6 @@ def apply_return_type_dataset( @classmethod def dataset_evaluation(cls, left_operand: Dataset, right_operand: Dataset) -> Dataset: - result_dataset = cls.dataset_validation(left_operand, right_operand) use_right_as_base = False @@ -587,16 +593,14 @@ def dataset_evaluation(cls, left_operand: Dataset, right_operand: Dataset) -> Da @classmethod def scalar_evaluation(cls, left_operand: Scalar, right_operand: Scalar) -> Scalar: - result_scalar = cls.scalar_validation(left_operand, right_operand) result_scalar.value = cls.op_func(left_operand.value, right_operand.value) return result_scalar @classmethod def dataset_scalar_evaluation( - cls, dataset: Dataset, scalar: Scalar, dataset_left: bool = True + cls, dataset: Dataset, scalar: Scalar, dataset_left: bool = True ) -> Dataset: - result_dataset = cls.dataset_scalar_validation(dataset, scalar) result_data = dataset.data.copy() if dataset.data is not None else pd.DataFrame() result_dataset.data = result_data @@ -606,7 +610,7 @@ def dataset_scalar_evaluation( for measure in dataset.get_measures(): measure_data = cls.cast_time_types(measure.data_type, result_data[measure.name].copy()) if measure.data_type.__name__.__str__() == "Duration" and not isinstance( - scalar_value, int + scalar_value, int ): scalar_value = DURATION_MAPPING[scalar_value] result_dataset.data[measure.name] = cls.apply_operation_series_scalar( @@ -621,9 +625,8 @@ def dataset_scalar_evaluation( @classmethod def component_evaluation( - cls, left_operand: DataComponent, right_operand: DataComponent + cls, left_operand: DataComponent, right_operand: DataComponent ) -> DataComponent: - result_component = cls.component_validation(left_operand, right_operand) left_data = cls.cast_time_types( left_operand.data_type, @@ -631,16 +634,15 @@ def component_evaluation( ) right_data = cls.cast_time_types( right_operand.data_type, - right_operand.data.copy() if right_operand.data is not None else pd.Series(), + (right_operand.data.copy() if right_operand.data is not None else pd.Series()), ) result_component.data = cls.apply_operation_two_series(left_data, right_data) return result_component @classmethod def component_scalar_evaluation( - cls, component: DataComponent, scalar: Scalar, component_left: bool = True + cls, component: DataComponent, scalar: Scalar, component_left: bool = True ) -> DataComponent: - result_component = cls.component_scalar_validation(component, scalar) comp_data = cls.cast_time_types( component.data_type, @@ -648,7 +650,7 @@ def component_scalar_evaluation( ) scalar_value = cls.cast_time_types_scalar(scalar.data_type, scalar.value) if component.data_type.__name__.__str__() == "Duration" and not isinstance( - scalar_value, int + scalar_value, int ): scalar_value = DURATION_MAPPING[scalar_value] result_component.data = cls.apply_operation_series_scalar( @@ -658,7 +660,6 @@ def component_scalar_evaluation( @classmethod def dataset_set_evaluation(cls, dataset: Dataset, scalar_set: ScalarSet) -> Dataset: - result_dataset = cls.dataset_set_validation(dataset, scalar_set) result_data = dataset.data.copy() if dataset.data is not None else pd.DataFrame() @@ -676,18 +677,17 @@ def dataset_set_evaluation(cls, dataset: Dataset, scalar_set: ScalarSet) -> Data @classmethod def component_set_evaluation( - cls, component: DataComponent, scalar_set: ScalarSet + cls, component: DataComponent, scalar_set: ScalarSet ) -> DataComponent: - result_component = cls.component_set_validation(component, scalar_set) result_component.data = cls.apply_operation_two_series( - component.data.copy() if component.data is not None else pd.Series(), scalar_set + component.data.copy() if component.data is not None else pd.Series(), + scalar_set, ) return result_component @classmethod def scalar_set_evaluation(cls, scalar: Scalar, scalar_set: ScalarSet) -> Scalar: - result_scalar = cls.scalar_set_validation(scalar, scalar_set) result_scalar.value = cls.op_func(scalar.value, scalar_set) return result_scalar @@ -726,7 +726,6 @@ def evaluate(cls, left_operand: Any, right_operand: Any) -> Any: class Unary(Operator): - @classmethod def op_func(cls, *args: Any) -> Any: x = args[0] @@ -758,7 +757,6 @@ def validate(cls, operand: Any) -> Any: @classmethod def dataset_validation(cls, operand: Dataset) -> Dataset: - cls.validate_dataset_type(operand) if len(operand.get_measures()) == 0: raise SemanticError("1-1-1-8", op=cls.op, name=operand.name) @@ -774,14 +772,12 @@ def dataset_validation(cls, operand: Dataset) -> Dataset: @classmethod def scalar_validation(cls, operand: Scalar) -> Scalar: - result_type = cls.type_validation(operand.data_type) result = Scalar(name="result", data_type=result_type, value=None) return result @classmethod def component_validation(cls, operand: DataComponent) -> DataComponent: - result_type = cls.type_validation(operand.data_type) result = DataComponent( name="result", @@ -795,18 +791,15 @@ def component_validation(cls, operand: DataComponent) -> DataComponent: # The following class method implements the type promotion @classmethod def type_validation(cls, operand: Any) -> Any: - return unary_implicit_promotion(operand, cls.type_to_check, cls.return_type) # The following class method checks the type promotion @classmethod def validate_type_compatibility(cls, operand: Any) -> bool: - return check_unary_implicit_promotion(operand, cls.type_to_check, cls.return_type) @classmethod def validate_dataset_type(cls, dataset: Dataset) -> None: - if cls.type_to_check is not None: for measure in dataset.get_measures(): if not cls.validate_type_compatibility(measure.data_type): @@ -820,7 +813,6 @@ def validate_dataset_type(cls, dataset: Dataset) -> None: @classmethod def validate_scalar_type(cls, scalar: Scalar) -> None: - if cls.type_to_check is not None and not cls.validate_type_compatibility(scalar.data_type): raise SemanticError( "1-1-1-5", @@ -831,7 +823,6 @@ def validate_scalar_type(cls, scalar: Scalar) -> None: @classmethod def apply_return_type_dataset(cls, result_dataset: Dataset, operand: Dataset) -> None: - changed_allowed = cls.op in MONOMEASURE_CHANGED_ALLOWED is_mono_measure = len(operand.get_measures()) == 1 for measure in result_dataset.get_measures(): @@ -850,9 +841,9 @@ def apply_return_type_dataset(cls, result_dataset: Dataset, operand: Dataset) -> if result_dataset.data is not None: result_dataset.data.rename(columns={measure.name: component.name}, inplace=True) elif ( - changed_allowed is False - and is_mono_measure is False - and operand_type.promotion_changed_type(result_data_type) + changed_allowed is False + and is_mono_measure is False + and operand_type.promotion_changed_type(result_data_type) ): raise SemanticError("1-1-1-4", op=cls.op) else: @@ -860,7 +851,6 @@ def apply_return_type_dataset(cls, result_dataset: Dataset, operand: Dataset) -> @classmethod def evaluate(cls, operand: ALL_MODEL_DATA_TYPES) -> Any: - if isinstance(operand, Dataset): return cls.dataset_evaluation(operand) if isinstance(operand, Scalar): @@ -870,7 +860,6 @@ def evaluate(cls, operand: ALL_MODEL_DATA_TYPES) -> Any: @classmethod def dataset_evaluation(cls, operand: Dataset) -> Dataset: - result_dataset = cls.dataset_validation(operand) result_data = operand.data.copy() if operand.data is not None else pd.DataFrame() for measure_name in operand.get_measures_names(): @@ -885,14 +874,12 @@ def dataset_evaluation(cls, operand: Dataset) -> Dataset: @classmethod def scalar_evaluation(cls, operand: Scalar) -> Scalar: - result_scalar = cls.scalar_validation(operand) result_scalar.value = cls.op_func(operand.value) return result_scalar @classmethod def component_evaluation(cls, operand: DataComponent) -> DataComponent: - result_component = cls.component_validation(operand) result_component.data = cls.apply_operation_component( operand.data.copy() if operand.data is not None else pd.Series() diff --git a/src/vtlengine/Utils/__init__.py b/src/vtlengine/Utils/__init__.py index 5ec70e1c..aef59ed2 100644 --- a/src/vtlengine/Utils/__init__.py +++ b/src/vtlengine/Utils/__init__.py @@ -339,7 +339,12 @@ APPLY: Apply, } -SET_MAPPING = {UNION: Union, INTERSECT: Intersection, SYMDIFF: Symdiff, SETDIFF: Setdiff} +SET_MAPPING = { + UNION: Union, + INTERSECT: Intersection, + SYMDIFF: Symdiff, + SETDIFF: Setdiff, +} AGGREGATION_MAPPING = { MAX: Max, diff --git a/src/vtlengine/files/output/__init__.py b/src/vtlengine/files/output/__init__.py index af8159e8..1d759a84 100644 --- a/src/vtlengine/files/output/__init__.py +++ b/src/vtlengine/files/output/__init__.py @@ -15,7 +15,6 @@ def save_datapoints( dataset: Dataset, output_path: Union[str, Path], ) -> None: - if dataset.data is None: dataset.data = pd.DataFrame() if time_period_representation is not None: diff --git a/src/vtlengine/files/parser/__init__.py b/src/vtlengine/files/parser/__init__.py index 982bb4d6..0b9d28b8 100644 --- a/src/vtlengine/files/parser/__init__.py +++ b/src/vtlengine/files/parser/__init__.py @@ -20,7 +20,11 @@ from vtlengine.DataTypes.TimeHandling import DURATION_MAPPING from vtlengine.Exceptions import InputValidationException, SemanticError from vtlengine.files.parser._rfc_dialect import register_rfc -from vtlengine.files.parser._time_checking import check_date, check_time, check_time_period +from vtlengine.files.parser._time_checking import ( + check_date, + check_time, + check_time_period, +) from vtlengine.Model import Component, Dataset, Role TIME_CHECKS_MAPPING: Dict[Type[ScalarType], Any] = { @@ -73,8 +77,11 @@ def _sanitize_pandas_columns( components: Dict[str, Component], csv_path: Union[str, Path], data: pd.DataFrame ) -> pd.DataFrame: # Fast loading from SDMX-CSV - if ("DATAFLOW" in data.columns and data.columns[0] == "DATAFLOW" and - "DATAFLOW" not in components): + if ( + "DATAFLOW" in data.columns + and data.columns[0] == "DATAFLOW" + and "DATAFLOW" not in components + ): data.drop(columns=["DATAFLOW"], inplace=True) if "STRUCTURE" in data.columns and data.columns[0] == "STRUCTURE": if "STRUCTURE" not in components: @@ -107,7 +114,11 @@ def _pandas_load_csv(components: Dict[str, Component], csv_path: Path) -> pd.Dat try: data = pd.read_csv( - csv_path, dtype=obj_dtypes, engine="c", keep_default_na=False, na_values=[""] + csv_path, + dtype=obj_dtypes, + engine="c", + keep_default_na=False, + na_values=[""], ) except UnicodeDecodeError: raise InputValidationException(code="0-1-2-5", file=csv_path.name) @@ -121,7 +132,11 @@ def _pandas_load_s3_csv(components: Dict[str, Component], csv_path: str) -> pd.D # start = time() try: data = pd.read_csv( - csv_path, dtype=obj_dtypes, engine="c", keep_default_na=False, na_values=[""] + csv_path, + dtype=obj_dtypes, + engine="c", + keep_default_na=False, + na_values=[""], ) except UnicodeDecodeError: @@ -165,7 +180,6 @@ def _validate_pandas( comp_name = "" comp = None try: - for comp_name, comp in components.items(): if comp.data_type in (Date, TimePeriod, TimeInterval): data[comp_name] = data[comp_name].map( @@ -184,7 +198,10 @@ def _validate_pandas( elif comp.data_type == Duration: values_correct = ( data[comp_name] - .map(lambda x: x.replace(" ", "") in DURATION_MAPPING, na_action="ignore") + .map( + lambda x: x.replace(" ", "") in DURATION_MAPPING, + na_action="ignore", + ) .all() ) if not values_correct: @@ -202,7 +219,9 @@ def _validate_pandas( def load_datapoints( - components: Dict[str, Component], dataset_name: str, csv_path: Optional[Union[Path, str]] = None + components: Dict[str, Component], + dataset_name: str, + csv_path: Optional[Union[Path, str]] = None, ) -> pd.DataFrame: if csv_path is None or (isinstance(csv_path, Path) and not csv_path.exists()): return pd.DataFrame(columns=list(components.keys())) diff --git a/src/vtlengine/files/parser/_rfc_dialect.py b/src/vtlengine/files/parser/_rfc_dialect.py index 4f2fa64f..6af73127 100644 --- a/src/vtlengine/files/parser/_rfc_dialect.py +++ b/src/vtlengine/files/parser/_rfc_dialect.py @@ -19,4 +19,4 @@ class RFCDialect(csv.Dialect): def register_rfc() -> None: """Register the RFC dialect.""" - csv.register_dialect("rfc", RFCDialect) + csv.register_dialect("rfc", RFCDialect) # type: ignore[arg-type] diff --git a/tests/API/test_S3.py b/tests/API/test_S3.py new file mode 100644 index 00000000..fa072ab0 --- /dev/null +++ b/tests/API/test_S3.py @@ -0,0 +1,130 @@ +from pathlib import Path +from unittest.mock import patch + +import pandas as pd +import pytest + +from vtlengine import DataTypes +from vtlengine.files.output import TimePeriodRepresentation, save_datapoints +from vtlengine.Model import Component, Dataset, Role + +base_path = Path(__file__).parent +filepath_output = base_path / "data" / "DataSet" / "output" + +params = [ + ( + Dataset( + name="test_dataset", + components={ + "Id_1": Component( + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, + ), + "Id_2": Component( + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, + ), + }, + data=pd.DataFrame(columns=["Id_1", "Id_2"]), + ), + filepath_output / "test_dataset.csv", + ), +] + + +@patch("pandas.DataFrame.to_csv") +def test_save_datapoints_without_data_mock(mock_csv): + dataset = Dataset( + name="test_dataset", + components={ + "Id_1": Component( + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, + ), + "Id_2": Component( + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, + ), + }, + data=None, + ) + output_path = "path/to/output" + + save_datapoints(None, dataset, output_path) + + expected_path = "path/to/output/test_dataset.csv" + mock_csv.assert_called_once_with(expected_path, index=False) + + +@patch("pandas.DataFrame.to_csv") +def test_save_datapoints_with_data_mock(mock_csv): + mock_data = pd.DataFrame(columns=["Id_1", "Id_2"]) + dataset = Dataset( + name="test_dataset", + components={ + "Id_1": Component( + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, + ), + "Id_2": Component( + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, + ), + }, + data=mock_data, + ) + output_path = "path/to/output/" + + save_datapoints(None, dataset, output_path) + + expected_path = "path/to/output/test_dataset.csv" + mock_csv.assert_called_once_with(expected_path, index=False) + + +@patch("pandas.DataFrame.to_csv") +def test_save_datapoints_with_data_and_time_period_representation_mock(mock_csv): + mock_data = pd.DataFrame(columns=["Id_1", "Id_2"]) + dataset = Dataset( + name="test_dataset", + components={ + "Id_1": Component( + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, + ), + "Id_2": Component( + name="Id_2", + data_type=DataTypes.TimePeriod, + role=Role.IDENTIFIER, + nullable=False, + ), + }, + data=mock_data, + ) + output_path = "path/to/output/" + + save_datapoints(TimePeriodRepresentation.VTL, dataset, output_path) + + expected_path = "path/to/output/test_dataset.csv" + mock_csv.assert_called_once_with(expected_path, index=False) + + +@pytest.mark.parametrize("dataset, reference", params) +def test_save_datapoints(dataset, reference, tmp_path_factory): + output_path = tmp_path_factory.mktemp("test") + save_datapoints(None, dataset, output_path=output_path) + result = pd.read_csv(output_path / f"{dataset.name}.csv") + pd.testing.assert_frame_equal(result, dataset.data) diff --git a/tests/API/test_api.py b/tests/API/test_api.py index 10178362..5056a226 100644 --- a/tests/API/test_api.py +++ b/tests/API/test_api.py @@ -28,7 +28,10 @@ input_vtl_params_OK = [ (filepath_VTL / "2.vtl", "DS_r := DS_1 + DS_2; DS_r2 <- DS_1 + DS_r;"), - ("DS_r := DS_1 + DS_2; DS_r2 <- DS_1 + DS_r;", "DS_r := DS_1 + DS_2; DS_r2 <- DS_1 + DS_r;"), + ( + "DS_r := DS_1 + DS_2; DS_r2 <- DS_1 + DS_r;", + "DS_r := DS_1 + DS_2; DS_r2 <- DS_1 + DS_r;", + ), ] input_vtl_error_params = [ @@ -72,8 +75,18 @@ "type": "Integer", "nullable": False, }, - {"name": "Id_2", "role": "Identifier", "type": "String", "nullable": False}, - {"name": "Me_1", "role": "Measure", "type": "Number", "nullable": True}, + { + "name": "Id_2", + "role": "Identifier", + "type": "String", + "nullable": False, + }, + { + "name": "Me_1", + "role": "Measure", + "type": "Number", + "nullable": True, + }, ], } ] @@ -221,7 +234,11 @@ filepath_csv / "DS_1.csv", "Invalid datastructure. Must have .json extension", ), - (filepath_json / "DS_1.json", filepath_json / "DS_2.json", "Not found dataset DS_2.json"), + ( + filepath_json / "DS_1.json", + filepath_json / "DS_2.json", + "Not found dataset DS_2.json", + ), (2, 2, "Invalid datastructure. Input must be a dict or Path object"), ] @@ -307,13 +324,22 @@ def test_load_datastructures(datastructure): name="DS_1", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=None, @@ -356,13 +382,22 @@ def test_semantic(script, data_structures, value_domains, external_routines): name="DS_r", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=None, @@ -382,34 +417,56 @@ def test_run(script, data_structures, datapoints, value_domains, external_routin name="DS_r", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=pd.DataFrame( - columns=["Id_1", "Id_2", "Me_1"], index=[0, 1], data=[(1, "A", 2), (1, "B", 4)] + columns=["Id_1", "Id_2", "Me_1"], + index=[0, 1], + data=[(1, "A", 2), (1, "B", 4)], ), ), "DS_r2": Dataset( name="DS_r2", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=pd.DataFrame( - columns=["Id_1", "Id_2", "Me_1"], index=[0, 1], data=[(1, "A", 3), (1, "B", 6)] + columns=["Id_1", "Id_2", "Me_1"], + index=[0, 1], + data=[(1, "A", 3), (1, "B", 6)], ), ), } @@ -434,17 +491,28 @@ def test_run_only_persistent(script, data_structures, datapoints, value_domains, name="DS_r2", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=pd.DataFrame( - columns=["Id_1", "Id_2", "Me_1"], index=[0, 1], data=[(1, "A", 3), (1, "B", 6)] + columns=["Id_1", "Id_2", "Me_1"], + index=[0, 1], + data=[(1, "A", 3), (1, "B", 6)], ), ) } @@ -462,8 +530,18 @@ def test_readme_example(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": True, + }, ], } ] @@ -480,14 +558,22 @@ def test_readme_example(): name="DS_A", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=pd.DataFrame( - columns=["Id_1", "Me_1"], index=[0, 1, 2], data=[(1, 100), (2, 200), (3, 300)] + columns=["Id_1", "Me_1"], + index=[0, 1, 2], + data=[(1, 100), (2, 200), (3, 300)], ), ) } @@ -503,8 +589,18 @@ def test_readme_run(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "Number", "role": "Measure", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": True, + }, ], } ] @@ -521,14 +617,22 @@ def test_readme_run(): name="DS_A", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.Number, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.Number, + role=Role.MEASURE, + nullable=True, ), }, data=pd.DataFrame( - columns=["Id_1", "Me_1"], index=[0, 1, 2], data=[(1, 100), (2, 200), (3, 300)] + columns=["Id_1", "Me_1"], + index=[0, 1, 2], + data=[(1, 100), (2, 200), (3, 300)], ), ) } @@ -546,8 +650,18 @@ def test_readme_semantic_error(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "String", "role": "Measure", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": True, + }, ], } ] @@ -572,10 +686,30 @@ def test_non_mandatory_fill_at(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Id_2", "type": "String", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "String", "role": "Measure", "nullable": True}, - {"name": "At_1", "type": "String", "role": "Attribute", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": True, + }, + { + "name": "At_1", + "type": "String", + "role": "Attribute", + "nullable": True, + }, ], } ] @@ -592,16 +726,28 @@ def test_non_mandatory_fill_at(): name="DS_r", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.String, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.String, + role=Role.MEASURE, + nullable=True, ), "At_1": Component( - name="At_1", data_type=DataTypes.String, role=Role.ATTRIBUTE, nullable=True + name="At_1", + data_type=DataTypes.String, + role=Role.ATTRIBUTE, + nullable=True, ), }, data=pd.DataFrame( @@ -630,10 +776,30 @@ def test_non_mandatory_fill_me(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Id_2", "type": "String", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "String", "role": "Measure", "nullable": True}, - {"name": "At_1", "type": "String", "role": "Attribute", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": True, + }, + { + "name": "At_1", + "type": "String", + "role": "Attribute", + "nullable": True, + }, ], } ] @@ -650,16 +816,28 @@ def test_non_mandatory_fill_me(): name="DS_r", components={ "Id_1": Component( - name="Id_1", data_type=DataTypes.Integer, role=Role.IDENTIFIER, nullable=False + name="Id_1", + data_type=DataTypes.Integer, + role=Role.IDENTIFIER, + nullable=False, ), "Id_2": Component( - name="Id_2", data_type=DataTypes.String, role=Role.IDENTIFIER, nullable=False + name="Id_2", + data_type=DataTypes.String, + role=Role.IDENTIFIER, + nullable=False, ), "Me_1": Component( - name="Me_1", data_type=DataTypes.String, role=Role.MEASURE, nullable=True + name="Me_1", + data_type=DataTypes.String, + role=Role.MEASURE, + nullable=True, ), "At_1": Component( - name="At_1", data_type=DataTypes.String, role=Role.ATTRIBUTE, nullable=True + name="At_1", + data_type=DataTypes.String, + role=Role.ATTRIBUTE, + nullable=True, ), }, data=pd.DataFrame( @@ -690,10 +868,30 @@ def test_mandatory_at_error(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Id_2", "type": "String", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "String", "role": "Measure", "nullable": True}, - {"name": "At_1", "type": "String", "role": "Attribute", "nullable": False}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": True, + }, + { + "name": "At_1", + "type": "String", + "role": "Attribute", + "nullable": False, + }, ], } ] @@ -723,10 +921,30 @@ def test_mandatory_me_error(): { "name": "DS_1", "DataStructure": [ - {"name": "Id_1", "type": "Integer", "role": "Identifier", "nullable": False}, - {"name": "Id_2", "type": "String", "role": "Identifier", "nullable": False}, - {"name": "Me_1", "type": "String", "role": "Measure", "nullable": False}, - {"name": "At_1", "type": "String", "role": "Attribute", "nullable": True}, + { + "name": "Id_1", + "type": "Integer", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": False, + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": False, + }, + { + "name": "At_1", + "type": "String", + "role": "Attribute", + "nullable": True, + }, ], } ] diff --git a/tests/AST/__init__.py b/tests/AST/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/AST/data/encode/reference_encode.json b/tests/AST/data/encode/reference_encode.json new file mode 100644 index 00000000..a17cc9a9 --- /dev/null +++ b/tests/AST/data/encode/reference_encode.json @@ -0,0 +1,25 @@ +{ + "class_name": "Start", + "children": [ + { + "class_name": "Assignment", + "right": { + "class_name": "BinOp", + "right": { + "class_name": "VarID", + "value": "DS_2" + }, + "op": "+", + "left": { + "class_name": "VarID", + "value": "DS_1" + } + }, + "op": ":=", + "left": { + "class_name": "VarID", + "value": "DS_r" + } + } + ] +} \ No newline at end of file diff --git a/tests/AST/test_AST.py b/tests/AST/test_AST.py new file mode 100644 index 00000000..e2c4d43b --- /dev/null +++ b/tests/AST/test_AST.py @@ -0,0 +1,31 @@ +import json +from pathlib import Path + +import pytest + +from vtlengine.API import create_ast, load_vtl +from vtlengine.AST.ASTEncoders import ComplexDecoder, ComplexEncoder + +base_path = Path(__file__).parent +filepath = base_path / "data" / "encode" + +param = ["DS_r := DS_1 + DS_2;"] + + +@pytest.mark.parametrize("script", param) +def test_encode_ast(script): + vtl = load_vtl(script) + ast = create_ast(vtl) + result = json.dumps(ast, indent=4, cls=ComplexEncoder) + with open(filepath / "reference_encode.json", "r") as file_reference: + reference = file_reference.read() + assert result == reference + + +@pytest.mark.parametrize("script", param) +def test_decode_ast(script): + vtl = load_vtl(script) + ast = create_ast(vtl) + with open(filepath / "reference_encode.json") as file: + ast_decode = json.load(file, object_hook=ComplexDecoder.object_hook) + assert ast_decode == ast diff --git a/tests/Additional/test_additional.py b/tests/Additional/test_additional.py index c26c41ce..41ee9b04 100644 --- a/tests/Additional/test_additional.py +++ b/tests/Additional/test_additional.py @@ -56,7 +56,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -70,7 +73,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -84,7 +90,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -98,7 +107,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -112,7 +124,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -126,7 +141,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -153,7 +171,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -167,7 +188,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_13(self): @@ -181,7 +205,10 @@ def test_13(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_14(self): @@ -195,7 +222,10 @@ def test_14(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_15(self): @@ -209,7 +239,10 @@ def test_15(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -223,7 +256,10 @@ def test_16(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_17(self): @@ -237,7 +273,10 @@ def test_17(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_18(self): @@ -251,7 +290,10 @@ def test_18(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_19(self): @@ -265,7 +307,10 @@ def test_19(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -279,7 +324,10 @@ def test_20(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_21(self): @@ -292,7 +340,10 @@ def test_21(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -306,7 +357,10 @@ def test_22(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -320,7 +374,10 @@ def test_23(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_26(self): @@ -334,7 +391,10 @@ def test_26(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_27(self): @@ -348,7 +408,10 @@ def test_27(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_28(self): @@ -362,7 +425,10 @@ def test_28(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_29(self): @@ -375,7 +441,10 @@ def test_29(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_30(self): @@ -388,7 +457,10 @@ def test_30(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_31(self): @@ -401,7 +473,10 @@ def test_31(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_32(self): @@ -414,7 +489,10 @@ def test_32(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_33(self): @@ -427,7 +505,10 @@ def test_33(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_34(self): @@ -440,7 +521,10 @@ def test_34(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_35(self): @@ -453,7 +537,10 @@ def test_35(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_36(self): @@ -466,7 +553,10 @@ def test_36(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_37(self): @@ -479,7 +569,10 @@ def test_37(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_41(self): @@ -492,7 +585,10 @@ def test_41(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_42(self): @@ -505,7 +601,10 @@ def test_42(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_43(self): @@ -518,7 +617,10 @@ def test_43(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_44(self): @@ -531,7 +633,10 @@ def test_44(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_45(self): @@ -544,7 +649,10 @@ def test_45(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_46(self): @@ -557,7 +665,10 @@ def test_46(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_47(self): @@ -570,7 +681,10 @@ def test_47(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_48(self): @@ -583,7 +697,10 @@ def test_48(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_49(self): @@ -596,7 +713,10 @@ def test_49(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_50(self): @@ -609,7 +729,10 @@ def test_50(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_51(self): @@ -623,7 +746,10 @@ def test_51(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_52(self): @@ -636,7 +762,10 @@ def test_52(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_53(self): @@ -649,7 +778,10 @@ def test_53(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_54(self): @@ -662,7 +794,10 @@ def test_54(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_55(self): @@ -675,7 +810,10 @@ def test_55(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_56(self): @@ -688,7 +826,10 @@ def test_56(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -712,7 +853,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -726,7 +870,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -740,7 +887,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -754,7 +904,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -768,7 +921,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_15(self): @@ -782,7 +938,10 @@ def test_15(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -796,7 +955,10 @@ def test_16(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_17(self): @@ -810,7 +972,10 @@ def test_17(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -824,7 +989,10 @@ def test_20(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_21(self): @@ -838,7 +1006,10 @@ def test_21(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -852,7 +1023,10 @@ def test_22(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -866,7 +1040,10 @@ def test_23(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_24(self): @@ -880,7 +1057,10 @@ def test_24(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_27(self): @@ -894,7 +1074,10 @@ def test_27(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_28(self): @@ -908,7 +1091,10 @@ def test_28(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_29(self): @@ -922,7 +1108,10 @@ def test_29(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_30(self): @@ -936,7 +1125,10 @@ def test_30(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_31(self): @@ -950,7 +1142,10 @@ def test_31(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -971,12 +1166,15 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): """ - equal to reference manual test but this at DS_1 contains nulls. + Equal to reference manual test but this at DS_1 contains nulls. """ text = """DS_r := exists_in (DS_1, DS_2, all);""" code = "5-3" @@ -984,12 +1182,15 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): """ - equal to reference manual test but this at DS_1 contains nulls. + Equal to reference manual test but this at DS_1 contains nulls. """ text = """DS_r := exists_in (DS_1, DS_2, true);""" code = "5-4" @@ -997,12 +1198,15 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): """ - equal to reference manual test but this at DS_1 contains nulls. + Equal to reference manual test but this at DS_1 contains nulls. """ text = """DS_r := exists_in (DS_1, DS_2, false);""" code = "5-5" @@ -1010,12 +1214,15 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): """ - equal to test 2 but this at DS_1 contains nulls. + Equal to test 2 but this at DS_1 contains nulls. """ text = """DS_r := exists_in (DS_1, DS_2);""" code = "5-6" @@ -1023,12 +1230,15 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): """ - equal to reference manual test but this with reverse order. + Equal to reference manual test but this with reverse order. """ text = """DS_r := exists_in (DS_2, DS_1, all);""" code = "5-7" @@ -1036,12 +1246,15 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): """ - equal to reference manual test but at this one DS_2 have no Id_4 (different number of Ids). + Equal to reference manual test but at this one DS_2 have no Id_4 (different number of Ids). """ text = """DS_r := exists_in (DS_2, DS_1, all);""" code = "5-9" @@ -1049,7 +1262,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -1062,7 +1278,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -1075,7 +1294,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -1088,7 +1310,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_13(self): @@ -1101,7 +1326,10 @@ def test_13(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -1114,7 +1342,10 @@ def test_16(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_17(self): @@ -1127,7 +1358,10 @@ def test_17(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_18(self): @@ -1140,7 +1374,10 @@ def test_18(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_19(self): @@ -1153,7 +1390,10 @@ def test_19(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -1166,7 +1406,10 @@ def test_20(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_21(self): @@ -1179,7 +1422,10 @@ def test_21(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -1192,7 +1438,10 @@ def test_22(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -1205,7 +1454,10 @@ def test_23(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_24(self): @@ -1218,7 +1470,10 @@ def test_24(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_25(self): @@ -1231,7 +1486,10 @@ def test_25(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_26(self): @@ -1244,7 +1502,10 @@ def test_26(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_27(self): @@ -1257,7 +1518,10 @@ def test_27(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_28(self): @@ -1270,7 +1534,10 @@ def test_28(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_29(self): @@ -1283,7 +1550,10 @@ def test_29(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_30(self): @@ -1296,7 +1566,10 @@ def test_30(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_31(self): @@ -1309,7 +1582,10 @@ def test_31(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_32(self): @@ -1322,7 +1598,10 @@ def test_32(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_33(self): @@ -1335,7 +1614,10 @@ def test_33(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_34(self): @@ -1348,7 +1630,10 @@ def test_34(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -1382,7 +1667,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -1396,7 +1684,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -1410,7 +1701,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -1425,7 +1719,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -1437,7 +1734,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) # unpivot @@ -1457,7 +1757,10 @@ def test_GL_49_1(self): text = "DS_r := DS_1 [unpivot Id_2, Me_3];" self.BaseTest( - code=code, number_inputs=number_inputs, references_names=references_names, text=text + code=code, + number_inputs=number_inputs, + references_names=references_names, + text=text, ) # OK @@ -1477,7 +1780,10 @@ def test_GL_49_2(self): text = "DS_r := DS_1 [unpivot Id_2, Me_3];" self.BaseTest( - code=code, number_inputs=number_inputs, references_names=references_names, text=text + code=code, + number_inputs=number_inputs, + references_names=references_names, + text=text, ) # OK @@ -1497,7 +1803,10 @@ def test_GL_49_3(self): text = "DS_r := DS_1 [unpivot Id_2, Me_3];" self.BaseTest( - code=code, number_inputs=number_inputs, references_names=references_names, text=text + code=code, + number_inputs=number_inputs, + references_names=references_names, + text=text, ) def test_GL_49_4(self): @@ -1516,7 +1825,10 @@ def test_GL_49_4(self): text = "DS_r := DS_1 [unpivot Id_3, Me_3];" self.BaseTest( - code=code, number_inputs=number_inputs, references_names=references_names, text=text + code=code, + number_inputs=number_inputs, + references_names=references_names, + text=text, ) def test_GL_49_6(self): @@ -1535,7 +1847,10 @@ def test_GL_49_6(self): text = "DS_r := DS_1 [unpivot Id_3, Me_2];" self.BaseTest( - code=code, number_inputs=number_inputs, references_names=references_names, text=text + code=code, + number_inputs=number_inputs, + references_names=references_names, + text=text, ) @@ -1559,7 +1874,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -1573,7 +1891,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -1587,7 +1908,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -1601,7 +1925,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -1615,7 +1942,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -1642,7 +1972,10 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_8(self): @@ -1656,7 +1989,10 @@ def test_8(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -1671,7 +2007,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -1694,7 +2033,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -1707,7 +2049,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -1721,7 +2066,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -1735,7 +2083,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -1749,7 +2100,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -1792,7 +2146,10 @@ def test_7(self): references_names = ["DS_r1", "DS_r2"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_GL_222_1(self): @@ -1803,7 +2160,10 @@ def test_GL_222_1(self): references_names = ["1"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -1822,7 +2182,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -1833,7 +2196,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -1844,7 +2210,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -1857,7 +2226,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -1868,7 +2240,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -1879,7 +2254,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -1890,7 +2268,10 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_8(self): @@ -1901,7 +2282,10 @@ def test_8(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -1912,7 +2296,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -1923,7 +2310,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -1934,7 +2324,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -1945,12 +2338,15 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_13(self): """ - reverse order of test 10. + Reverse order of test 10. """ # text = """DS_r := inner_join ( DS_2, DS_1 using Id_1, Me_2);""" # code = "2-10" @@ -1971,7 +2367,10 @@ def test_15(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -2006,7 +2405,10 @@ def test_19(self): # self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -2023,7 +2425,10 @@ def test_20(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -2040,7 +2445,10 @@ def test_22(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -2109,7 +2517,10 @@ def test_27(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_28(self): @@ -2177,7 +2588,10 @@ def test_31(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_32(self): @@ -2212,7 +2626,10 @@ def test_33(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_34(self): @@ -2267,7 +2684,10 @@ def test_36(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_37(self): @@ -2285,7 +2705,10 @@ def test_37(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_38(self): @@ -2303,7 +2726,10 @@ def test_38(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) # BUG @@ -2322,7 +2748,10 @@ def test_39(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_40(self): @@ -2340,7 +2769,10 @@ def test_40(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -2362,7 +2794,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -2375,7 +2810,10 @@ def test_2(self): with pytest.raises(Exception, match="Error level must be an integer, line 1"): self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -2387,7 +2825,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -2414,7 +2855,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -2432,7 +2876,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -2450,7 +2897,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -2468,7 +2918,10 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_8(self): @@ -2486,7 +2939,10 @@ def test_8(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -2504,7 +2960,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -2522,7 +2981,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -2540,7 +3002,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -2558,7 +3023,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_13(self): @@ -2576,7 +3044,10 @@ def test_13(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_14(self): @@ -2594,7 +3065,10 @@ def test_14(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_15(self): @@ -2612,7 +3086,10 @@ def test_15(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -2630,7 +3107,10 @@ def test_16(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_17(self): @@ -2648,7 +3128,10 @@ def test_17(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_18(self): @@ -2666,7 +3149,10 @@ def test_18(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_19(self): @@ -2684,7 +3170,10 @@ def test_19(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -2702,7 +3191,10 @@ def test_20(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_21(self): @@ -2720,7 +3212,10 @@ def test_21(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -2738,7 +3233,10 @@ def test_22(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -2756,7 +3254,10 @@ def test_23(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_24(self): @@ -2774,7 +3275,10 @@ def test_24(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_25(self): @@ -2795,7 +3299,10 @@ def test_25(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_26(self): @@ -2816,7 +3323,10 @@ def test_26(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_27(self): @@ -2837,7 +3347,10 @@ def test_27(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_28(self): @@ -2858,7 +3371,10 @@ def test_28(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_29(self): @@ -2879,7 +3395,10 @@ def test_29(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_30(self): @@ -2900,7 +3419,10 @@ def test_30(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -2923,7 +3445,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -2936,7 +3461,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -2949,7 +3477,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -2962,7 +3493,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -2975,7 +3509,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -2988,7 +3525,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -3001,7 +3541,10 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_8(self): @@ -3014,7 +3557,10 @@ def test_8(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -3027,7 +3573,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -3040,7 +3589,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -3053,7 +3605,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -3066,7 +3621,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_13(self): @@ -3079,7 +3637,10 @@ def test_13(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_14(self): @@ -3092,7 +3653,10 @@ def test_14(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_15(self): @@ -3105,7 +3669,10 @@ def test_15(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_16(self): @@ -3118,7 +3685,10 @@ def test_16(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_17(self): @@ -3131,7 +3701,10 @@ def test_17(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_18(self): @@ -3144,7 +3717,10 @@ def test_18(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_19(self): @@ -3157,7 +3733,10 @@ def test_19(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_20(self): @@ -3169,7 +3748,10 @@ def test_20(self): references_names = ["1", "2", "3", "4", "5", "6"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_21(self): @@ -3181,7 +3763,10 @@ def test_21(self): references_names = ["1", "2", "3", "4", "5", "6"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_22(self): @@ -3193,7 +3778,10 @@ def test_22(self): references_names = ["1", "2", "3", "4", "5", "6"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_23(self): @@ -3205,7 +3793,10 @@ def test_23(self): references_names = ["1"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_24(self): @@ -3217,7 +3808,10 @@ def test_24(self): references_names = ["1"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_25(self): @@ -3260,7 +3854,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -3270,7 +3867,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -3280,7 +3880,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -3290,7 +3893,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -3300,7 +3906,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -3310,7 +3919,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -3320,7 +3932,10 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_8(self): @@ -3330,7 +3945,10 @@ def test_8(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_9(self): @@ -3340,7 +3958,10 @@ def test_9(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_10(self): @@ -3350,7 +3971,10 @@ def test_10(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_11(self): @@ -3360,7 +3984,10 @@ def test_11(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_12(self): @@ -3370,7 +3997,10 @@ def test_12(self): references_names = ["DS_r"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -3400,7 +4030,10 @@ def test_3(self): references_names = ["DS_r1", "DS_r2"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) @@ -3421,5 +4054,8 @@ def test_1(self): # with pytest.raises(Exception, match="cast .+? without providing a mask"): self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) diff --git a/tests/BigProjects/Anamart/test_anamart.py b/tests/BigProjects/Anamart/test_anamart.py index ebc8dc45..9a3b457d 100644 --- a/tests/BigProjects/Anamart/test_anamart.py +++ b/tests/BigProjects/Anamart/test_anamart.py @@ -160,7 +160,11 @@ def test_2(self): """ """ code = "C02" number_inputs = 2 - references_names = ["INSTRMNT_INFO", "INSTRMNT_INFO_K", "ANAMART_PRTCTN_INSTRMNT"] + references_names = [ + "INSTRMNT_INFO", + "INSTRMNT_INFO_K", + "ANAMART_PRTCTN_INSTRMNT", + ] self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) diff --git a/tests/BigProjects/ExternalProjects/test_ext_projects.py b/tests/BigProjects/ExternalProjects/test_ext_projects.py index 48404658..11474d8e 100644 --- a/tests/BigProjects/ExternalProjects/test_ext_projects.py +++ b/tests/BigProjects/ExternalProjects/test_ext_projects.py @@ -83,7 +83,13 @@ def test_AnaMart_AnaMart_1(self): rn = [str(i) for i in range(1, 30)] rn += [str(i) for i in range(72, 303)] # references_names = rn - sql_names = ["instDates", "instrFctJn", "instrFctJn2", "prtctnDts", "prtctnFctJn"] + sql_names = [ + "instDates", + "instrFctJn", + "instrFctJn2", + "prtctnDts", + "prtctnFctJn", + ] # self.BaseTest( # code=code, diff --git a/tests/Bugs/test_bugs.py b/tests/Bugs/test_bugs.py index a16fd629..43ef5421 100644 --- a/tests/Bugs/test_bugs.py +++ b/tests/Bugs/test_bugs.py @@ -140,7 +140,10 @@ def test_GL_133_1(self): # HUH!!!!!!!!! self.NewSemanticExceptionTest( - code=code, number_inputs=number_inputs, exception_code=message, vd_names=vd_names + code=code, + number_inputs=number_inputs, + exception_code=message, + vd_names=vd_names, ) def test_GL_133_2(self): @@ -196,7 +199,10 @@ def test_GL_161_2(self): references_names = ["1"] self.BaseTest( - text=None, code=code, number_inputs=number_inputs, references_names=references_names + text=None, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_GL_47_4(self): diff --git a/tests/Calc/test_calc.py b/tests/Calc/test_calc.py index 5cdb186c..2957af52 100644 --- a/tests/Calc/test_calc.py +++ b/tests/Calc/test_calc.py @@ -221,7 +221,7 @@ def test_GL_287_1(self): def test_GL_300_1(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression:DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2); @@ -237,7 +237,7 @@ def test_GL_300_1(self): def test_GL_300_2(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= DS_1#Me_2]; @@ -255,7 +255,7 @@ def test_GL_300_2(self): def test_GL_300_3(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -271,7 +271,7 @@ def test_GL_300_3(self): def test_GL_300_4(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_2, DS_1 filter Id_2 ="B" calc Me_4 := Me_2 keep Me_4, DS_1#Me_2); @@ -289,7 +289,7 @@ def test_GL_300_4(self): def test_GL_300_5(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -305,7 +305,7 @@ def test_GL_300_5(self): def test_GL_300_6(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -321,7 +321,7 @@ def test_GL_300_6(self): def test_GL_300_7(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -337,7 +337,7 @@ def test_GL_300_7(self): def test_GL_300_8(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -353,7 +353,7 @@ def test_GL_300_8(self): def test_GL_300_9(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1 + d2#Me_2 drop d2#Me_2); @@ -369,7 +369,7 @@ def test_GL_300_9(self): def test_GL_300_10(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d2#Me_2 drop d2#Me_2); @@ -385,7 +385,7 @@ def test_GL_300_10(self): def test_GL_300_11(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_2 drop d2#Me_2); @@ -403,7 +403,7 @@ def test_GL_300_11(self): def test_GL_300_12(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_2 drop d2#Me_2); @@ -420,7 +420,7 @@ def test_GL_300_12(self): def test_GL_300_13(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + Me_2 + d2#Me_1A drop d2#Me_2); @@ -438,7 +438,7 @@ def test_GL_300_13(self): def test_GL_300_14(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + Me_3 drop d2#Me_2); @@ -456,7 +456,7 @@ def test_GL_300_14(self): def test_GL_300_15(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + Me_2 + d2#Me_1A drop d2#Me_2); @@ -474,7 +474,7 @@ def test_GL_300_15(self): def test_GL_300_16(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: DS_r := inner_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d1#Me_1+ d2#Me_2 + d1#Me_2 drop d2#Me_2); @@ -490,7 +490,7 @@ def test_GL_300_16(self): def test_GL_310_1(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: @@ -506,7 +506,7 @@ def test_GL_310_1(self): def test_GL_310_2(self): """ - inner join + Inner join Dataset --> Dataset Status: OK Expression: diff --git a/tests/Cast/test_cast.py b/tests/Cast/test_cast.py index 94e768a4..8b8a27c4 100644 --- a/tests/Cast/test_cast.py +++ b/tests/Cast/test_cast.py @@ -1,6 +1,15 @@ +import warnings from pathlib import Path +import pytest + from tests.Helper import TestHelper +from vtlengine.API import create_ast +from vtlengine.DataTypes import Date, Duration, Number, String, TimeInterval, TimePeriod +from vtlengine.Exceptions import SemanticError +from vtlengine.Interpreter import InterpreterAnalyzer +from vtlengine.Model import Scalar +from vtlengine.Operators.CastOperator import Cast class CastHelper(TestHelper): @@ -14,6 +23,174 @@ class CastHelper(TestHelper): filepath_sql = base_path / "data" / "sql" +evaluate_params = [ + ( + Scalar("40.000", String, "40.000"), + Number, + "DD.DDD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2022-01-01", String, "2022-01-01"), + Date, + "YYYY-MM-DD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2023-01-12", String, "2023-01-12"), + Date, + "\PY\YDDD\D", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2000Q1", String, "2000Q1"), + TimePeriod, + "YYYY\QQ", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2022-05-21/2023-05-21", String, "2022-05-21/2023-05-21"), + TimeInterval, + "YYYY-MM-DD/YYYY-MM-DD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2023-02-05", String, "2023-02-05"), + Duration, + "P0Y240D", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2021-12-21", Date, "2021-12-21"), + String, + "YYYY-MM-DD hh:mm:ss", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("P0Y240D", Duration, "P0Y240D"), + String, + "YYYY-MM-DD hh:mm:ss", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + Scalar("2022-05-21/2023-05-21", TimeInterval, "2022-05-21/2023-05-21"), + String, + "YYYY-MM-DD/YYYY-MM-DD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), +] + +cast_error_params = [ + ( + "40.000", + String, + Number, + "DD.DDD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + "2022-01-01", + String, + Date, + "YYYY-MM-DD", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + "2023-01-12", + String, + Date, + "\PY\YDDD\D", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + "2000Q1", + String, + TimePeriod, + "YYYY\QQ", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + "2022-05-21/2023-05-21", + String, + TimeInterval, + "YYYY-MM-DD/YYYY-MM-DD", + NotImplementedError, + "How this cast should be implemented is not yet defined.", + ), + ( + "2023-02-05", + String, + Duration, + "P0Y240D", + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + "40.000", + Number, + String, + "DD.DDD", + SemanticError, + "('Impossible to cast 40.000 from type Number to String. Please check transformation with output dataset DS_r', '2-1-5-1')", + ), +] +test_params = [ + ( + 'cast("40.000", number, "DD.DDD")', + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + 'cast("2022-01-01", date, "YYYY-MM-DD")', + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + r'cast("2023-01-12", date, "\PY\YDDD\D")', + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + r'cast ("2000Q1", time_period, "YYYY\QQ")', + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + 'cast ("2022-05-21/2023-05-21", time, "YYYY-MM-DD/YYYY-MM-DD")', + NotImplementedError, + "How this mask should be implemented is not yet defined.", + ), + ( + 'cast("2021-12-21", string, "YYYY-MM-DD hh:mm:ss")', + SemanticError, + "(\"A mask can't be provided to cast from type String to String. Mask provided: YYYY-MM-DD hh:mm:ss. Please check transformation with output dataset DS_r\", '1-1-5-5')", + ), + ( + 'cast("P0Y240D", string, "YYYY-MM-DD hh:mm:ss")', + SemanticError, + "(\"A mask can't be provided to cast from type String to String. Mask provided: YYYY-MM-DD hh:mm:ss. Please check transformation with output dataset DS_r\", '1-1-5-5')", + ), + ( + 'cast ("2022-05-21/2023-05-21", string, "YYYY-MM-DD/YYYY-MM-DD")', + SemanticError, + "(\"A mask can't be provided to cast from type String to String. Mask provided: YYYY-MM-DD/YYYY-MM-DD. Please check transformation with output dataset DS_r\", '1-1-5-5')", + ), +] + + class CastExplicitWithoutMask(CastHelper): """ """ @@ -27,3 +204,32 @@ def test_GL_461_1(self): self.NewSemanticExceptionTest( code=code, number_inputs=number_inputs, exception_code=error_code ) + + +@pytest.mark.parametrize("text, type_of_error, exception_message", test_params) +def test_errors_validate_cast_scalar(text, type_of_error, exception_message): + warnings.filterwarnings("ignore", category=FutureWarning) + expression = f"DS_r := {text};" + ast = create_ast(expression) + interpreter = InterpreterAnalyzer({}) + with pytest.raises(type_of_error, match=f"{exception_message}"): + interpreter.visit(ast) + + +@pytest.mark.parametrize( + "value, provided_type, to_type, mask, type_of_error, exception_message", + cast_error_params, +) +def test_errors_cast_scalar(value, provided_type, to_type, mask, type_of_error, exception_message): + warnings.filterwarnings("ignore", category=FutureWarning) + with pytest.raises(type_of_error, match=f"{exception_message}"): + Cast.cast_value(value, provided_type=provided_type, to_type=to_type, mask_value=mask) + + +@pytest.mark.parametrize( + "operand, scalar_type, mask, type_of_error, exception_message", evaluate_params +) +def test_errors_cast_scalar_evaluate(operand, scalar_type, mask, type_of_error, exception_message): + warnings.filterwarnings("ignore", category=FutureWarning) + with pytest.raises(type_of_error, match=f"{exception_message}"): + Cast.evaluate(operand=operand, scalarType=scalar_type, mask=mask) diff --git a/tests/DatapointRulesets/test_datapoint_rulesets.py b/tests/DatapointRulesets/test_datapoint_rulesets.py index 4ba0de87..1d8434e0 100644 --- a/tests/DatapointRulesets/test_datapoint_rulesets.py +++ b/tests/DatapointRulesets/test_datapoint_rulesets.py @@ -23,7 +23,7 @@ class DatapointRulesetTests(TestDataPointRuleset): def test_1(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -54,7 +54,7 @@ def test_1(self): def test_2(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -85,7 +85,7 @@ def test_2(self): def test_3(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -116,7 +116,7 @@ def test_3(self): def test_4(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -147,7 +147,7 @@ def test_4(self): def test_5(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -178,7 +178,7 @@ def test_5(self): def test_6(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -209,7 +209,7 @@ def test_6(self): def test_7(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -240,7 +240,7 @@ def test_7(self): def test_8(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -272,7 +272,7 @@ def test_8(self): def test_9(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -303,7 +303,7 @@ def test_9(self): def test_10(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -335,7 +335,7 @@ def test_10(self): # with value domains def test_11(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -362,7 +362,7 @@ def test_11(self): # If rule, then name has to be equal to variable rule is siganture def test_12(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -383,7 +383,7 @@ def test_12(self): # Value domain def test_13(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -403,7 +403,7 @@ def test_13(self): def test_14(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK diff --git a/tests/Eval/test_eval.py b/tests/Eval/test_eval.py index c7370ea6..bddb8343 100644 --- a/tests/Eval/test_eval.py +++ b/tests/Eval/test_eval.py @@ -70,7 +70,8 @@ def test_4(self): references_names = ["DS_r"] sql_names = ["SQL_DS_NOT_FOUND"] with pytest.raises( - ValueError, match="External Routine dataset DS_X is not present in Eval operands" + ValueError, + match="External Routine dataset DS_X is not present in Eval operands", ): self.BaseTest( code=code, diff --git a/tests/Helper.py b/tests/Helper.py index 33c8eb6b..29b5c71a 100644 --- a/tests/Helper.py +++ b/tests/Helper.py @@ -82,7 +82,9 @@ def LoadDataset( for scalar_json in structures["scalars"]: scalar_name = scalar_json["name"] scalar = Scalar( - name=scalar_name, data_type=SCALAR_TYPES[scalar_json["type"]], value=None + name=scalar_name, + data_type=SCALAR_TYPES[scalar_json["type"]], + value=None, ) datasets[scalar_name] = scalar return datasets @@ -237,7 +239,9 @@ def NewSemanticExceptionTest( input_datasets[scalar_name].value = scalar_value interpreter = InterpreterAnalyzer( - input_datasets, value_domains=value_domains, external_routines=external_routines + input_datasets, + value_domains=value_domains, + external_routines=external_routines, ) with pytest.raises(SemanticError) as context: ast = create_ast(text) @@ -250,14 +254,14 @@ def NewSemanticExceptionTest( @classmethod def SemanticExceptionTest( - cls, - code: str, - number_inputs: int, - exception_code: str, - vd_names: List[str] = None, - sql_names: List[str] = None, - text: Optional[str] = None, - scalars: Dict[str, Any] = None, + cls, + code: str, + number_inputs: int, + exception_code: str, + vd_names: List[str] = None, + sql_names: List[str] = None, + text: Optional[str] = None, + scalars: Dict[str, Any] = None, ): # Data Loading.-------------------------------------------------------- warnings.filterwarnings("ignore", category=FutureWarning) @@ -282,7 +286,9 @@ def SemanticExceptionTest( input_datasets[scalar_name].value = scalar_value interpreter = InterpreterAnalyzer( - input_datasets, value_domains=value_domains, external_routines=external_routines + input_datasets, + value_domains=value_domains, + external_routines=external_routines, ) with pytest.raises(SemanticError) as context: ast = create_ast(text) diff --git a/tests/Hierarchical/test_hierarchical.py b/tests/Hierarchical/test_hierarchical.py index 89585f02..c69bb9bd 100644 --- a/tests/Hierarchical/test_hierarchical.py +++ b/tests/Hierarchical/test_hierarchical.py @@ -1082,7 +1082,11 @@ def test_GL_397_5(self): references_names = ["1"] with pytest.raises(Exception, match="condComp and ruleComp must be the same"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_GL_397_7(self): """ @@ -2365,7 +2369,11 @@ def test_GL_397_6(self): references_names = ["1"] with pytest.raises(Exception, match="condComp and ruleComp must be the same"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_GL_397_8(self): """ @@ -2407,7 +2415,11 @@ def test_GL_397_10(self): references_names = ["1"] with pytest.raises(Exception, match="condComp and ruleComp must be the same"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_GL_397_12(self): """ @@ -2428,7 +2440,11 @@ def test_GL_397_12(self): references_names = ["1"] with pytest.raises(Exception, match="condComp and ruleComp must be the same"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_GL_397_14(self): """ @@ -2449,7 +2465,11 @@ def test_GL_397_14(self): references_names = ["1"] with pytest.raises(Exception, match="condComp and ruleComp must be the same"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_GL_397_16(self): """ diff --git a/tests/Joins/test_joins.py b/tests/Joins/test_joins.py index 6e037a43..5a4a9154 100644 --- a/tests/Joins/test_joins.py +++ b/tests/Joins/test_joins.py @@ -243,7 +243,7 @@ class CalcInsideJoinTests(JoinHelper): def test_GL_300_1(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2); @@ -259,7 +259,7 @@ def test_GL_300_1(self): def test_GL_300_2(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= DS_1#Me_2]; @@ -277,7 +277,7 @@ def test_GL_300_2(self): def test_GL_300_3(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -293,7 +293,7 @@ def test_GL_300_3(self): def test_GL_300_4(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join ( DS_2, DS_1 filter Id_2 ="B" calc Me_4 := Me_2 keep Me_4, DS_1#Me_2); @@ -311,7 +311,7 @@ def test_GL_300_4(self): def test_GL_300_5(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_2 drop d2#Me_2); @@ -327,7 +327,7 @@ def test_GL_300_5(self): def test_GL_300_6(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1 drop d2#Me_2); @@ -343,7 +343,7 @@ def test_GL_300_6(self): def test_GL_300_7(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 drop d2#Me_2); @@ -359,7 +359,7 @@ def test_GL_300_7(self): def test_GL_300_8(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + d2#Me_2 drop d2#Me_2); @@ -375,7 +375,7 @@ def test_GL_300_8(self): def test_GL_300_9(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1 + d2#Me_2 drop d2#Me_2); @@ -391,7 +391,7 @@ def test_GL_300_9(self): def test_GL_300_10(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d2#Me_2 drop d2#Me_2); @@ -407,7 +407,7 @@ def test_GL_300_10(self): def test_GL_300_11(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_2 drop d2#Me_2); @@ -425,7 +425,7 @@ def test_GL_300_11(self): def test_GL_300_12(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1A drop d2#Me_2); @@ -442,7 +442,7 @@ def test_GL_300_12(self): def test_GL_300_13(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + Me_2 + d2#Me_1A drop d2#Me_2); @@ -460,7 +460,7 @@ def test_GL_300_13(self): def test_GL_300_14(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + Me_3 drop d2#Me_2); @@ -478,7 +478,7 @@ def test_GL_300_14(self): def test_GL_300_15(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d1#Me_1+ d2#Me_2 + Me_2 drop d2#Me_2); @@ -496,7 +496,7 @@ def test_GL_300_15(self): def test_GL_300_16(self): """ - left join + Left join Dataset --> Dataset Status: OK Expression: DS_r := left_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d1#Me_1+ d2#Me_2 + d1#Me_2 drop d2#Me_2); @@ -512,7 +512,7 @@ def test_GL_300_16(self): def test_GL_300_17(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2); @@ -528,7 +528,7 @@ def test_GL_300_17(self): def test_GL_300_18(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= DS_1#Me_2]; @@ -546,7 +546,7 @@ def test_GL_300_18(self): def test_GL_300_19(self): """ - full join + Full join Dataset --> Dataset Status: BUG Expression: DS_r := full_join ( DS_1, DS_2 filter Id_2 ="B" calc Me_4 := DS_2#Me_2 keep Me_4, DS_1#Me_2)[calc me_5:= Me_2]; @@ -563,7 +563,7 @@ def test_GL_300_19(self): def test_GL_300_20(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join ( DS_2, DS_1 filter Id_2 ="B" calc Me_4 := Me_2 keep Me_4, DS_1#Me_2); @@ -581,7 +581,7 @@ def test_GL_300_20(self): def test_GL_300_21(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_2 drop d2#Me_2); @@ -597,7 +597,7 @@ def test_GL_300_21(self): def test_GL_300_22(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1 drop d2#Me_2); @@ -613,7 +613,7 @@ def test_GL_300_22(self): def test_GL_300_23(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 drop d2#Me_2); @@ -629,7 +629,7 @@ def test_GL_300_23(self): def test_GL_300_24(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + d2#Me_2 drop d2#Me_2); @@ -645,7 +645,7 @@ def test_GL_300_24(self): def test_GL_300_25(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1 + d2#Me_2 drop d2#Me_2); @@ -661,7 +661,7 @@ def test_GL_300_25(self): def test_GL_300_26(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d2#Me_2 drop d2#Me_2); @@ -677,7 +677,7 @@ def test_GL_300_26(self): def test_GL_300_27(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_2 drop d2#Me_2); @@ -695,7 +695,7 @@ def test_GL_300_27(self): def test_GL_300_28(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d1#Me_1A drop d2#Me_2); @@ -712,7 +712,7 @@ def test_GL_300_28(self): def test_GL_300_29(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := Me_1 + Me_2 + d2#Me_1A drop d2#Me_2); @@ -730,7 +730,7 @@ def test_GL_300_29(self): def test_GL_300_30(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + Me_3 drop d2#Me_2); @@ -748,7 +748,7 @@ def test_GL_300_30(self): def test_GL_300_31(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d1#Me_1+ d2#Me_2 + Me_2 drop d2#Me_2); @@ -766,7 +766,7 @@ def test_GL_300_31(self): def test_GL_300_32(self): """ - full join + Full join Dataset --> Dataset Status: OK Expression: DS_r := full_join (DS_1 as d1, DS_2 as d2 calc Me_4 := d2#Me_1A + d1#Me_1+ d2#Me_2 + d1#Me_2 drop d2#Me_2); diff --git a/tests/NewOperators/Case/test_case.py b/tests/NewOperators/Case/test_case.py index d31c36f3..db088139 100644 --- a/tests/NewOperators/Case/test_case.py +++ b/tests/NewOperators/Case/test_case.py @@ -39,8 +39,16 @@ ] error_param = [ - ("10", "x := 1; DS_r := case when DS_cond then 1 when x = 2 then 2 else 0;", "2-1-9-1"), - ("11", "x := 1; DS_r := case when x = 1 then 1 when x = 2 then DS_1 else 0;", "2-1-9-3"), + ( + "10", + "x := 1; DS_r := case when DS_cond then 1 when x = 2 then 2 else 0;", + "2-1-9-1", + ), + ( + "11", + "x := 1; DS_r := case when x = 1 then 1 when x = 2 then DS_1 else 0;", + "2-1-9-3", + ), ("12", "DS_r := DS_1 [calc Me_3 := case when Me_1 then 1 else 0];", "2-1-9-4"), ("13", "DS_r := case when DS_1 then DS_1 else null;", "2-1-9-5"), ("14", "DS_r := case when DS_cond1 then 1 else null;", "1-1-1-4"), diff --git a/tests/NewOperators/Time/test_datediff.py b/tests/NewOperators/Time/test_datediff.py index a01695a4..471e7ba8 100644 --- a/tests/NewOperators/Time/test_datediff.py +++ b/tests/NewOperators/Time/test_datediff.py @@ -12,15 +12,15 @@ pytestmark = mark.input_path(Path(__file__).parent / "data") ds_param = [ - ("21", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];'), - ("22", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];'), - ("23", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];'), - ("24", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];'), + ("21", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];"), + ("22", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];"), + ("23", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];"), + ("24", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];"), ] error_param = [ - ("25", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];', "1-1-1-2"), - ("26", 'DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];', "1-1-1-2"), + ("25", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];", "1-1-1-2"), + ("26", "DS_r := DS_1[calc Me_3 := datediff(Me_1, Me_2)];", "1-1-1-2"), ] scalar_time_params = [ @@ -37,6 +37,7 @@ ('datediff(cast("2022Q2",time_period),cast("2023-06-30",date))', "1-1-1-2"), ] + @pytest.mark.parametrize("code, expression", ds_param) def test_case_ds(load_input, load_reference, code, expression): warnings.filterwarnings("ignore", category=FutureWarning) @@ -45,6 +46,7 @@ def test_case_ds(load_input, load_reference, code, expression): result = interpreter.visit(ast) assert result == load_reference + @pytest.mark.parametrize("text, reference", scalar_time_params) def test_unary_time_scalar(text, reference): warnings.filterwarnings("ignore", category=FutureWarning) @@ -69,6 +71,7 @@ def test_errors(load_input, code, expression, error_code): print(f"\n{error_code} != {context.value.args[1]}") assert result + @pytest.mark.parametrize("text, exception_message", scalar_time_error_params) def test_errors_time_scalar(text, exception_message): warnings.filterwarnings("ignore", category=FutureWarning) diff --git a/tests/NewOperators/Time/test_new_time.py b/tests/NewOperators/Time/test_new_time.py index e27108fe..9a7af5a6 100644 --- a/tests/NewOperators/Time/test_new_time.py +++ b/tests/NewOperators/Time/test_new_time.py @@ -31,8 +31,8 @@ error_param = [ ("15", 'DS_r := dateadd(DS_1, DS_1, "D");', "2-1-19-12"), ("16", 'DS_r := dateadd(DS_1, "D", "D");', "2-1-19-13"), - ("17", 'DS_r := dateadd(DS_1, 1, DS_1);', "2-1-19-12"), - ("18", 'DS_r := dateadd(DS_1, 1, 1);', "2-1-19-13"), + ("17", "DS_r := dateadd(DS_1, 1, DS_1);", "2-1-19-12"), + ("18", "DS_r := dateadd(DS_1, 1, 1);", "2-1-19-13"), ("19", 'DS_r := dateadd(DS_1, 1, "D");', "2-1-19-14"), ("20", 'DS_r := DS_1[calc Me_2 := dateadd(Me_1, 1, "D")];', "1-1-1-1"), ] diff --git a/tests/ReferenceManual/data/DataSet/input/177-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/177-DS_1.csv new file mode 100644 index 00000000..fc1fb6bf --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/177-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +G,2019-01-01,2020Q2 +G,2020-07-01,2021Q1 +T,2020-12-31,2021Q1 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/178-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/178-DS_1.csv new file mode 100644 index 00000000..aee19430 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/178-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +G,2019-01-01 +H,2020-07-01 +T,2020-12-31 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/179-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/179-DS_1.csv new file mode 100644 index 00000000..aee19430 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/179-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +G,2019-01-01 +H,2020-07-01 +T,2020-12-31 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/180-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/180-DS_1.csv new file mode 100644 index 00000000..a8115e6e --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/180-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +G,240 +H,724 +T,1056 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/181-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/181-DS_1.csv new file mode 100644 index 00000000..a8115e6e --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/181-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +G,240 +H,724 +T,1056 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/182-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/182-DS_1.csv new file mode 100644 index 00000000..3dbb8043 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/182-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Me_1 +G,P2Y230D +H,P1Y23D +T,P3Y152D \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/183-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/183-DS_1.csv new file mode 100644 index 00000000..ffb2e1e5 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/183-DS_1.csv @@ -0,0 +1,5 @@ +Id_1,Me_1 +1,0.12 +2,3.5 +3,10.7 +4, \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/184-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/184-DS_1.csv new file mode 100644 index 00000000..893421ba --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/184-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +10,A,16.0 +10,B,4.0 +11,A,7.2 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/input/185-DS_1.csv b/tests/ReferenceManual/data/DataSet/input/185-DS_1.csv new file mode 100644 index 00000000..893421ba --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/input/185-DS_1.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +10,A,16.0 +10,B,4.0 +11,A,7.2 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/177-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/177-DS_r.csv new file mode 100644 index 00000000..3de89c8b --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/177-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1,Me_2 +G,2019-01-01,2020Q2,546 +G,2020-07-01,2021Q1,273 +T,2020-12-31,2021Q1,90 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/178-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/178-DS_r.csv new file mode 100644 index 00000000..6664a3a5 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/178-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +G,2019-01-01,2019-03-01 +H,2020-07-01,2020-09-01 +T,2020-12-31,2021-02-28 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/179-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/179-DS_r.csv new file mode 100644 index 00000000..6340a8c9 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/179-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +G,2019-01-01,1 +H,2020-07-01,7 +T,2020-12-31,12 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/180-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/180-DS_r.csv new file mode 100644 index 00000000..d4593572 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/180-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +G,240,P0Y240D +H,724,P1Y359D +T,1056,P2Y326D \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/181-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/181-DS_r.csv new file mode 100644 index 00000000..05e211e6 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/181-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +G,240,P8M0D +H,724,P24M4D +T,1056,P35M6D \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/182-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/182-DS_r.csv new file mode 100644 index 00000000..f8082253 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/182-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Me_1,Me_2 +G,P2Y230D,960 +H,P1Y23D,388 +T,P3Y152D,1247 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/183-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/183-DS_r.csv new file mode 100644 index 00000000..55f64f52 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/183-DS_r.csv @@ -0,0 +1,5 @@ +Id_1,Me_1,Me_2 +1,0.12,0 +2,3.5,1 +3,10.7,10 +4,,100 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/184-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/184-DS_r.csv new file mode 100644 index 00000000..dfab2bef --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/184-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1 +10,A,0.657706 +10,B,0.401591 +11,A,0.017412 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataSet/output/185-DS_r.csv b/tests/ReferenceManual/data/DataSet/output/185-DS_r.csv new file mode 100644 index 00000000..25ca8542 --- /dev/null +++ b/tests/ReferenceManual/data/DataSet/output/185-DS_r.csv @@ -0,0 +1,4 @@ +Id_1,Id_2,Me_1,Me_2 +10,A,16.0,0.010128 +10,B,4.0,0.765163 +11,A,7.2,0.933756 \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/177-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/177-DS_1.json new file mode 100644 index 00000000..6504b775 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/177-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type":"Time_Period", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Time_Period", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/178-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/178-DS_1.json new file mode 100644 index 00000000..bdfbe7f1 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/178-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Date", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/179-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/179-DS_1.json new file mode 100644 index 00000000..bdfbe7f1 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/179-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Date", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/180-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/180-DS_1.json new file mode 100644 index 00000000..18a98eeb --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/180-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/181-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/181-DS_1.json new file mode 100644 index 00000000..18a98eeb --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/181-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/182-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/182-DS_1.json new file mode 100644 index 00000000..b75537e5 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/182-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/183-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/183-DS_1.json new file mode 100644 index 00000000..a7dd0c8d --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/183-DS_1.json @@ -0,0 +1,21 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/184-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/184-DS_1.json new file mode 100644 index 00000000..7c6e4b22 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/184-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/input/185-DS_1.json b/tests/ReferenceManual/data/DataStructure/input/185-DS_1.json new file mode 100644 index 00000000..7c6e4b22 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/input/185-DS_1.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_1", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/177-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/177-DS_r.json new file mode 100644 index 00000000..4407374f --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/177-DS_r.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type":"Time_Period", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Time_Period", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/178-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/178-DS_r.json new file mode 100644 index 00000000..ac7c4321 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/178-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Date", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Date", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/179-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/179-DS_r.json new file mode 100644 index 00000000..24e44ac8 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/179-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Date", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/180-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/180-DS_r.json new file mode 100644 index 00000000..1c907544 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/180-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Integer", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "String", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/181-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/181-DS_r.json new file mode 100644 index 00000000..1c907544 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/181-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Integer", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "String", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/182-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/182-DS_r.json new file mode 100644 index 00000000..54524aa8 --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/182-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "String", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/183-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/183-DS_r.json new file mode 100644 index 00000000..edceef5f --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/183-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Integer", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/184-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/184-DS_r.json new file mode 100644 index 00000000..e1b9c07c --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/184-DS_r.json @@ -0,0 +1,27 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/DataStructure/output/185-DS_r.json b/tests/ReferenceManual/data/DataStructure/output/185-DS_r.json new file mode 100644 index 00000000..cc564f7b --- /dev/null +++ b/tests/ReferenceManual/data/DataStructure/output/185-DS_r.json @@ -0,0 +1,33 @@ +{ + "datasets": [ + { + "name": "DS_r", + "DataStructure": [ + { + "name": "Id_1", + "type":"Integer", + "role": "Identifier", + "nullable": false + }, + { + "name": "Id_2", + "type": "String", + "role": "Identifier", + "nullable": false + }, + { + "name": "Me_1", + "type": "Number", + "role": "Measure", + "nullable": true + }, + { + "name": "Me_2", + "type": "Number", + "role": "Measure", + "nullable": true + } + ] + } + ] +} \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM177.vtl b/tests/ReferenceManual/data/vtl/RM177.vtl new file mode 100644 index 00000000..e63fc007 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM177.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := datediff( Id_2, Me_1 ) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM178.vtl b/tests/ReferenceManual/data/vtl/RM178.vtl new file mode 100644 index 00000000..8c9d1265 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM178.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := dateadd( Me_1, 2, "M" ) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM179.vtl b/tests/ReferenceManual/data/vtl/RM179.vtl new file mode 100644 index 00000000..709a03f7 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM179.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := month (Me_1) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM180.vtl b/tests/ReferenceManual/data/vtl/RM180.vtl new file mode 100644 index 00000000..34968752 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM180.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := daytoyear (Me_1) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM181.vtl b/tests/ReferenceManual/data/vtl/RM181.vtl new file mode 100644 index 00000000..61d99055 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM181.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := daytomonth (Me_1) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM182.vtl b/tests/ReferenceManual/data/vtl/RM182.vtl new file mode 100644 index 00000000..0bfa578f --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM182.vtl @@ -0,0 +1 @@ +DS_r := DS_1[ calc Me_2 := yeartoday (Me_1) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM183.vtl b/tests/ReferenceManual/data/vtl/RM183.vtl new file mode 100644 index 00000000..a11d4c86 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM183.vtl @@ -0,0 +1,4 @@ +DS_r := DS_1 [calc Me_2 := case when Me_1 <= 1 then 0 + when Me_1 > 1 and Me_1 <= 10 then 1 + when Me_1 > 10 then 10 + else 100]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM184.vtl b/tests/ReferenceManual/data/vtl/RM184.vtl new file mode 100644 index 00000000..84afd807 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM184.vtl @@ -0,0 +1 @@ +DS_r := random(DS_1, 5); \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl/RM185.vtl b/tests/ReferenceManual/data/vtl/RM185.vtl new file mode 100644 index 00000000..5e479e45 --- /dev/null +++ b/tests/ReferenceManual/data/vtl/RM185.vtl @@ -0,0 +1 @@ +DS_r := DS_1 [ calc Me_2 := random( Me_1, 8 ) ]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM177.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM177.vtl new file mode 100644 index 00000000..4e4ecc84 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM177.vtl @@ -0,0 +1,5 @@ +define operator Test177 (y component, z component) + returns component is + datediff(y, z) +end operator; +DS_r := DS_1[calc Me_2 := Test177( Id_2, Me_1 )]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM178.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM178.vtl new file mode 100644 index 00000000..ccd7f005 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM178.vtl @@ -0,0 +1,5 @@ +define operator Test178 (y component, s scalar) + returns component is + dateadd(y, s, "M") +end operator; +DS_r := DS_1[calc Me_2 := Test178(Me_1, 2, "M")]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl new file mode 100644 index 00000000..7b2e4218 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM179.vtl @@ -0,0 +1,5 @@ +define operator Test179 (y component) + returns component is + month(y) +end operator; +DS_r := DS_1[calc Me_2 := Test179(Me_1)]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM180.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM180.vtl new file mode 100644 index 00000000..cddb1599 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM180.vtl @@ -0,0 +1,5 @@ +define operator Test180 (y component) + returns component is + daytoyear(y) +end operator; +DS_r := DS_1[calc Me_2 := Test180(Me_1)]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM181.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM181.vtl new file mode 100644 index 00000000..69573786 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM181.vtl @@ -0,0 +1,5 @@ +define operator Test181 (y component) + returns component is + daytomonth(y) +end operator; +DS_r := DS_1[calc Me_2 := Test181(Me_1)]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM182.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM182.vtl new file mode 100644 index 00000000..7da68993 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM182.vtl @@ -0,0 +1,5 @@ +define operator Test182 (y component) + returns component is + yeartoday(y) +end operator; +DS_r := DS_1[calc Me_2 := Test182(Me_1)]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM183.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM183.vtl new file mode 100644 index 00000000..b50fc533 --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM183.vtl @@ -0,0 +1,8 @@ +define operator Test183 (y component) + returns component is + case when y <= 1 then 0 + when y > 1 and y <= 10 then 1 + when y > 10 then 10 + else 100 +end operator; +DS_r := DS_1 [calc Me_2 := Test183(Me_1)]; \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM184.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM184.vtl new file mode 100644 index 00000000..23821acd --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM184.vtl @@ -0,0 +1,5 @@ +define operator Test184 (x dataset) + returns dataset is + random(x, 5) +end operator; +DS_r := Test184(DS_1, 5); \ No newline at end of file diff --git a/tests/ReferenceManual/data/vtl_defined_operators/RM185.vtl b/tests/ReferenceManual/data/vtl_defined_operators/RM185.vtl new file mode 100644 index 00000000..4b08c1ea --- /dev/null +++ b/tests/ReferenceManual/data/vtl_defined_operators/RM185.vtl @@ -0,0 +1,5 @@ +define operator Test185 (c component) + returns component is + random(c, 8) +end operator; +DS_r := DS_1 [calc Me_2 := Test185( Me_1, 8 )]; \ No newline at end of file diff --git a/tests/ReferenceManual/referencemanualtests.md b/tests/ReferenceManual/referencemanualtests.md index 6ea1e02d..0ee7886a 100644 --- a/tests/ReferenceManual/referencemanualtests.md +++ b/tests/ReferenceManual/referencemanualtests.md @@ -6,49 +6,49 @@ The official site [VTL-sdmx](https://sdmx.org/?page_id=5096) >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -1 | DS_r <- DS_1; | WIP -2 | DS_r := DS_1; | WIP -3 | DS_r := DS_1#Me_1; | WIP -4 | DS_r := DS_1#Id_1; | WIP -5 | DS_r := DS_1#At_1; | WIP +1 | DS_r <- DS_1; | OK +2 | DS_r := DS_1; | OK +3 | DS_r := DS_1#Me_1; | OK +4 | DS_r := DS_1#Id_1; | OK +5 | DS_r := DS_1#At_1; | OK >>> ## 2. Join Operators. >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -6 | DS_r := inner_join ( DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A); | WIP -7 | DS_r := left_join DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A ); | WIP -8 | DS_r := full_join ( DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A ); | WIP -9 | DS_r := cross_join (DS_1 as d1, DS_2 as d2 rename d1#Id_1 to Id11, d1#Id_2 to Id12, d2#Id_1 to Id21, d2#Id_2 to Id22, d1#Me_2 to Me12 );| WIP -10 | DS_r := inner_join (DS_1 as d1, DS_2 as d2 filter Me_1 = "A" calc Me_4 := Me_1 \|\| Me_1A drop d1#Me_2); | WIP -11 | DS_r := inner_join ( DS_1 calc Me_2 := Me_2 \|\| "_NEW" filter Id_2 ="B" keep Me_1, Me_2); | WIP -12 | DS_r := inner_join ( DS_1 as d1, DS_2 as d2 apply d1 \|\| d2); | +6 | DS_r := inner_join ( DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A); | OK +7 | DS_r := left_join DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A ); | OK +8 | DS_r := full_join ( DS_1 as d1, DS_2 as d2 keep Me_1, d2#Me_2, Me_1A ); | OK +9 | DS_r := cross_join (DS_1 as d1, DS_2 as d2 rename d1#Id_1 to Id11, d1#Id_2 to Id12, d2#Id_1 to Id21, d2#Id_2 to Id22, d1#Me_2 to Me12 );| OK +10 | DS_r := inner_join (DS_1 as d1, DS_2 as d2 filter Me_1 = "A" calc Me_4 := Me_1 \|\| Me_1A drop d1#Me_2); | OK +11 | DS_r := inner_join ( DS_1 calc Me_2 := Me_2 \|\| "_NEW" filter Id_2 ="B" keep Me_1, Me_2); | OK +12 | DS_r := inner_join ( DS_1 as d1, DS_2 as d2 apply d1 \|\| d2); | OK >>> ## 3. String Operators. >>> Test number | VTL expresion | Test result :------------: |:-----------------------------------------------------------------------|:-------------: -13 | DS_r := DS_1 \|\| DS_2; | WIP -14 | DS_r := DS_1[calc Me_2:= Me_1 \|\| " world"]; | WIP -15 | DS_r := rtrim(DS_1); | WIP -16 | DS_r := DS_1[ calc Me_2:= rtrim(Me_1)]; | WIP -17 | DS_r := upper(DS_1); | WIP -18 | DS_r := DS_1[calc Me_2:= upper(Me_1)]; | WIP -19 | DS_r:= substr ( DS_1 , 7 ); | WIP -20 | DS_r:= substr ( DS_1 , 1 , 5 ); | WIP -21 | DS_r:= DS_1 [ calc Me_2:= substr ( Me_2 , 1 , 5 ) ]; | WIP -22 | DS_r := replace (ds_1,"ello","i"); | WIP -23 | DS_r := DS_1[ calc Me_2:= replace (Me_1,"ello","i")]; | WIP -24 | DS_r:= instr(ds_1,"hello"); | WIP -25 | DS_r := DS_1[calc Me_2:=instr(Me_1,"hello")]; | WIP -26 | DS_r := DS_1 [calc Me_10:= instr(Me_1, "o"), Me_20:=instr(Me_2, "o")]; | WIP -27 | DS_r := instr(DS_1, "o"); | WIP -28 | DS_r := length(DS_1); | WIP -29 | DS_r:= DS_1[calc Me_2:=length(Me_1)]; | WIP -30 | DS_r := DS_2 [calc Me_10:= length(Me_1), Me_20:=length(Me_2)]; | WIP -31 | DS_r := length(DS_2); | WIP +13 | DS_r := DS_1 \|\| DS_2; | OK +14 | DS_r := DS_1[calc Me_2:= Me_1 \|\| " world"]; | OK +15 | DS_r := rtrim(DS_1); | OK +16 | DS_r := DS_1[ calc Me_2:= rtrim(Me_1)]; | OK +17 | DS_r := upper(DS_1); | OK +18 | DS_r := DS_1[calc Me_2:= upper(Me_1)]; | OK +19 | DS_r:= substr ( DS_1 , 7 ); | OK +20 | DS_r:= substr ( DS_1 , 1 , 5 ); | OK +21 | DS_r:= DS_1 [ calc Me_2:= substr ( Me_2 , 1 , 5 ) ]; | OK +22 | DS_r := replace (ds_1,"ello","i"); | OK +23 | DS_r := DS_1[ calc Me_2:= replace (Me_1,"ello","i")]; | OK +24 | DS_r:= instr(ds_1,"hello"); | OK +25 | DS_r := DS_1[calc Me_2:=instr(Me_1,"hello")]; | OK +26 | DS_r := DS_1 [calc Me_10:= instr(Me_1, "o"), Me_20:=instr(Me_2, "o")]; | OK +27 | DS_r := instr(DS_1, "o"); | OK +28 | DS_r := length(DS_1); | OK +29 | DS_r:= DS_1[calc Me_2:=length(Me_1)]; | OK +30 | DS_r := DS_2 [calc Me_10:= length(Me_1), Me_20:=length(Me_2)]; | OK +31 | DS_r := length(DS_2); | OK >>> @@ -56,71 +56,73 @@ Test number | VTL expresion >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -32 | DS_r := + DS_1; | WIP -33 | DS_r := DS_1 [calc Me_3 := + Me_1 ]; | WIP -34 | DS_r := - DS_1; | WIP -35 | DS_r := DS_1 [ calc Me_3 := - Me_1 ]; | WIP -36 | DS_r := DS_1 + DS_2; | WIP -37 | DS_r := DS_1 + 3; | WIP -38 | DS_r := DS_1 [ calc Me_3 := Me_1 + 3.0 ]; | WIP -39 | DS_r := DS_1 - DS_2; | WIP -40 | DS_r := DS_1 - 3; | WIP -41 | DS_r := DS_1 [ calc Me_3 := Me_1 - 3 ]; | WIP -42 | DS_r := DS_1 * DS_2; | WIP -43 | DS_r := DS_1 * -3; | WIP -44 | DS_r := DS_1 [ calc Me_3 := Me_1 * Me_2 ]; | WIP -45 | DS_r := DS_1 / DS_2; | WIP -46 | DS_r := DS_1 / 10; | WIP -47 | DS_r := DS_1 [ calc Me_3 := Me_2 / Me_1 ]; | WIP -48 | DS_r := mod ( DS_1, DS_2 ); | WIP -49 | DS_r := mod ( DS_1, 15 ); | WIP -50 | DS_r := DS_1[ calc Me_3 := mod( DS_1#Me_1, 3.0 ) ]; | WIP -51 | DS_r := round(DS_1, 0); | WIP -52 | DS_r := DS_1 [ calc Me_10:= round( Me_1 ) ]; | WIP -53 | DS_r := DS_1 [ calc Me_20:= round( Me_1 , -1 ) ]; | WIP -54 | DS_r := trunc(DS_1, 0); | WIP -55 | DS_r := DS_1[ calc Me_10:= trunc( Me_1 ) ]; | WIP -56 | DS_r := DS_1[ calc Me_20:= trunc( Me_1 , -1 ) ]; | WIP -57 | DS_r := ceil (DS_1); | WIP -58 | DS_r := DS_1 [ calc Me_10 := ceil (Me_1) ]; | WIP -59 | DS_r := floor ( DS_1 ); | WIP -60 | DS_r := DS_1 [ calc Me_10 := floor (Me_1) ]; | WIP -61 | DS_r := abs ( DS_1 ); | WIP -62 | DS_r := DS_1 [ calc Me_10 := abs(Me_1) ]; | WIP -63 | DS_r := exp(DS_1); | WIP -64 | DS_r := DS_1 [ calc Me_1 := exp ( Me_1 ) ]; | WIP -65 | DS_r := ln(DS_1); | WIP -66 | DS_r := DS_1 [ calc Me_2 := ln ( DS_1#Me_1 ) ]; | WIP -67 | DS_r := power(DS_1, 2); | WIP -68 | DS_r := DS_1[ calc Me_1 := power(Me_1, 2) ]; | WIP -69 | DS_r := log ( DS_1, 2 ); | WIP -70 | DS_r := DS_1 [ calc Me_1 := log (Me_1, 2) ]; | WIP -71 | DS_r := sqrt(DS_1); | WIP -72 | DS_r := DS_1 [ calc Me_1 := sqrt ( Me_1 ) ]; | WIP +32 | DS_r := + DS_1; | OK +33 | DS_r := DS_1 [calc Me_3 := + Me_1 ]; | OK +34 | DS_r := - DS_1; | OK +35 | DS_r := DS_1 [ calc Me_3 := - Me_1 ]; | OK +36 | DS_r := DS_1 + DS_2; | OK +37 | DS_r := DS_1 + 3; | OK +38 | DS_r := DS_1 [ calc Me_3 := Me_1 + 3.0 ]; | OK +39 | DS_r := DS_1 - DS_2; | OK +40 | DS_r := DS_1 - 3; | OK +41 | DS_r := DS_1 [ calc Me_3 := Me_1 - 3 ]; | OK +42 | DS_r := DS_1 * DS_2; | OK +43 | DS_r := DS_1 * -3; | OK +44 | DS_r := DS_1 [ calc Me_3 := Me_1 * Me_2 ]; | OK +45 | DS_r := DS_1 / DS_2; | OK +46 | DS_r := DS_1 / 10; | OK +47 | DS_r := DS_1 [ calc Me_3 := Me_2 / Me_1 ]; | OK +48 | DS_r := mod ( DS_1, DS_2 ); | OK +49 | DS_r := mod ( DS_1, 15 ); | OK +50 | DS_r := DS_1[ calc Me_3 := mod( DS_1#Me_1, 3.0 ) ]; | OK +51 | DS_r := round(DS_1, 0); | OK +52 | DS_r := DS_1 [ calc Me_10:= round( Me_1 ) ]; | OK +53 | DS_r := DS_1 [ calc Me_20:= round( Me_1 , -1 ) ]; | OK +54 | DS_r := trunc(DS_1, 0); | OK +55 | DS_r := DS_1[ calc Me_10:= trunc( Me_1 ) ]; | OK +56 | DS_r := DS_1[ calc Me_20:= trunc( Me_1 , -1 ) ]; | OK +57 | DS_r := ceil (DS_1); | OK +58 | DS_r := DS_1 [ calc Me_10 := ceil (Me_1) ]; | OK +59 | DS_r := floor ( DS_1 ); | OK +60 | DS_r := DS_1 [ calc Me_10 := floor (Me_1) ]; | OK +61 | DS_r := abs ( DS_1 ); | OK +62 | DS_r := DS_1 [ calc Me_10 := abs(Me_1) ]; | OK +63 | DS_r := exp(DS_1); | OK +64 | DS_r := DS_1 [ calc Me_1 := exp ( Me_1 ) ]; | OK +65 | DS_r := ln(DS_1); | OK +66 | DS_r := DS_1 [ calc Me_2 := ln ( DS_1#Me_1 ) ]; | OK +67 | DS_r := power(DS_1, 2); | OK +68 | DS_r := DS_1[ calc Me_1 := power(Me_1, 2) ]; | OK +69 | DS_r := log ( DS_1, 2 ); | OK +70 | DS_r := DS_1 [ calc Me_1 := log (Me_1, 2) ]; | OK +71 | DS_r := sqrt(DS_1); | OK +72 | DS_r := DS_1 [ calc Me_1 := sqrt ( Me_1 ) ]; | OK +184 | DS_r := random(DS_1, 5); | OK +185 | DS_r := DS_1 [ calc Me_2 := random( Me_1, 8 ) ]; | OK >>> ## 5. Comparison Operators. >>> Test number | VTL expresion | Test result :------------: |:----------------------------------------------------------|:-------------: -73 | DS_r := DS_1 = 0.08; | WIP -74 | DS_r := DS_1 [ calc Me_2 := Me_1 = 0.08 ]; | WIP -75 | DS_r := DS_1 <> DS_2; | WIP -76 | DS_r := DS_1 [ calc Me_2 := Me_1<>7.5 ]; | WIP -77 | DS_r := DS_1 > 20; | WIP -78 | DS_r := DS_1 [ calc Me_2 := Me_1 > 20 ]; | WIP -79 | DS_r:= DS_1 > DS_2; | WIP -80 | DS_r := DS_1 < 15000000; | WIP -81 | DS_r:= between(ds1, 5,10); | WIP -82 | DS_r := DS_1 in { 0, 3, 6, 12 }; | WIP -83 | DS_r := DS_1 [ calc Me_2:= Me_1 in { 0, 3, 6, 12 } ]; | WIP -84 | DS_r := DS_1#Id_2 in myGeoValueDomain; | -85 | DS_r:= match_characters(ds1, "[:alpha:]{2}[:digit:]{3}"); | -86 | DS_r := isnull(DS_1); | WIP -87 | DS_r := DS_1[ calc Me_2 := isnull(Me_1) ]; | WIP -88 | DS_r := exists_in (DS_1, DS_2, all); | WIP -89 | DS_r := exists_in (DS_1, DS_2, true); | WIP -90 | DS_r := exists_in (DS_1, DS_2, false); | WIP +73 | DS_r := DS_1 = 0.08; | OK +74 | DS_r := DS_1 [ calc Me_2 := Me_1 = 0.08 ]; | OK +75 | DS_r := DS_1 <> DS_2; | OK +76 | DS_r := DS_1 [ calc Me_2 := Me_1<>7.5 ]; | OK +77 | DS_r := DS_1 > 20; | OK +78 | DS_r := DS_1 [ calc Me_2 := Me_1 > 20 ]; | OK +79 | DS_r:= DS_1 > DS_2; | OK +80 | DS_r := DS_1 < 15000000; | OK +81 | DS_r:= between(ds1, 5,10); | OK +82 | DS_r := DS_1 in { 0, 3, 6, 12 }; | OK +83 | DS_r := DS_1 [ calc Me_2:= Me_1 in { 0, 3, 6, 12 } ]; | OK +84 | DS_r := DS_1#Id_2 in myGeoValueDomain; | OK +85 | DS_r:= match_characters(ds1, "[:alpha:]{2}[:digit:]{3}"); | OK +86 | DS_r := isnull(DS_1); | OK +87 | DS_r := DS_1[ calc Me_2 := isnull(Me_1) ]; | OK +88 | DS_r := exists_in (DS_1, DS_2, all); | OK +89 | DS_r := exists_in (DS_1, DS_2, true); | OK +90 | DS_r := exists_in (DS_1, DS_2, false); | OK >>> @@ -129,70 +131,75 @@ Test number | VTL expresion | Te >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -91 | DS_r:= DS_1 and DS_2; | WIP -92 | DS_r := DS_1 [ calc Me_2:= Me_1 and true ]; | WIP -93 | DS_r:= DS_1 or DS_2; | WIP -94 | DS_r:= DS_1 [ calc Me_2:= Me_1 or true ]; | WIP -95 | DS_r:=DS_1 xor DS_2; | WIP -96 | DS_r:= DS_1 [ calc Me_2:= Me_1 xor true ]; | WIP -97 | DS_r:= not DS_1; | WIP -98 | DS_r:= DS_1 [ calc Me_2 := not Me_1 ]; | WIP +91 | DS_r:= DS_1 and DS_2; | OK +92 | DS_r := DS_1 [ calc Me_2:= Me_1 and true ]; | OK +93 | DS_r:= DS_1 or DS_2; | OK +94 | DS_r:= DS_1 [ calc Me_2:= Me_1 or true ]; | OK +95 | DS_r:=DS_1 xor DS_2; | OK +96 | DS_r:= DS_1 [ calc Me_2:= Me_1 xor true ]; | OK +97 | DS_r:= not DS_1; | OK +98 | DS_r:= DS_1 [ calc Me_2 := not Me_1 ]; | OK >>> ## 7. Time Operators. >>> -Test number | VTL expresion | Test result -:------------: | :------------- |:-------------: -99 | DS_r := period_indicator ( DS_1 ); | WIP -100 | DS_r := DS_1 [ filter period_indicator ( Id_3 ) = "A" ]; | -101 | DS_r := fill_time_series ( DS_1, single ); | -102 | DS_r := fill_time_series ( DS_1, all ); | -103 | DS_r := fill_time_series ( DS_2, single ); | -104 | DS_r := fill_time_series ( DS_2, all ); | -105 | DS_r := fill_time_series ( DS_3, single ); | WIP -106 | DS_r := fill_time_series ( DS_3, all ); | WIP -107 | DS_r := fill_time_series ( DS_4, single ); | WIP -108 | DS_r := fill_time_series ( DS_4, all ); | WIP -109 | DS_r := flow_to_stock ( DS_1 ); | WIP -110 | DS_r := flow_to_stock ( DS_2 ); | WIP -111 | DS_r := flow_to_stock ( DS_3 ); | WIP -112 | DS_r := flow_to_stock ( DS_4 ); | WIP -113 | DS_r := stock_to_flow ( DS_1 ); | WIP -114 | DS_r := stock_to_flow ( DS_2 ); | WIP -115 | DS_r := stock_to_flow ( DS_3 ); | WIP -116 | DS_r := stock_to_flow ( DS_4 ); | WIP -117 | DS_r := timeshift ( DS_1 , -1 ); | -118 | DS_r := timeshift ( DS_2 , 2 ); | -119 | DS_r := timeshift ( DS_3 , 1 ); | WIP -120 | DS_r := time_shift ( DS_3 , -1 ); | WIP -121 | DS_r := sum ( DS_1 ) group all time_agg ( "A" , _ , Me_1 ); | -122 | DS_r := time_agg ( "Q", cast ( "2012M01", time_period, "YYYY\MMM" ) ); | -123 | time_agg( "Q", cast("20120213", date, "YYYYMMDD"), _ , last ); | -124 | time_agg(cast( "A", "2012M1", date, "YYYYMMDD"), _ , first ); | -125 | cast ( current_date, string, "YYYY.MM.DD" ); | - +Test number | VTL expresion | Test result +:------------: |:------------------------------------------------------------------------|:-------------: +99 | DS_r := period_indicator ( DS_1 ); | OK +100 | DS_r := DS_1 [ filter period_indicator ( Id_3 ) = "A" ]; | OK +101 | DS_r := fill_time_series ( DS_1, single ); | OK +102 | DS_r := fill_time_series ( DS_1, all ); | OK +103 | DS_r := fill_time_series ( DS_2, single ); | OK +104 | DS_r := fill_time_series ( DS_2, all ); | OK +105 | DS_r := fill_time_series ( DS_3, single ); | OK +106 | DS_r := fill_time_series ( DS_3, all ); | OK +107 | DS_r := fill_time_series ( DS_4, single ); | OK +108 | DS_r := fill_time_series ( DS_4, all ); | OK +109 | DS_r := flow_to_stock ( DS_1 ); | OK +110 | DS_r := flow_to_stock ( DS_2 ); | OK +111 | DS_r := flow_to_stock ( DS_3 ); | OK +112 | DS_r := flow_to_stock ( DS_4 ); | OK +113 | DS_r := stock_to_flow ( DS_1 ); | OK +114 | DS_r := stock_to_flow ( DS_2 ); | OK +115 | DS_r := stock_to_flow ( DS_3 ); | OK +116 | DS_r := stock_to_flow ( DS_4 ); | OK +117 | DS_r := timeshift ( DS_1 , -1 ); | OK +118 | DS_r := timeshift ( DS_2 , 2 ); | OK +119 | DS_r := timeshift ( DS_3 , 1 ); | OK +120 | DS_r := time_shift ( DS_3 , -1 ); | OK +121 | DS_r := sum ( DS_1 ) group all time_agg ( "A" , _ , Me_1 ); | OK +122 | DS_r := time_agg ( "Q", cast ( "2012M01", time_period, "YYYY\MMM" ) ); | OK +123 | time_agg( "Q", cast("20120213", date, "YYYYMMDD"), _ , last ); | OK +124 | time_agg(cast( "A", "2012M1", date, "YYYYMMDD"), _ , first ); | OK +125 | cast ( current_date, string, "YYYY.MM.DD" ); | OK +177 | DS_r := DS_1 [calc Me2 := datediff(Id_2, Me_1)]; | OK +178 | DS_r := DS_1[ calc Me_2 := dateadd( Me_1, 2, "M" ) ]; | OK +179 | DS_r := DS_1[ calc Me_2 := month (Me_1) ]; | OK +180 | DS_r := DS_1[ calc Me_2 := daytoyear (Me_1) ]; | OK +181 | DS_r := DS_1[ calc Me_2 := daytomonth (Me_1) ]; | OK +182 | DS_r := DS_1[ calc Me_2 := yeartoday (Me_1) ]; | OK >>> ## 8. Set Operators. >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -126 | DS_r := union(DS_1,DS_2); | WIP -127 | DS_r := union ( DS_1, DS_2 ); | WIP -128 | DS_r := intersect(DS_1,DS_2); | WIP -129 | DS_r := setdiff ( DS_1, DS_2 ); | WIP -130 | DS_r := setdiff ( DS_1, DS_2 ); | WIP -131 | DS_r := symdiff ( DS_1, DS_2 ); | WIP +126 | DS_r := union(DS_1,DS_2); | OK +127 | DS_r := union ( DS_1, DS_2 ); | OK +128 | DS_r := intersect(DS_1,DS_2); | OK +129 | DS_r := setdiff ( DS_1, DS_2 ); | OK +130 | DS_r := setdiff ( DS_1, DS_2 ); | OK +131 | DS_r := symdiff ( DS_1, DS_2 ); | OK >>> ## 9. Hierarchical aggregation. >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -132 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 non_null ); | -133 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 non_zero ); | -134 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 partial_null ); | +132 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 non_null ); | OK +133 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 non_zero ); | OK +134 | DS_r := hierarchy ( DS_1, HR_1 rule Id_2 partial_null ); | OK >>> @@ -202,22 +209,22 @@ Test number | VTL expresion | Test result >>> Test number | VTL expresion | Test result :------------: |:---------------------------------------------------------------------------------------------------------|:-------------: -135 | DS_r := avg ( DS_1 group by Id_1 ); | WIP -136 | DS_r := sum ( DS_1 group by Id_1, Id_3 ); | WIP -137 | DS_r := avg ( DS_1 ); | -138 | DS_r := DS_1 [ aggr Me_2 := max ( Me_1 ) , Me_3 := min ( Me_1 ) group by Id_1 ]; | -139 | DS_r := sum ( DS_1 over ( order by Id_1, Id_2, Id_3 data points between 1 preceding and 1 following ) ); | -140 | DS_r := count ( DS_1 group by Id_1 ); | WIP -141 | DS_r := count ( DS_1 group by Id_1 having count() > 2 ); | -142 | DS_r := min ( DS_1 group by Id_1 ); | WIP -143 | DS_r := max ( DS_1 group by Id_1 ); | WIP -144 | DS_r := median ( DS_1 group by Id_1 ); | WIP -145 | DS_r := sum ( DS_1 group by Id_1 ); | WIP -146 | DS_r := avg ( DS_1 group by Id_1 ); | WIP -147 | DS_r := stddev_pop ( DS_1 group by Id_1 ); | WIP -148 | DS_r := stddev_samp ( DS_1 group by Id_1 ); | WIP -149 | DS_r := var_pop ( DS_1 group by Id_1 ); | WIP -150 | DS_r := var_samp ( DS_1 group by Id_1 ); | WIP +135 | DS_r := avg ( DS_1 group by Id_1 ); | OK +136 | DS_r := sum ( DS_1 group by Id_1, Id_3 ); | OK +137 | DS_r := avg ( DS_1 ); | OK +138 | DS_r := DS_1 [ aggr Me_2 := max ( Me_1 ) , Me_3 := min ( Me_1 ) group by Id_1 ]; | OK +139 | DS_r := sum ( DS_1 over ( order by Id_1, Id_2, Id_3 data points between 1 preceding and 1 following ) ); | OK +140 | DS_r := count ( DS_1 group by Id_1 ); | OK +141 | DS_r := count ( DS_1 group by Id_1 having count() > 2 ); | OK +142 | DS_r := min ( DS_1 group by Id_1 ); | OK +143 | DS_r := max ( DS_1 group by Id_1 ); | OK +144 | DS_r := median ( DS_1 group by Id_1 ); | OK +145 | DS_r := sum ( DS_1 group by Id_1 ); | OK +146 | DS_r := avg ( DS_1 group by Id_1 ); | OK +147 | DS_r := stddev_pop ( DS_1 group by Id_1 ); | OK +148 | DS_r := stddev_samp ( DS_1 group by Id_1 ); | OK +149 | DS_r := var_pop ( DS_1 group by Id_1 ); | OK +150 | DS_r := var_samp ( DS_1 group by Id_1 ); | OK >>> @@ -226,12 +233,12 @@ Test number | VTL expresion >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -151 | DS_r := first_value ( DS_1 over ( partition by Id_1, Id_2 order by Id_3 data points between 1 preceding and 1 following); | -152 | DS_r := last_value ( DS_1 over ( partition by Id_1, Id_2 order by Id_3 data points between 1 preceding and 1 following ) ); | -153 | DS_r := lag ( DS_1 , 1 over ( partition by Id_1 , Id_2 order by Id_3 ) ); | -154 | DS_r := lead ( DS_1 , 1 over ( partition by Id_1 , Id_2 order by Id_3 ) ); | -155 | DS_r := DS_1 [ calc Me2 := rank ( over ( partition by Id_1 , Id_2 order by Me_1 ) ) ]; | -156 | DS_r := ratio_to_report ( DS_1 over ( partition by Id_1, Id_2 ) ) | +151 | DS_r := first_value ( DS_1 over ( partition by Id_1, Id_2 order by Id_3 data points between 1 preceding and 1 following); | OK +152 | DS_r := last_value ( DS_1 over ( partition by Id_1, Id_2 order by Id_3 data points between 1 preceding and 1 following ) ); | OK +153 | DS_r := lag ( DS_1 , 1 over ( partition by Id_1 , Id_2 order by Id_3 ) ); | OK +154 | DS_r := lead ( DS_1 , 1 over ( partition by Id_1 , Id_2 order by Id_3 ) ); | OK +155 | DS_r := DS_1 [ calc Me2 := rank ( over ( partition by Id_1 , Id_2 order by Me_1 ) ) ]; | OK +156 | DS_r := ratio_to_report ( DS_1 over ( partition by Id_1, Id_2 ) ) | OK >>> @@ -239,10 +246,10 @@ Test number | VTL expresion | Test result >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -157 | DS_r := check_datapoint ( DS_1, dpr1 ); | WIP -158 | DS_r := check_datapoint ( DS_1, dpr1 all ); | WIP -159 | DS_r := check_hierarchy ( DS_1, HR_1 rule Id_2 partial_null all ); | WIP -160 | DS_r := check ( DS1 >= DS2 imbalance DS1 - DS2 ); | WIP +157 | DS_r := check_datapoint ( DS_1, dpr1 ); | OK +158 | DS_r := check_datapoint ( DS_1, dpr1 all ); | OK +159 | DS_r := check_hierarchy ( DS_1, HR_1 rule Id_2 partial_null all ); | OK +160 | DS_r := check ( DS1 >= DS2 imbalance DS1 - DS2 ); | OK >>> @@ -250,28 +257,30 @@ Test number | VTL expresion | Test result >>> Test number | VTL expresion | Test result :------------: | :------------- |:-------------: -161 | DS_r := if ( DS_cond#Id_4 = ""F"" ) then DS_1 else DS_2; | WIP -162 | DS_r := nvl ( DS_1, 0 ); | WIP - +161 | DS_r := if ( DS_cond#Id_4 = ""F"" ) then DS_1 else DS_2; | OK +162 | DS_r := nvl ( DS_1, 0 ); | OK +183 |DS_r := DS_1 [calc Me_2 := case when Me_1 <= 1 then 0 | OK + | when Me_1 > 1 and Me_1 <= 10 then 1 | + | when Me_1 > 10 then 10 | + | else 100]; | >>> ## 13. Clause Operators. >>> -Test number | VTL expresion | Test result -:------------: | :------------- |:-------------: -163 | DS_r := DS_1 [ filter Id_1 = 1 and Me_1 < 10 ]; | WIP -164 | DS_r := DS_1 [ calc Me_1:= Me_1 * 2 ]; | WIP -165 | DS_r := DS_1 [ calc attribute At_1:= "EP" ]; | WIP -166 | DS_r := DS_1 [ aggr Me_1:= sum( Me_1 ) group by Id_1 , Id_2 ]; | -167 | DS_r := DS_1 [ aggr Me_3:= min( Me_1 ) group except Id_3 ]; | -168 | DS_r := DS_1 [ aggr Me_1:= sum( Me_1 ), Me_2 := max( Me_1) group by Id_1 , Id_2 having avg (Me_1 ) > 2 ]; | -169 | DS_r := DS_1 [ keep Me_1 ]; | WIP -170 | DS_r := DS_1 [ drop At_1 ]; | WIP -171 | DS_r := DS_1 [ rename Me_1 to Me_2, At_1 to At_2]; | WIP -172 | DS_r := DS_1 [ pivot Id_2, Me_1 ]; | WIP -173 | DS_r := DS_1 [ unpivot Id_2, Me_1]; | WIP -174 | DS_r := DS_1 [ sub Id_1 = 1, Id_2 = "A" ]; | WIP -175 | DS_r := DS_1 [ sub Id_1 = 1, Id_2 = "B", Id_3 = "YY" ]; | WIP -176 | DS_r := DS_1 [ sub Id_2 = "A" ] + DS_1 [ sub Id_2 = "B" ]; | - +Test number | VTL expresion | Test result +:------------: | :------------- |:----------------------------------: +163 | DS_r := DS_1 [ filter Id_1 = 1 and Me_1 < 10 ]; | OK +164 | DS_r := DS_1 [ calc Me_1:= Me_1 * 2 ]; | OK +165 | DS_r := DS_1 [ calc attribute At_1:= "EP" ]; | OK +166 | DS_r := DS_1 [ aggr Me_1:= sum( Me_1 ) group by Id_1 , Id_2 ]; | OK +167 | DS_r := DS_1 [ aggr Me_3:= min( Me_1 ) group except Id_3 ]; | OK +168 | DS_r := DS_1 [ aggr Me_1:= sum( Me_1 ), Me_2 := max( Me_1) group by Id_1 , Id_2 having avg (Me_1 ) > 2 ]; | OK +169 | DS_r := DS_1 [ keep Me_1 ]; | OK +170 | DS_r := DS_1 [ drop At_1 ]; | OK +171 | DS_r := DS_1 [ rename Me_1 to Me_2, At_1 to At_2]; | OK +172 | DS_r := DS_1 [ pivot Id_2, Me_1 ]; | OK +173 | DS_r := DS_1 [ unpivot Id_2, Me_1]; | OK +174 | DS_r := DS_1 [ sub Id_1 = 1, Id_2 = "A" ]; | OK +175 | DS_r := DS_1 [ sub Id_1 = 1, Id_2 = "B", Id_3 = "YY" ]; | OK +176 | DS_r := DS_1 [ sub Id_2 = "A" ] + DS_1 [ sub Id_2 = "B" ]; | OK >>> \ No newline at end of file diff --git a/tests/ReferenceManual/test_reference_manual.py b/tests/ReferenceManual/test_reference_manual.py index d0734a28..b9a76c7d 100644 --- a/tests/ReferenceManual/test_reference_manual.py +++ b/tests/ReferenceManual/test_reference_manual.py @@ -66,6 +66,7 @@ validation_operators = list(range(157, 161)) conditional_operators = list(range(161, 163)) clause_operators = list(range(163, 177)) +new_operators = list(range(177, 186)) # Remove tests because Reference Manual is wrong (Pivot) clause_operators.remove(172) @@ -109,6 +110,7 @@ validation_operators, conditional_operators, clause_operators, + new_operators, ) params = [x for x in list(params) if x not in exceptions_tests] diff --git a/tests/Semantic/test_semantic.py b/tests/Semantic/test_semantic.py index 9c597762..a67abcb0 100644 --- a/tests/Semantic/test_semantic.py +++ b/tests/Semantic/test_semantic.py @@ -1971,7 +1971,11 @@ def test_5(self): references_names = ["1"] with pytest.raises(Exception, match="Vtl Script contains Cycles, no DAG established"): - self.BaseTest(code=code, number_inputs=number_inputs, references_names=references_names) + self.BaseTest( + code=code, + number_inputs=number_inputs, + references_names=references_names, + ) def test_6(self): """ diff --git a/tests/ThreeValueLogic/test_three_value_logic.py b/tests/ThreeValueLogic/test_three_value_logic.py index a1a51033..40d78ddd 100644 --- a/tests/ThreeValueLogic/test_three_value_logic.py +++ b/tests/ThreeValueLogic/test_three_value_logic.py @@ -57,7 +57,10 @@ def test_1(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_2(self): @@ -70,7 +73,10 @@ def test_2(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_3(self): @@ -83,7 +89,10 @@ def test_3(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_4(self): @@ -96,7 +105,10 @@ def test_4(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_5(self): @@ -109,7 +121,10 @@ def test_5(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_6(self): @@ -122,7 +137,10 @@ def test_6(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) def test_7(self): @@ -135,5 +153,8 @@ def test_7(self): references_names = ["DS_r"] self.BaseTest( - text=text, code=code, number_inputs=number_inputs, references_names=references_names + text=text, + code=code, + number_inputs=number_inputs, + references_names=references_names, ) diff --git a/tests/TimePeriod/test_timeperiod.py b/tests/TimePeriod/test_timeperiod.py index 13ea28cc..d32fe26e 100644 --- a/tests/TimePeriod/test_timeperiod.py +++ b/tests/TimePeriod/test_timeperiod.py @@ -41,7 +41,7 @@ def test_GL_418(self): def test_GL_417_1(self): """ - test := avg (BE2_DF_NICP group all time_agg ("Q", "M", TIME_PERIOD)); + Test := avg (BE2_DF_NICP group all time_agg ("Q", "M", TIME_PERIOD)); """ code = "GL_417_1" number_inputs = 1 @@ -51,7 +51,7 @@ def test_GL_417_1(self): def test_GL_417_2(self): """ - test := avg (BE2_DF_NICP group all time_agg ("A", "M", TIME_PERIOD)); + Test := avg (BE2_DF_NICP group all time_agg ("A", "M", TIME_PERIOD)); """ code = "GL_417_2" number_inputs = 1 @@ -73,7 +73,7 @@ def test_GL_417_4( self, ): # TODO: Check periodIndFrom is not the same as in data, in data is "M", should we allow this? """ - test := avg (BE2_DF_NICP group all time_agg ("A", "Q", TIME_PERIOD)); + Test := avg (BE2_DF_NICP group all time_agg ("A", "Q", TIME_PERIOD)); """ code = "GL_417_4" number_inputs = 1 diff --git a/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py b/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py index efa02665..c367bbfb 100644 --- a/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py +++ b/tests/TypeChecking/AggregateOperators/test_aggregate_operators.py @@ -43,7 +43,7 @@ def test_1(self): def test_2(self): """ - test 1 plus nulls. + Test 1 plus nulls. Status: OK Expression: DS_r := avg ( DS_1 group by Id_1); Description: The nulls are ignored in the average. @@ -151,7 +151,7 @@ def test_8(self): def test_9(self): """ - average with time again + Average with time again Status: OK Expression: DS_r := avg ( DS_1 group by Id_1); Description: Average with time. @@ -169,7 +169,7 @@ def test_9(self): def test_10(self): """ - count with integer and number + Count with integer and number Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: There are measures int and num without nulls. @@ -187,7 +187,7 @@ def test_10(self): def test_11(self): """ - count one measure + Count one measure Status: OK Expression: DS_r := count ( Me_1 group by Id_1); Description: Special case of count with a component, should ignore nulls. @@ -205,7 +205,7 @@ def test_11(self): def test_12(self): """ - count with string + Count with string Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: There isnt fail because take the null as empty string. @@ -220,7 +220,7 @@ def test_12(self): def test_13(self): """ - count with time + Count with time Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Measure Time with null, counts the null @@ -235,7 +235,7 @@ def test_13(self): def test_14(self): """ - count with date + Count with date Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Measure Date with null, doesn't count the null, we think that should. @@ -250,7 +250,7 @@ def test_14(self): def test_15(self): """ - count with time period + Count with time period Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Measure Time Period with null, doesn't count the null, we think that should. @@ -265,7 +265,7 @@ def test_15(self): def test_16(self): """ - count with duration + Count with duration Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Measure Duration with null, doesn't count the null, we think that should. @@ -280,7 +280,7 @@ def test_16(self): def test_17(self): """ - count with boolean + Count with boolean Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Measure Boolean with null, doesn't count the null, we think that should. @@ -295,7 +295,7 @@ def test_17(self): def test_18(self): """ - count with number and integer + Count with number and integer Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: There are measures int and num with nulls. @@ -310,7 +310,7 @@ def test_18(self): def test_19(self): """ - count with number and integer + Count with number and integer Status: OK Expression: DS_r := count ( DS_1 group by Id_1); Description: Example that takes the most left measure. @@ -340,7 +340,7 @@ def test_20(self): # max operator def test_21(self): """ - max for integers + Max for integers Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: All the measures Integers are involved and the results should be type Integer. @@ -358,7 +358,7 @@ def test_21(self): def test_22(self): """ - max for integers and numbers + Max for integers and numbers Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: All the measures Integers and Numbers are involved and the results should be the parent type. @@ -373,7 +373,7 @@ def test_22(self): def test_23(self): """ - max for integers and string + Max for integers and string Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: Max for string is ok on a lexicographic order. @@ -388,7 +388,7 @@ def test_23(self): def test_24(self): """ - max for integers and time + Max for integers and time Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: Max for time takes the the mayor number but not the mayor time, @@ -404,7 +404,7 @@ def test_24(self): def test_25(self): """ - max for integers and date + Max for integers and date Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: Max for date and nulls. @@ -419,7 +419,7 @@ def test_25(self): def test_26(self): """ - max for integers and time period + Max for integers and time period Status: OK. Expression: DS_r := max ( DS_1 group by Id_1); Description: Max doesnt work with nulls and diferent time_period in the same id (2012Q2,2012M12). @@ -434,7 +434,7 @@ def test_26(self): def test_27(self): """ - max for integers and duration + Max for integers and duration Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: Max doesnt work with nulls and take the max duration in a lexicographic order. @@ -449,7 +449,7 @@ def test_27(self): def test_28(self): """ - max for integers and boolean + Max for integers and boolean Status: OK Expression: DS_r := max ( DS_1 group by Id_1); Description: Max for booleans takes True as max. diff --git a/tests/TypeChecking/Numeric/ScalarDataset/test_scalar_dataset.py b/tests/TypeChecking/Numeric/ScalarDataset/test_scalar_dataset.py index 1ec815a3..6f6d97cd 100644 --- a/tests/TypeChecking/Numeric/ScalarDataset/test_scalar_dataset.py +++ b/tests/TypeChecking/Numeric/ScalarDataset/test_scalar_dataset.py @@ -6622,7 +6622,7 @@ def test_373(self): def test_374(self): """ - number power duration --> number + Number power duration --> number Status: OK Expression: DS_r := power(1.0 , DS_1) ; Description: Forbid implicit cast number to duration in power operator. diff --git a/tests/Validation/test_validation.py b/tests/Validation/test_validation.py index bc1498b8..2ecf6f92 100644 --- a/tests/Validation/test_validation.py +++ b/tests/Validation/test_validation.py @@ -194,7 +194,7 @@ def test_6(self): def test_7(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -225,7 +225,7 @@ def test_7(self): def test_8(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -256,7 +256,7 @@ def test_8(self): def test_9(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -287,7 +287,7 @@ def test_9(self): def test_10(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -304,7 +304,7 @@ def test_10(self): def test_11(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -322,7 +322,7 @@ def test_11(self): def test_12(self): """ - define datapoint ruleset + Define datapoint ruleset Dataset --> Dataset Status: OK @@ -368,7 +368,7 @@ def test_GL_446_3(self): def test_GL_cs_22(self): """ - eschaped characters in the hierarchical ruleset have to be replaced by the corresponding character + Eschaped characters in the hierarchical ruleset have to be replaced by the corresponding character '_T' -> 'T' Uses SDMX-CSV 1.0