From ac9b6e599c6ce26203a0897d11ca9d4065e8e9fd Mon Sep 17 00:00:00 2001 From: clinssen Date: Wed, 6 Nov 2024 10:26:54 +0100 Subject: [PATCH] Add explicit output parameters to spiking output ports (#1124) --- .../nestml_language_concepts.rst | 24 + doc/running/running_nest.rst | 8 + .../stdp_dopa_synapse/stdp_dopa_synapse.ipynb | 2 +- .../stdp_third_factor_active_dendrite.ipynb | 2 +- doc/tutorials/stdp_windows/stdp_windows.ipynb | 6 +- .../triplet_stdp_synapse.ipynb | 4 +- .../neuromodulated_stdp_synapse.nestml | 2 +- models/synapses/noisy_synapse.nestml | 2 +- models/synapses/static_synapse.nestml | 2 +- .../stdp_nn_pre_centered_synapse.nestml | 2 +- .../stdp_nn_restr_symm_synapse.nestml | 2 +- models/synapses/stdp_nn_symm_synapse.nestml | 2 +- models/synapses/stdp_synapse.nestml | 2 +- models/synapses/stdp_triplet_synapse.nestml | 2 +- .../co_co_output_port_defined_if_emit_call.py | 52 +- .../codegeneration/printers/nestml_printer.py | 8 + pynestml/generated/PyNestMLLexer.py | 500 +++++++++--------- pynestml/generated/PyNestMLParser.py | 387 +++++++++----- pynestml/grammars/PyNestMLLexer.g4 | 5 +- pynestml/grammars/PyNestMLParser.g4 | 5 +- pynestml/meta_model/ast_function_call.py | 7 +- pynestml/meta_model/ast_node_factory.py | 7 +- pynestml/meta_model/ast_output_block.py | 24 +- pynestml/utils/messages.py | 15 + pynestml/visitors/ast_builder_visitor.py | 12 +- .../CoCoOutputPortTypeContinuous.nestml | 34 ++ .../CoCoOutputPortTypeIfEmitCall-2.nestml | 37 ++ .../CoCoOutputPortTypeIfEmitCall-3.nestml | 37 ++ .../CoCoOutputPortTypeIfEmitCall.nestml | 37 ++ .../delay_test_assigned_delay2_synapse.nestml | 2 +- .../delay_test_assigned_delay_synapse.nestml | 2 +- .../delay_test_assigned_synapse.nestml | 2 +- .../delay_test_plastic_synapse.nestml | 2 +- .../resources/delay_test_synapse.nestml | 2 +- .../dopa_second_order_synapse.nestml | 2 +- .../homogeneous_parameters_synapse.nestml | 2 +- .../random_functions_illegal_synapse.nestml | 2 +- .../test_plasticity_dynamics_synapse.nestml | 2 +- .../weight_test_assigned_synapse.nestml | 2 +- .../weight_test_plastic_synapse.nestml | 2 +- .../synapse_event_inv_priority_test.nestml | 2 +- .../synapse_event_priority_test.nestml | 2 +- tests/test_cocos.py | 31 +- 43 files changed, 846 insertions(+), 440 deletions(-) create mode 100644 tests/invalid/CoCoOutputPortTypeContinuous.nestml create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml create mode 100644 tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml diff --git a/doc/nestml_language/nestml_language_concepts.rst b/doc/nestml_language/nestml_language_concepts.rst index a76e08bc8..7fc87c624 100644 --- a/doc/nestml_language/nestml_language_concepts.rst +++ b/doc/nestml_language/nestml_language_concepts.rst @@ -907,6 +907,30 @@ Each model can only send a single type of event. The type of the event has to be Calling the ``emit_spike()`` function in the ``update`` block results in firing a spike to all target neurons and devices time stamped with the simulation time at the end of the time interval ``t + timestep()``. +Event attributes +~~~~~~~~~~~~~~~~ + +Each spiking output event can be parameterised by one or more attributes. For example, a synapse could assign a weight (as a real number) and delay (in milliseconds) to its spike events by including these values in the call to ``emit_spike()``: + +.. code-block:: nestml + + parameters: + weight real = 10. + + update: + emit_spike(weight, 1 ms) + +If spike event attributes are used, their names and types must be given as part of the output port specification, for example: + +.. code-block:: nestml + + output: + spike(weight real, delay ms) + +The names are only used externally, so that other models can refer to the correct attribute (such as a downstream neuron that is receiving the spike through its input port). It is thus allowed to have a state variable called ``weight`` and an output port attribute by the same name; the output port attribute name does not refer to names declared inside the model. + +Specific code generators may support a specific set of attributes; please check the documentation of each individual code generator for more details. + Equations --------- diff --git a/doc/running/running_nest.rst b/doc/running/running_nest.rst index bb40a63aa..f6d53dfc9 100644 --- a/doc/running/running_nest.rst +++ b/doc/running/running_nest.rst @@ -182,6 +182,14 @@ For a full example, please see `iaf_psc_exp_multisynapse_vectors.nestml 1: + code, message = Messages.get_block_not_defined_correctly('output', missing=False) + Logger.log_message(error_position=node.get_source_position(), log_level=LoggingLevel.ERROR, + code=code, message=message) + return + + assert len(output_blocks) == 1 - if not spike_output_exists: + if not output_blocks[0].is_spike(): code, message = Messages.get_emit_spike_function_but_no_output_port() Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, - error_position=node.get_source_position()) + error_position=output_blocks[0].get_source_position()) return + + # check types + if len(node.get_args()) != len(output_blocks[0].get_attributes()): + code, message = Messages.get_output_port_type_differs() + Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, + error_position=output_blocks[0].get_source_position()) + return + + for emit_spike_arg, output_block_attr in zip(node.get_args(), output_blocks[0].get_attributes()): + emit_spike_arg_type_sym = emit_spike_arg.type + output_block_attr_type_sym = output_block_attr.get_data_type().get_type_symbol() + + if emit_spike_arg_type_sym.equals(output_block_attr_type_sym): + continue + + if emit_spike_arg_type_sym.is_castable_to(output_block_attr_type_sym): + # types are not equal, but castable + code, message = Messages.get_implicit_cast_rhs_to_lhs(output_block_attr_type_sym.print_symbol(), + emit_spike_arg_type_sym.print_symbol()) + Logger.log_message(error_position=output_blocks[0].get_source_position(), + code=code, message=message, log_level=LoggingLevel.WARNING) + continue + else: + # types are not equal and not castable + code, message = Messages.get_output_port_type_differs() + Logger.log_message(code=code, message=message, log_level=LoggingLevel.ERROR, + error_position=output_blocks[0].get_source_position()) + return diff --git a/pynestml/codegeneration/printers/nestml_printer.py b/pynestml/codegeneration/printers/nestml_printer.py index f03d9931d..481fc9a6a 100644 --- a/pynestml/codegeneration/printers/nestml_printer.py +++ b/pynestml/codegeneration/printers/nestml_printer.py @@ -431,6 +431,14 @@ def print_output_block(self, node: ASTOutputBlock) -> str: ret += print_n_spaces(self.indent) + "output:\n" ret += print_n_spaces(self.indent + 4) ret += "spike" if node.is_spike() else "continuous" + if node.get_attributes(): + ret += "(" + for i, attr in enumerate(node.get_attributes()): + ret += self.print(attr) + if i < len(node.get_attributes()) - 1: + ret += ", " + + ret += ")" ret += print_sl_comment(node.in_comment) ret += "\n" return ret diff --git a/pynestml/generated/PyNestMLLexer.py b/pynestml/generated/PyNestMLLexer.py index 90123821f..fe99b1dd7 100644 --- a/pynestml/generated/PyNestMLLexer.py +++ b/pynestml/generated/PyNestMLLexer.py @@ -15,7 +15,7 @@ def serializedATN(): return [ - 4,0,90,702,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, + 4,0,91,707,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5, 2,6,7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2, 13,7,13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7, 19,2,20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2, @@ -29,243 +29,244 @@ def serializedATN(): 71,2,72,7,72,2,73,7,73,2,74,7,74,2,75,7,75,2,76,7,76,2,77,7,77,2, 78,7,78,2,79,7,79,2,80,7,80,2,81,7,81,2,82,7,82,2,83,7,83,2,84,7, 84,2,85,7,85,2,86,7,86,2,87,7,87,2,88,7,88,2,89,7,89,2,90,7,90,2, - 91,7,91,1,0,1,0,1,0,1,0,1,1,3,1,191,8,1,1,1,1,1,1,2,1,2,1,2,3,2, - 198,8,2,1,3,4,3,201,8,3,11,3,12,3,202,1,3,1,3,1,4,1,4,1,4,1,4,1, - 4,1,5,1,5,5,5,214,8,5,10,5,12,5,217,9,5,1,5,1,5,4,5,221,8,5,11,5, - 12,5,222,1,5,1,5,1,6,1,6,5,6,229,8,6,10,6,12,6,232,9,6,1,6,1,6,1, - 7,1,7,1,7,3,7,239,8,7,1,7,1,7,1,7,3,7,244,8,7,3,7,246,8,7,1,7,1, - 7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9,1,10,1,10, - 1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,11,1,11,1,11, - 1,12,1,12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13,1,13,1,13, - 1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15,1,15,1,15, - 1,15,1,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,18,1,18,1,18, - 1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20,1,20,1,21, - 1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23,1,24,1,24, - 1,24,1,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27,1,27,1,27, - 1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28,1,28,1,28, - 1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30,1,30,1,30, - 1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,32,1,32, - 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33,1,33,1,33, - 1,33,1,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,35, - 1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,1,36,1,36,1,37, - 1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38,1,38,1,38, - 1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39,1,39,1,39, - 1,39,1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40,1,40,1,41, - 1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,1,42, - 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43,1,43,1,43, - 1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44,1,44,1,44, - 1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45,1,45,1,46, - 1,46,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49,1,50,1,50,1,51,1,51, - 1,52,1,52,1,53,1,53,1,54,1,54,1,55,1,55,1,55,1,56,1,56,1,57,1,57, - 1,57,1,58,1,58,1,58,1,59,1,59,1,59,1,60,1,60,1,60,1,61,1,61,1,62, - 1,62,1,63,1,63,1,63,1,64,1,64,1,64,1,65,1,65,1,65,1,66,1,66,1,66, - 1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,69,1,69,1,70,1,70,1,70,1,71, - 1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75,1,76,1,76,1,76, - 1,77,1,77,1,78,1,78,1,79,1,79,1,80,1,80,1,81,1,81,1,81,1,82,1,82, - 1,83,1,83,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84,1,84, - 1,84,1,84,1,84,1,84,1,84,1,84,1,84,3,84,636,8,84,1,85,1,85,1,85, - 4,85,641,8,85,11,85,12,85,642,1,85,3,85,646,8,85,1,85,3,85,649,8, - 85,1,85,3,85,652,8,85,1,85,5,85,655,8,85,10,85,12,85,658,9,85,1, - 85,1,85,1,86,3,86,663,8,86,1,86,5,86,666,8,86,10,86,12,86,669,9, - 86,1,87,4,87,672,8,87,11,87,12,87,673,1,88,1,88,3,88,678,8,88,1, - 89,3,89,681,8,89,1,89,1,89,1,89,1,89,1,89,3,89,688,8,89,1,90,1,90, - 3,90,692,8,90,1,90,1,90,1,90,1,91,1,91,3,91,699,8,91,1,91,1,91,2, - 215,222,0,92,1,3,3,0,5,4,7,5,9,6,11,7,13,8,15,9,17,10,19,11,21,12, - 23,13,25,14,27,15,29,16,31,17,33,18,35,19,37,20,39,21,41,22,43,23, - 45,24,47,25,49,26,51,27,53,28,55,29,57,30,59,31,61,32,63,33,65,34, - 67,35,69,36,71,37,73,38,75,39,77,40,79,41,81,42,83,43,85,44,87,45, - 89,46,91,47,93,48,95,49,97,50,99,51,101,52,103,53,105,54,107,55, - 109,56,111,57,113,58,115,59,117,60,119,61,121,62,123,63,125,64,127, - 65,129,66,131,67,133,68,135,69,137,70,139,71,141,72,143,73,145,74, - 147,75,149,76,151,77,153,78,155,79,157,80,159,81,161,82,163,83,165, - 84,167,85,169,86,171,87,173,88,175,89,177,90,179,0,181,0,183,0,1, - 0,7,2,0,9,9,32,32,2,0,10,10,13,13,4,0,10,10,13,13,34,34,92,92,4, - 0,36,36,65,90,95,95,97,122,5,0,36,36,48,57,65,90,95,95,97,122,1, - 0,48,57,2,0,69,69,101,101,723,0,1,1,0,0,0,0,5,1,0,0,0,0,7,1,0,0, - 0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,0, - 0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1,0,0,0,0,25,1,0,0,0,0,27,1,0,0, - 0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1,0,0,0,0,35,1,0,0,0,0,37,1,0,0, - 0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1,0,0,0,0,45,1,0,0,0,0,47,1,0,0, - 0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1,0,0,0,0,55,1,0,0,0,0,57,1,0,0, - 0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1,0,0,0,0,65,1,0,0,0,0,67,1,0,0, - 0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1,0,0,0,0,75,1,0,0,0,0,77,1,0,0, - 0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1,0,0,0,0,85,1,0,0,0,0,87,1,0,0, - 0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1,0,0,0,0,95,1,0,0,0,0,97,1,0,0, - 0,0,99,1,0,0,0,0,101,1,0,0,0,0,103,1,0,0,0,0,105,1,0,0,0,0,107,1, - 0,0,0,0,109,1,0,0,0,0,111,1,0,0,0,0,113,1,0,0,0,0,115,1,0,0,0,0, - 117,1,0,0,0,0,119,1,0,0,0,0,121,1,0,0,0,0,123,1,0,0,0,0,125,1,0, - 0,0,0,127,1,0,0,0,0,129,1,0,0,0,0,131,1,0,0,0,0,133,1,0,0,0,0,135, - 1,0,0,0,0,137,1,0,0,0,0,139,1,0,0,0,0,141,1,0,0,0,0,143,1,0,0,0, - 0,145,1,0,0,0,0,147,1,0,0,0,0,149,1,0,0,0,0,151,1,0,0,0,0,153,1, - 0,0,0,0,155,1,0,0,0,0,157,1,0,0,0,0,159,1,0,0,0,0,161,1,0,0,0,0, - 163,1,0,0,0,0,165,1,0,0,0,0,167,1,0,0,0,0,169,1,0,0,0,0,171,1,0, - 0,0,0,173,1,0,0,0,0,175,1,0,0,0,0,177,1,0,0,0,1,185,1,0,0,0,3,190, - 1,0,0,0,5,194,1,0,0,0,7,200,1,0,0,0,9,206,1,0,0,0,11,211,1,0,0,0, - 13,226,1,0,0,0,15,245,1,0,0,0,17,249,1,0,0,0,19,257,1,0,0,0,21,262, - 1,0,0,0,23,269,1,0,0,0,25,277,1,0,0,0,27,282,1,0,0,0,29,291,1,0, - 0,0,31,298,1,0,0,0,33,305,1,0,0,0,35,308,1,0,0,0,37,313,1,0,0,0, - 39,318,1,0,0,0,41,322,1,0,0,0,43,328,1,0,0,0,45,331,1,0,0,0,47,336, - 1,0,0,0,49,340,1,0,0,0,51,344,1,0,0,0,53,347,1,0,0,0,55,351,1,0, - 0,0,57,362,1,0,0,0,59,369,1,0,0,0,61,375,1,0,0,0,63,381,1,0,0,0, - 65,392,1,0,0,0,67,402,1,0,0,0,69,409,1,0,0,0,71,419,1,0,0,0,73,425, - 1,0,0,0,75,432,1,0,0,0,77,443,1,0,0,0,79,453,1,0,0,0,81,465,1,0, - 0,0,83,471,1,0,0,0,85,482,1,0,0,0,87,493,1,0,0,0,89,506,1,0,0,0, - 91,521,1,0,0,0,93,523,1,0,0,0,95,527,1,0,0,0,97,529,1,0,0,0,99,531, - 1,0,0,0,101,533,1,0,0,0,103,535,1,0,0,0,105,537,1,0,0,0,107,539, - 1,0,0,0,109,541,1,0,0,0,111,543,1,0,0,0,113,546,1,0,0,0,115,548, - 1,0,0,0,117,551,1,0,0,0,119,554,1,0,0,0,121,557,1,0,0,0,123,560, - 1,0,0,0,125,562,1,0,0,0,127,564,1,0,0,0,129,567,1,0,0,0,131,570, - 1,0,0,0,133,573,1,0,0,0,135,576,1,0,0,0,137,579,1,0,0,0,139,582, - 1,0,0,0,141,585,1,0,0,0,143,588,1,0,0,0,145,591,1,0,0,0,147,593, - 1,0,0,0,149,595,1,0,0,0,151,597,1,0,0,0,153,599,1,0,0,0,155,602, - 1,0,0,0,157,604,1,0,0,0,159,606,1,0,0,0,161,608,1,0,0,0,163,610, - 1,0,0,0,165,613,1,0,0,0,167,615,1,0,0,0,169,635,1,0,0,0,171,637, - 1,0,0,0,173,662,1,0,0,0,175,671,1,0,0,0,177,677,1,0,0,0,179,687, - 1,0,0,0,181,691,1,0,0,0,183,698,1,0,0,0,185,186,5,34,0,0,186,187, - 5,34,0,0,187,188,5,34,0,0,188,2,1,0,0,0,189,191,5,13,0,0,190,189, - 1,0,0,0,190,191,1,0,0,0,191,192,1,0,0,0,192,193,5,10,0,0,193,4,1, - 0,0,0,194,195,3,145,72,0,195,197,3,3,1,0,196,198,3,7,3,0,197,196, - 1,0,0,0,197,198,1,0,0,0,198,6,1,0,0,0,199,201,7,0,0,0,200,199,1, - 0,0,0,201,202,1,0,0,0,202,200,1,0,0,0,202,203,1,0,0,0,203,204,1, - 0,0,0,204,205,6,3,0,0,205,8,1,0,0,0,206,207,5,92,0,0,207,208,3,3, - 1,0,208,209,1,0,0,0,209,210,6,4,0,0,210,10,1,0,0,0,211,215,3,1,0, - 0,212,214,9,0,0,0,213,212,1,0,0,0,214,217,1,0,0,0,215,216,1,0,0, - 0,215,213,1,0,0,0,216,218,1,0,0,0,217,215,1,0,0,0,218,220,3,1,0, - 0,219,221,3,3,1,0,220,219,1,0,0,0,221,222,1,0,0,0,222,223,1,0,0, - 0,222,220,1,0,0,0,223,224,1,0,0,0,224,225,6,5,1,0,225,12,1,0,0,0, - 226,230,5,35,0,0,227,229,8,1,0,0,228,227,1,0,0,0,229,232,1,0,0,0, - 230,228,1,0,0,0,230,231,1,0,0,0,231,233,1,0,0,0,232,230,1,0,0,0, - 233,234,6,6,1,0,234,14,1,0,0,0,235,236,4,7,0,0,236,246,3,7,3,0,237, - 239,5,13,0,0,238,237,1,0,0,0,238,239,1,0,0,0,239,240,1,0,0,0,240, - 241,5,10,0,0,241,243,1,0,0,0,242,244,3,7,3,0,243,242,1,0,0,0,243, - 244,1,0,0,0,244,246,1,0,0,0,245,235,1,0,0,0,245,238,1,0,0,0,246, - 247,1,0,0,0,247,248,6,7,2,0,248,16,1,0,0,0,249,250,5,105,0,0,250, - 251,5,110,0,0,251,252,5,116,0,0,252,253,5,101,0,0,253,254,5,103, - 0,0,254,255,5,101,0,0,255,256,5,114,0,0,256,18,1,0,0,0,257,258,5, - 114,0,0,258,259,5,101,0,0,259,260,5,97,0,0,260,261,5,108,0,0,261, - 20,1,0,0,0,262,263,5,115,0,0,263,264,5,116,0,0,264,265,5,114,0,0, - 265,266,5,105,0,0,266,267,5,110,0,0,267,268,5,103,0,0,268,22,1,0, - 0,0,269,270,5,98,0,0,270,271,5,111,0,0,271,272,5,111,0,0,272,273, - 5,108,0,0,273,274,5,101,0,0,274,275,5,97,0,0,275,276,5,110,0,0,276, - 24,1,0,0,0,277,278,5,118,0,0,278,279,5,111,0,0,279,280,5,105,0,0, - 280,281,5,100,0,0,281,26,1,0,0,0,282,283,5,102,0,0,283,284,5,117, - 0,0,284,285,5,110,0,0,285,286,5,99,0,0,286,287,5,116,0,0,287,288, - 5,105,0,0,288,289,5,111,0,0,289,290,5,110,0,0,290,28,1,0,0,0,291, - 292,5,105,0,0,292,293,5,110,0,0,293,294,5,108,0,0,294,295,5,105, - 0,0,295,296,5,110,0,0,296,297,5,101,0,0,297,30,1,0,0,0,298,299,5, - 114,0,0,299,300,5,101,0,0,300,301,5,116,0,0,301,302,5,117,0,0,302, - 303,5,114,0,0,303,304,5,110,0,0,304,32,1,0,0,0,305,306,5,105,0,0, - 306,307,5,102,0,0,307,34,1,0,0,0,308,309,5,101,0,0,309,310,5,108, - 0,0,310,311,5,105,0,0,311,312,5,102,0,0,312,36,1,0,0,0,313,314,5, - 101,0,0,314,315,5,108,0,0,315,316,5,115,0,0,316,317,5,101,0,0,317, - 38,1,0,0,0,318,319,5,102,0,0,319,320,5,111,0,0,320,321,5,114,0,0, - 321,40,1,0,0,0,322,323,5,119,0,0,323,324,5,104,0,0,324,325,5,105, - 0,0,325,326,5,108,0,0,326,327,5,101,0,0,327,42,1,0,0,0,328,329,5, - 105,0,0,329,330,5,110,0,0,330,44,1,0,0,0,331,332,5,115,0,0,332,333, - 5,116,0,0,333,334,5,101,0,0,334,335,5,112,0,0,335,46,1,0,0,0,336, - 337,5,105,0,0,337,338,5,110,0,0,338,339,5,102,0,0,339,48,1,0,0,0, - 340,341,5,97,0,0,341,342,5,110,0,0,342,343,5,100,0,0,343,50,1,0, - 0,0,344,345,5,111,0,0,345,346,5,114,0,0,346,52,1,0,0,0,347,348,5, - 110,0,0,348,349,5,111,0,0,349,350,5,116,0,0,350,54,1,0,0,0,351,352, - 5,114,0,0,352,353,5,101,0,0,353,354,5,99,0,0,354,355,5,111,0,0,355, - 356,5,114,0,0,356,357,5,100,0,0,357,358,5,97,0,0,358,359,5,98,0, - 0,359,360,5,108,0,0,360,361,5,101,0,0,361,56,1,0,0,0,362,363,5,107, - 0,0,363,364,5,101,0,0,364,365,5,114,0,0,365,366,5,110,0,0,366,367, - 5,101,0,0,367,368,5,108,0,0,368,58,1,0,0,0,369,370,5,109,0,0,370, - 371,5,111,0,0,371,372,5,100,0,0,372,373,5,101,0,0,373,374,5,108, - 0,0,374,60,1,0,0,0,375,376,5,115,0,0,376,377,5,116,0,0,377,378,5, - 97,0,0,378,379,5,116,0,0,379,380,5,101,0,0,380,62,1,0,0,0,381,382, - 5,112,0,0,382,383,5,97,0,0,383,384,5,114,0,0,384,385,5,97,0,0,385, - 386,5,109,0,0,386,387,5,101,0,0,387,388,5,116,0,0,388,389,5,101, - 0,0,389,390,5,114,0,0,390,391,5,115,0,0,391,64,1,0,0,0,392,393,5, - 105,0,0,393,394,5,110,0,0,394,395,5,116,0,0,395,396,5,101,0,0,396, - 397,5,114,0,0,397,398,5,110,0,0,398,399,5,97,0,0,399,400,5,108,0, - 0,400,401,5,115,0,0,401,66,1,0,0,0,402,403,5,117,0,0,403,404,5,112, - 0,0,404,405,5,100,0,0,405,406,5,97,0,0,406,407,5,116,0,0,407,408, - 5,101,0,0,408,68,1,0,0,0,409,410,5,101,0,0,410,411,5,113,0,0,411, - 412,5,117,0,0,412,413,5,97,0,0,413,414,5,116,0,0,414,415,5,105,0, - 0,415,416,5,111,0,0,416,417,5,110,0,0,417,418,5,115,0,0,418,70,1, - 0,0,0,419,420,5,105,0,0,420,421,5,110,0,0,421,422,5,112,0,0,422, - 423,5,117,0,0,423,424,5,116,0,0,424,72,1,0,0,0,425,426,5,111,0,0, - 426,427,5,117,0,0,427,428,5,116,0,0,428,429,5,112,0,0,429,430,5, - 117,0,0,430,431,5,116,0,0,431,74,1,0,0,0,432,433,5,99,0,0,433,434, - 5,111,0,0,434,435,5,110,0,0,435,436,5,116,0,0,436,437,5,105,0,0, - 437,438,5,110,0,0,438,439,5,117,0,0,439,440,5,111,0,0,440,441,5, - 117,0,0,441,442,5,115,0,0,442,76,1,0,0,0,443,444,5,111,0,0,444,445, - 5,110,0,0,445,446,5,82,0,0,446,447,5,101,0,0,447,448,5,99,0,0,448, - 449,5,101,0,0,449,450,5,105,0,0,450,451,5,118,0,0,451,452,5,101, - 0,0,452,78,1,0,0,0,453,454,5,111,0,0,454,455,5,110,0,0,455,456,5, - 67,0,0,456,457,5,111,0,0,457,458,5,110,0,0,458,459,5,100,0,0,459, - 460,5,105,0,0,460,461,5,116,0,0,461,462,5,105,0,0,462,463,5,111, - 0,0,463,464,5,110,0,0,464,80,1,0,0,0,465,466,5,115,0,0,466,467,5, - 112,0,0,467,468,5,105,0,0,468,469,5,107,0,0,469,470,5,101,0,0,470, - 82,1,0,0,0,471,472,5,105,0,0,472,473,5,110,0,0,473,474,5,104,0,0, - 474,475,5,105,0,0,475,476,5,98,0,0,476,477,5,105,0,0,477,478,5,116, - 0,0,478,479,5,111,0,0,479,480,5,114,0,0,480,481,5,121,0,0,481,84, - 1,0,0,0,482,483,5,101,0,0,483,484,5,120,0,0,484,485,5,99,0,0,485, - 486,5,105,0,0,486,487,5,116,0,0,487,488,5,97,0,0,488,489,5,116,0, - 0,489,490,5,111,0,0,490,491,5,114,0,0,491,492,5,121,0,0,492,86,1, - 0,0,0,493,494,5,64,0,0,494,495,5,104,0,0,495,496,5,111,0,0,496,497, - 5,109,0,0,497,498,5,111,0,0,498,499,5,103,0,0,499,500,5,101,0,0, - 500,501,5,110,0,0,501,502,5,101,0,0,502,503,5,111,0,0,503,504,5, - 117,0,0,504,505,5,115,0,0,505,88,1,0,0,0,506,507,5,64,0,0,507,508, - 5,104,0,0,508,509,5,101,0,0,509,510,5,116,0,0,510,511,5,101,0,0, - 511,512,5,114,0,0,512,513,5,111,0,0,513,514,5,103,0,0,514,515,5, - 101,0,0,515,516,5,110,0,0,516,517,5,101,0,0,517,518,5,111,0,0,518, - 519,5,117,0,0,519,520,5,115,0,0,520,90,1,0,0,0,521,522,5,64,0,0, - 522,92,1,0,0,0,523,524,5,46,0,0,524,525,5,46,0,0,525,526,5,46,0, - 0,526,94,1,0,0,0,527,528,5,40,0,0,528,96,1,0,0,0,529,530,5,41,0, - 0,530,98,1,0,0,0,531,532,5,43,0,0,532,100,1,0,0,0,533,534,5,126, - 0,0,534,102,1,0,0,0,535,536,5,124,0,0,536,104,1,0,0,0,537,538,5, - 94,0,0,538,106,1,0,0,0,539,540,5,38,0,0,540,108,1,0,0,0,541,542, - 5,91,0,0,542,110,1,0,0,0,543,544,5,60,0,0,544,545,5,45,0,0,545,112, - 1,0,0,0,546,547,5,93,0,0,547,114,1,0,0,0,548,549,5,91,0,0,549,550, - 5,91,0,0,550,116,1,0,0,0,551,552,5,93,0,0,552,553,5,93,0,0,553,118, - 1,0,0,0,554,555,5,60,0,0,555,556,5,60,0,0,556,120,1,0,0,0,557,558, - 5,62,0,0,558,559,5,62,0,0,559,122,1,0,0,0,560,561,5,60,0,0,561,124, - 1,0,0,0,562,563,5,62,0,0,563,126,1,0,0,0,564,565,5,60,0,0,565,566, - 5,61,0,0,566,128,1,0,0,0,567,568,5,43,0,0,568,569,5,61,0,0,569,130, - 1,0,0,0,570,571,5,45,0,0,571,572,5,61,0,0,572,132,1,0,0,0,573,574, - 5,42,0,0,574,575,5,61,0,0,575,134,1,0,0,0,576,577,5,47,0,0,577,578, - 5,61,0,0,578,136,1,0,0,0,579,580,5,61,0,0,580,581,5,61,0,0,581,138, - 1,0,0,0,582,583,5,33,0,0,583,584,5,61,0,0,584,140,1,0,0,0,585,586, - 5,60,0,0,586,587,5,62,0,0,587,142,1,0,0,0,588,589,5,62,0,0,589,590, - 5,61,0,0,590,144,1,0,0,0,591,592,5,44,0,0,592,146,1,0,0,0,593,594, - 5,45,0,0,594,148,1,0,0,0,595,596,5,61,0,0,596,150,1,0,0,0,597,598, - 5,42,0,0,598,152,1,0,0,0,599,600,5,42,0,0,600,601,5,42,0,0,601,154, - 1,0,0,0,602,603,5,47,0,0,603,156,1,0,0,0,604,605,5,37,0,0,605,158, - 1,0,0,0,606,607,5,63,0,0,607,160,1,0,0,0,608,609,5,58,0,0,609,162, - 1,0,0,0,610,611,5,58,0,0,611,612,5,58,0,0,612,164,1,0,0,0,613,614, - 5,59,0,0,614,166,1,0,0,0,615,616,5,39,0,0,616,168,1,0,0,0,617,618, - 5,116,0,0,618,619,5,114,0,0,619,620,5,117,0,0,620,636,5,101,0,0, - 621,622,5,84,0,0,622,623,5,114,0,0,623,624,5,117,0,0,624,636,5,101, - 0,0,625,626,5,102,0,0,626,627,5,97,0,0,627,628,5,108,0,0,628,629, - 5,115,0,0,629,636,5,101,0,0,630,631,5,70,0,0,631,632,5,97,0,0,632, - 633,5,108,0,0,633,634,5,115,0,0,634,636,5,101,0,0,635,617,1,0,0, - 0,635,621,1,0,0,0,635,625,1,0,0,0,635,630,1,0,0,0,636,170,1,0,0, - 0,637,656,5,34,0,0,638,651,5,92,0,0,639,641,7,0,0,0,640,639,1,0, - 0,0,641,642,1,0,0,0,642,640,1,0,0,0,642,643,1,0,0,0,643,648,1,0, - 0,0,644,646,5,13,0,0,645,644,1,0,0,0,645,646,1,0,0,0,646,647,1,0, - 0,0,647,649,5,10,0,0,648,645,1,0,0,0,648,649,1,0,0,0,649,652,1,0, - 0,0,650,652,9,0,0,0,651,640,1,0,0,0,651,650,1,0,0,0,652,655,1,0, - 0,0,653,655,8,2,0,0,654,638,1,0,0,0,654,653,1,0,0,0,655,658,1,0, - 0,0,656,654,1,0,0,0,656,657,1,0,0,0,657,659,1,0,0,0,658,656,1,0, - 0,0,659,660,5,34,0,0,660,172,1,0,0,0,661,663,7,3,0,0,662,661,1,0, - 0,0,663,667,1,0,0,0,664,666,7,4,0,0,665,664,1,0,0,0,666,669,1,0, - 0,0,667,665,1,0,0,0,667,668,1,0,0,0,668,174,1,0,0,0,669,667,1,0, - 0,0,670,672,7,5,0,0,671,670,1,0,0,0,672,673,1,0,0,0,673,671,1,0, - 0,0,673,674,1,0,0,0,674,176,1,0,0,0,675,678,3,179,89,0,676,678,3, - 181,90,0,677,675,1,0,0,0,677,676,1,0,0,0,678,178,1,0,0,0,679,681, - 3,175,87,0,680,679,1,0,0,0,680,681,1,0,0,0,681,682,1,0,0,0,682,683, - 5,46,0,0,683,688,3,175,87,0,684,685,3,175,87,0,685,686,5,46,0,0, - 686,688,1,0,0,0,687,680,1,0,0,0,687,684,1,0,0,0,688,180,1,0,0,0, - 689,692,3,175,87,0,690,692,3,179,89,0,691,689,1,0,0,0,691,690,1, - 0,0,0,692,693,1,0,0,0,693,694,7,6,0,0,694,695,3,183,91,0,695,182, - 1,0,0,0,696,699,3,99,49,0,697,699,3,147,73,0,698,696,1,0,0,0,698, - 697,1,0,0,0,698,699,1,0,0,0,699,700,1,0,0,0,700,701,3,175,87,0,701, - 184,1,0,0,0,26,0,190,197,202,215,222,230,238,243,245,635,642,645, - 648,651,654,656,662,665,667,673,677,680,687,691,698,3,0,1,0,0,2, - 0,1,7,0 + 91,7,91,2,92,7,92,1,0,1,0,1,0,1,0,1,1,3,1,193,8,1,1,1,1,1,1,2,1, + 2,1,2,3,2,200,8,2,1,3,4,3,203,8,3,11,3,12,3,204,1,3,1,3,1,4,1,4, + 1,4,1,4,1,4,1,5,1,5,5,5,216,8,5,10,5,12,5,219,9,5,1,5,1,5,4,5,223, + 8,5,11,5,12,5,224,1,5,1,5,1,6,1,6,5,6,231,8,6,10,6,12,6,234,9,6, + 1,6,1,6,1,7,1,7,1,7,3,7,241,8,7,1,7,1,7,1,7,3,7,246,8,7,3,7,248, + 8,7,1,7,1,7,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,8,1,9,1,9,1,9,1,9,1,9, + 1,10,1,10,1,10,1,10,1,10,1,10,1,10,1,11,1,11,1,11,1,11,1,11,1,11, + 1,11,1,11,1,12,1,12,1,12,1,12,1,12,1,13,1,13,1,13,1,13,1,13,1,13, + 1,13,1,13,1,13,1,14,1,14,1,14,1,14,1,14,1,14,1,14,1,15,1,15,1,15, + 1,15,1,15,1,15,1,15,1,16,1,16,1,16,1,17,1,17,1,17,1,17,1,17,1,18, + 1,18,1,18,1,18,1,18,1,19,1,19,1,19,1,19,1,20,1,20,1,20,1,20,1,20, + 1,20,1,21,1,21,1,21,1,22,1,22,1,22,1,22,1,22,1,23,1,23,1,23,1,23, + 1,24,1,24,1,24,1,24,1,25,1,25,1,25,1,26,1,26,1,26,1,26,1,27,1,27, + 1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,27,1,28,1,28,1,28,1,28, + 1,28,1,28,1,28,1,29,1,29,1,29,1,29,1,29,1,29,1,30,1,30,1,30,1,30, + 1,30,1,30,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31,1,31, + 1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,32,1,33,1,33,1,33, + 1,33,1,33,1,33,1,33,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34,1,34, + 1,34,1,35,1,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,1,36, + 1,36,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,37,1,38, + 1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,38,1,39,1,39,1,39,1,39, + 1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,39,1,40,1,40,1,40,1,40,1,40, + 1,40,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,41,1,42, + 1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,42,1,43,1,43,1,43, + 1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,43,1,44,1,44,1,44, + 1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,44,1,45, + 1,45,1,46,1,46,1,46,1,46,1,47,1,47,1,48,1,48,1,49,1,49,1,50,1,50, + 1,51,1,51,1,52,1,52,1,53,1,53,1,54,1,54,1,55,1,55,1,55,1,56,1,56, + 1,57,1,57,1,57,1,58,1,58,1,58,1,59,1,59,1,59,1,60,1,60,1,60,1,61, + 1,61,1,62,1,62,1,63,1,63,1,63,1,64,1,64,1,64,1,65,1,65,1,65,1,66, + 1,66,1,66,1,67,1,67,1,67,1,68,1,68,1,68,1,69,1,69,1,69,1,70,1,70, + 1,70,1,71,1,71,1,71,1,72,1,72,1,73,1,73,1,74,1,74,1,75,1,75,1,76, + 1,76,1,76,1,77,1,77,1,78,1,78,1,79,1,79,1,80,1,80,1,81,1,81,1,81, + 1,82,1,82,1,83,1,83,1,84,1,84,1,85,1,85,1,85,1,85,1,85,1,85,1,85, + 1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,1,85,3,85,640, + 8,85,1,86,1,86,1,86,4,86,645,8,86,11,86,12,86,646,1,86,3,86,650, + 8,86,1,86,3,86,653,8,86,1,86,3,86,656,8,86,1,86,5,86,659,8,86,10, + 86,12,86,662,9,86,1,86,1,86,1,87,3,87,667,8,87,1,87,5,87,670,8,87, + 10,87,12,87,673,9,87,1,88,4,88,676,8,88,11,88,12,88,677,1,89,1,89, + 3,89,682,8,89,1,90,3,90,685,8,90,1,90,1,90,1,90,1,90,1,90,1,90,3, + 90,693,8,90,1,91,1,91,3,91,697,8,91,1,91,1,91,1,91,1,92,1,92,3,92, + 704,8,92,1,92,1,92,2,217,224,0,93,1,3,3,0,5,4,7,5,9,6,11,7,13,8, + 15,9,17,10,19,11,21,12,23,13,25,14,27,15,29,16,31,17,33,18,35,19, + 37,20,39,21,41,22,43,23,45,24,47,25,49,26,51,27,53,28,55,29,57,30, + 59,31,61,32,63,33,65,34,67,35,69,36,71,37,73,38,75,39,77,40,79,41, + 81,42,83,43,85,44,87,45,89,46,91,47,93,48,95,49,97,50,99,51,101, + 52,103,53,105,54,107,55,109,56,111,57,113,58,115,59,117,60,119,61, + 121,62,123,63,125,64,127,65,129,66,131,67,133,68,135,69,137,70,139, + 71,141,72,143,73,145,74,147,75,149,76,151,77,153,78,155,79,157,80, + 159,81,161,82,163,83,165,84,167,85,169,86,171,87,173,88,175,89,177, + 90,179,91,181,0,183,0,185,0,1,0,7,2,0,9,9,32,32,2,0,10,10,13,13, + 4,0,10,10,13,13,34,34,92,92,4,0,36,36,65,90,95,95,97,122,5,0,36, + 36,48,57,65,90,95,95,97,122,1,0,48,57,2,0,69,69,101,101,728,0,1, + 1,0,0,0,0,5,1,0,0,0,0,7,1,0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1, + 0,0,0,0,15,1,0,0,0,0,17,1,0,0,0,0,19,1,0,0,0,0,21,1,0,0,0,0,23,1, + 0,0,0,0,25,1,0,0,0,0,27,1,0,0,0,0,29,1,0,0,0,0,31,1,0,0,0,0,33,1, + 0,0,0,0,35,1,0,0,0,0,37,1,0,0,0,0,39,1,0,0,0,0,41,1,0,0,0,0,43,1, + 0,0,0,0,45,1,0,0,0,0,47,1,0,0,0,0,49,1,0,0,0,0,51,1,0,0,0,0,53,1, + 0,0,0,0,55,1,0,0,0,0,57,1,0,0,0,0,59,1,0,0,0,0,61,1,0,0,0,0,63,1, + 0,0,0,0,65,1,0,0,0,0,67,1,0,0,0,0,69,1,0,0,0,0,71,1,0,0,0,0,73,1, + 0,0,0,0,75,1,0,0,0,0,77,1,0,0,0,0,79,1,0,0,0,0,81,1,0,0,0,0,83,1, + 0,0,0,0,85,1,0,0,0,0,87,1,0,0,0,0,89,1,0,0,0,0,91,1,0,0,0,0,93,1, + 0,0,0,0,95,1,0,0,0,0,97,1,0,0,0,0,99,1,0,0,0,0,101,1,0,0,0,0,103, + 1,0,0,0,0,105,1,0,0,0,0,107,1,0,0,0,0,109,1,0,0,0,0,111,1,0,0,0, + 0,113,1,0,0,0,0,115,1,0,0,0,0,117,1,0,0,0,0,119,1,0,0,0,0,121,1, + 0,0,0,0,123,1,0,0,0,0,125,1,0,0,0,0,127,1,0,0,0,0,129,1,0,0,0,0, + 131,1,0,0,0,0,133,1,0,0,0,0,135,1,0,0,0,0,137,1,0,0,0,0,139,1,0, + 0,0,0,141,1,0,0,0,0,143,1,0,0,0,0,145,1,0,0,0,0,147,1,0,0,0,0,149, + 1,0,0,0,0,151,1,0,0,0,0,153,1,0,0,0,0,155,1,0,0,0,0,157,1,0,0,0, + 0,159,1,0,0,0,0,161,1,0,0,0,0,163,1,0,0,0,0,165,1,0,0,0,0,167,1, + 0,0,0,0,169,1,0,0,0,0,171,1,0,0,0,0,173,1,0,0,0,0,175,1,0,0,0,0, + 177,1,0,0,0,0,179,1,0,0,0,1,187,1,0,0,0,3,192,1,0,0,0,5,196,1,0, + 0,0,7,202,1,0,0,0,9,208,1,0,0,0,11,213,1,0,0,0,13,228,1,0,0,0,15, + 247,1,0,0,0,17,251,1,0,0,0,19,259,1,0,0,0,21,264,1,0,0,0,23,271, + 1,0,0,0,25,279,1,0,0,0,27,284,1,0,0,0,29,293,1,0,0,0,31,300,1,0, + 0,0,33,307,1,0,0,0,35,310,1,0,0,0,37,315,1,0,0,0,39,320,1,0,0,0, + 41,324,1,0,0,0,43,330,1,0,0,0,45,333,1,0,0,0,47,338,1,0,0,0,49,342, + 1,0,0,0,51,346,1,0,0,0,53,349,1,0,0,0,55,353,1,0,0,0,57,364,1,0, + 0,0,59,371,1,0,0,0,61,377,1,0,0,0,63,383,1,0,0,0,65,394,1,0,0,0, + 67,404,1,0,0,0,69,411,1,0,0,0,71,421,1,0,0,0,73,427,1,0,0,0,75,434, + 1,0,0,0,77,445,1,0,0,0,79,455,1,0,0,0,81,467,1,0,0,0,83,473,1,0, + 0,0,85,484,1,0,0,0,87,495,1,0,0,0,89,508,1,0,0,0,91,523,1,0,0,0, + 93,525,1,0,0,0,95,529,1,0,0,0,97,531,1,0,0,0,99,533,1,0,0,0,101, + 535,1,0,0,0,103,537,1,0,0,0,105,539,1,0,0,0,107,541,1,0,0,0,109, + 543,1,0,0,0,111,545,1,0,0,0,113,548,1,0,0,0,115,550,1,0,0,0,117, + 553,1,0,0,0,119,556,1,0,0,0,121,559,1,0,0,0,123,562,1,0,0,0,125, + 564,1,0,0,0,127,566,1,0,0,0,129,569,1,0,0,0,131,572,1,0,0,0,133, + 575,1,0,0,0,135,578,1,0,0,0,137,581,1,0,0,0,139,584,1,0,0,0,141, + 587,1,0,0,0,143,590,1,0,0,0,145,593,1,0,0,0,147,595,1,0,0,0,149, + 597,1,0,0,0,151,599,1,0,0,0,153,601,1,0,0,0,155,604,1,0,0,0,157, + 606,1,0,0,0,159,608,1,0,0,0,161,610,1,0,0,0,163,612,1,0,0,0,165, + 615,1,0,0,0,167,617,1,0,0,0,169,619,1,0,0,0,171,639,1,0,0,0,173, + 641,1,0,0,0,175,666,1,0,0,0,177,675,1,0,0,0,179,681,1,0,0,0,181, + 692,1,0,0,0,183,696,1,0,0,0,185,703,1,0,0,0,187,188,5,34,0,0,188, + 189,5,34,0,0,189,190,5,34,0,0,190,2,1,0,0,0,191,193,5,13,0,0,192, + 191,1,0,0,0,192,193,1,0,0,0,193,194,1,0,0,0,194,195,5,10,0,0,195, + 4,1,0,0,0,196,197,3,145,72,0,197,199,3,3,1,0,198,200,3,7,3,0,199, + 198,1,0,0,0,199,200,1,0,0,0,200,6,1,0,0,0,201,203,7,0,0,0,202,201, + 1,0,0,0,203,204,1,0,0,0,204,202,1,0,0,0,204,205,1,0,0,0,205,206, + 1,0,0,0,206,207,6,3,0,0,207,8,1,0,0,0,208,209,5,92,0,0,209,210,3, + 3,1,0,210,211,1,0,0,0,211,212,6,4,0,0,212,10,1,0,0,0,213,217,3,1, + 0,0,214,216,9,0,0,0,215,214,1,0,0,0,216,219,1,0,0,0,217,218,1,0, + 0,0,217,215,1,0,0,0,218,220,1,0,0,0,219,217,1,0,0,0,220,222,3,1, + 0,0,221,223,3,3,1,0,222,221,1,0,0,0,223,224,1,0,0,0,224,225,1,0, + 0,0,224,222,1,0,0,0,225,226,1,0,0,0,226,227,6,5,1,0,227,12,1,0,0, + 0,228,232,5,35,0,0,229,231,8,1,0,0,230,229,1,0,0,0,231,234,1,0,0, + 0,232,230,1,0,0,0,232,233,1,0,0,0,233,235,1,0,0,0,234,232,1,0,0, + 0,235,236,6,6,1,0,236,14,1,0,0,0,237,238,4,7,0,0,238,248,3,7,3,0, + 239,241,5,13,0,0,240,239,1,0,0,0,240,241,1,0,0,0,241,242,1,0,0,0, + 242,243,5,10,0,0,243,245,1,0,0,0,244,246,3,7,3,0,245,244,1,0,0,0, + 245,246,1,0,0,0,246,248,1,0,0,0,247,237,1,0,0,0,247,240,1,0,0,0, + 248,249,1,0,0,0,249,250,6,7,2,0,250,16,1,0,0,0,251,252,5,105,0,0, + 252,253,5,110,0,0,253,254,5,116,0,0,254,255,5,101,0,0,255,256,5, + 103,0,0,256,257,5,101,0,0,257,258,5,114,0,0,258,18,1,0,0,0,259,260, + 5,114,0,0,260,261,5,101,0,0,261,262,5,97,0,0,262,263,5,108,0,0,263, + 20,1,0,0,0,264,265,5,115,0,0,265,266,5,116,0,0,266,267,5,114,0,0, + 267,268,5,105,0,0,268,269,5,110,0,0,269,270,5,103,0,0,270,22,1,0, + 0,0,271,272,5,98,0,0,272,273,5,111,0,0,273,274,5,111,0,0,274,275, + 5,108,0,0,275,276,5,101,0,0,276,277,5,97,0,0,277,278,5,110,0,0,278, + 24,1,0,0,0,279,280,5,118,0,0,280,281,5,111,0,0,281,282,5,105,0,0, + 282,283,5,100,0,0,283,26,1,0,0,0,284,285,5,102,0,0,285,286,5,117, + 0,0,286,287,5,110,0,0,287,288,5,99,0,0,288,289,5,116,0,0,289,290, + 5,105,0,0,290,291,5,111,0,0,291,292,5,110,0,0,292,28,1,0,0,0,293, + 294,5,105,0,0,294,295,5,110,0,0,295,296,5,108,0,0,296,297,5,105, + 0,0,297,298,5,110,0,0,298,299,5,101,0,0,299,30,1,0,0,0,300,301,5, + 114,0,0,301,302,5,101,0,0,302,303,5,116,0,0,303,304,5,117,0,0,304, + 305,5,114,0,0,305,306,5,110,0,0,306,32,1,0,0,0,307,308,5,105,0,0, + 308,309,5,102,0,0,309,34,1,0,0,0,310,311,5,101,0,0,311,312,5,108, + 0,0,312,313,5,105,0,0,313,314,5,102,0,0,314,36,1,0,0,0,315,316,5, + 101,0,0,316,317,5,108,0,0,317,318,5,115,0,0,318,319,5,101,0,0,319, + 38,1,0,0,0,320,321,5,102,0,0,321,322,5,111,0,0,322,323,5,114,0,0, + 323,40,1,0,0,0,324,325,5,119,0,0,325,326,5,104,0,0,326,327,5,105, + 0,0,327,328,5,108,0,0,328,329,5,101,0,0,329,42,1,0,0,0,330,331,5, + 105,0,0,331,332,5,110,0,0,332,44,1,0,0,0,333,334,5,115,0,0,334,335, + 5,116,0,0,335,336,5,101,0,0,336,337,5,112,0,0,337,46,1,0,0,0,338, + 339,5,105,0,0,339,340,5,110,0,0,340,341,5,102,0,0,341,48,1,0,0,0, + 342,343,5,97,0,0,343,344,5,110,0,0,344,345,5,100,0,0,345,50,1,0, + 0,0,346,347,5,111,0,0,347,348,5,114,0,0,348,52,1,0,0,0,349,350,5, + 110,0,0,350,351,5,111,0,0,351,352,5,116,0,0,352,54,1,0,0,0,353,354, + 5,114,0,0,354,355,5,101,0,0,355,356,5,99,0,0,356,357,5,111,0,0,357, + 358,5,114,0,0,358,359,5,100,0,0,359,360,5,97,0,0,360,361,5,98,0, + 0,361,362,5,108,0,0,362,363,5,101,0,0,363,56,1,0,0,0,364,365,5,107, + 0,0,365,366,5,101,0,0,366,367,5,114,0,0,367,368,5,110,0,0,368,369, + 5,101,0,0,369,370,5,108,0,0,370,58,1,0,0,0,371,372,5,109,0,0,372, + 373,5,111,0,0,373,374,5,100,0,0,374,375,5,101,0,0,375,376,5,108, + 0,0,376,60,1,0,0,0,377,378,5,115,0,0,378,379,5,116,0,0,379,380,5, + 97,0,0,380,381,5,116,0,0,381,382,5,101,0,0,382,62,1,0,0,0,383,384, + 5,112,0,0,384,385,5,97,0,0,385,386,5,114,0,0,386,387,5,97,0,0,387, + 388,5,109,0,0,388,389,5,101,0,0,389,390,5,116,0,0,390,391,5,101, + 0,0,391,392,5,114,0,0,392,393,5,115,0,0,393,64,1,0,0,0,394,395,5, + 105,0,0,395,396,5,110,0,0,396,397,5,116,0,0,397,398,5,101,0,0,398, + 399,5,114,0,0,399,400,5,110,0,0,400,401,5,97,0,0,401,402,5,108,0, + 0,402,403,5,115,0,0,403,66,1,0,0,0,404,405,5,117,0,0,405,406,5,112, + 0,0,406,407,5,100,0,0,407,408,5,97,0,0,408,409,5,116,0,0,409,410, + 5,101,0,0,410,68,1,0,0,0,411,412,5,101,0,0,412,413,5,113,0,0,413, + 414,5,117,0,0,414,415,5,97,0,0,415,416,5,116,0,0,416,417,5,105,0, + 0,417,418,5,111,0,0,418,419,5,110,0,0,419,420,5,115,0,0,420,70,1, + 0,0,0,421,422,5,105,0,0,422,423,5,110,0,0,423,424,5,112,0,0,424, + 425,5,117,0,0,425,426,5,116,0,0,426,72,1,0,0,0,427,428,5,111,0,0, + 428,429,5,117,0,0,429,430,5,116,0,0,430,431,5,112,0,0,431,432,5, + 117,0,0,432,433,5,116,0,0,433,74,1,0,0,0,434,435,5,99,0,0,435,436, + 5,111,0,0,436,437,5,110,0,0,437,438,5,116,0,0,438,439,5,105,0,0, + 439,440,5,110,0,0,440,441,5,117,0,0,441,442,5,111,0,0,442,443,5, + 117,0,0,443,444,5,115,0,0,444,76,1,0,0,0,445,446,5,111,0,0,446,447, + 5,110,0,0,447,448,5,82,0,0,448,449,5,101,0,0,449,450,5,99,0,0,450, + 451,5,101,0,0,451,452,5,105,0,0,452,453,5,118,0,0,453,454,5,101, + 0,0,454,78,1,0,0,0,455,456,5,111,0,0,456,457,5,110,0,0,457,458,5, + 67,0,0,458,459,5,111,0,0,459,460,5,110,0,0,460,461,5,100,0,0,461, + 462,5,105,0,0,462,463,5,116,0,0,463,464,5,105,0,0,464,465,5,111, + 0,0,465,466,5,110,0,0,466,80,1,0,0,0,467,468,5,115,0,0,468,469,5, + 112,0,0,469,470,5,105,0,0,470,471,5,107,0,0,471,472,5,101,0,0,472, + 82,1,0,0,0,473,474,5,105,0,0,474,475,5,110,0,0,475,476,5,104,0,0, + 476,477,5,105,0,0,477,478,5,98,0,0,478,479,5,105,0,0,479,480,5,116, + 0,0,480,481,5,111,0,0,481,482,5,114,0,0,482,483,5,121,0,0,483,84, + 1,0,0,0,484,485,5,101,0,0,485,486,5,120,0,0,486,487,5,99,0,0,487, + 488,5,105,0,0,488,489,5,116,0,0,489,490,5,97,0,0,490,491,5,116,0, + 0,491,492,5,111,0,0,492,493,5,114,0,0,493,494,5,121,0,0,494,86,1, + 0,0,0,495,496,5,64,0,0,496,497,5,104,0,0,497,498,5,111,0,0,498,499, + 5,109,0,0,499,500,5,111,0,0,500,501,5,103,0,0,501,502,5,101,0,0, + 502,503,5,110,0,0,503,504,5,101,0,0,504,505,5,111,0,0,505,506,5, + 117,0,0,506,507,5,115,0,0,507,88,1,0,0,0,508,509,5,64,0,0,509,510, + 5,104,0,0,510,511,5,101,0,0,511,512,5,116,0,0,512,513,5,101,0,0, + 513,514,5,114,0,0,514,515,5,111,0,0,515,516,5,103,0,0,516,517,5, + 101,0,0,517,518,5,110,0,0,518,519,5,101,0,0,519,520,5,111,0,0,520, + 521,5,117,0,0,521,522,5,115,0,0,522,90,1,0,0,0,523,524,5,64,0,0, + 524,92,1,0,0,0,525,526,5,46,0,0,526,527,5,46,0,0,527,528,5,46,0, + 0,528,94,1,0,0,0,529,530,5,40,0,0,530,96,1,0,0,0,531,532,5,41,0, + 0,532,98,1,0,0,0,533,534,5,43,0,0,534,100,1,0,0,0,535,536,5,126, + 0,0,536,102,1,0,0,0,537,538,5,124,0,0,538,104,1,0,0,0,539,540,5, + 94,0,0,540,106,1,0,0,0,541,542,5,38,0,0,542,108,1,0,0,0,543,544, + 5,91,0,0,544,110,1,0,0,0,545,546,5,60,0,0,546,547,5,45,0,0,547,112, + 1,0,0,0,548,549,5,93,0,0,549,114,1,0,0,0,550,551,5,91,0,0,551,552, + 5,91,0,0,552,116,1,0,0,0,553,554,5,93,0,0,554,555,5,93,0,0,555,118, + 1,0,0,0,556,557,5,60,0,0,557,558,5,60,0,0,558,120,1,0,0,0,559,560, + 5,62,0,0,560,561,5,62,0,0,561,122,1,0,0,0,562,563,5,60,0,0,563,124, + 1,0,0,0,564,565,5,62,0,0,565,126,1,0,0,0,566,567,5,60,0,0,567,568, + 5,61,0,0,568,128,1,0,0,0,569,570,5,43,0,0,570,571,5,61,0,0,571,130, + 1,0,0,0,572,573,5,45,0,0,573,574,5,61,0,0,574,132,1,0,0,0,575,576, + 5,42,0,0,576,577,5,61,0,0,577,134,1,0,0,0,578,579,5,47,0,0,579,580, + 5,61,0,0,580,136,1,0,0,0,581,582,5,61,0,0,582,583,5,61,0,0,583,138, + 1,0,0,0,584,585,5,33,0,0,585,586,5,61,0,0,586,140,1,0,0,0,587,588, + 5,60,0,0,588,589,5,62,0,0,589,142,1,0,0,0,590,591,5,62,0,0,591,592, + 5,61,0,0,592,144,1,0,0,0,593,594,5,44,0,0,594,146,1,0,0,0,595,596, + 5,45,0,0,596,148,1,0,0,0,597,598,5,61,0,0,598,150,1,0,0,0,599,600, + 5,42,0,0,600,152,1,0,0,0,601,602,5,42,0,0,602,603,5,42,0,0,603,154, + 1,0,0,0,604,605,5,47,0,0,605,156,1,0,0,0,606,607,5,37,0,0,607,158, + 1,0,0,0,608,609,5,63,0,0,609,160,1,0,0,0,610,611,5,58,0,0,611,162, + 1,0,0,0,612,613,5,58,0,0,613,614,5,58,0,0,614,164,1,0,0,0,615,616, + 5,59,0,0,616,166,1,0,0,0,617,618,5,39,0,0,618,168,1,0,0,0,619,620, + 5,46,0,0,620,170,1,0,0,0,621,622,5,116,0,0,622,623,5,114,0,0,623, + 624,5,117,0,0,624,640,5,101,0,0,625,626,5,84,0,0,626,627,5,114,0, + 0,627,628,5,117,0,0,628,640,5,101,0,0,629,630,5,102,0,0,630,631, + 5,97,0,0,631,632,5,108,0,0,632,633,5,115,0,0,633,640,5,101,0,0,634, + 635,5,70,0,0,635,636,5,97,0,0,636,637,5,108,0,0,637,638,5,115,0, + 0,638,640,5,101,0,0,639,621,1,0,0,0,639,625,1,0,0,0,639,629,1,0, + 0,0,639,634,1,0,0,0,640,172,1,0,0,0,641,660,5,34,0,0,642,655,5,92, + 0,0,643,645,7,0,0,0,644,643,1,0,0,0,645,646,1,0,0,0,646,644,1,0, + 0,0,646,647,1,0,0,0,647,652,1,0,0,0,648,650,5,13,0,0,649,648,1,0, + 0,0,649,650,1,0,0,0,650,651,1,0,0,0,651,653,5,10,0,0,652,649,1,0, + 0,0,652,653,1,0,0,0,653,656,1,0,0,0,654,656,9,0,0,0,655,644,1,0, + 0,0,655,654,1,0,0,0,656,659,1,0,0,0,657,659,8,2,0,0,658,642,1,0, + 0,0,658,657,1,0,0,0,659,662,1,0,0,0,660,658,1,0,0,0,660,661,1,0, + 0,0,661,663,1,0,0,0,662,660,1,0,0,0,663,664,5,34,0,0,664,174,1,0, + 0,0,665,667,7,3,0,0,666,665,1,0,0,0,667,671,1,0,0,0,668,670,7,4, + 0,0,669,668,1,0,0,0,670,673,1,0,0,0,671,669,1,0,0,0,671,672,1,0, + 0,0,672,176,1,0,0,0,673,671,1,0,0,0,674,676,7,5,0,0,675,674,1,0, + 0,0,676,677,1,0,0,0,677,675,1,0,0,0,677,678,1,0,0,0,678,178,1,0, + 0,0,679,682,3,181,90,0,680,682,3,183,91,0,681,679,1,0,0,0,681,680, + 1,0,0,0,682,180,1,0,0,0,683,685,3,177,88,0,684,683,1,0,0,0,684,685, + 1,0,0,0,685,686,1,0,0,0,686,687,3,169,84,0,687,688,3,177,88,0,688, + 693,1,0,0,0,689,690,3,177,88,0,690,691,3,169,84,0,691,693,1,0,0, + 0,692,684,1,0,0,0,692,689,1,0,0,0,693,182,1,0,0,0,694,697,3,177, + 88,0,695,697,3,181,90,0,696,694,1,0,0,0,696,695,1,0,0,0,697,698, + 1,0,0,0,698,699,7,6,0,0,699,700,3,185,92,0,700,184,1,0,0,0,701,704, + 3,99,49,0,702,704,3,147,73,0,703,701,1,0,0,0,703,702,1,0,0,0,703, + 704,1,0,0,0,704,705,1,0,0,0,705,706,3,177,88,0,706,186,1,0,0,0,26, + 0,192,199,204,217,224,232,240,245,247,639,646,649,652,655,658,660, + 666,669,671,677,681,684,692,696,703,3,0,1,0,0,2,0,1,7,0 ] class PyNestMLLexer(PyNestMLLexerBase): @@ -361,11 +362,12 @@ class PyNestMLLexer(PyNestMLLexerBase): DOUBLE_COLON = 83 SEMICOLON = 84 DIFFERENTIAL_ORDER = 85 - BOOLEAN_LITERAL = 86 - STRING_LITERAL = 87 - NAME = 88 - UNSIGNED_INTEGER = 89 - FLOAT = 90 + FULLSTOP = 86 + BOOLEAN_LITERAL = 87 + STRING_LITERAL = 88 + NAME = 89 + UNSIGNED_INTEGER = 90 + FLOAT = 91 channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN", u"COMMENT" ] @@ -383,7 +385,7 @@ class PyNestMLLexer(PyNestMLLexerBase): "'|'", "'^'", "'&'", "'['", "'<-'", "']'", "'[['", "']]'", "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", "'-'", "'='", "'*'", - "'**'", "'/'", "'%'", "'?'", "':'", "'::'", "';'", "'''" ] + "'**'", "'/'", "'%'", "'?'", "':'", "'::'", "';'", "'''", "'.'" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", @@ -406,8 +408,8 @@ class PyNestMLLexer(PyNestMLLexerBase): "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", - "SEMICOLON", "DIFFERENTIAL_ORDER", "BOOLEAN_LITERAL", "STRING_LITERAL", - "NAME", "UNSIGNED_INTEGER", "FLOAT" ] + "SEMICOLON", "DIFFERENTIAL_ORDER", "FULLSTOP", "BOOLEAN_LITERAL", + "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", "FLOAT" ] ruleNames = [ "DOCSTRING_TRIPLEQUOTE", "NEWLINE_FRAG", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", "SL_COMMENT", "NEWLINE", @@ -431,9 +433,9 @@ class PyNestMLLexer(PyNestMLLexerBase): "EQUALS_EQUALS", "EXCLAMATION_EQUALS", "LEFT_ANGLE_RIGHT_ANGLE", "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", - "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", "BOOLEAN_LITERAL", - "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", "FLOAT", - "POINT_FLOAT", "EXPONENT_FLOAT", "EXPONENT" ] + "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", "FULLSTOP", + "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", + "FLOAT", "POINT_FLOAT", "EXPONENT_FLOAT", "EXPONENT" ] grammarFileName = "PyNestMLLexer.g4" diff --git a/pynestml/generated/PyNestMLParser.py b/pynestml/generated/PyNestMLParser.py index 0cfec8e7d..2c51de196 100644 --- a/pynestml/generated/PyNestMLParser.py +++ b/pynestml/generated/PyNestMLParser.py @@ -10,7 +10,7 @@ def serializedATN(): return [ - 4,1,90,598,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, + 4,1,91,628,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,7, 6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,13, 2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,20, 7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,26, @@ -52,17 +52,20 @@ def serializedATN(): 35,481,9,35,1,35,1,35,1,35,1,35,1,36,1,36,1,36,1,36,1,36,4,36,492, 8,36,11,36,12,36,493,1,36,1,36,1,37,1,37,1,37,1,37,1,38,1,38,1,38, 1,38,1,38,1,38,1,38,4,38,509,8,38,11,38,12,38,510,1,38,1,38,1,39, - 1,39,1,39,1,39,1,39,1,39,4,39,521,8,39,11,39,12,39,522,1,39,1,39, - 1,40,1,40,1,40,1,40,1,40,3,40,532,8,40,1,40,1,40,5,40,536,8,40,10, - 40,12,40,539,9,40,1,40,1,40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,549, - 8,41,1,41,1,41,1,41,1,41,1,41,1,42,1,42,3,42,558,8,42,1,43,1,43, - 1,43,1,43,1,43,1,43,3,43,566,8,43,1,43,1,43,1,43,1,44,1,44,1,44, - 1,44,1,44,1,44,5,44,577,8,44,10,44,12,44,580,9,44,3,44,582,8,44, - 1,44,1,44,3,44,586,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, + 1,39,1,39,1,39,1,39,1,39,3,39,521,8,39,1,39,1,39,1,39,1,39,5,39, + 527,8,39,10,39,12,39,530,9,39,3,39,532,8,39,1,39,3,39,535,8,39,4, + 39,537,8,39,11,39,12,39,538,1,39,1,39,1,40,1,40,1,40,1,40,1,40,3, + 40,548,8,40,1,40,1,40,5,40,552,8,40,10,40,12,40,555,9,40,1,40,1, + 40,1,40,1,41,1,41,1,41,1,41,1,41,3,41,565,8,41,1,41,1,41,1,41,1, + 41,1,41,1,42,1,42,3,42,574,8,42,1,43,1,43,1,43,1,43,1,43,1,43,1, + 43,1,43,1,43,5,43,585,8,43,10,43,12,43,588,9,43,3,43,590,8,43,1, + 43,3,43,593,8,43,1,43,3,43,596,8,43,1,43,1,43,1,43,1,44,1,44,1,44, + 1,44,1,44,1,44,5,44,607,8,44,10,44,12,44,610,9,44,3,44,612,8,44, + 1,44,1,44,3,44,616,8,44,1,44,1,44,1,44,1,45,1,45,1,45,1,46,1,46, 1,46,1,46,1,46,0,2,2,6,47,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28, 30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72, - 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,89,90,1,0, - 32,34,3,0,25,25,86,87,89,90,653,0,100,1,0,0,0,2,111,1,0,0,0,4,128, + 74,76,78,80,82,84,86,88,90,92,0,4,2,0,51,51,75,75,1,0,90,91,1,0, + 32,34,3,0,25,25,87,88,90,91,689,0,100,1,0,0,0,2,111,1,0,0,0,4,128, 1,0,0,0,6,143,1,0,0,0,8,193,1,0,0,0,10,198,1,0,0,0,12,205,1,0,0, 0,14,214,1,0,0,0,16,218,1,0,0,0,18,220,1,0,0,0,20,233,1,0,0,0,22, 248,1,0,0,0,24,266,1,0,0,0,26,280,1,0,0,0,28,299,1,0,0,0,30,310, @@ -71,21 +74,21 @@ def serializedATN(): 48,382,1,0,0,0,50,386,1,0,0,0,52,396,1,0,0,0,54,401,1,0,0,0,56,406, 1,0,0,0,58,410,1,0,0,0,60,424,1,0,0,0,62,431,1,0,0,0,64,437,1,0, 0,0,66,441,1,0,0,0,68,458,1,0,0,0,70,472,1,0,0,0,72,486,1,0,0,0, - 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,526,1,0,0,0,82,543, - 1,0,0,0,84,557,1,0,0,0,86,559,1,0,0,0,88,570,1,0,0,0,90,590,1,0, - 0,0,92,593,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, + 74,497,1,0,0,0,76,501,1,0,0,0,78,514,1,0,0,0,80,542,1,0,0,0,82,559, + 1,0,0,0,84,573,1,0,0,0,86,575,1,0,0,0,88,600,1,0,0,0,90,620,1,0, + 0,0,92,623,1,0,0,0,94,101,5,10,0,0,95,101,5,11,0,0,96,101,5,12,0, 0,97,101,5,13,0,0,98,101,5,14,0,0,99,101,3,2,1,0,100,94,1,0,0,0, 100,95,1,0,0,0,100,96,1,0,0,0,100,97,1,0,0,0,100,98,1,0,0,0,100, 99,1,0,0,0,101,1,1,0,0,0,102,103,6,1,-1,0,103,104,5,49,0,0,104,105, - 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,89,0,0,108,109, - 5,79,0,0,109,112,3,2,1,2,110,112,5,88,0,0,111,102,1,0,0,0,111,107, + 3,2,1,0,105,106,5,50,0,0,106,112,1,0,0,0,107,108,5,90,0,0,108,109, + 5,79,0,0,109,112,3,2,1,2,110,112,5,89,0,0,111,102,1,0,0,0,111,107, 1,0,0,0,111,110,1,0,0,0,112,124,1,0,0,0,113,116,10,3,0,0,114,117, 5,77,0,0,115,117,5,79,0,0,116,114,1,0,0,0,116,115,1,0,0,0,117,118, 1,0,0,0,118,123,3,2,1,4,119,120,10,4,0,0,120,121,5,78,0,0,121,123, 3,4,2,0,122,113,1,0,0,0,122,119,1,0,0,0,123,126,1,0,0,0,124,122, 1,0,0,0,124,125,1,0,0,0,125,3,1,0,0,0,126,124,1,0,0,0,127,129,7, 0,0,0,128,127,1,0,0,0,128,129,1,0,0,0,129,130,1,0,0,0,130,131,5, - 89,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, + 90,0,0,131,5,1,0,0,0,132,133,6,3,-1,0,133,134,5,49,0,0,134,135,3, 6,3,0,135,136,5,50,0,0,136,144,1,0,0,0,137,138,3,10,5,0,138,139, 3,6,3,9,139,144,1,0,0,0,140,141,5,28,0,0,141,144,3,6,3,4,142,144, 3,8,4,0,143,132,1,0,0,0,143,137,1,0,0,0,143,140,1,0,0,0,143,142, @@ -102,8 +105,8 @@ def serializedATN(): 148,1,0,0,0,179,155,1,0,0,0,179,161,1,0,0,0,179,165,1,0,0,0,179, 169,1,0,0,0,179,173,1,0,0,0,180,183,1,0,0,0,181,179,1,0,0,0,181, 182,1,0,0,0,182,7,1,0,0,0,183,181,1,0,0,0,184,194,3,20,10,0,185, - 194,5,86,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, - 189,1,0,0,0,189,194,1,0,0,0,190,194,5,87,0,0,191,194,5,25,0,0,192, + 194,5,87,0,0,186,188,7,1,0,0,187,189,3,18,9,0,188,187,1,0,0,0,188, + 189,1,0,0,0,189,194,1,0,0,0,190,194,5,88,0,0,191,194,5,25,0,0,192, 194,3,18,9,0,193,184,1,0,0,0,193,185,1,0,0,0,193,186,1,0,0,0,193, 190,1,0,0,0,193,191,1,0,0,0,193,192,1,0,0,0,194,9,1,0,0,0,195,199, 5,51,0,0,196,199,5,75,0,0,197,199,5,52,0,0,198,195,1,0,0,0,198,196, @@ -115,16 +118,16 @@ def serializedATN(): 213,215,5,64,0,0,214,207,1,0,0,0,214,208,1,0,0,0,214,209,1,0,0,0, 214,210,1,0,0,0,214,211,1,0,0,0,214,212,1,0,0,0,214,213,1,0,0,0, 215,15,1,0,0,0,216,219,5,26,0,0,217,219,5,27,0,0,218,216,1,0,0,0, - 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,88,0,0,221,222,5,56,0,0, + 218,217,1,0,0,0,219,17,1,0,0,0,220,225,5,89,0,0,221,222,5,56,0,0, 222,223,3,6,3,0,223,224,5,58,0,0,224,226,1,0,0,0,225,221,1,0,0,0, 225,226,1,0,0,0,226,230,1,0,0,0,227,229,5,85,0,0,228,227,1,0,0,0, 229,232,1,0,0,0,230,228,1,0,0,0,230,231,1,0,0,0,231,19,1,0,0,0,232, - 230,1,0,0,0,233,234,5,88,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, + 230,1,0,0,0,233,234,5,89,0,0,234,243,5,49,0,0,235,240,3,6,3,0,236, 237,5,74,0,0,237,239,3,6,3,0,238,236,1,0,0,0,239,242,1,0,0,0,240, 238,1,0,0,0,240,241,1,0,0,0,241,244,1,0,0,0,242,240,1,0,0,0,243, 235,1,0,0,0,243,244,1,0,0,0,244,245,1,0,0,0,245,246,5,50,0,0,246, 21,1,0,0,0,247,249,5,29,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249, - 250,1,0,0,0,250,251,5,16,0,0,251,252,5,88,0,0,252,253,3,0,0,0,253, + 250,1,0,0,0,250,251,5,16,0,0,251,252,5,89,0,0,252,253,3,0,0,0,253, 254,5,76,0,0,254,256,3,6,3,0,255,257,5,84,0,0,256,255,1,0,0,0,256, 257,1,0,0,0,257,261,1,0,0,0,258,260,3,42,21,0,259,258,1,0,0,0,260, 263,1,0,0,0,261,259,1,0,0,0,261,262,1,0,0,0,262,264,1,0,0,0,263, @@ -162,8 +165,8 @@ def serializedATN(): 0,0,0,366,367,3,38,19,0,367,368,5,9,0,0,368,41,1,0,0,0,369,377,5, 45,0,0,370,377,5,46,0,0,371,372,5,47,0,0,372,373,3,44,22,0,373,374, 5,83,0,0,374,375,3,46,23,0,375,377,1,0,0,0,376,369,1,0,0,0,376,370, - 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,88,0,0,379,45,1, - 0,0,0,380,381,5,88,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, + 1,0,0,0,376,371,1,0,0,0,377,43,1,0,0,0,378,379,5,89,0,0,379,45,1, + 0,0,0,380,381,5,89,0,0,381,47,1,0,0,0,382,384,5,17,0,0,383,385,3, 6,3,0,384,383,1,0,0,0,384,385,1,0,0,0,385,49,1,0,0,0,386,390,3,52, 26,0,387,389,3,54,27,0,388,387,1,0,0,0,389,392,1,0,0,0,390,388,1, 0,0,0,390,391,1,0,0,0,391,394,1,0,0,0,392,390,1,0,0,0,393,395,3, @@ -171,7 +174,7 @@ def serializedATN(): 18,0,0,397,398,3,6,3,0,398,399,5,82,0,0,399,400,3,28,14,0,400,53, 1,0,0,0,401,402,5,19,0,0,402,403,3,6,3,0,403,404,5,82,0,0,404,405, 3,28,14,0,405,55,1,0,0,0,406,407,5,20,0,0,407,408,5,82,0,0,408,409, - 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,88,0,0,412,413, + 3,28,14,0,409,57,1,0,0,0,410,411,5,21,0,0,411,412,5,89,0,0,412,413, 5,23,0,0,413,414,3,6,3,0,414,415,5,48,0,0,415,416,3,6,3,0,416,418, 5,24,0,0,417,419,5,75,0,0,418,417,1,0,0,0,418,419,1,0,0,0,419,420, 1,0,0,0,420,421,7,1,0,0,421,422,5,82,0,0,422,423,3,28,14,0,423,59, @@ -179,7 +182,7 @@ def serializedATN(): 3,28,14,0,428,61,1,0,0,0,429,432,3,64,32,0,430,432,5,9,0,0,431,429, 1,0,0,0,431,430,1,0,0,0,432,433,1,0,0,0,433,431,1,0,0,0,433,434, 1,0,0,0,434,435,1,0,0,0,435,436,5,0,0,1,436,63,1,0,0,0,437,438,5, - 31,0,0,438,439,5,88,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, + 31,0,0,438,439,5,89,0,0,439,440,3,66,33,0,440,65,1,0,0,0,441,442, 5,82,0,0,442,443,5,9,0,0,443,452,5,1,0,0,444,453,3,72,36,0,445,453, 3,76,38,0,446,453,3,78,39,0,447,453,3,86,43,0,448,453,3,88,44,0, 449,453,3,68,34,0,450,453,3,70,35,0,451,453,3,74,37,0,452,444,1, @@ -187,7 +190,7 @@ def serializedATN(): 0,0,0,452,449,1,0,0,0,452,450,1,0,0,0,452,451,1,0,0,0,453,454,1, 0,0,0,454,452,1,0,0,0,454,455,1,0,0,0,455,456,1,0,0,0,456,457,5, 2,0,0,457,67,1,0,0,0,458,459,5,40,0,0,459,460,5,49,0,0,460,465,5, - 88,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, + 89,0,0,461,462,5,74,0,0,462,464,3,92,46,0,463,461,1,0,0,0,464,467, 1,0,0,0,465,463,1,0,0,0,465,466,1,0,0,0,466,468,1,0,0,0,467,465, 1,0,0,0,468,469,5,50,0,0,469,470,5,82,0,0,470,471,3,28,14,0,471, 69,1,0,0,0,472,473,5,41,0,0,473,474,5,49,0,0,474,479,3,6,3,0,475, @@ -202,34 +205,43 @@ def serializedATN(): 3,22,11,0,506,509,3,24,12,0,507,509,3,26,13,0,508,505,1,0,0,0,508, 506,1,0,0,0,508,507,1,0,0,0,509,510,1,0,0,0,510,508,1,0,0,0,510, 511,1,0,0,0,511,512,1,0,0,0,512,513,5,2,0,0,513,77,1,0,0,0,514,515, - 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,520,5,1,0,0,518,521, + 5,37,0,0,515,516,5,82,0,0,516,517,5,9,0,0,517,536,5,1,0,0,518,521, 3,80,40,0,519,521,3,82,41,0,520,518,1,0,0,0,520,519,1,0,0,0,521, - 522,1,0,0,0,522,520,1,0,0,0,522,523,1,0,0,0,523,524,1,0,0,0,524, - 525,5,2,0,0,525,79,1,0,0,0,526,531,5,88,0,0,527,528,5,56,0,0,528, - 529,3,6,3,0,529,530,5,58,0,0,530,532,1,0,0,0,531,527,1,0,0,0,531, - 532,1,0,0,0,532,533,1,0,0,0,533,537,5,57,0,0,534,536,3,84,42,0,535, - 534,1,0,0,0,536,539,1,0,0,0,537,535,1,0,0,0,537,538,1,0,0,0,538, - 540,1,0,0,0,539,537,1,0,0,0,540,541,5,42,0,0,541,542,5,9,0,0,542, - 81,1,0,0,0,543,548,5,88,0,0,544,545,5,56,0,0,545,546,3,6,3,0,546, - 547,5,58,0,0,547,549,1,0,0,0,548,544,1,0,0,0,548,549,1,0,0,0,549, - 550,1,0,0,0,550,551,3,0,0,0,551,552,5,57,0,0,552,553,5,39,0,0,553, - 554,5,9,0,0,554,83,1,0,0,0,555,558,5,43,0,0,556,558,5,44,0,0,557, - 555,1,0,0,0,557,556,1,0,0,0,558,85,1,0,0,0,559,560,5,38,0,0,560, - 561,5,82,0,0,561,562,5,9,0,0,562,565,5,1,0,0,563,566,5,42,0,0,564, - 566,5,39,0,0,565,563,1,0,0,0,565,564,1,0,0,0,566,567,1,0,0,0,567, - 568,5,9,0,0,568,569,5,2,0,0,569,87,1,0,0,0,570,571,5,15,0,0,571, - 572,5,88,0,0,572,581,5,49,0,0,573,578,3,90,45,0,574,575,5,74,0,0, - 575,577,3,90,45,0,576,574,1,0,0,0,577,580,1,0,0,0,578,576,1,0,0, - 0,578,579,1,0,0,0,579,582,1,0,0,0,580,578,1,0,0,0,581,573,1,0,0, - 0,581,582,1,0,0,0,582,583,1,0,0,0,583,585,5,50,0,0,584,586,3,0,0, - 0,585,584,1,0,0,0,585,586,1,0,0,0,586,587,1,0,0,0,587,588,5,82,0, - 0,588,589,3,28,14,0,589,89,1,0,0,0,590,591,5,88,0,0,591,592,3,0, - 0,0,592,91,1,0,0,0,593,594,5,88,0,0,594,595,5,76,0,0,595,596,7,3, - 0,0,596,93,1,0,0,0,63,100,111,116,122,124,128,143,152,158,179,181, - 188,193,198,205,214,218,225,230,240,243,248,256,261,270,275,291, - 295,304,310,315,321,331,336,339,346,352,358,363,376,384,390,394, - 418,431,433,452,454,465,479,493,508,510,520,522,531,537,548,557, - 565,578,581,585 + 534,1,0,0,0,522,531,5,49,0,0,523,528,3,90,45,0,524,525,5,74,0,0, + 525,527,3,90,45,0,526,524,1,0,0,0,527,530,1,0,0,0,528,526,1,0,0, + 0,528,529,1,0,0,0,529,532,1,0,0,0,530,528,1,0,0,0,531,523,1,0,0, + 0,531,532,1,0,0,0,532,533,1,0,0,0,533,535,5,50,0,0,534,522,1,0,0, + 0,534,535,1,0,0,0,535,537,1,0,0,0,536,520,1,0,0,0,537,538,1,0,0, + 0,538,536,1,0,0,0,538,539,1,0,0,0,539,540,1,0,0,0,540,541,5,2,0, + 0,541,79,1,0,0,0,542,547,5,89,0,0,543,544,5,56,0,0,544,545,3,6,3, + 0,545,546,5,58,0,0,546,548,1,0,0,0,547,543,1,0,0,0,547,548,1,0,0, + 0,548,549,1,0,0,0,549,553,5,57,0,0,550,552,3,84,42,0,551,550,1,0, + 0,0,552,555,1,0,0,0,553,551,1,0,0,0,553,554,1,0,0,0,554,556,1,0, + 0,0,555,553,1,0,0,0,556,557,5,42,0,0,557,558,5,9,0,0,558,81,1,0, + 0,0,559,564,5,89,0,0,560,561,5,56,0,0,561,562,3,6,3,0,562,563,5, + 58,0,0,563,565,1,0,0,0,564,560,1,0,0,0,564,565,1,0,0,0,565,566,1, + 0,0,0,566,567,3,0,0,0,567,568,5,57,0,0,568,569,5,39,0,0,569,570, + 5,9,0,0,570,83,1,0,0,0,571,574,5,43,0,0,572,574,5,44,0,0,573,571, + 1,0,0,0,573,572,1,0,0,0,574,85,1,0,0,0,575,576,5,38,0,0,576,577, + 5,82,0,0,577,578,5,9,0,0,578,595,5,1,0,0,579,592,5,42,0,0,580,589, + 5,49,0,0,581,586,3,90,45,0,582,583,5,74,0,0,583,585,3,90,45,0,584, + 582,1,0,0,0,585,588,1,0,0,0,586,584,1,0,0,0,586,587,1,0,0,0,587, + 590,1,0,0,0,588,586,1,0,0,0,589,581,1,0,0,0,589,590,1,0,0,0,590, + 591,1,0,0,0,591,593,5,50,0,0,592,580,1,0,0,0,592,593,1,0,0,0,593, + 596,1,0,0,0,594,596,5,39,0,0,595,579,1,0,0,0,595,594,1,0,0,0,596, + 597,1,0,0,0,597,598,5,9,0,0,598,599,5,2,0,0,599,87,1,0,0,0,600,601, + 5,15,0,0,601,602,5,89,0,0,602,611,5,49,0,0,603,608,3,90,45,0,604, + 605,5,74,0,0,605,607,3,90,45,0,606,604,1,0,0,0,607,610,1,0,0,0,608, + 606,1,0,0,0,608,609,1,0,0,0,609,612,1,0,0,0,610,608,1,0,0,0,611, + 603,1,0,0,0,611,612,1,0,0,0,612,613,1,0,0,0,613,615,5,50,0,0,614, + 616,3,0,0,0,615,614,1,0,0,0,615,616,1,0,0,0,616,617,1,0,0,0,617, + 618,5,82,0,0,618,619,3,28,14,0,619,89,1,0,0,0,620,621,5,89,0,0,621, + 622,3,0,0,0,622,91,1,0,0,0,623,624,5,89,0,0,624,625,5,76,0,0,625, + 626,7,3,0,0,626,93,1,0,0,0,69,100,111,116,122,124,128,143,152,158, + 179,181,188,193,198,205,214,218,225,230,240,243,248,256,261,270, + 275,291,295,304,310,315,321,331,336,339,346,352,358,363,376,384, + 390,394,418,431,433,452,454,465,479,493,508,510,520,528,531,534, + 538,547,553,564,573,586,589,592,595,608,611,615 ] class PyNestMLParser ( Parser ): @@ -257,7 +269,7 @@ class PyNestMLParser ( Parser ): "'<<'", "'>>'", "'<'", "'>'", "'<='", "'+='", "'-='", "'*='", "'/='", "'=='", "'!='", "'<>'", "'>='", "','", "'-'", "'='", "'*'", "'**'", "'/'", "'%'", "'?'", "':'", - "'::'", "';'", "'''" ] + "'::'", "';'", "'''", "'.'" ] symbolicNames = [ "", "INDENT", "DEDENT", "DOCSTRING_TRIPLEQUOTE", "KERNEL_JOINING", "WS", "LINE_ESCAPE", "DOCSTRING", @@ -283,8 +295,8 @@ class PyNestMLParser ( Parser ): "RIGHT_ANGLE_EQUALS", "COMMA", "MINUS", "EQUALS", "STAR", "STAR_STAR", "FORWARD_SLASH", "PERCENT", "QUESTION", "COLON", "DOUBLE_COLON", "SEMICOLON", "DIFFERENTIAL_ORDER", - "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", "UNSIGNED_INTEGER", - "FLOAT" ] + "FULLSTOP", "BOOLEAN_LITERAL", "STRING_LITERAL", "NAME", + "UNSIGNED_INTEGER", "FLOAT" ] RULE_dataType = 0 RULE_unitType = 1 @@ -433,11 +445,12 @@ class PyNestMLParser ( Parser ): DOUBLE_COLON=83 SEMICOLON=84 DIFFERENTIAL_ORDER=85 - BOOLEAN_LITERAL=86 - STRING_LITERAL=87 - NAME=88 - UNSIGNED_INTEGER=89 - FLOAT=90 + FULLSTOP=86 + BOOLEAN_LITERAL=87 + STRING_LITERAL=88 + NAME=89 + UNSIGNED_INTEGER=90 + FLOAT=91 def __init__(self, input:TokenStream, output:TextIO = sys.stdout): super().__init__(input, output) @@ -525,7 +538,7 @@ def dataType(self): self.state = 98 localctx.isVoid = self.match(PyNestMLParser.VOID_KEYWORD) pass - elif token in [49, 88, 89]: + elif token in [49, 89, 90]: self.enterOuterAlt(localctx, 6) self.state = 99 localctx.unit = self.unitType(0) @@ -624,7 +637,7 @@ def unitType(self, _p:int=0): self.state = 105 localctx.rightParentheses = self.match(PyNestMLParser.RIGHT_PAREN) pass - elif token in [89]: + elif token in [90]: self.state = 107 localctx.unitlessLiteral = self.match(PyNestMLParser.UNSIGNED_INTEGER) self.state = 108 @@ -632,7 +645,7 @@ def unitType(self, _p:int=0): self.state = 109 localctx.right = self.unitType(2) pass - elif token in [88]: + elif token in [89]: self.state = 110 localctx.unit = self.match(PyNestMLParser.NAME) pass @@ -889,7 +902,7 @@ def expression(self, _p:int=0): self.state = 141 localctx.term = self.expression(4) pass - elif token in [25, 86, 87, 88, 89, 90]: + elif token in [25, 87, 88, 89, 90, 91]: self.state = 142 self.simpleExpression() pass @@ -1121,7 +1134,7 @@ def simpleExpression(self): self.enterOuterAlt(localctx, 3) self.state = 186 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -1596,7 +1609,7 @@ def functionCall(self): self.state = 243 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): self.state = 235 self.expression(0) self.state = 240 @@ -1980,7 +1993,7 @@ def block(self): self.state = 304 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 543621120) != 0) or _la==89): break self.state = 306 @@ -2029,7 +2042,7 @@ def stmt(self): self.state = 310 self._errHandler.sync(self) token = self._input.LA(1) - if token in [16, 17, 29, 88]: + if token in [16, 17, 29, 89]: self.enterOuterAlt(localctx, 1) self.state = 308 self.smallStmt() @@ -2682,7 +2695,7 @@ def returnStmt(self): self.state = 384 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 63489) != 0): + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 7318349696466944) != 0) or ((((_la - 75)) & ~0x3f) == 0 and ((1 << (_la - 75)) & 126977) != 0): self.state = 383 self.expression(0) @@ -3021,7 +3034,7 @@ def forStmt(self): self.state = 420 _la = self._input.LA(1) - if not(_la==89 or _la==90): + if not(_la==90 or _la==91): self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) @@ -3644,7 +3657,7 @@ def blockWithVariables(self): self.state = 493 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==16 or _la==29 or _la==88): + if not (_la==16 or _la==29 or _la==89): break self.state = 495 @@ -3789,7 +3802,7 @@ def equationsBlock(self): self.state = 505 self.inlineExpression() pass - elif token in [88]: + elif token in [89]: self.state = 506 self.odeEquation() pass @@ -3803,7 +3816,7 @@ def equationsBlock(self): self.state = 510 self._errHandler.sync(self) _la = self._input.LA(1) - if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==88): + if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & 1610678272) != 0) or _la==89): break self.state = 512 @@ -3853,6 +3866,31 @@ def continuousInputPort(self, i:int=None): return self.getTypedRuleContext(PyNestMLParser.ContinuousInputPortContext,i) + def LEFT_PAREN(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.LEFT_PAREN) + else: + return self.getToken(PyNestMLParser.LEFT_PAREN, i) + + def RIGHT_PAREN(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.RIGHT_PAREN) + else: + return self.getToken(PyNestMLParser.RIGHT_PAREN, i) + + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) + def getRuleIndex(self): return PyNestMLParser.RULE_inputBlock @@ -3880,7 +3918,7 @@ def inputBlock(self): self.match(PyNestMLParser.NEWLINE) self.state = 517 self.match(PyNestMLParser.INDENT) - self.state = 520 + self.state = 536 self._errHandler.sync(self) _la = self._input.LA(1) while True: @@ -3898,13 +3936,43 @@ def inputBlock(self): pass - self.state = 522 + self.state = 534 self._errHandler.sync(self) _la = self._input.LA(1) - if not (_la==88): + if _la==49: + self.state = 522 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 531 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==89: + self.state = 523 + self.parameter() + self.state = 528 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==74: + self.state = 524 + self.match(PyNestMLParser.COMMA) + self.state = 525 + self.parameter() + self.state = 530 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 533 + self.match(PyNestMLParser.RIGHT_PAREN) + + + self.state = 538 + self._errHandler.sync(self) + _la = self._input.LA(1) + if not (_la==89): break - self.state = 524 + self.state = 540 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -3972,35 +4040,35 @@ def spikeInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 526 + self.state = 542 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 531 + self.state = 547 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 527 + self.state = 543 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 528 + self.state = 544 localctx.sizeParameter = self.expression(0) - self.state = 529 + self.state = 545 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 533 + self.state = 549 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 537 + self.state = 553 self._errHandler.sync(self) _la = self._input.LA(1) while _la==43 or _la==44: - self.state = 534 + self.state = 550 self.inputQualifier() - self.state = 539 + self.state = 555 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 540 + self.state = 556 self.match(PyNestMLParser.SPIKE_KEYWORD) - self.state = 541 + self.state = 557 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4065,27 +4133,27 @@ def continuousInputPort(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 543 + self.state = 559 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 548 + self.state = 564 self._errHandler.sync(self) _la = self._input.LA(1) if _la==56: - self.state = 544 + self.state = 560 self.match(PyNestMLParser.LEFT_SQUARE_BRACKET) - self.state = 545 + self.state = 561 localctx.sizeParameter = self.expression(0) - self.state = 546 + self.state = 562 self.match(PyNestMLParser.RIGHT_SQUARE_BRACKET) - self.state = 550 + self.state = 566 self.dataType() - self.state = 551 + self.state = 567 self.match(PyNestMLParser.LEFT_ANGLE_MINUS) - self.state = 552 + self.state = 568 self.match(PyNestMLParser.CONTINUOUS_KEYWORD) - self.state = 553 + self.state = 569 self.match(PyNestMLParser.NEWLINE) except RecognitionException as re: localctx.exception = re @@ -4129,15 +4197,15 @@ def inputQualifier(self): self.enterRule(localctx, 84, self.RULE_inputQualifier) try: self.enterOuterAlt(localctx, 1) - self.state = 557 + self.state = 573 self._errHandler.sync(self) token = self._input.LA(1) if token in [43]: - self.state = 555 + self.state = 571 localctx.isInhibitory = self.match(PyNestMLParser.INHIBITORY_KEYWORD) pass elif token in [44]: - self.state = 556 + self.state = 572 localctx.isExcitatory = self.match(PyNestMLParser.EXCITATORY_KEYWORD) pass else: @@ -4159,6 +4227,7 @@ def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): super().__init__(parent, invokingState) self.parser = parser self.isSpike = None # Token + self.attribute = None # ParameterContext self.isContinuous = None # Token def OUTPUT_KEYWORD(self): @@ -4179,11 +4248,30 @@ def INDENT(self): def DEDENT(self): return self.getToken(PyNestMLParser.DEDENT, 0) + def CONTINUOUS_KEYWORD(self): + return self.getToken(PyNestMLParser.CONTINUOUS_KEYWORD, 0) + def SPIKE_KEYWORD(self): return self.getToken(PyNestMLParser.SPIKE_KEYWORD, 0) - def CONTINUOUS_KEYWORD(self): - return self.getToken(PyNestMLParser.CONTINUOUS_KEYWORD, 0) + def LEFT_PAREN(self): + return self.getToken(PyNestMLParser.LEFT_PAREN, 0) + + def RIGHT_PAREN(self): + return self.getToken(PyNestMLParser.RIGHT_PAREN, 0) + + def parameter(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(PyNestMLParser.ParameterContext) + else: + return self.getTypedRuleContext(PyNestMLParser.ParameterContext,i) + + + def COMMA(self, i:int=None): + if i is None: + return self.getTokens(PyNestMLParser.COMMA) + else: + return self.getToken(PyNestMLParser.COMMA, i) def getRuleIndex(self): return PyNestMLParser.RULE_outputBlock @@ -4201,33 +4289,64 @@ def outputBlock(self): localctx = PyNestMLParser.OutputBlockContext(self, self._ctx, self.state) self.enterRule(localctx, 86, self.RULE_outputBlock) + self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 559 + self.state = 575 self.match(PyNestMLParser.OUTPUT_KEYWORD) - self.state = 560 + self.state = 576 self.match(PyNestMLParser.COLON) - self.state = 561 + self.state = 577 self.match(PyNestMLParser.NEWLINE) - self.state = 562 + self.state = 578 self.match(PyNestMLParser.INDENT) - self.state = 565 + self.state = 595 self._errHandler.sync(self) token = self._input.LA(1) if token in [42]: - self.state = 563 + self.state = 579 localctx.isSpike = self.match(PyNestMLParser.SPIKE_KEYWORD) + self.state = 592 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==49: + self.state = 580 + self.match(PyNestMLParser.LEFT_PAREN) + self.state = 589 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==89: + self.state = 581 + localctx.attribute = self.parameter() + self.state = 586 + self._errHandler.sync(self) + _la = self._input.LA(1) + while _la==74: + self.state = 582 + self.match(PyNestMLParser.COMMA) + self.state = 583 + localctx.attribute = self.parameter() + self.state = 588 + self._errHandler.sync(self) + _la = self._input.LA(1) + + + + self.state = 591 + self.match(PyNestMLParser.RIGHT_PAREN) + + pass elif token in [39]: - self.state = 564 + self.state = 594 localctx.isContinuous = self.match(PyNestMLParser.CONTINUOUS_KEYWORD) pass else: raise NoViableAltException(self) - self.state = 567 + self.state = 597 self.match(PyNestMLParser.NEWLINE) - self.state = 568 + self.state = 598 self.match(PyNestMLParser.DEDENT) except RecognitionException as re: localctx.exception = re @@ -4301,45 +4420,45 @@ def function(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 570 + self.state = 600 self.match(PyNestMLParser.FUNCTION_KEYWORD) - self.state = 571 + self.state = 601 self.match(PyNestMLParser.NAME) - self.state = 572 + self.state = 602 self.match(PyNestMLParser.LEFT_PAREN) - self.state = 581 + self.state = 611 self._errHandler.sync(self) _la = self._input.LA(1) - if _la==88: - self.state = 573 + if _la==89: + self.state = 603 self.parameter() - self.state = 578 + self.state = 608 self._errHandler.sync(self) _la = self._input.LA(1) while _la==74: - self.state = 574 + self.state = 604 self.match(PyNestMLParser.COMMA) - self.state = 575 + self.state = 605 self.parameter() - self.state = 580 + self.state = 610 self._errHandler.sync(self) _la = self._input.LA(1) - self.state = 583 + self.state = 613 self.match(PyNestMLParser.RIGHT_PAREN) - self.state = 585 + self.state = 615 self._errHandler.sync(self) _la = self._input.LA(1) - if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==88 or _la==89: - self.state = 584 + if (((_la) & ~0x3f) == 0 and ((1 << _la) & 562949953453056) != 0) or _la==89 or _la==90: + self.state = 614 localctx.returnType = self.dataType() - self.state = 587 + self.state = 617 self.match(PyNestMLParser.COLON) - self.state = 588 + self.state = 618 self.block() except RecognitionException as re: localctx.exception = re @@ -4382,9 +4501,9 @@ def parameter(self): self.enterRule(localctx, 90, self.RULE_parameter) try: self.enterOuterAlt(localctx, 1) - self.state = 590 + self.state = 620 self.match(PyNestMLParser.NAME) - self.state = 591 + self.state = 621 self.dataType() except RecognitionException as re: localctx.exception = re @@ -4444,14 +4563,14 @@ def constParameter(self): self._la = 0 # Token type try: self.enterOuterAlt(localctx, 1) - self.state = 593 + self.state = 623 localctx.name = self.match(PyNestMLParser.NAME) - self.state = 594 + self.state = 624 self.match(PyNestMLParser.EQUALS) - self.state = 595 + self.state = 625 localctx.value = self._input.LT(1) _la = self._input.LA(1) - if not(_la==25 or ((((_la - 86)) & ~0x3f) == 0 and ((1 << (_la - 86)) & 27) != 0)): + if not(_la==25 or ((((_la - 87)) & ~0x3f) == 0 and ((1 << (_la - 87)) & 27) != 0)): localctx.value = self._errHandler.recoverInline(self) else: self._errHandler.reportMatch(self) diff --git a/pynestml/grammars/PyNestMLLexer.g4 b/pynestml/grammars/PyNestMLLexer.g4 index c7255e807..b5e36be4f 100644 --- a/pynestml/grammars/PyNestMLLexer.g4 +++ b/pynestml/grammars/PyNestMLLexer.g4 @@ -138,6 +138,7 @@ lexer grammar PyNestMLLexer; DOUBLE_COLON : '::'; SEMICOLON : ';'; DIFFERENTIAL_ORDER : '\''; + FULLSTOP : '.'; /** @@ -174,8 +175,8 @@ lexer grammar PyNestMLLexer; FLOAT : POINT_FLOAT | EXPONENT_FLOAT; - fragment POINT_FLOAT : UNSIGNED_INTEGER? '.' UNSIGNED_INTEGER - | UNSIGNED_INTEGER '.' + fragment POINT_FLOAT : UNSIGNED_INTEGER? FULLSTOP UNSIGNED_INTEGER + | UNSIGNED_INTEGER FULLSTOP ; fragment EXPONENT_FLOAT: ( UNSIGNED_INTEGER | POINT_FLOAT ) [eE] EXPONENT ; diff --git a/pynestml/grammars/PyNestMLParser.g4 b/pynestml/grammars/PyNestMLParser.g4 index 5d2af2d50..5c5b4c270 100644 --- a/pynestml/grammars/PyNestMLParser.g4 +++ b/pynestml/grammars/PyNestMLParser.g4 @@ -284,7 +284,7 @@ parser grammar PyNestMLParser; @attribute inputPort: A list of input ports. */ inputBlock: INPUT_KEYWORD COLON - NEWLINE INDENT (spikeInputPort | continuousInputPort)+ DEDENT; + NEWLINE INDENT ((spikeInputPort | continuousInputPort) (LEFT_PAREN (parameter (COMMA parameter)*)? RIGHT_PAREN)?)+ DEDENT; /** ASTInputPort represents a single input port, e.g.: spike_in[3] <- excitatory spike @@ -322,7 +322,8 @@ parser grammar PyNestMLParser; @attribute isContinuous: true if and only if the neuron has a continuous-time output. */ outputBlock: OUTPUT_KEYWORD COLON - NEWLINE INDENT (isSpike=SPIKE_KEYWORD | isContinuous=CONTINUOUS_KEYWORD) NEWLINE DEDENT; + NEWLINE INDENT ((isSpike=SPIKE_KEYWORD (LEFT_PAREN (attribute=parameter (COMMA attribute=parameter)*)? RIGHT_PAREN)?) | isContinuous=CONTINUOUS_KEYWORD) + NEWLINE DEDENT; /** ASTFunction A single declaration of a user-defined function definition: function set_V_m(v mV): diff --git a/pynestml/meta_model/ast_function_call.py b/pynestml/meta_model/ast_function_call.py index a078e188a..a07c3483a 100644 --- a/pynestml/meta_model/ast_function_call.py +++ b/pynestml/meta_model/ast_function_call.py @@ -22,6 +22,7 @@ from typing import List from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter class ASTFunctionCall(ASTNode): @@ -91,12 +92,14 @@ def has_args(self): """ return (self.args is not None) and len(self.args) > 0 - def get_args(self): + def get_args(self) -> List[ASTParameter]: """ Returns the list of arguments. :return: the list of arguments. - :rtype: list(ASTExpression) """ + if self.args is None: + return [] + return self.args def get_children(self) -> List[ASTNode]: diff --git a/pynestml/meta_model/ast_node_factory.py b/pynestml/meta_model/ast_node_factory.py index da3986be9..781d75f9e 100644 --- a/pynestml/meta_model/ast_node_factory.py +++ b/pynestml/meta_model/ast_node_factory.py @@ -19,7 +19,7 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import Optional, Union +from typing import List, Optional, Union from pynestml.meta_model.ast_arithmetic_operator import ASTArithmeticOperator from pynestml.meta_model.ast_assignment import ASTAssignment @@ -295,9 +295,8 @@ def create_ast_kernel(cls, variables=None, expressions=None, source_position=Non return ASTKernel(variables, expressions, source_position=source_position) @classmethod - def create_ast_output_block(cls, s_type, source_position): - # type: (PortSignalType,ASTSourceLocation) -> ASTOutputBlock - return ASTOutputBlock(s_type, source_position=source_position) + def create_ast_output_block(cls, s_type: PortSignalType, attributes: Optional[List[ASTParameter]] = None, source_position: ASTSourceLocation = None) -> ASTOutputBlock: + return ASTOutputBlock(s_type, attributes=attributes, source_position=source_position) @classmethod def create_ast_parameter(cls, name, data_type, source_position): diff --git a/pynestml/meta_model/ast_output_block.py b/pynestml/meta_model/ast_output_block.py index 66a61f71d..33cf33245 100644 --- a/pynestml/meta_model/ast_output_block.py +++ b/pynestml/meta_model/ast_output_block.py @@ -19,9 +19,10 @@ # You should have received a copy of the GNU General Public License # along with NEST. If not, see . -from typing import List +from typing import List, Optional from pynestml.meta_model.ast_node import ASTNode +from pynestml.meta_model.ast_parameter import ASTParameter from pynestml.utils.port_signal_type import PortSignalType @@ -39,7 +40,7 @@ class ASTOutputBlock(ASTNode): type = None """ - def __init__(self, o_type, *args, **kwargs): + def __init__(self, o_type, attributes: Optional[List[ASTParameter]], *args, **kwargs): """ Standard constructor. @@ -51,6 +52,7 @@ def __init__(self, o_type, *args, **kwargs): assert isinstance(o_type, PortSignalType) super(ASTOutputBlock, self).__init__(*args, **kwargs) self.type = o_type + self.attributes = attributes def clone(self): """ @@ -60,6 +62,7 @@ def clone(self): :rtype: ASTOutputBlock """ dup = ASTOutputBlock(o_type=self.type, + attributes=self.attributes, # ASTNode common attributes: source_position=self.source_position, scope=self.scope, @@ -84,6 +87,16 @@ def is_continuous(self) -> bool: """ return self.type is PortSignalType.CONTINUOUS + def get_attributes(self) -> List[ASTParameter]: + r""" + Returns the attributes of this node, if any. + :return: List of attributes of this node. + """ + if self.attributes is None: + return [] + + return self.attributes + def get_children(self) -> List[ASTNode]: r""" Returns the children of this node, if any. @@ -98,4 +111,11 @@ def equals(self, other: ASTNode) -> bool: if not isinstance(other, ASTOutputBlock): return False + if bool(self.attributes) != bool(other.attributes): + return False + + for attribute_self, attribute_other in zip(self.attributes, other.attributes): + if not attribute_self.equals(attribute_other): + return False + return self.is_spike() == other.is_spike() and self.is_continuous() == other.is_continuous() diff --git a/pynestml/utils/messages.py b/pynestml/utils/messages.py index 28f066f4e..ce2913c7b 100644 --- a/pynestml/utils/messages.py +++ b/pynestml/utils/messages.py @@ -138,6 +138,8 @@ class MessageCode(Enum): TIMESTEP_FUNCTION_LEGALLY_USED = 113 RANDOM_FUNCTIONS_LEGALLY_USED = 113 EXPONENT_MUST_BE_INTEGER = 114 + EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS = 115 + CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES = 116 class Messages: @@ -1063,6 +1065,14 @@ def get_emit_spike_function_but_no_output_port(cls): message = 'emit_spike() function was called, but no spiking output port has been defined!' return MessageCode.EMIT_SPIKE_FUNCTION_BUT_NO_OUTPUT_PORT, message + @classmethod + def get_output_port_type_differs(cls) -> Tuple[MessageCode, str]: + """ + Indicates that an emit_spike() function was called, but with different parameter types than the output port was defined with. + """ + message = 'emit_spike() function was called, but with different parameter types than the output port was defined with!' + return MessageCode.EMIT_SPIKE_OUTPUT_PORT_TYPE_DIFFERS, message + @classmethod def get_kernel_wrong_type(cls, kernel_name: str, @@ -1376,3 +1386,8 @@ def get_non_constant_exponent(cls) -> Tuple[MessageCode, str]: def get_random_functions_legally_used(cls, name): message = "The function '" + name + "' can only be used in the update, onReceive, or onCondition blocks." return MessageCode.RANDOM_FUNCTIONS_LEGALLY_USED, message + + @classmethod + def get_continuous_output_port_cannot_have_attributes(cls): + message = "continuous time output port may not have attributes." + return MessageCode.CONTINUOUS_OUTPUT_PORT_MAY_NOT_HAVE_ATTRIBUTES, message diff --git a/pynestml/visitors/ast_builder_visitor.py b/pynestml/visitors/ast_builder_visitor.py index bfc4dd902..02a9bd396 100644 --- a/pynestml/visitors/ast_builder_visitor.py +++ b/pynestml/visitors/ast_builder_visitor.py @@ -644,13 +644,21 @@ def visitInputQualifier(self, ctx): # Visit a parse tree produced by PyNESTMLParser#outputBuffer. def visitOutputBlock(self, ctx): source_pos = create_source_pos(ctx) + attributes: List[ASTParameter] = [] + if ctx.parameter() is not None: + if type(ctx.parameter()) is list: + for par in ctx.parameter(): + attributes.append(self.visit(par)) + else: + attributes.append(self.visit(ctx.parameter())) + if ctx.isSpike is not None: - ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.SPIKE, source_position=source_pos) + ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.SPIKE, attributes=attributes, source_position=source_pos) update_node_comments(ret, self.__comments.visit(ctx)) return ret if ctx.isContinuous is not None: - ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.CONTINUOUS, source_position=source_pos) + ret = ASTNodeFactory.create_ast_output_block(s_type=PortSignalType.CONTINUOUS, attributes=attributes, source_position=source_pos) update_node_comments(ret, self.__comments.visit(ctx)) return ret diff --git a/tests/invalid/CoCoOutputPortTypeContinuous.nestml b/tests/invalid/CoCoOutputPortTypeContinuous.nestml new file mode 100644 index 000000000..d5486a730 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeContinuous.nestml @@ -0,0 +1,34 @@ +""" +CoCoOutputPortTypeContinuous.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when a continous-time output port is defined as having attributes. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + continuous(foo real) diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml new file mode 100644 index 000000000..c8ddcbdc6 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall-2.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike(foo real) + + update: + emit_spike() diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml new file mode 100644 index 000000000..0e3c5a26f --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall-3.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike(foo real) + + update: + emit_spike(1 ms) diff --git a/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml b/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml new file mode 100644 index 000000000..d9fc459f0 --- /dev/null +++ b/tests/invalid/CoCoOutputPortTypeIfEmitCall.nestml @@ -0,0 +1,37 @@ +""" +CoCoOutputPortTypeIfEmitCall.nestml +################################### + + +Description ++++++++++++ + +This model is used to test if broken CoCos are identified correctly. Here, test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port. + + +Copyright statement ++++++++++++++++++++ + +This file is part of NEST. + +Copyright (C) 2004 The NEST Initiative + +NEST is free software: you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation, either version 2 of the License, or +(at your option) any later version. + +NEST is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License +along with NEST. If not, see . +""" +model output_port_types_neuron: + output: + spike + + update: + emit_spike(1 ms) diff --git a/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml index bb382c0dc..6373a19a9 100644 --- a/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_delay2_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_delay2_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml index 23d1d4929..3c7866319 100644 --- a/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_delay_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_delay_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): d = 2 ms # not allowed! diff --git a/tests/nest_tests/resources/delay_test_assigned_synapse.nestml b/tests/nest_tests/resources/delay_test_assigned_synapse.nestml index 3774d79e9..04e451613 100644 --- a/tests/nest_tests/resources/delay_test_assigned_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_assigned_synapse.nestml @@ -11,7 +11,7 @@ model delay_test_assigned_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): w = 2 diff --git a/tests/nest_tests/resources/delay_test_plastic_synapse.nestml b/tests/nest_tests/resources/delay_test_plastic_synapse.nestml index b639827cc..f7ba1ff2d 100644 --- a/tests/nest_tests/resources/delay_test_plastic_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_plastic_synapse.nestml @@ -12,7 +12,7 @@ model delay_test_plastic_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/delay_test_synapse.nestml b/tests/nest_tests/resources/delay_test_synapse.nestml index 3663d76ab..a403c650b 100644 --- a/tests/nest_tests/resources/delay_test_synapse.nestml +++ b/tests/nest_tests/resources/delay_test_synapse.nestml @@ -14,7 +14,7 @@ model delay_test_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/nest_tests/resources/dopa_second_order_synapse.nestml b/tests/nest_tests/resources/dopa_second_order_synapse.nestml index 662465217..794248f6f 100644 --- a/tests/nest_tests/resources/dopa_second_order_synapse.nestml +++ b/tests/nest_tests/resources/dopa_second_order_synapse.nestml @@ -49,7 +49,7 @@ model dopa_second_order_synapse: dopa_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(dopa_spikes): dopa_rate_d += 1. / tau_dopa diff --git a/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml b/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml index 8e443022d..c53105bc6 100644 --- a/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml +++ b/tests/nest_tests/resources/homogeneous_parameters_synapse.nestml @@ -18,7 +18,7 @@ model static_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(3.18E-3 * a * b * w, d) diff --git a/tests/nest_tests/resources/random_functions_illegal_synapse.nestml b/tests/nest_tests/resources/random_functions_illegal_synapse.nestml index 473791ec7..7790640e4 100644 --- a/tests/nest_tests/resources/random_functions_illegal_synapse.nestml +++ b/tests/nest_tests/resources/random_functions_illegal_synapse.nestml @@ -50,7 +50,7 @@ model random_functions_illegal_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): post_trace += 1 diff --git a/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml b/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml index 251dcdd8f..b32786f24 100644 --- a/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml +++ b/tests/nest_tests/resources/test_plasticity_dynamics_synapse.nestml @@ -41,7 +41,7 @@ model test_plasticity_dynamics_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(post_spikes): if t_last_pre_spike < t and t <= (t_last_pre_spike + 1*ms): diff --git a/tests/nest_tests/resources/weight_test_assigned_synapse.nestml b/tests/nest_tests/resources/weight_test_assigned_synapse.nestml index 1737c4c57..9271adaf4 100644 --- a/tests/nest_tests/resources/weight_test_assigned_synapse.nestml +++ b/tests/nest_tests/resources/weight_test_assigned_synapse.nestml @@ -11,7 +11,7 @@ model weight_test_assigned_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): w = 2 diff --git a/tests/nest_tests/resources/weight_test_plastic_synapse.nestml b/tests/nest_tests/resources/weight_test_plastic_synapse.nestml index 8860a472c..bededd562 100644 --- a/tests/nest_tests/resources/weight_test_plastic_synapse.nestml +++ b/tests/nest_tests/resources/weight_test_plastic_synapse.nestml @@ -12,7 +12,7 @@ model weight_test_plastic_synapse: pre_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes): emit_spike(w, d) diff --git a/tests/resources/synapse_event_inv_priority_test.nestml b/tests/resources/synapse_event_inv_priority_test.nestml index c889d6045..17b971aac 100644 --- a/tests/resources/synapse_event_inv_priority_test.nestml +++ b/tests/resources/synapse_event_inv_priority_test.nestml @@ -42,7 +42,7 @@ model event_inv_priority_test_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes, priority=2): tr += 1. diff --git a/tests/resources/synapse_event_priority_test.nestml b/tests/resources/synapse_event_priority_test.nestml index 885073ff5..3cef60ae6 100644 --- a/tests/resources/synapse_event_priority_test.nestml +++ b/tests/resources/synapse_event_priority_test.nestml @@ -42,7 +42,7 @@ model event_priority_test_synapse: post_spikes <- spike output: - spike + spike(weight real, delay ms) onReceive(pre_spikes, priority=1): tr += 1. diff --git a/tests/test_cocos.py b/tests/test_cocos.py index 731fb8d8a..63a862a7a 100644 --- a/tests/test_cocos.py +++ b/tests/test_cocos.py @@ -115,7 +115,7 @@ def test_valid_function_unique_and_defined(self): def test_invalid_inline_expressions_have_rhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionHasNoRhs.nestml')) - assert model is None + assert model is None # parse error def test_valid_inline_expressions_have_rhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionHasNoRhs.nestml')) @@ -123,7 +123,7 @@ def test_valid_inline_expressions_have_rhs(self): def test_invalid_inline_expression_has_several_lhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) - assert model is None + assert model is None # parse error def test_valid_inline_expression_has_several_lhs(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoInlineExpressionWithSeveralLhs.nestml')) @@ -324,6 +324,28 @@ def test_valid_output_port_defined_if_emit_call(self): model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'valid')), 'CoCoOutputPortDefinedIfEmitCall.nestml')) assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_invalid_output_port_type_if_emit_call(self): + """test that an error is raised when the emit_spike() function is called with different parameter types than are defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-2.nestml')) + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) > 0 + + def test_valid_output_port_type_if_emit_call(self): + """test that a warning is raised when the emit_spike() function is called with parameter types castable to the types defined in the spiking output port""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeIfEmitCall-3.nestml')) + assert model is not None + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.ERROR)) == 0 + assert len(Logger.get_all_messages_of_level_and_or_node(model, LoggingLevel.WARNING)) > 0 + + def test_invalid_output_port_type_continuous(self): + """test that an error is raised when a continous-time output port is defined as having attributes.""" + model = self._parse_and_validate_model(os.path.join(os.path.realpath(os.path.join(os.path.dirname(__file__), 'invalid')), 'CoCoOutputPortTypeContinuous.nestml')) + assert model is None # should result in a parse error + def test_valid_coco_kernel_type(self): """ Test the functionality of CoCoKernelType. @@ -388,10 +410,7 @@ def _parse_and_validate_model(self, fname: str) -> Optional[str]: Logger.init_logger(LoggingLevel.DEBUG) - try: - generate_target(input_path=fname, target_platform="NONE", logging_level="DEBUG") - except BaseException: - return None + generate_target(input_path=fname, target_platform="NONE", logging_level="DEBUG") ast_compilation_unit = ModelParser.parse_file(fname) if ast_compilation_unit is None or len(ast_compilation_unit.get_model_list()) == 0: