Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
  • Loading branch information
pavel-kirienko committed Apr 6, 2021
1 parent b87efdf commit 182934f
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 15 deletions.
2 changes: 1 addition & 1 deletion pyuavcan/application/heartbeat_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class Mode(enum.IntEnum):


VENDOR_SPECIFIC_STATUS_CODE_MASK = (
2 ** list(pyuavcan.dsdl.get_model(Heartbeat)["vendor_specific_status_code"].data_type.bit_length_set)[0] - 1
2 ** pyuavcan.dsdl.get_model(Heartbeat)["vendor_specific_status_code"].data_type.bit_length_set.max - 1
)


Expand Down
6 changes: 3 additions & 3 deletions pyuavcan/application/plug_and_play.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@


_PSEUDO_UNIQUE_ID_MASK = (
2 ** list(pyuavcan.dsdl.get_model(NodeIDAllocationData_1)["unique_id_hash"].data_type.bit_length_set)[0] - 1
2 ** pyuavcan.dsdl.get_model(NodeIDAllocationData_1)["unique_id_hash"].data_type.bit_length_set.max - 1
)

_NODE_ID_MASK = 2 ** max(pyuavcan.dsdl.get_model(ID)["value"].data_type.bit_length_set) - 1
_NODE_ID_MASK = 2 ** pyuavcan.dsdl.get_model(ID)["value"].data_type.bit_length_set.max - 1

_UNIQUE_ID_SIZE_BYTES = pyuavcan.application.NodeInfo().unique_id.size

Expand Down Expand Up @@ -63,7 +63,7 @@ class Allocatee:

DEFAULT_PRIORITY = pyuavcan.transport.Priority.SLOW

_MTU_THRESHOLD = max(pyuavcan.dsdl.get_model(NodeIDAllocationData_2).bit_length_set) // 8
_MTU_THRESHOLD = pyuavcan.dsdl.get_model(NodeIDAllocationData_2).bit_length_set.max // 8

def __init__(
self,
Expand Down
6 changes: 3 additions & 3 deletions pyuavcan/dsdl/_templates/deserialization.j2
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
{% set t = self.inner_type %}
{% if t is StructureType %}
{% set field_ref_map = {} %}
{% for f, offset in t.iterate_fields_with_offsets(0|bit_length_set) %}
{% for f, offset in t.iterate_fields_with_offsets() %}
{% if f is not padding %}
{% set field_ref = 'f'|to_template_unique_name %}
{% do field_ref_map.update({f: field_ref}) %}
Expand All @@ -33,7 +33,7 @@
{% elif t is UnionType %}
{% set tag_ref = 'tag'|to_template_unique_name %}
{{ _deserialize_integer(t.tag_field_type, tag_ref, 0|bit_length_set) }}
{% for f, offset in t.iterate_fields_with_offsets(0|bit_length_set) %}
{% for f, offset in t.iterate_fields_with_offsets() %}
{# We generate new temporary for each variant to prevent MyPy from complaining. #}
{% set field_ref = 'uni'|to_template_unique_name %}
{{ 'if' if loop.first else 'elif' }} {{ tag_ref }} == {{ loop.index0 }}:
Expand All @@ -45,7 +45,7 @@
{% else %}{% assert False %}{# Delimited type is not expected in this context. #}
{% endif %}
_des_.pad_to_alignment({{ self.alignment_requirement }})
assert {{ t.bit_length_set|min }} <= (_des_.consumed_bit_length - _base_offset_) <= {{ t.bit_length_set|max }}, \
assert {{ t.bit_length_set.min }} <= (_des_.consumed_bit_length - _base_offset_) <= {{ t.bit_length_set.max }}, \
'Bad deserialization of {{ self }}'
{%- endmacro %}

Expand Down
14 changes: 7 additions & 7 deletions pyuavcan/dsdl/_templates/serialization.j2
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@
_base_offset_ = _ser_.current_bit_length
{% set t = self.inner_type %}
{% if t is StructureType %}
{% for f, offset in t.iterate_fields_with_offsets(0|bit_length_set) %}
{% for f, offset in t.iterate_fields_with_offsets() %}
{{ _serialize_any(f.data_type, 'self.' + (f|id), offset) }}
{% endfor %}
{% elif t is UnionType %}
{% for f, offset in t.iterate_fields_with_offsets(0|bit_length_set) %}
{% for f, offset in t.iterate_fields_with_offsets() %}
{% set field_ref = 'self.' + (f|id) %}
{{ 'if' if loop.first else 'elif' }} {{ field_ref }} is not None: # Union tag {{ loop.index0 }}
{{ _serialize_integer(t.tag_field_type, loop.index0|string, 0|bit_length_set)|indent }}
Expand All @@ -24,7 +24,7 @@
{% else %}{% assert False %}{# Delimited type is not expected in this context. #}
{% endif %}
_ser_.pad_to_alignment({{ self.alignment_requirement }})
assert {{ t.bit_length_set|min }} <= (_ser_.current_bit_length - _base_offset_) <= {{ t.bit_length_set|max }}, \
assert {{ t.bit_length_set.min }} <= (_ser_.current_bit_length - _base_offset_) <= {{ t.bit_length_set.max }}, \
'Bad serialization of {{ self }}'
{%- endmacro %}

Expand Down Expand Up @@ -120,7 +120,7 @@
{%- elif t is VariableLengthArrayType -%} {{ _serialize_variable_length_array(t, ref, offset) }}
{%- elif t is CompositeType -%}
{% if t is DelimitedType %}
{% if (t.inner_type.bit_length_set | length) > 1 %}
{% if not t.inner_type.bit_length_set.fixed_length %}
{# Instead of the outer extent, we use the inner extent, which equals the max bit length and is a
# tighter bound than the user-defined extent.
# This is safe because when serializing we always know the concrete type.
Expand All @@ -136,14 +136,14 @@
{{ ref }}._serialize_(_nested_)
_nested_length_ = _nested_.current_bit_length - {{ t.delimiter_header_type.bit_length }}
del _nested_
assert {{ t.inner_type.bit_length_set|min }} <= _nested_length_ <= {{ t.inner_type.bit_length_set|max }}
assert {{ t.inner_type.bit_length_set.min }} <= _nested_length_ <= {{ t.inner_type.bit_length_set.max }}
assert _nested_length_ % 8 == 0
_ser_.add_aligned_u32(_nested_length_ // 8) # Jump back and serialize the delimiter header.
_ser_.skip_bits(_nested_length_) # Return to the current offset.
{% else %}
{# Optional optimization: if the nested object is fixed-length, no need to fork the serializer. #}
{% set length_bits = t.inner_type.bit_length_set | first %}
{% assert [length_bits] == t.inner_type.bit_length_set | list %}
{% set length_bits = t.inner_type.bit_length_set.max %}
{% assert length_bits == t.inner_type.bit_length_set.min %}
{% assert length_bits % 8 == 0 %}
{% set length_bytes = length_bits // 8 %}
# Delimited serialization of {{ t }}, fixed bit length {{ length_bits }} ({{ length_bytes }} bytes)
Expand Down
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ packages = find:
# The preferred long-term plan is to avoid adding any new required dependencies whatsoever for the project's lifetime.
install_requires =
nunavut ~= 1.0
pydsdl @ git+https://github.com/UAVCAN/pydsdl@combinatorial-explosion
numpy ~= 1.17, < 1.20
# TODO: allow NumPy v1.20 -- requires fixing type annotations and many meaningless false-positives. No runtime effects.

Expand Down
2 changes: 1 addition & 1 deletion tests/dsdl/_builtin_form.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def _unittest_slow_builtin_form_manual(compiled: typing.List[pyuavcan.dsdl.Gener
def _unittest_slow_builtin_form_automatic(compiled: typing.List[pyuavcan.dsdl.GeneratedPackageInfo]) -> None:
for info in compiled:
for model in _util.expand_service_types(info.models):
if max(model.bit_length_set) / 8 > 1024 * 1024:
if model.bit_length_set.max / 8 > 1024 * 1024:
_logger.info("Automatic test of %s skipped because the type is too large", model)
continue # Skip large objects because they take forever to convert and test

Expand Down

0 comments on commit 182934f

Please sign in to comment.