Update pdl-compiler to 0.1.4

Test: TreeHugger
Change-Id: I859eaafd97b48b81a3f9da1dad00e91e5717abc0
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 24636e7..f776014 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,6 +1,6 @@
 {
   "git": {
-    "sha1": "6b05a0616ef2a982068e0d5fbe28d2058d49d8e0"
+    "sha1": "ff041c0f60f985ecc2f8c90bdec618575dcb5060"
   },
   "path_in_vcs": ""
 }
\ No newline at end of file
diff --git a/Android.bp b/Android.bp
index 60940fe..c6ff8e0 100644
--- a/Android.bp
+++ b/Android.bp
@@ -7,7 +7,7 @@
     name: "generate_canonical_tests",
     crate_name: "generate_canonical_tests",
     cargo_env_compat: true,
-    cargo_pkg_version: "0.1.3",
+    cargo_pkg_version: "0.1.4",
     srcs: ["src/bin/generate-canonical-tests.rs"],
     edition: "2021",
     features: [
@@ -37,7 +37,7 @@
     name: "libpdl_compiler",
     crate_name: "pdl_compiler",
     cargo_env_compat: true,
-    cargo_pkg_version: "0.1.3",
+    cargo_pkg_version: "0.1.4",
     srcs: ["src/lib.rs"],
     edition: "2021",
     features: [
@@ -66,7 +66,7 @@
     name: "pdlc",
     crate_name: "pdlc",
     cargo_env_compat: true,
-    cargo_pkg_version: "0.1.3",
+    cargo_pkg_version: "0.1.4",
     srcs: ["src/main.rs"],
     edition: "2021",
     features: [
diff --git a/Cargo.toml b/Cargo.toml
index 57d75db..a9b427c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -12,7 +12,7 @@
 [package]
 edition = "2021"
 name = "pdl-compiler"
-version = "0.1.3"
+version = "0.1.4"
 authors = [
     "Henri Chataing <[email protected]>",
     "David de Jesus Duarte <[email protected]>",
@@ -32,7 +32,7 @@
     "grammar",
 ]
 categories = ["parsing"]
-license-file = "LICENSE"
+license = "Apache-2.0"
 repository = "https://github.com/google/pdl/"
 
 [[bin]]
@@ -85,7 +85,7 @@
 features = ["serde"]
 
 [dev-dependencies.googletest]
-version = "0.7.0"
+version = "0.8.0"
 
 [dev-dependencies.num-derive]
 version = "0.3.3"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index c9fa183..cf8b0cf 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,10 +1,10 @@
 [package]
 name = "pdl-compiler"
-version = "0.1.3"
+version = "0.1.4"
 edition = "2021"
 description = "Parser and serializer generator for protocol binary packets"
 repository = "https://github.com/google/pdl/"
-license-file = "LICENSE"
+license = "Apache-2.0"
 readme = "README.md"
 keywords = ["pdl", "parser", "serializer", "grammar"]
 authors = [
@@ -49,4 +49,4 @@
 num-traits = "0.2.15"
 thiserror = "1.0.37"
 paste = "1.0.6"
-googletest = "0.7.0"
+googletest = "0.8.0"
diff --git a/METADATA b/METADATA
index b35258e..3dc5568 100644
--- a/METADATA
+++ b/METADATA
@@ -11,13 +11,13 @@
   }
   url {
     type: ARCHIVE
-    value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.3.crate"
+    value: "https://static.crates.io/crates/pdl-compiler/pdl-compiler-0.1.4.crate"
   }
-  version: "0.1.3"
+  version: "0.1.4"
   license_type: NOTICE
   last_upgrade_date {
     year: 2023
-    month: 6
-    day: 27
+    month: 7
+    day: 13
   }
 }
diff --git a/doc/cxx-generated-code-guide.rst b/doc/cxx-generated-code-guide.rst
index f694e70..3a71430 100644
--- a/doc/cxx-generated-code-guide.rst
+++ b/doc/cxx-generated-code-guide.rst
@@ -25,3 +25,23 @@
 
     cargo run my-protocol.pdl --output-format json | \
         ./scripts/generate_cxx_backend.py > my-protocol.h
+
+Language bindings
+-----------------
+
+Enum declarations
+^^^^^^^^^^^^^^^^^
+
++---------------------------------------+---------------------------------------------------------------+
+| ::                                    | .. sourcecode:: c++                                           |
+|                                       |                                                               |
+|     enum TestEnum : 8 {               |     enum TestEnum : int8_t {                                  |
+|         A = 1,                        |         A = 1,                                                |
+|         B = 2..3,                     |         B_MIN = 2,                                            |
+|         C = 4,                        |         B_MAX = 3,                                            |
+|         OTHER = ..,                   |         C = 4,                                                |
+|     }                                 |     }                                                         |
++---------------------------------------+---------------------------------------------------------------+
+
+.. note::
+    C++ enums are open by construction, default cases in enum declarations are ignored.
diff --git a/doc/python-generated-code-guide.rst b/doc/python-generated-code-guide.rst
index 664d759..de766d4 100644
--- a/doc/python-generated-code-guide.rst
+++ b/doc/python-generated-code-guide.rst
@@ -49,9 +49,13 @@
 |         A = 1,                        |         A = 1                                                 |
 |         B = 2..3,                     |         B_MIN = 2                                             |
 |         C = 4,                        |         B_MAX = 3                                             |
-|     }                                 |         C = 4                                                 |
+|         OTHER = ..,                   |         C = 4                                                 |
+|     }                                 |                                                               |
 +---------------------------------------+---------------------------------------------------------------+
 
+.. note::
+    Python enums are open by construction, default cases in enum declarations are ignored.
+
 Packet declarations
 ^^^^^^^^^^^^^^^^^^^
 
diff --git a/doc/reference.md b/doc/reference.md
index 2529c29..ce2f0a7 100644
--- a/doc/reference.md
+++ b/doc/reference.md
@@ -161,7 +161,7 @@
 > &nbsp;&nbsp; enum_tag (`,` enum_tag)* `,`?
 >
 > enum_tag:\
-> &nbsp;&nbsp; enum_range | enum_value
+> &nbsp;&nbsp; enum_range | enum_value | enum_other
 >
 > enum_range:\
 > &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` [INTEGER](#integer) `..` [INTEGER](#integer)) (`{`\
@@ -173,12 +173,20 @@
 >
 > enum_value:\
 > &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` [INTEGER](#integer)
+>
+> enum_other:\
+> &nbsp;&nbsp; [IDENTIFIER](#identifier) `=` `..`
 
 An *enumeration* or for short *enum*, is a declaration of a set of named [integer](#integer) constants
 or named [integer](#integer) ranges. [integer](#integer) ranges are inclusive in both ends.
 [integer](#integer) value within a range *must* be unique. [integer](#integer) ranges
 *must not* overlap.
 
+*enumeration* are closed by default, all values that are not explicitely described in the declaration are treated as invalid and _may_ cause a parsing error.
+
+An *enumaration* _may_ be declared open by specifiying the default case; all unrecognized values
+_shall_ falltrough to the default.
+
 The [integer](#integer) following the name specifies the bit size of the values.
 
 ```
@@ -199,6 +207,8 @@
   },
 
   Custom = 20..29,
+
+  Other = ..
 }
 ```
 
diff --git a/doc/rust-generated-code-guide.rst b/doc/rust-generated-code-guide.rst
index df22d92..6dfdfd0 100644
--- a/doc/rust-generated-code-guide.rst
+++ b/doc/rust-generated-code-guide.rst
@@ -30,6 +30,10 @@
 
         impl<T> std::ops::Deref for Private<T> { .. }
 
+.. warning::
+    PDL authorizes the use of rust keywords as identifier. Keyword identifiers
+    are generated as raw identifiers, e.g. `type` is generated as `r#type`.
+
 Enum declarations
 ^^^^^^^^^^^^^^^^^
 
@@ -72,3 +76,23 @@
 |                                       |     impl From<TestEnum> for i32 { .. }                        |
 |                                       |     impl From<TestEnum> for i64 { .. }                        |
 +---------------------------------------+---------------------------------------------------------------+
+| ::                                    | .. sourcecode:: rust                                          |
+|                                       |                                                               |
+|     enum TestEnum : 8 {               |     #[repr(u64)]                                              |
+|         A = 1,                        |     #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]        |
+|         B = 2,                        |     enum TestEnum {                                           |
+|         OTHER = ..,                   |         A,                                                    |
+|     }                                 |         B,                                                    |
+|                                       |         Other(Private<u8>),                                   |
+|                                       |     }                                                         |
+|                                       |                                                               |
+|                                       |     impl From<u8> for TestEnum { .. }                         |
+|                                       |     impl From<TestEnum> for u8 { .. }                         |
+|                                       |     impl From<TestEnum> for u16 { .. }                        |
+|                                       |     impl From<TestEnum> for u32 { .. }                        |
+|                                       |     impl From<TestEnum> for u64 { .. }                        |
+|                                       |     impl From<TestEnum> for i8 { .. }                         |
+|                                       |     impl From<TestEnum> for i16 { .. }                        |
+|                                       |     impl From<TestEnum> for i32 { .. }                        |
+|                                       |     impl From<TestEnum> for i64 { .. }                        |
++---------------------------------------+---------------------------------------------------------------+
diff --git a/scripts/generate_test_vectors.py b/scripts/generate_test_vectors.py
new file mode 100755
index 0000000..ef2ea24
--- /dev/null
+++ b/scripts/generate_test_vectors.py
@@ -0,0 +1,685 @@
+#!/usr/bin/env python3
+
+import argparse
+import collections
+import copy
+import json
+from pathlib import Path
+import pprint
+import traceback
+from typing import Iterable, List, Optional, Union
+import sys
+
+from pdl import ast, core
+
+MAX_ARRAY_SIZE = 256
+MAX_ARRAY_COUNT = 32
+DEFAULT_ARRAY_COUNT = 3
+DEFAULT_PAYLOAD_SIZE = 5
+
+
+class BitSerializer:
+    def __init__(self, big_endian: bool):
+        self.stream = []
+        self.value = 0
+        self.shift = 0
+        self.byteorder = "big" if big_endian else "little"
+
+    def append(self, value: int, width: int):
+        self.value = self.value | (value << self.shift)
+        self.shift += width
+
+        if (self.shift % 8) == 0:
+            width = int(self.shift / 8)
+            self.stream.extend(self.value.to_bytes(width, byteorder=self.byteorder))
+            self.shift = 0
+            self.value = 0
+
+
+class Value:
+    def __init__(self, value: object, width: Optional[int] = None):
+        self.value = value
+        if width is not None:
+            self.width = width
+        elif isinstance(value, int) or callable(value):
+            raise Exception("Creating scalar value of unspecified width")
+        elif isinstance(value, list):
+            self.width = sum([v.width for v in value])
+        elif isinstance(value, Packet):
+            self.width = value.width
+        else:
+            raise Exception(f"Malformed value {value}")
+
+    def finalize(self, parent: "Packet"):
+        if callable(self.width):
+            self.width = self.width(parent)
+
+        if callable(self.value):
+            self.value = self.value(parent)
+        elif isinstance(self.value, list):
+            for v in self.value:
+                v.finalize(parent)
+        elif isinstance(self.value, Packet):
+            self.value.finalize()
+
+    def serialize_(self, serializer: BitSerializer):
+        if isinstance(self.value, int):
+            serializer.append(self.value, self.width)
+        elif isinstance(self.value, list):
+            for v in self.value:
+                v.serialize_(serializer)
+        elif isinstance(self.value, Packet):
+            self.value.serialize_(serializer)
+        else:
+            raise Exception(f"Malformed value {self.value}")
+
+    def show(self, indent: int = 0):
+        space = " " * indent
+        if isinstance(self.value, int):
+            print(f"{space}{self.name}: {hex(self.value)}")
+        elif isinstance(self.value, list):
+            print(f"{space}{self.name}[{len(self.value)}]:")
+            for v in self.value:
+                v.show(indent + 2)
+        elif isinstance(self.value, Packet):
+            print(f"{space}{self.name}:")
+            self.value.show(indent + 2)
+
+    def to_json(self) -> object:
+        if isinstance(self.value, int):
+            return self.value
+        elif isinstance(self.value, list):
+            return [v.to_json() for v in self.value]
+        elif isinstance(self.value, Packet):
+            return self.value.to_json()
+
+
+class Field:
+    def __init__(self, value: Value, ref: ast.Field):
+        self.value = value
+        self.ref = ref
+
+    def finalize(self, parent: "Packet"):
+        self.value.finalize(parent)
+
+    def serialize_(self, serializer: BitSerializer):
+        self.value.serialize_(serializer)
+
+    def clone(self):
+        return Field(copy.copy(self.value), self.ref)
+
+
+class Packet:
+    def __init__(self, fields: List[Field], ref: ast.Declaration):
+        self.fields = fields
+        self.ref = ref
+
+    def finalize(self, parent: Optional["Packet"] = None):
+        for f in self.fields:
+            f.finalize(self)
+
+    def serialize_(self, serializer: BitSerializer):
+        for f in self.fields:
+            f.serialize_(serializer)
+
+    def serialize(self, big_endian: bool) -> bytes:
+        serializer = BitSerializer(big_endian)
+        self.serialize_(serializer)
+        if serializer.shift != 0:
+            raise Exception("The packet size is not an integral number of octets")
+        return bytes(serializer.stream)
+
+    def show(self, indent: int = 0):
+        for f in self.fields:
+            f.value.show(indent)
+
+    def to_json(self) -> dict:
+        result = dict()
+        for f in self.fields:
+            if isinstance(f.ref, (ast.PayloadField, ast.BodyField)) and isinstance(
+                f.value.value, Packet
+            ):
+                result.update(f.value.to_json())
+            elif isinstance(f.ref, (ast.PayloadField, ast.BodyField)):
+                result["payload"] = f.value.to_json()
+            elif hasattr(f.ref, "id"):
+                result[f.ref.id] = f.value.to_json()
+        return result
+
+    @property
+    def width(self) -> int:
+        self.finalize()
+        return sum([f.value.width for f in self.fields])
+
+
+class BitGenerator:
+    def __init__(self):
+        self.value = 0
+        self.shift = 0
+
+    def generate(self, width: int) -> Value:
+        """Generate an integer value of the selected width."""
+        value = 0
+        remains = width
+        while remains > 0:
+            w = min(8 - self.shift, remains)
+            mask = (1 << w) - 1
+            value = (value << w) | ((self.value >> self.shift) & mask)
+            remains -= w
+            self.shift += w
+            if self.shift >= 8:
+                self.shift = 0
+                self.value = (self.value + 1) % 0xFF
+        return Value(value, width)
+
+    def generate_list(self, width: int, count: int) -> List[Value]:
+        return [self.generate(width) for n in range(count)]
+
+
+generator = BitGenerator()
+
+
+def generate_size_field_values(field: ast.SizeField) -> List[Value]:
+    def get_field_size(parent: Packet, field_id: str) -> int:
+        for f in parent.fields:
+            if (
+                (field_id == "_payload_" and isinstance(f.ref, ast.PayloadField))
+                or (field_id == "_body_" and isinstance(f.ref, ast.BodyField))
+                or (getattr(f.ref, "id", None) == field_id)
+            ):
+                assert f.value.width % 8 == 0
+                size_modifier = int(getattr(f.ref, "size_modifier", None) or 0)
+                return int(f.value.width / 8) + size_modifier
+        raise Exception(
+            "Field {} not found in packet {}".format(field_id, parent.ref.id)
+        )
+
+    return [Value(lambda p: get_field_size(p, field.field_id), field.width)]
+
+
+def generate_count_field_values(field: ast.CountField) -> List[Value]:
+    def get_array_count(parent: Packet, field_id: str) -> int:
+        for f in parent.fields:
+            if getattr(f.ref, "id", None) == field_id:
+                assert isinstance(f.value.value, list)
+                return len(f.value.value)
+        raise Exception(
+            "Field {} not found in packet {}".format(field_id, parent.ref.id)
+        )
+
+    return [Value(lambda p: get_array_count(p, field.field_id), field.width)]
+
+
+def generate_checksum_field_values(field: ast.TypedefField) -> List[Value]:
+    field_width = core.get_field_size(field)
+
+    def basic_checksum(input: bytes, width: int):
+        assert width == 8
+        return sum(input) % 256
+
+    def compute_checksum(parent: Packet, field_id: str) -> int:
+        serializer = None
+        for f in parent.fields:
+            if isinstance(f.ref, ast.ChecksumField) and f.ref.field_id == field_id:
+                serializer = BitSerializer(
+                    f.ref.parent.file.endianness.value == "big_endian"
+                )
+            elif isinstance(f.ref, ast.TypedefField) and f.ref.id == field_id:
+                return basic_checksum(serializer.stream, field_width)
+            elif serializer:
+                f.value.serialize_(serializer)
+        raise Exception("malformed checksum")
+
+    return [Value(lambda p: compute_checksum(p, field.id), field_width)]
+
+
+def generate_padding_field_values(field: ast.PaddingField) -> List[Value]:
+    preceding_field_id = field.padded_field.id
+
+    def get_padding(parent: Packet, field_id: str, width: int) -> List[Value]:
+        for f in parent.fields:
+            if (
+                (field_id == "_payload_" and isinstance(f.ref, ast.PayloadField))
+                or (field_id == "_body_" and isinstance(f.ref, ast.BodyField))
+                or (getattr(f.ref, "id", None) == field_id)
+            ):
+                assert f.value.width % 8 == 0
+                assert f.value.width <= width
+                return width - f.value.width
+        raise Exception(
+            "Field {} not found in packet {}".format(field_id, parent.ref.id)
+        )
+
+    return [Value(0, lambda p: get_padding(p, preceding_field_id, 8 * field.size))]
+
+
+def generate_payload_field_values(
+    field: Union[ast.PayloadField, ast.BodyField]
+) -> List[Value]:
+    payload_size = core.get_payload_field_size(field)
+    size_modifier = int(getattr(field, "size_modifier", None) or 0)
+
+    # If the paylaod has a size field, generate an empty payload and
+    # a payload of maximum size. If not generate a payload of the default size.
+    max_size = (1 << payload_size.width) - 1 if payload_size else DEFAULT_PAYLOAD_SIZE
+    max_size -= size_modifier
+
+    assert max_size > 0
+    return [Value([]), Value(generator.generate_list(8, max_size))]
+
+
+def generate_scalar_array_field_values(field: ast.ArrayField) -> List[Value]:
+    if field.width % 8 != 0:
+        if element_width % 8 != 0:
+            raise Exception("Array element size is not a multiple of 8")
+
+    array_size = core.get_array_field_size(field)
+    element_width = int(field.width / 8)
+
+    # TODO
+    # The array might also be bounded if it is included in the sized payload
+    # of a packet.
+
+    # Apply the size modifiers.
+    size_modifier = int(getattr(field, "size_modifier", None) or 0)
+
+    # The element width is known, and the array element count is known
+    # statically.
+    if isinstance(array_size, int):
+        return [Value(generator.generate_list(field.width, array_size))]
+
+    # The element width is known, and the array element count is known
+    # by count field.
+    elif isinstance(array_size, ast.CountField):
+        min_count = 0
+        max_count = (1 << array_size.width) - 1
+        return [Value([]), Value(generator.generate_list(field.width, max_count))]
+
+    # The element width is known, and the array full size is known
+    # by size field.
+    elif isinstance(array_size, ast.SizeField):
+        min_count = 0
+        max_size = (1 << array_size.width) - 1 - size_modifier
+        max_count = int(max_size / element_width)
+        return [Value([]), Value(generator.generate_list(field.width, max_count))]
+
+    # The element width is known, but the array size is unknown.
+    # Generate two arrays: one empty and one including some possible element
+    # values.
+    else:
+        return [
+            Value([]),
+            Value(generator.generate_list(field.width, DEFAULT_ARRAY_COUNT)),
+        ]
+
+
+def generate_typedef_array_field_values(field: ast.ArrayField) -> List[Value]:
+    array_size = core.get_array_field_size(field)
+    element_width = core.get_array_element_size(field)
+    if element_width:
+        if element_width % 8 != 0:
+            raise Exception("Array element size is not a multiple of 8")
+        element_width = int(element_width / 8)
+
+    # Generate element values to use for the generation.
+    type_decl = field.parent.file.typedef_scope[field.type_id]
+
+    def generate_list(count: Optional[int]) -> List[Value]:
+        """Generate an array of specified length.
+        If the count is None all possible array items are returned."""
+        element_values = generate_typedef_values(type_decl)
+
+        # Requested a variable count, send everything in one chunk.
+        if count is None:
+            return [Value(element_values)]
+        # Have more items than the requested count.
+        # Slice the possible array values in multiple slices.
+        elif len(element_values) > count:
+            # Add more elements in case of wrap-over.
+            elements_count = len(element_values)
+            element_values.extend(generate_typedef_values(type_decl))
+            chunk_count = int((len(elements) + count - 1) / count)
+            return [
+                Value(element_values[n * count : (n + 1) * count])
+                for n in range(chunk_count)
+            ]
+        # Have less items than the requested count.
+        # Generate additional items to fill the gap.
+        else:
+            chunk = element_values
+            while len(chunk) < count:
+                chunk.extend(generate_typedef_values(type_decl))
+            return [Value(chunk[:count])]
+
+    # TODO
+    # The array might also be bounded if it is included in the sized payload
+    # of a packet.
+
+    # Apply the size modifier.
+    size_modifier = int(getattr(field, "size_modifier", None) or 0)
+
+    min_size = 0
+    max_size = MAX_ARRAY_SIZE
+    min_count = 0
+    max_count = MAX_ARRAY_COUNT
+
+    if field.padded_size:
+        max_size = field.padded_size
+
+    if isinstance(array_size, ast.SizeField):
+        max_size = (1 << array_size.width) - 1 - size_modifier
+        min_size = size_modifier
+    elif isinstance(array_size, ast.CountField):
+        max_count = (1 << array_size.width) - 1
+    elif isinstance(array_size, int):
+        min_count = array_size
+        max_count = array_size
+
+    values = []
+    chunk = []
+    chunk_size = 0
+
+    while not values:
+        element_values = generate_typedef_values(type_decl)
+        for element_value in element_values:
+            element_size = int(element_value.width / 8)
+
+            if len(chunk) >= max_count or chunk_size + element_size > max_size:
+                assert len(chunk) >= min_count
+                values.append(Value(chunk))
+                chunk = []
+                chunk_size = 0
+
+            chunk.append(element_value)
+            chunk_size += element_size
+
+    if min_count == 0:
+        values.append(Value([]))
+
+    return values
+
+    # The element width is not known, but the array full octet size
+    # is known by size field. Generate two arrays: of minimal and maximum
+    # size. All unused element values are packed into arrays of varying size.
+    if element_width is None and isinstance(array_size, ast.SizeField):
+        element_values = generate_typedef_values(type_decl)
+        chunk = []
+        chunk_size = 0
+        values = [Value([])]
+        for element_value in element_values:
+            assert element_value.width % 8 == 0
+            element_size = int(element_value.width / 8)
+            if chunk_size + element_size > max_size:
+                values.append(Value(chunk))
+                chunk = []
+            chunk.append(element_value)
+            chunk_size += element_size
+        if chunk:
+            values.append(Value(chunk))
+        return values
+
+    # The element width is not known, but the array element count
+    # is known statically or by count field. Generate two arrays:
+    # of minimal and maximum length. All unused element values are packed into
+    # arrays of varying count.
+    elif element_width is None and isinstance(array_size, ast.CountField):
+        return [Value([])] + generate_list(max_count)
+
+    # The element width is not known, and the array element count is known
+    # statically.
+    elif element_width is None and isinstance(array_size, int):
+        return generate_list(array_size)
+
+    # Neither the count not size is known,
+    # generate two arrays: one empty and one including all possible element
+    # values.
+    elif element_width is None:
+        return [Value([])] + generate_list(None)
+
+    # The element width is known, and the array element count is known
+    # statically.
+    elif isinstance(array_size, int):
+        return generate_list(array_size)
+
+    # The element width is known, and the array element count is known
+    # by count field.
+    elif isinstance(array_size, ast.CountField):
+        return [Value([])] + generate_list(max_count)
+
+    # The element width is known, and the array full size is known
+    # by size field.
+    elif isinstance(array_size, ast.SizeField):
+        return [Value([])] + generate_list(max_count)
+
+    # The element width is known, but the array size is unknown.
+    # Generate two arrays: one empty and one including all possible element
+    # values.
+    else:
+        return [Value([])] + generate_list(None)
+
+
+def generate_array_field_values(field: ast.ArrayField) -> List[Value]:
+    if field.width is not None:
+        return generate_scalar_array_field_values(field)
+    else:
+        return generate_typedef_array_field_values(field)
+
+
+def generate_typedef_field_values(
+    field: ast.TypedefField, constraints: List[ast.Constraint]
+) -> List[Value]:
+    type_decl = field.parent.file.typedef_scope[field.type_id]
+
+    # Check for constraint on enum field.
+    if isinstance(type_decl, ast.EnumDeclaration):
+        for c in constraints:
+            if c.id == field.id:
+                for tag in type_decl.tags:
+                    if tag.id == c.tag_id:
+                        return [Value(tag.value, type_decl.width)]
+                raise Exception("undefined enum tag")
+
+    # Checksum field needs to known the checksum range.
+    if isinstance(type_decl, ast.ChecksumDeclaration):
+        return generate_checksum_field_values(field)
+
+    return generate_typedef_values(type_decl)
+
+
+def generate_field_values(
+    field: ast.Field, constraints: List[ast.Constraint], payload: Optional[List[Packet]]
+) -> List[Value]:
+    if isinstance(field, ast.ChecksumField):
+        # Checksum fields are just markers.
+        return [Value(0, 0)]
+
+    elif isinstance(field, ast.PaddingField):
+        return generate_padding_field_values(field)
+
+    elif isinstance(field, ast.SizeField):
+        return generate_size_field_values(field)
+
+    elif isinstance(field, ast.CountField):
+        return generate_count_field_values(field)
+
+    elif isinstance(field, (ast.BodyField, ast.PayloadField)) and payload:
+        return [Value(p) for p in payload]
+
+    elif isinstance(field, (ast.BodyField, ast.PayloadField)):
+        return generate_payload_field_values(field)
+
+    elif isinstance(field, ast.FixedField) and field.enum_id:
+        enum_decl = field.parent.file.typedef_scope[field.enum_id]
+        for tag in enum_decl.tags:
+            if tag.id == field.tag_id:
+                return [Value(tag.value, enum_decl.width)]
+        raise Exception("undefined enum tag")
+
+    elif isinstance(field, ast.FixedField) and field.width:
+        return [Value(field.value, field.width)]
+
+    elif isinstance(field, ast.ReservedField):
+        return [Value(0, field.width)]
+
+    elif isinstance(field, ast.ArrayField):
+        return generate_array_field_values(field)
+
+    elif isinstance(field, ast.ScalarField):
+        for c in constraints:
+            if c.id == field.id:
+                return [Value(c.value, field.width)]
+        mask = (1 << field.width) - 1
+        return [
+            Value(0, field.width),
+            Value(-1 & mask, field.width),
+            generator.generate(field.width),
+        ]
+
+    elif isinstance(field, ast.TypedefField):
+        return generate_typedef_field_values(field, constraints)
+
+    else:
+        raise Exception("unsupported field kind")
+
+
+def generate_fields(
+    decl: ast.Declaration,
+    constraints: List[ast.Constraint],
+    payload: Optional[List[Packet]],
+) -> List[List[Field]]:
+    return [
+        [Field(v, f) for v in generate_field_values(f, constraints, payload)]
+        for f in decl.fields
+    ]
+
+
+def generate_fields_recursive(
+    scope: dict,
+    decl: ast.Declaration,
+    constraints: List[ast.Constraint] = [],
+    payload: Optional[List[Packet]] = None,
+) -> List[List[Field]]:
+    fields = generate_fields(decl, constraints, payload)
+
+    if not decl.parent_id:
+        return fields
+
+    packets = [Packet(fields, decl) for fields in product(fields)]
+    parent_decl = scope[decl.parent_id]
+    return generate_fields_recursive(
+        scope, parent_decl, constraints + decl.constraints, payload=packets
+    )
+
+
+def generate_struct_values(decl: ast.StructDeclaration) -> List[Packet]:
+    fields = generate_fields_recursive(decl.file.typedef_scope, decl)
+    return [Packet(fields, decl) for fields in product(fields)]
+
+
+def generate_packet_values(decl: ast.PacketDeclaration) -> List[Packet]:
+    fields = generate_fields_recursive(decl.file.packet_scope, decl)
+    return [Packet(fields, decl) for fields in product(fields)]
+
+
+def generate_typedef_values(decl: ast.Declaration) -> List[Value]:
+    if isinstance(decl, ast.EnumDeclaration):
+        return [Value(t.value, decl.width) for t in decl.tags]
+
+    elif isinstance(decl, ast.ChecksumDeclaration):
+        raise Exception("ChecksumDeclaration handled in typedef field")
+
+    elif isinstance(decl, ast.CustomFieldDeclaration):
+        raise Exception("TODO custom field")
+
+    elif isinstance(decl, ast.StructDeclaration):
+        return [Value(p) for p in generate_struct_values(decl)]
+
+    else:
+        raise Exception("unsupported typedef declaration type")
+
+
+def product(fields: List[List[Field]]) -> List[List[Field]]:
+    """Perform a cartesian product of generated options for packet field values."""
+
+    def aux(vec: List[List[Field]]) -> List[List[Field]]:
+        if len(vec) == 0:
+            return [[]]
+        return [[item.clone()] + items for item in vec[0] for items in aux(vec[1:])]
+
+    count = 1
+    max_len = 0
+    for f in fields:
+        count *= len(f)
+        max_len = max(max_len, len(f))
+
+    # Limit products to 32 elements to prevent combinatorial
+    # explosion.
+    if count <= 32:
+        return aux(fields)
+
+    # If too many products, select samples which test all fields value
+    # values at the minimum.
+    else:
+        return [[f[idx % len(f)] for f in fields] for idx in range(0, max_len + 1)]
+
+
+def serialize_values(file: ast.File, values: List[Value]) -> List[dict]:
+    results = []
+    for v in values:
+        v.finalize()
+        packed = v.serialize(file.endianness.value == "big_endian")
+        result = {
+            "packed": "".join([f"{b:02x}" for b in packed]),
+            "unpacked": v.to_json(),
+        }
+        if v.ref.parent_id:
+            result["packet"] = v.ref.id
+        results.append(result)
+    return results
+
+
+def run(input: Path, packet: List[str]):
+    with open(input) as f:
+        file = ast.File.from_json(json.load(f))
+    core.desugar(file)
+
+    results = dict()
+    for decl in file.packet_scope.values():
+        if core.get_derived_packets(decl) or (packet and decl.id not in packet):
+            continue
+
+        try:
+            values = generate_packet_values(decl)
+            ancestor = core.get_packet_ancestor(decl)
+            results[ancestor.id] = results.get(ancestor.id, []) + serialize_values(
+                file, values
+            )
+        except Exception as exn:
+            print(
+                f"Skipping packet {decl.id}; cannot generate values: {exn}",
+                file=sys.stderr,
+            )
+
+    results = [{"packet": k, "tests": v} for (k, v) in results.items()]
+    json.dump(results, sys.stdout, indent=2)
+
+
+def main() -> int:
+    """Generate test vectors for top-level PDL packets."""
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument(
+        "--input", type=Path, required=True, help="Input PDL-JSON source"
+    )
+    parser.add_argument(
+        "--packet",
+        type=lambda x: x.split(","),
+        required=False,
+        action="extend",
+        default=[],
+        help="Select PDL packet to test",
+    )
+    return run(**vars(parser.parse_args()))
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/src/analyzer.rs b/src/analyzer.rs
index 1757e5e..c89d3d0 100644
--- a/src/analyzer.rs
+++ b/src/analyzer.rs
@@ -190,6 +190,7 @@
     DuplicateTagRange = 41,
     E42 = 42,
     E43 = 43,
+    DuplicateDefaultTag = 44,
 }
 
 impl From<ErrorCode> for String {
@@ -205,8 +206,10 @@
 }
 
 /// Gather information about the full AST.
-#[derive(Debug, Default)]
-pub struct Scope<'d, A: Annotation> {
+#[derive(Debug)]
+pub struct Scope<'d, A: Annotation = ast::Annotation> {
+    /// Reference to the source file.
+    pub file: &'d crate::ast::File<A>,
     /// Collection of Group, Packet, Enum, Struct, Checksum, and CustomField
     /// declarations.
     pub typedef: HashMap<String, &'d crate::ast::Decl<A>>,
@@ -245,7 +248,7 @@
 impl<'d, A: Annotation + Default> Scope<'d, A> {
     pub fn new(file: &'d crate::ast::File<A>) -> Result<Scope<'d, A>, Diagnostics> {
         // Gather top-level declarations.
-        let mut scope: Scope<A> = Default::default();
+        let mut scope: Scope<A> = Scope { file, typedef: Default::default() };
         let mut diagnostics: Diagnostics = Default::default();
         for decl in &file.declarations {
             if let Some(id) = decl.id() {
@@ -277,6 +280,14 @@
         }
     }
 
+    /// Iterate over the child declarations of the selected declaration.
+    pub fn iter_children<'s>(
+        &'s self,
+        decl: &'d crate::ast::Decl<A>,
+    ) -> impl Iterator<Item = &'d crate::ast::Decl<A>> + 's {
+        self.file.iter_children(decl)
+    }
+
     /// Return the parent declaration of the selected declaration,
     /// if it has one.
     pub fn get_parent(&self, decl: &crate::ast::Decl<A>) -> Option<&'d crate::ast::Decl<A>> {
@@ -291,6 +302,15 @@
         std::iter::successors(self.get_parent(decl), |decl| self.get_parent(decl))
     }
 
+    /// Iterate over the parent declarations of the selected declaration,
+    /// including the current declaration.
+    pub fn iter_parents_and_self<'s>(
+        &'s self,
+        decl: &'d crate::ast::Decl<A>,
+    ) -> impl Iterator<Item = &'d Decl<A>> + 's {
+        std::iter::successors(Some(decl), |decl| self.get_parent(decl))
+    }
+
     /// Iterate over the declaration and its parent's fields.
     pub fn iter_fields<'s>(
         &'s self,
@@ -299,11 +319,27 @@
         std::iter::successors(Some(decl), |decl| self.get_parent(decl)).flat_map(Decl::fields)
     }
 
+    /// Iterate over the declaration parent's fields.
+    pub fn iter_parent_fields<'s>(
+        &'s self,
+        decl: &'d crate::ast::Decl<A>,
+    ) -> impl Iterator<Item = &'d crate::ast::Field<A>> + 's {
+        std::iter::successors(self.get_parent(decl), |decl| self.get_parent(decl))
+            .flat_map(Decl::fields)
+    }
+
+    /// Iterate over the declaration and its parent's constraints.
+    pub fn iter_constraints<'s>(
+        &'s self,
+        decl: &'d crate::ast::Decl<A>,
+    ) -> impl Iterator<Item = &'d Constraint> + 's {
+        std::iter::successors(Some(decl), |decl| self.get_parent(decl)).flat_map(Decl::constraints)
+    }
+
     /// Return the type declaration for the selected field, if applicable.
-    #[allow(dead_code)]
-    pub fn get_declaration(
+    pub fn get_type_declaration(
         &self,
-        field: &'d crate::ast::Field<A>,
+        field: &crate::ast::Field<A>,
     ) -> Option<&'d crate::ast::Decl<A>> {
         match &field.desc {
             FieldDesc::Checksum { .. }
@@ -323,6 +359,24 @@
             | FieldDesc::Typedef { type_id, .. } => self.typedef.get(type_id).cloned(),
         }
     }
+
+    /// Test if the selected field is a bit-field.
+    pub fn is_bitfield(&self, field: &crate::ast::Field<A>) -> bool {
+        match &field.desc {
+            FieldDesc::Size { .. }
+            | FieldDesc::Count { .. }
+            | FieldDesc::ElementSize { .. }
+            | FieldDesc::FixedScalar { .. }
+            | FieldDesc::FixedEnum { .. }
+            | FieldDesc::Reserved { .. }
+            | FieldDesc::Scalar { .. } => true,
+            FieldDesc::Typedef { type_id, .. } => {
+                let field = self.typedef.get(type_id.as_str());
+                matches!(field, Some(Decl { desc: DeclDesc::Enum { .. }, .. }))
+            }
+            _ => false,
+        }
+    }
 }
 
 /// Return the bit-width of a scalar value.
@@ -682,6 +736,39 @@
         }
     }
 
+    fn check_tag_other<'a>(
+        tag: &'a TagOther,
+        tags_by_id: &mut HashMap<&'a str, SourceRange>,
+        tag_other: &mut Option<SourceRange>,
+        diagnostics: &mut Diagnostics,
+    ) {
+        if let Some(prev) = tags_by_id.insert(&tag.id, tag.loc) {
+            diagnostics.push(
+                Diagnostic::error()
+                    .with_code(ErrorCode::DuplicateTagIdentifier)
+                    .with_message(format!("duplicate tag identifier `{}`", tag.id))
+                    .with_labels(vec![
+                        tag.loc.primary(),
+                        prev.secondary()
+                            .with_message(format!("`{}` is first declared here", tag.id)),
+                    ]),
+            )
+        }
+        if let Some(prev) = tag_other {
+            diagnostics.push(
+                Diagnostic::error()
+                    .with_code(ErrorCode::DuplicateDefaultTag)
+                    .with_message("duplicate default tag".to_owned())
+                    .with_labels(vec![
+                        tag.loc.primary(),
+                        prev.secondary()
+                            .with_message("the default tag is first declared here".to_owned()),
+                    ]),
+            )
+        }
+        *tag_other = Some(tag.loc)
+    }
+
     let mut diagnostics: Diagnostics = Default::default();
     for decl in &file.declarations {
         if let DeclDesc::Enum { tags, width, .. } = &decl.desc {
@@ -694,6 +781,7 @@
                     _ => None,
                 })
                 .collect::<Vec<_>>();
+            let mut tag_other = None;
 
             for tag in tags {
                 match tag {
@@ -712,6 +800,9 @@
                         &mut tags_by_value,
                         &mut diagnostics,
                     ),
+                    Tag::Other(other) => {
+                        check_tag_other(other, &mut tags_by_id, &mut tag_other, &mut diagnostics)
+                    }
                 }
             }
 
@@ -1842,6 +1933,17 @@
         }
         "#
         );
+
+        raises!(
+            DuplicateTagIdentifier,
+            r#"
+        little_endian_packets
+        enum A : 8 {
+            X = 0,
+            X = ..,
+        }
+        "#
+        );
     }
 
     #[test]
@@ -2520,6 +2622,22 @@
     }
 
     #[test]
+    fn test_e44() {
+        raises!(
+            DuplicateDefaultTag,
+            r#"
+        little_endian_packets
+        enum A : 8 {
+            A = 0,
+            X = ..,
+            B = 1,
+            Y = ..,
+        }
+        "#
+        );
+    }
+
+    #[test]
     fn test_enum_declaration() {
         valid!(
             r#"
@@ -2554,6 +2672,17 @@
         }
         "#
         );
+
+        valid!(
+            r#"
+        little_endian_packets
+        enum A : 7 {
+            A = 50..100,
+            X = 101,
+            UNKNOWN = ..,
+        }
+        "#
+        );
     }
 
     use analyzer::ast::Size;
diff --git a/src/ast.rs b/src/ast.rs
index da46c13..30c2a4d 100644
--- a/src/ast.rs
+++ b/src/ast.rs
@@ -86,11 +86,19 @@
     pub tags: Vec<TagValue>,
 }
 
+#[derive(Debug, Clone, Serialize)]
+#[serde(tag = "kind", rename = "tag")]
+pub struct TagOther {
+    pub id: String,
+    pub loc: SourceRange,
+}
+
 #[derive(Debug, Serialize, Clone, PartialEq, Eq)]
 #[serde(untagged)]
 pub enum Tag {
     Value(TagValue),
     Range(TagRange),
+    Other(TagOther),
 }
 
 #[derive(Debug, Serialize, Clone)]
@@ -288,23 +296,35 @@
     }
 }
 
+impl Eq for TagOther {}
+impl PartialEq for TagOther {
+    fn eq(&self, other: &Self) -> bool {
+        // Implement structual equality, leave out loc.
+        self.id == other.id
+    }
+}
+
 impl Tag {
     pub fn id(&self) -> &str {
         match self {
-            Tag::Value(TagValue { id, .. }) | Tag::Range(TagRange { id, .. }) => id,
+            Tag::Value(TagValue { id, .. })
+            | Tag::Range(TagRange { id, .. })
+            | Tag::Other(TagOther { id, .. }) => id,
         }
     }
 
     pub fn loc(&self) -> &SourceRange {
         match self {
-            Tag::Value(TagValue { loc, .. }) | Tag::Range(TagRange { loc, .. }) => loc,
+            Tag::Value(TagValue { loc, .. })
+            | Tag::Range(TagRange { loc, .. })
+            | Tag::Other(TagOther { loc, .. }) => loc,
         }
     }
 
     pub fn value(&self) -> Option<usize> {
         match self {
             Tag::Value(TagValue { value, .. }) => Some(*value),
-            Tag::Range(_) => None,
+            Tag::Range(_) | Tag::Other(_) => None,
         }
     }
 }
@@ -451,6 +471,31 @@
         }
     }
 
+    /// Return the reference to the payload or body field in a declaration,
+    /// if present.
+    pub fn payload(&self) -> Option<&Field<A>> {
+        self.fields()
+            .find(|field| matches!(&field.desc, FieldDesc::Payload { .. } | FieldDesc::Body { .. }))
+    }
+
+    /// Return the reference to the payload or body size field in a declaration,
+    /// if present.
+    pub fn payload_size(&self) -> Option<&Field<A>> {
+        self.fields().find(|field| match &field.desc {
+            FieldDesc::Size { field_id, .. } => field_id == "_payload_" || field_id == "_body_",
+            _ => false,
+        })
+    }
+
+    /// Return the reference to the array size or count field in a declaration,
+    /// if present.
+    pub fn array_size(&self, id: &str) -> Option<&Field<A>> {
+        self.fields().find(|field| match &field.desc {
+            FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => field_id == id,
+            _ => false,
+        })
+    }
+
     pub fn kind(&self) -> &str {
         match &self.desc {
             DeclDesc::Checksum { .. } => "checksum",
diff --git a/src/backends/rust.rs b/src/backends/rust.rs
index f369179..9dfdf8a 100644
--- a/src/backends/rust.rs
+++ b/src/backends/rust.rs
@@ -14,9 +14,10 @@
 
 //! Rust compiler backend.
 
-use crate::{ast, lint};
+use crate::{analyzer, ast};
 use quote::{format_ident, quote};
 use std::collections::BTreeSet;
+use std::collections::HashMap;
 use std::path::Path;
 use syn::LitInt;
 
@@ -30,25 +31,25 @@
 use parser::FieldParser;
 use serializer::FieldSerializer;
 
-#[cfg(not(tm_mainline_prod))]
 pub use heck::ToUpperCamelCase;
 
-#[cfg(tm_mainline_prod)]
-pub trait ToUpperCamelCase {
-    fn to_upper_camel_case(&self) -> String;
+pub trait ToIdent {
+    /// Generate a sanitized rust identifier.
+    /// Rust specific keywords are renamed for validity.
+    fn to_ident(self) -> proc_macro2::Ident;
 }
 
-#[cfg(tm_mainline_prod)]
-impl ToUpperCamelCase for str {
-    fn to_upper_camel_case(&self) -> String {
-        use heck::CamelCase;
-        let camel_case = self.to_camel_case();
-        if camel_case.is_empty() {
-            camel_case
-        } else {
-            // PDL identifiers are a-zA-z0-9, so we're dealing with
-            // simple ASCII text.
-            format!("{}{}", &camel_case[..1].to_ascii_uppercase(), &camel_case[1..])
+impl ToIdent for &'_ str {
+    fn to_ident(self) -> proc_macro2::Ident {
+        match self {
+            "as" | "break" | "const" | "continue" | "crate" | "else" | "enum" | "extern"
+            | "false" | "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "match" | "mod"
+            | "move" | "mut" | "pub" | "ref" | "return" | "self" | "Self" | "static" | "struct"
+            | "super" | "trait" | "true" | "type" | "unsafe" | "use" | "where" | "while"
+            | "async" | "await" | "dyn" | "abstract" | "become" | "box" | "do" | "final"
+            | "macro" | "override" | "priv" | "typeof" | "unsized" | "virtual" | "yield"
+            | "try" => format_ident!("r#{}", self),
+            _ => format_ident!("{}", self),
         }
     }
 }
@@ -84,7 +85,7 @@
 }
 
 fn generate_packet_size_getter<'a>(
-    scope: &lint::Scope<'a>,
+    scope: &analyzer::Scope<'a>,
     fields: impl Iterator<Item = &'a analyzer_ast::Field>,
     is_packet: bool,
 ) -> (usize, proc_macro2::TokenStream) {
@@ -97,7 +98,7 @@
             continue;
         }
 
-        let decl = scope.get_field_declaration(field);
+        let decl = scope.get_type_declaration(field);
         dynamic_widths.push(match &field.desc {
             ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. } => {
                 if is_packet {
@@ -111,11 +112,11 @@
                 }
             }
             ast::FieldDesc::Typedef { id, .. } => {
-                let id = format_ident!("{id}");
+                let id = id.to_ident();
                 quote!(self.#id.get_size())
             }
             ast::FieldDesc::Array { id, width, .. } => {
-                let id = format_ident!("{id}");
+                let id = id.to_ident();
                 match &decl {
                     Some(analyzer_ast::Decl {
                         desc: ast::DeclDesc::Struct { .. } | ast::DeclDesc::CustomField { .. },
@@ -163,7 +164,10 @@
     )
 }
 
-fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a analyzer_ast::Decl {
+fn top_level_packet<'a>(
+    scope: &analyzer::Scope<'a>,
+    packet_name: &'a str,
+) -> &'a analyzer_ast::Decl {
     let mut decl = scope.typedef[packet_name];
     while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
     | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
@@ -173,46 +177,41 @@
     decl
 }
 
-/// Find all constrained fields in children of `id`.
-fn find_constrained_fields<'a>(
-    scope: &'a lint::Scope<'a>,
-    id: &'a str,
-) -> Vec<&'a analyzer_ast::Field> {
-    let mut fields = Vec::new();
-    let mut field_names = BTreeSet::new();
-    let mut children = scope.iter_children(id).collect::<Vec<_>>();
-
-    while let Some(child) = children.pop() {
-        if let ast::DeclDesc::Packet { id, constraints, .. }
-        | ast::DeclDesc::Struct { id, constraints, .. } = &child.desc
-        {
-            let packet_scope = &scope.scopes[&scope.typedef[id]];
-            for constraint in constraints {
-                if field_names.insert(&constraint.id) {
-                    fields.push(packet_scope.all_fields[&constraint.id]);
-                }
-            }
-            children.extend(scope.iter_children(id).collect::<Vec<_>>());
-        }
-    }
-
-    fields
-}
-
 /// Find parent fields which are constrained in child packets.
 ///
 /// These fields are the fields which need to be passed in when
 /// parsing a `id` packet since their values are needed for one or
 /// more child packets.
 fn find_constrained_parent_fields<'a>(
-    scope: &'a lint::Scope<'a>,
-    id: &'a str,
-) -> impl Iterator<Item = &'a analyzer_ast::Field> {
-    let packet_scope = &scope.scopes[&scope.typedef[id]];
-    find_constrained_fields(scope, id).into_iter().filter(|field| {
-        let id = field.id().unwrap();
-        packet_scope.all_fields.contains_key(id) && packet_scope.get_packet_field(id).is_none()
-    })
+    scope: &analyzer::Scope<'a>,
+    id: &str,
+) -> Vec<&'a analyzer_ast::Field> {
+    let all_parent_fields: HashMap<String, &'a analyzer_ast::Field> = HashMap::from_iter(
+        scope
+            .iter_parent_fields(scope.typedef[id])
+            .filter_map(|f| f.id().map(|id| (id.to_string(), f))),
+    );
+
+    let mut fields = Vec::new();
+    let mut field_names = BTreeSet::new();
+    let mut children = scope.iter_children(scope.typedef[id]).collect::<Vec<_>>();
+
+    while let Some(child) = children.pop() {
+        if let ast::DeclDesc::Packet { id, constraints, .. }
+        | ast::DeclDesc::Struct { id, constraints, .. } = &child.desc
+        {
+            for constraint in constraints {
+                if field_names.insert(&constraint.id)
+                    && all_parent_fields.contains_key(&constraint.id)
+                {
+                    fields.push(all_parent_fields[&constraint.id]);
+                }
+            }
+            children.extend(scope.iter_children(scope.typedef[id]).collect::<Vec<_>>());
+        }
+    }
+
+    fields
 }
 
 /// Generate the declaration and implementation for a data struct.
@@ -220,27 +219,26 @@
 /// This struct will hold the data for a packet or a struct. It knows
 /// how to parse and serialize its own fields.
 fn generate_data_struct(
-    scope: &lint::Scope<'_>,
+    scope: &analyzer::Scope<'_>,
     endianness: ast::EndiannessValue,
     id: &str,
 ) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {
     let decl = scope.typedef[id];
-    let packet_scope = &scope.scopes[&decl];
     let is_packet = matches!(&decl.desc, ast::DeclDesc::Packet { .. });
 
     let span = format_ident!("bytes");
     let serializer_span = format_ident!("buffer");
     let mut field_parser = FieldParser::new(scope, endianness, id, &span);
     let mut field_serializer = FieldSerializer::new(scope, endianness, id, &serializer_span);
-    for field in packet_scope.iter_fields() {
+    for field in decl.fields() {
         field_parser.add(field);
         field_serializer.add(field);
     }
     field_parser.done();
 
     let (parse_arg_names, parse_arg_types) = if is_packet {
-        let fields = find_constrained_parent_fields(scope, id).collect::<Vec<_>>();
-        let names = fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
+        let fields = find_constrained_parent_fields(scope, id);
+        let names = fields.iter().map(|f| f.id().unwrap().to_ident()).collect::<Vec<_>>();
         let types = fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
         (names, types)
     } else {
@@ -248,7 +246,7 @@
     };
 
     let (constant_width, packet_size) =
-        generate_packet_size_getter(scope, packet_scope.iter_fields(), is_packet);
+        generate_packet_size_getter(scope, decl.fields(), is_packet);
     let conforms = if constant_width == 0 {
         quote! { true }
     } else {
@@ -257,14 +255,13 @@
     };
 
     let visibility = if is_packet { quote!() } else { quote!(pub) };
-    let has_payload = packet_scope.get_payload_field().is_some();
-    let has_children = scope.iter_children(id).next().is_some();
+    let has_payload = decl.payload().is_some();
+    let has_children = scope.iter_children(decl).next().is_some();
 
-    let struct_name = if is_packet { format_ident!("{id}Data") } else { format_ident!("{id}") };
-    let fields_with_ids =
-        packet_scope.iter_fields().filter(|f| f.id().is_some()).collect::<Vec<_>>();
+    let struct_name = if is_packet { format_ident!("{id}Data") } else { id.to_ident() };
+    let fields_with_ids = decl.fields().filter(|f| f.id().is_some()).collect::<Vec<_>>();
     let mut field_names =
-        fields_with_ids.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
+        fields_with_ids.iter().map(|f| f.id().unwrap().to_ident()).collect::<Vec<_>>();
     let mut field_types = fields_with_ids.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
     if has_children || has_payload {
         if is_packet {
@@ -326,26 +323,10 @@
     (data_struct_decl, data_struct_impl)
 }
 
-/// Find all parents from `id`.
-///
-/// This includes the `Decl` for `id` itself.
-fn find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a analyzer_ast::Decl> {
-    let mut decl = scope.typedef[id];
-    let mut parents = vec![decl];
-    while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
-    | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
-    {
-        decl = scope.typedef[parent_id];
-        parents.push(decl);
-    }
-    parents.reverse();
-    parents
-}
-
 /// Turn the constraint into a value (such as `10` or
 /// `SomeEnum::Foo`).
 pub fn constraint_to_value(
-    packet_scope: &lint::PacketScope<'_>,
+    all_fields: &HashMap<String, &'_ analyzer_ast::Field>,
     constraint: &ast::Constraint,
 ) -> proc_macro2::TokenStream {
     match constraint {
@@ -356,8 +337,8 @@
         // TODO(mgeisler): include type_id in `ast::Constraint` and
         // drop the packet_scope argument.
         ast::Constraint { tag_id: Some(tag_id), .. } => {
-            let type_id = match &packet_scope.all_fields[&constraint.id].desc {
-                ast::FieldDesc::Typedef { type_id, .. } => format_ident!("{type_id}"),
+            let type_id = match &all_fields[&constraint.id].desc {
+                ast::FieldDesc::Typedef { type_id, .. } => type_id.to_ident(),
                 _ => unreachable!("Invalid constraint: {constraint:?}"),
             };
             let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
@@ -369,95 +350,99 @@
 
 /// Generate code for a `ast::Decl::Packet`.
 fn generate_packet_decl(
-    scope: &lint::Scope<'_>,
+    scope: &analyzer::Scope<'_>,
     endianness: ast::EndiannessValue,
     id: &str,
 ) -> proc_macro2::TokenStream {
-    let packet_scope = &scope.scopes[&scope.typedef[id]];
-
+    let decl = scope.typedef[id];
     let top_level = top_level_packet(scope, id);
     let top_level_id = top_level.id().unwrap();
-    let top_level_packet = format_ident!("{top_level_id}");
+    let top_level_packet = top_level_id.to_ident();
     let top_level_data = format_ident!("{top_level_id}Data");
-    let top_level_id_lower = format_ident!("{}", top_level_id.to_lowercase());
+    let top_level_id_lower = top_level_id.to_lowercase().to_ident();
 
     // TODO(mgeisler): use the convert_case crate to convert between
     // `FooBar` and `foo_bar` in the code below.
     let span = format_ident!("bytes");
-    let id_lower = format_ident!("{}", id.to_lowercase());
-    let id_packet = format_ident!("{id}");
+    let id_lower = id.to_lowercase().to_ident();
+    let id_packet = id.to_ident();
     let id_child = format_ident!("{id}Child");
     let id_data_child = format_ident!("{id}DataChild");
     let id_builder = format_ident!("{id}Builder");
 
-    let parents = find_parents(scope, id);
+    let mut parents = scope.iter_parents_and_self(decl).collect::<Vec<_>>();
+    parents.reverse();
+
     let parent_ids = parents.iter().map(|p| p.id().unwrap()).collect::<Vec<_>>();
-    let parent_shifted_ids = parent_ids.iter().skip(1).map(|id| format_ident!("{id}"));
+    let parent_shifted_ids = parent_ids.iter().skip(1).map(|id| id.to_ident());
     let parent_lower_ids =
-        parent_ids.iter().map(|id| format_ident!("{}", id.to_lowercase())).collect::<Vec<_>>();
+        parent_ids.iter().map(|id| id.to_lowercase().to_ident()).collect::<Vec<_>>();
     let parent_shifted_lower_ids = parent_lower_ids.iter().skip(1).collect::<Vec<_>>();
-    let parent_packet = parent_ids.iter().map(|id| format_ident!("{id}"));
+    let parent_packet = parent_ids.iter().map(|id| id.to_ident());
     let parent_data = parent_ids.iter().map(|id| format_ident!("{id}Data"));
     let parent_data_child = parent_ids.iter().map(|id| format_ident!("{id}DataChild"));
 
     let all_fields = {
-        let mut fields = packet_scope.all_fields.values().collect::<Vec<_>>();
+        let mut fields = scope.iter_fields(decl).filter(|d| d.id().is_some()).collect::<Vec<_>>();
         fields.sort_by_key(|f| f.id());
         fields
     };
-    let all_field_names =
-        all_fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
+    let all_named_fields =
+        HashMap::from_iter(all_fields.iter().map(|f| (f.id().unwrap().to_string(), *f)));
+
+    let all_field_names = all_fields.iter().map(|f| f.id().unwrap().to_ident()).collect::<Vec<_>>();
     let all_field_types = all_fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
     let all_field_borrows =
         all_fields.iter().map(|f| types::rust_borrow(f, scope)).collect::<Vec<_>>();
-    let all_field_getter_names = all_field_names.iter().map(|id| format_ident!("get_{id}"));
+    let all_field_getter_names =
+        all_fields.iter().map(|f| format_ident!("get_{}", f.id().unwrap()));
     let all_field_self_field = all_fields.iter().map(|f| {
         for (parent, parent_id) in parents.iter().zip(parent_lower_ids.iter()) {
-            if scope.scopes[parent].iter_fields().any(|ff| ff.id() == f.id()) {
+            if parent.fields().any(|ff| ff.id() == f.id()) {
                 return quote!(self.#parent_id);
             }
         }
         unreachable!("Could not find {f:?} in parent chain");
     });
 
+    let all_constraints = HashMap::<String, _>::from_iter(
+        scope.iter_constraints(decl).map(|c| (c.id.to_string(), c)),
+    );
+
     let unconstrained_fields = all_fields
         .iter()
-        .filter(|f| !packet_scope.all_constraints.contains_key(f.id().unwrap()))
+        .filter(|f| !all_constraints.contains_key(f.id().unwrap()))
         .collect::<Vec<_>>();
-    let unconstrained_field_names = unconstrained_fields
-        .iter()
-        .map(|f| format_ident!("{}", f.id().unwrap()))
-        .collect::<Vec<_>>();
+    let unconstrained_field_names =
+        unconstrained_fields.iter().map(|f| f.id().unwrap().to_ident()).collect::<Vec<_>>();
     let unconstrained_field_types = unconstrained_fields.iter().map(|f| types::rust_type(f));
 
     let rev_parents = parents.iter().rev().collect::<Vec<_>>();
     let builder_assignments = rev_parents.iter().enumerate().map(|(idx, parent)| {
         let parent_id = parent.id().unwrap();
-        let parent_id_lower = format_ident!("{}", parent_id.to_lowercase());
+        let parent_id_lower = parent_id.to_lowercase().to_ident();
         let parent_data = format_ident!("{parent_id}Data");
         let parent_data_child = format_ident!("{parent_id}DataChild");
-        let parent_packet_scope = &scope.scopes[&scope.typedef[parent_id]];
 
         let named_fields = {
-            let mut names =
-                parent_packet_scope.iter_fields().filter_map(ast::Field::id).collect::<Vec<_>>();
+            let mut names = parent.fields().filter_map(ast::Field::id).collect::<Vec<_>>();
             names.sort_unstable();
             names
         };
 
-        let mut field = named_fields.iter().map(|id| format_ident!("{id}")).collect::<Vec<_>>();
+        let mut field = named_fields.iter().map(|id| id.to_ident()).collect::<Vec<_>>();
         let mut value = named_fields
             .iter()
-            .map(|&id| match packet_scope.all_constraints.get(id) {
-                Some(constraint) => constraint_to_value(packet_scope, constraint),
+            .map(|&id| match all_constraints.get(id) {
+                Some(constraint) => constraint_to_value(&all_named_fields, constraint),
                 None => {
-                    let id = format_ident!("{id}");
+                    let id = id.to_ident();
                     quote!(self.#id)
                 }
             })
             .collect::<Vec<_>>();
 
-        if parent_packet_scope.get_payload_field().is_some() {
+        if parent.payload().is_some() {
             field.push(format_ident!("child"));
             if idx == 0 {
                 // Top-most parent, the child is simply created from
@@ -471,13 +456,13 @@
             } else {
                 // Child is created from the previous parent.
                 let prev_parent_id = rev_parents[idx - 1].id().unwrap();
-                let prev_parent_id_lower = format_ident!("{}", prev_parent_id.to_lowercase());
-                let prev_parent_id = format_ident!("{prev_parent_id}");
+                let prev_parent_id_lower = prev_parent_id.to_lowercase().to_ident();
+                let prev_parent_id = prev_parent_id.to_ident();
                 value.push(quote! {
                     #parent_data_child::#prev_parent_id(#prev_parent_id_lower)
                 });
             }
-        } else if scope.iter_children(parent_id).next().is_some() {
+        } else if scope.iter_children(parent).next().is_some() {
             field.push(format_ident!("child"));
             value.push(quote! { #parent_data_child::None });
         }
@@ -489,11 +474,10 @@
         }
     });
 
-    let children = scope.iter_children(id).collect::<Vec<_>>();
-    let has_payload = packet_scope.get_payload_field().is_some();
+    let children = scope.iter_children(decl).collect::<Vec<_>>();
+    let has_payload = decl.payload().is_some();
     let has_children_or_payload = !children.is_empty() || has_payload;
-    let child =
-        children.iter().map(|child| format_ident!("{}", child.id().unwrap())).collect::<Vec<_>>();
+    let child = children.iter().map(|child| child.id().unwrap().to_ident()).collect::<Vec<_>>();
     let child_data = child.iter().map(|child| format_ident!("{child}Data")).collect::<Vec<_>>();
     let get_payload = (children.is_empty() && has_payload).then(|| {
         quote! {
@@ -555,8 +539,7 @@
         }
     });
 
-    let ancestor_packets =
-        parent_ids[..parent_ids.len() - 1].iter().map(|id| format_ident!("{id}"));
+    let ancestor_packets = parent_ids[..parent_ids.len() - 1].iter().map(|id| id.to_ident());
     let impl_from_and_try_from = (top_level_id != id).then(|| {
         quote! {
             #(
@@ -689,7 +672,7 @@
 
 /// Generate code for a `ast::Decl::Struct`.
 fn generate_struct_decl(
-    scope: &lint::Scope<'_>,
+    scope: &analyzer::Scope<'_>,
     endianness: ast::EndiannessValue,
     id: &str,
 ) -> proc_macro2::TokenStream {
@@ -710,20 +693,26 @@
 ///            an additional Unknown case for unmatched valued. Complete
 ///            enums (where the full range of values is covered) are
 ///            automatically closed.
-fn generate_enum_decl(
-    id: &str,
-    tags: &[ast::Tag],
-    width: usize,
-    open: bool,
-) -> proc_macro2::TokenStream {
+fn generate_enum_decl(id: &str, tags: &[ast::Tag], width: usize) -> proc_macro2::TokenStream {
+    // Determine if the enum is open, i.e. a default tag is defined.
+    fn enum_default_tag(tags: &[ast::Tag]) -> Option<ast::TagOther> {
+        tags.iter()
+            .filter_map(|tag| match tag {
+                ast::Tag::Other(tag) => Some(tag.clone()),
+                _ => None,
+            })
+            .next()
+    }
+
     // Determine if the enum is complete, i.e. all values in the backing
     // integer range have a matching tag in the original declaration.
     fn enum_is_complete(tags: &[ast::Tag], max: usize) -> bool {
         let mut ranges = tags
             .iter()
-            .map(|tag| match tag {
-                ast::Tag::Value(tag) => (tag.value, tag.value),
-                ast::Tag::Range(tag) => tag.range.clone().into_inner(),
+            .filter_map(|tag| match tag {
+                ast::Tag::Value(tag) => Some((tag.value, tag.value)),
+                ast::Tag::Range(tag) => Some(tag.range.clone().into_inner()),
+                _ => None,
             })
             .collect::<Vec<_>>();
         ranges.sort_unstable();
@@ -738,8 +727,7 @@
             })
     }
 
-    // Determine if the enum is primitive, i.e. does not contain any
-    // tag range.
+    // Determine if the enum is primitive, i.e. does not contain any tag range.
     fn enum_is_primitive(tags: &[ast::Tag]) -> bool {
         tags.iter().all(|tag| matches!(tag, ast::Tag::Value(_)))
     }
@@ -768,13 +756,15 @@
     let backing_type = types::Integer::new(width);
     let backing_type_str = proc_macro2::Literal::string(&format!("u{}", backing_type.width));
     let range_max = scalar_max(width);
+    let default_tag = enum_default_tag(tags);
+    let is_open = default_tag.is_some();
     let is_complete = enum_is_complete(tags, scalar_max(width));
     let is_primitive = enum_is_primitive(tags);
-    let name = format_ident!("{id}");
+    let name = id.to_ident();
 
     // Generate the variant cases for the enum declaration.
     // Tags declared in ranges are flattened in the same declaration.
-    let use_variant_values = is_primitive && (is_complete || !open);
+    let use_variant_values = is_primitive && (is_complete || !is_open);
     let repr_u64 = use_variant_values.then(|| quote! { #[repr(u64)] });
     let mut variants = vec![];
     for tag in tags.iter() {
@@ -790,6 +780,7 @@
                 let id = format_tag_ident(&tag.id);
                 variants.push(quote! { #id(Private<#backing_type>) })
             }
+            ast::Tag::Other(_) => (),
         }
     }
 
@@ -813,6 +804,7 @@
                 let end = format_value(*tag.range.end());
                 from_cases.push(quote! { #start ..= #end => Ok(#name::#id(Private(value))) })
             }
+            ast::Tag::Other(_) => (),
         }
     }
 
@@ -834,19 +826,22 @@
                 let id = format_tag_ident(&tag.id);
                 into_cases.push(quote! { #name::#id(Private(value)) => *value })
             }
+            ast::Tag::Other(_) => (),
         }
     }
 
     // Generate a default case if the enum is open and incomplete.
-    if !is_complete && open {
-        variants.push(quote! { Unknown(Private<#backing_type>) });
-        from_cases.push(quote! { 0..#range_max => Ok(#name::Unknown(Private(value))) });
-        into_cases.push(quote! { #name::Unknown(Private(value)) => *value });
+    if !is_complete && is_open {
+        let unknown_id = format_tag_ident(&default_tag.unwrap().id);
+        let range_max = format_value(range_max);
+        variants.push(quote! { #unknown_id(Private<#backing_type>) });
+        from_cases.push(quote! { 0..=#range_max => Ok(#name::#unknown_id(Private(value))) });
+        into_cases.push(quote! { #name::#unknown_id(Private(value)) => *value });
     }
 
     // Generate an error case if the enum size is lower than the backing
     // type size, or if the enum is closed or incomplete.
-    if backing_type.width != width || (!is_complete && !open) {
+    if backing_type.width != width || (!is_complete && !is_open) {
         from_cases.push(quote! { _ => Err(value) });
     }
 
@@ -907,7 +902,7 @@
 /// * `id` - Enum identifier.
 /// * `width` - Width of the backing type of the enum, in bits.
 fn generate_custom_field_decl(id: &str, width: usize) -> proc_macro2::TokenStream {
-    let id = format_ident!("{}", id);
+    let id = id.to_ident();
     let backing_type = types::Integer::new(width);
     let backing_type_str = proc_macro2::Literal::string(&format!("u{}", backing_type.width));
     let max_value = mask_bits(width, &format!("u{}", backing_type.width));
@@ -964,7 +959,7 @@
 }
 
 fn generate_decl(
-    scope: &lint::Scope<'_>,
+    scope: &analyzer::Scope<'_>,
     file: &analyzer_ast::File,
     decl: &analyzer_ast::Decl,
 ) -> proc_macro2::TokenStream {
@@ -978,7 +973,7 @@
             // implement the recursive (de)serialization.
             generate_struct_decl(scope, file.endianness.value, id)
         }
-        ast::DeclDesc::Enum { id, tags, width } => generate_enum_decl(id, tags, *width, false),
+        ast::DeclDesc::Enum { id, tags, width } => generate_enum_decl(id, tags, *width),
         ast::DeclDesc::CustomField { id, width: Some(width), .. } => {
             generate_custom_field_decl(id, *width)
         }
@@ -994,7 +989,7 @@
     let source = sources.get(file.file).expect("could not read source");
     let preamble = preamble::generate(Path::new(source.name()));
 
-    let scope = lint::Scope::new(file);
+    let scope = analyzer::Scope::new(file).expect("could not create scope");
     let decls = file.declarations.iter().map(|decl| generate_decl(&scope, file, decl));
     let code = quote! {
         #preamble
@@ -1050,9 +1045,10 @@
               }
             ";
         let file = parse_str(code);
-        let scope = lint::Scope::new(&file);
+        let scope = analyzer::Scope::new(&file).unwrap();
         let find_fields = |id| {
             find_constrained_parent_fields(&scope, id)
+                .iter()
                 .map(|field| field.id().unwrap())
                 .collect::<Vec<_>>()
         };
@@ -1120,14 +1116,18 @@
     test_pdl!(
         enum_declaration,
         r#"
-        // Should generate unknown case.
-        enum IncompleteTruncated : 3 {
+        enum IncompleteTruncatedClosed : 3 {
             A = 0,
             B = 1,
         }
 
-        // Should generate unknown case.
-        enum IncompleteTruncatedWithRange : 3 {
+        enum IncompleteTruncatedOpen : 3 {
+            A = 0,
+            B = 1,
+            UNKNOWN = ..
+        }
+
+        enum IncompleteTruncatedClosedWithRange : 3 {
             A = 0,
             B = 1..6 {
                 X = 1,
@@ -1135,7 +1135,15 @@
             }
         }
 
-        // Should generate unreachable case.
+        enum IncompleteTruncatedOpenWithRange : 3 {
+            A = 0,
+            B = 1..6 {
+                X = 1,
+                Y = 2,
+            },
+            UNKNOWN = ..
+        }
+
         enum CompleteTruncated : 3 {
             A = 0,
             B = 1,
@@ -1147,7 +1155,6 @@
             H = 7,
         }
 
-        // Should generate unreachable case.
         enum CompleteTruncatedWithRange : 3 {
             A = 0,
             B = 1..7 {
@@ -1156,7 +1163,6 @@
             }
         }
 
-        // Should generate no unknown or unreachable case.
         enum CompleteWithRange : 8 {
             A = 0,
             B = 1,
@@ -1522,6 +1528,15 @@
         "
     );
 
+    test_pdl!(
+        reserved_identifier,
+        "
+          packet Test {
+            type: 8,
+          }
+        "
+    );
+
     // TODO(mgeisler): enable this test when we have an approach to
     // struct fields with parents.
     //
diff --git a/src/backends/rust/parser.rs b/src/backends/rust/parser.rs
index 203a19b..f837082 100644
--- a/src/backends/rust/parser.rs
+++ b/src/backends/rust/parser.rs
@@ -14,11 +14,12 @@
 
 use crate::analyzer::ast as analyzer_ast;
 use crate::backends::rust::{
-    constraint_to_value, find_constrained_parent_fields, mask_bits, types, ToUpperCamelCase,
+    constraint_to_value, find_constrained_parent_fields, mask_bits, types, ToIdent,
+    ToUpperCamelCase,
 };
-use crate::{ast, lint};
+use crate::{analyzer, ast};
 use quote::{format_ident, quote};
-use std::collections::BTreeSet;
+use std::collections::{BTreeSet, HashMap};
 
 fn size_field_ident(id: &str) -> proc_macro2::Ident {
     format_ident!("{}_size", id.trim_matches('_'))
@@ -31,8 +32,9 @@
 }
 
 pub struct FieldParser<'a> {
-    scope: &'a lint::Scope<'a>,
+    scope: &'a analyzer::Scope<'a>,
     endianness: ast::EndiannessValue,
+    decl: &'a analyzer_ast::Decl,
     packet_name: &'a str,
     span: &'a proc_macro2::Ident,
     chunk: Vec<BitField<'a>>,
@@ -43,7 +45,7 @@
 
 impl<'a> FieldParser<'a> {
     pub fn new(
-        scope: &'a lint::Scope<'a>,
+        scope: &'a analyzer::Scope<'a>,
         endianness: ast::EndiannessValue,
         packet_name: &'a str,
         span: &'a proc_macro2::Ident,
@@ -51,6 +53,7 @@
         FieldParser {
             scope,
             endianness,
+            decl: scope.typedef[packet_name],
             packet_name,
             span,
             chunk: Vec::new(),
@@ -70,7 +73,7 @@
                 type_id.as_deref(),
                 *size,
                 field.annot.padded_size,
-                self.scope.get_field_declaration(field),
+                self.scope.get_type_declaration(field),
             ),
             ast::FieldDesc::Typedef { id, type_id } => self.add_typedef_field(id, type_id),
             ast::FieldDesc::Payload { size_modifier, .. } => {
@@ -139,19 +142,20 @@
 
             self.code.push(match &field.desc {
                 ast::FieldDesc::Scalar { id, .. } => {
-                    let id = format_ident!("{id}");
+                    let id = id.to_ident();
                     quote! {
                         let #id = #v;
                     }
                 }
                 ast::FieldDesc::FixedEnum { enum_id, tag_id, .. } => {
-                    let enum_id = format_ident!("{enum_id}");
-                    let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
+                    let enum_id = enum_id.to_ident();
+                    let tag_id = tag_id.to_upper_camel_case().to_ident();
                     quote! {
-                        if #v != #value_type::from(#enum_id::#tag_id)  {
+                        let fixed_value = #v;
+                        if fixed_value != #value_type::from(#enum_id::#tag_id)  {
                             return Err(Error::InvalidFixedValue {
                                 expected: #value_type::from(#enum_id::#tag_id) as u64,
-                                actual: #v as u64,
+                                actual: fixed_value as u64,
                             });
                         }
                     }
@@ -159,10 +163,11 @@
                 ast::FieldDesc::FixedScalar { value, .. } => {
                     let value = proc_macro2::Literal::usize_unsuffixed(*value);
                     quote! {
+                        let fixed_value = #v;
                         if #v != #value {
                             return Err(Error::InvalidFixedValue {
                                 expected: #value,
-                                actual: #v as u64,
+                                actual: fixed_value as u64,
                             });
                         }
                     }
@@ -171,13 +176,13 @@
                     let field_name = id;
                     let type_name = type_id;
                     let packet_name = &self.packet_name;
-                    let id = format_ident!("{id}");
-                    let type_id = format_ident!("{type_id}");
+                    let id = id.to_ident();
+                    let type_id = type_id.to_ident();
                     quote! {
-                        let #id = #type_id::try_from(#v).map_err(|_| Error::InvalidEnumValueError {
+                        let #id = #type_id::try_from(#v).map_err(|unknown_val| Error::InvalidEnumValueError {
                             obj: #packet_name.to_string(),
                             field: #field_name.to_string(),
-                            value: #v as u64,
+                            value: unknown_val as u64,
                             type_: #type_name.to_string(),
                         })?;
                     }
@@ -216,27 +221,23 @@
         self.shift = 0;
     }
 
-    fn packet_scope(&self) -> Option<&lint::PacketScope> {
-        self.scope.scopes.get(self.scope.typedef.get(self.packet_name)?)
-    }
-
     fn find_count_field(&self, id: &str) -> Option<proc_macro2::Ident> {
-        match self.packet_scope()?.get_array_size_field(id)?.desc {
+        match self.decl.array_size(id)?.desc {
             ast::FieldDesc::Count { .. } => Some(format_ident!("{id}_count")),
             _ => None,
         }
     }
 
     fn find_size_field(&self, id: &str) -> Option<proc_macro2::Ident> {
-        match self.packet_scope()?.get_array_size_field(id)?.desc {
+        match self.decl.array_size(id)?.desc {
             ast::FieldDesc::Size { .. } => Some(size_field_ident(id)),
             _ => None,
         }
     }
 
     fn payload_field_offset_from_end(&self) -> Option<usize> {
-        let packet_scope = self.packet_scope().unwrap();
-        let mut fields = packet_scope.iter_fields();
+        let decl = self.scope.typedef[self.packet_name];
+        let mut fields = decl.fields();
         fields.find(|f| {
             matches!(f.desc, ast::FieldDesc::Body { .. } | ast::FieldDesc::Payload { .. })
         })?;
@@ -327,7 +328,7 @@
             None => self.span.clone(),
         };
 
-        let id = format_ident!("{id}");
+        let id = id.to_ident();
 
         let parse_element = self.parse_array_element(&span, width, type_id, decl);
         match (element_width, &array_shape) {
@@ -468,8 +469,8 @@
         }
 
         let span = self.span;
-        let id = format_ident!("{id}");
-        let type_id = format_ident!("{type_id}");
+        let id = id.to_ident();
+        let type_id = type_id.to_ident();
 
         self.code.push(match decl.annot.size {
             analyzer_ast::Size::Unknown | analyzer_ast::Size::Dynamic => quote! {
@@ -510,8 +511,7 @@
     /// Parse body and payload fields.
     fn add_payload_field(&mut self, size_modifier: Option<&str>) {
         let span = self.span;
-        let packet_scope = self.packet_scope().unwrap();
-        let payload_size_field = packet_scope.get_payload_size_field();
+        let payload_size_field = self.decl.payload_size();
         let offset_from_end = self.payload_field_offset_from_end();
 
         if size_modifier.is_some() {
@@ -597,19 +597,19 @@
 
         if let Some(ast::DeclDesc::Enum { id, width, .. }) = decl.map(|decl| &decl.desc) {
             let get_uint = types::get_uint(self.endianness, *width, span);
-            let type_id = format_ident!("{id}");
+            let type_id = id.to_ident();
             let packet_name = &self.packet_name;
             return quote! {
-                #type_id::try_from(#get_uint).map_err(|_| Error::InvalidEnumValueError {
+                #type_id::try_from(#get_uint).map_err(|unknown_val| Error::InvalidEnumValueError {
                     obj: #packet_name.to_string(),
                     field: String::new(), // TODO(mgeisler): fill out or remove
-                    value: 0,
+                    value: unknown_val as u64,
                     type_: #id.to_string(),
                 })
             };
         }
 
-        let type_id = format_ident!("{}", type_id.unwrap());
+        let type_id = type_id.unwrap().to_ident();
         quote! {
             #type_id::parse_inner(#span)
         }
@@ -621,12 +621,15 @@
             return; // Structs don't parse the child structs recursively.
         }
 
-        let packet_scope = &self.scope.scopes[&decl];
-        let children = self.scope.iter_children(self.packet_name).collect::<Vec<_>>();
-        if children.is_empty() && packet_scope.get_payload_field().is_none() {
+        let children = self.scope.iter_children(decl).collect::<Vec<_>>();
+        if children.is_empty() && self.decl.payload().is_none() {
             return;
         }
 
+        let all_fields = HashMap::<String, _>::from_iter(
+            self.scope.iter_fields(decl).filter_map(|f| f.id().map(|id| (id.to_string(), f))),
+        );
+
         // Gather fields that are constrained in immediate child declarations.
         // Keep the fields sorted by name.
         // TODO: fields that are only matched in grand children will not be included.
@@ -639,10 +642,11 @@
         let mut child_parse_args = Vec::new();
         let mut child_ids_data = Vec::new();
         let mut child_ids = Vec::new();
+
         let get_constraint_value = |mut constraints: std::slice::Iter<'_, ast::Constraint>,
                                     id: &str|
          -> Option<proc_macro2::TokenStream> {
-            constraints.find(|c| c.id == id).map(|c| constraint_to_value(packet_scope, c))
+            constraints.find(|c| c.id == id).map(|c| constraint_to_value(&all_fields, c))
         };
 
         for child in children.iter() {
@@ -673,16 +677,17 @@
                 .collect::<Vec<_>>();
 
             let fields = find_constrained_parent_fields(self.scope, child.id().unwrap())
-                .map(|field| format_ident!("{}", field.id().unwrap()));
+                .iter()
+                .map(|field| field.id().unwrap().to_ident())
+                .collect::<Vec<_>>();
 
             match_values.push(quote!( (#(#tuple_values),*) ));
             child_parse_args.push(quote!( #(, #fields)*));
             child_ids_data.push(format_ident!("{}Data", child.id().unwrap()));
-            child_ids.push(format_ident!("{}", child.id().unwrap()));
+            child_ids.push(child.id().unwrap().to_ident());
         }
 
-        let constrained_field_idents =
-            constrained_fields.iter().map(|field| format_ident!("{field}"));
+        let constrained_field_idents = constrained_fields.iter().map(|field| field.to_ident());
         let packet_data_child = format_ident!("{}DataChild", self.packet_name);
 
         // Parsing of packet children requires having a payload field;
@@ -750,7 +755,7 @@
               }
             ";
         let file = parse_str(code);
-        let scope = lint::Scope::new(&file);
+        let scope = analyzer::Scope::new(&file).unwrap();
         let span = format_ident!("bytes");
         let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
         assert_eq!(parser.find_size_field("a"), None);
@@ -767,7 +772,7 @@
               }
             ";
         let file = parse_str(code);
-        let scope = lint::Scope::new(&file);
+        let scope = analyzer::Scope::new(&file).unwrap();
         let span = format_ident!("bytes");
         let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
         assert_eq!(parser.find_size_field("b"), None);
@@ -784,7 +789,7 @@
               }
             ";
         let file = parse_str(code);
-        let scope = lint::Scope::new(&file);
+        let scope = analyzer::Scope::new(&file).unwrap();
         let span = format_ident!("bytes");
         let parser = FieldParser::new(&scope, file.endianness.value, "P", &span);
         assert_eq!(parser.find_size_field("c"), Some(format_ident!("c_size")));
diff --git a/src/backends/rust/serializer.rs b/src/backends/rust/serializer.rs
index 497936c..345e98a 100644
--- a/src/backends/rust/serializer.rs
+++ b/src/backends/rust/serializer.rs
@@ -13,8 +13,8 @@
 // limitations under the License.
 
 use crate::analyzer::ast as analyzer_ast;
-use crate::backends::rust::{mask_bits, types, ToUpperCamelCase};
-use crate::{ast, lint};
+use crate::backends::rust::{mask_bits, types, ToIdent, ToUpperCamelCase};
+use crate::{analyzer, ast};
 use quote::{format_ident, quote};
 
 /// A single bit-field value.
@@ -25,7 +25,7 @@
 }
 
 pub struct FieldSerializer<'a> {
-    scope: &'a lint::Scope<'a>,
+    scope: &'a analyzer::Scope<'a>,
     endianness: ast::EndiannessValue,
     packet_name: &'a str,
     span: &'a proc_macro2::Ident,
@@ -36,7 +36,7 @@
 
 impl<'a> FieldSerializer<'a> {
     pub fn new(
-        scope: &'a lint::Scope<'a>,
+        scope: &'a analyzer::Scope<'a>,
         endianness: ast::EndiannessValue,
         packet_name: &'a str,
         span: &'a proc_macro2::Ident,
@@ -59,7 +59,7 @@
                 id,
                 *width,
                 field.annot.padded_size,
-                self.scope.get_field_declaration(field),
+                self.scope.get_type_declaration(field),
             ),
             ast::FieldDesc::Typedef { id, type_id } => {
                 self.add_typedef_field(id, type_id);
@@ -79,7 +79,7 @@
 
         match &field.desc {
             ast::FieldDesc::Scalar { id, width } => {
-                let field_name = format_ident!("{id}");
+                let field_name = id.to_ident();
                 let field_type = types::Integer::new(*width);
                 if field_type.width > *width {
                     let packet_name = &self.packet_name;
@@ -97,7 +97,7 @@
             }
             ast::FieldDesc::FixedEnum { enum_id, tag_id, .. } => {
                 let field_type = types::Integer::new(width);
-                let enum_id = format_ident!("{enum_id}");
+                let enum_id = enum_id.to_ident();
                 let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
                 self.chunk.push(BitField {
                     value: quote!(#field_type::from(#enum_id::#tag_id)),
@@ -111,7 +111,7 @@
                 self.chunk.push(BitField { value: quote!(#value), field_type, shift });
             }
             ast::FieldDesc::Typedef { id, .. } => {
-                let field_name = format_ident!("{id}");
+                let field_name = id.to_ident();
                 let field_type = types::Integer::new(width);
                 self.chunk.push(BitField {
                     value: quote!(#field_type::from(self.#field_name)),
@@ -127,14 +127,21 @@
                 let max_value = mask_bits(*width, "usize");
 
                 let decl = self.scope.typedef.get(self.packet_name).unwrap();
-                let scope = self.scope.scopes.get(decl).unwrap();
-                let value_field = scope.get_packet_field(field_id).unwrap();
+                let value_field = self
+                    .scope
+                    .iter_fields(decl)
+                    .find(|field| match &field.desc {
+                        ast::FieldDesc::Payload { .. } => field_id == "_payload_",
+                        ast::FieldDesc::Body { .. } => field_id == "_body_",
+                        _ => field.id() == Some(field_id),
+                    })
+                    .unwrap();
 
-                let field_name = format_ident!("{field_id}");
+                let field_name = field_id.to_ident();
                 let field_type = types::Integer::new(*width);
                 // TODO: size modifier
 
-                let value_field_decl = self.scope.get_field_declaration(value_field);
+                let value_field_decl = self.scope.get_type_declaration(value_field);
 
                 let field_size_name = format_ident!("{field_id}_size");
                 let array_size = match (&value_field.desc, value_field_decl.map(|decl| &decl.desc))
@@ -183,7 +190,7 @@
                 });
             }
             ast::FieldDesc::Count { field_id, width, .. } => {
-                let field_name = format_ident!("{field_id}");
+                let field_name = field_id.to_ident();
                 let field_type = types::Integer::new(*width);
                 if field_type.width > *width {
                     let packet_name = &self.packet_name;
@@ -289,7 +296,7 @@
             }
         };
 
-        let id = format_ident!("{id}");
+        let id = id.to_ident();
 
         self.code.push(match padding_size {
             Some(padding_size) =>
@@ -320,7 +327,7 @@
             panic!("Derived struct used in typedef field");
         }
 
-        let id = format_ident!("{id}");
+        let id = id.to_ident();
         let span = format_ident!("{}", self.span);
 
         self.code.push(match &decl.desc {
@@ -354,8 +361,8 @@
 
         let child_ids = self
             .scope
-            .iter_children(self.packet_name)
-            .map(|child| format_ident!("{}", child.id().unwrap()))
+            .iter_children(decl)
+            .map(|child| child.id().unwrap().to_ident())
             .collect::<Vec<_>>();
 
         let span = format_ident!("{}", self.span);
diff --git a/src/backends/rust/types.rs b/src/backends/rust/types.rs
index 5b1767d..b5f2b91 100644
--- a/src/backends/rust/types.rs
+++ b/src/backends/rust/types.rs
@@ -15,7 +15,8 @@
 //! Utility functions for dealing with Rust integer types.
 
 use crate::analyzer::ast as analyzer_ast;
-use crate::{ast, lint};
+use crate::backends::rust::ToIdent;
+use crate::{analyzer, ast};
 use quote::{format_ident, quote};
 
 /// A Rust integer type such as `u8`.
@@ -55,7 +56,7 @@
             quote!(#field_type)
         }
         ast::FieldDesc::Typedef { type_id, .. } => {
-            let field_type = format_ident!("{type_id}");
+            let field_type = type_id.to_ident();
             quote!(#field_type)
         }
         ast::FieldDesc::Array { width: Some(width), size: Some(size), .. } => {
@@ -68,12 +69,12 @@
             quote!(Vec<#field_type>)
         }
         ast::FieldDesc::Array { type_id: Some(type_id), size: Some(size), .. } => {
-            let field_type = format_ident!("{type_id}");
+            let field_type = type_id.to_ident();
             let size = proc_macro2::Literal::usize_unsuffixed(*size);
             quote!([#field_type; #size])
         }
         ast::FieldDesc::Array { type_id: Some(type_id), size: None, .. } => {
-            let field_type = format_ident!("{type_id}");
+            let field_type = type_id.to_ident();
             quote!(Vec<#field_type>)
         }
         //ast::Field::Size { .. } | ast::Field::Count { .. } => quote!(),
@@ -83,7 +84,7 @@
 
 pub fn rust_borrow(
     field: &analyzer_ast::Field,
-    scope: &lint::Scope<'_>,
+    scope: &analyzer::Scope<'_>,
 ) -> proc_macro2::TokenStream {
     match &field.desc {
         ast::FieldDesc::Scalar { .. } => quote!(),
diff --git a/src/lib.rs b/src/lib.rs
index cd384bc..a57ee58 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -17,7 +17,6 @@
 pub mod analyzer;
 pub mod ast;
 pub mod backends;
-pub mod lint;
 pub mod parser;
 #[cfg(test)]
 pub mod test_utils;
diff --git a/src/lint.rs b/src/lint.rs
deleted file mode 100644
index 421f364..0000000
--- a/src/lint.rs
+++ /dev/null
@@ -1,259 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-use std::collections::HashMap;
-
-use crate::analyzer::ast as analyzer_ast;
-use crate::ast::*;
-
-/// Gather information about the full AST.
-#[derive(Debug)]
-pub struct Scope<'d> {
-    // Original file.
-    pub file: &'d analyzer_ast::File,
-
-    // Collection of Group, Packet, Enum, Struct, Checksum, and CustomField declarations.
-    pub typedef: HashMap<String, &'d analyzer_ast::Decl>,
-
-    // Collection of Packet, Struct, and Group scope declarations.
-    pub scopes: HashMap<&'d analyzer_ast::Decl, PacketScope<'d>>,
-}
-
-/// Gather information about a Packet, Struct, or Group declaration.
-#[derive(Debug)]
-pub struct PacketScope<'d> {
-    // Original decl.
-    decl: &'d analyzer_ast::Decl,
-
-    // Local and inherited field declarations. Only named fields are preserved.
-    // Saved here for reference for parent constraint resolving.
-    pub all_fields: HashMap<String, &'d analyzer_ast::Field>,
-
-    // Local and inherited constraint declarations.
-    // Saved here for constraint conflict checks.
-    pub all_constraints: HashMap<String, &'d Constraint>,
-}
-
-impl<'d> std::hash::Hash for &'d analyzer_ast::Decl {
-    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
-        std::ptr::hash(*self, state);
-    }
-}
-
-impl<'d> PacketScope<'d> {
-    /// Add parent fields and constraints to the scope.
-    /// Only named fields are imported.
-    fn inherit(
-        &mut self,
-        parent: &PacketScope<'d>,
-        constraints: impl Iterator<Item = &'d Constraint>,
-    ) {
-        // Check constraints.
-        assert!(self.all_constraints.is_empty());
-        self.all_constraints = parent.all_constraints.clone();
-        for constraint in constraints {
-            let id = constraint.id.clone();
-            self.all_constraints.insert(id, constraint);
-        }
-
-        // Save parent fields.
-        self.all_fields = parent.all_fields.clone();
-    }
-
-    /// Iterate over the packet's fields.
-    pub fn iter_fields(&self) -> impl Iterator<Item = &'d analyzer_ast::Field> {
-        self.decl.fields()
-    }
-
-    /// Lookup a field by name. This will also find the special
-    /// `_payload_` and `_body_` fields.
-    pub fn get_packet_field(&self, id: &str) -> Option<&analyzer_ast::Field> {
-        self.decl.fields().find(|field| match &field.desc {
-            FieldDesc::Payload { .. } => id == "_payload_",
-            FieldDesc::Body { .. } => id == "_body_",
-            _ => field.id() == Some(id),
-        })
-    }
-
-    /// Find the payload or body field, if any.
-    pub fn get_payload_field(&self) -> Option<&analyzer_ast::Field> {
-        self.decl
-            .fields()
-            .find(|field| matches!(&field.desc, FieldDesc::Payload { .. } | FieldDesc::Body { .. }))
-    }
-
-    /// Lookup the size field for an array field.
-    pub fn get_array_size_field(&self, id: &str) -> Option<&analyzer_ast::Field> {
-        self.decl.fields().find(|field| match &field.desc {
-            FieldDesc::Size { field_id, .. } | FieldDesc::Count { field_id, .. } => field_id == id,
-            _ => false,
-        })
-    }
-
-    /// Find the size field corresponding to the payload or body
-    /// field of this packet.
-    pub fn get_payload_size_field(&self) -> Option<&analyzer_ast::Field> {
-        self.decl.fields().find(|field| match &field.desc {
-            FieldDesc::Size { field_id, .. } => field_id == "_payload_" || field_id == "_body_",
-            _ => false,
-        })
-    }
-
-    /// Cleanup scope after processing all fields.
-    fn finalize(&mut self) {
-        // Check field shadowing.
-        for f in self.decl.fields() {
-            if let Some(id) = f.id() {
-                self.all_fields.insert(id.to_string(), f);
-            }
-        }
-    }
-}
-
-impl<'d> Scope<'d> {
-    pub fn new(file: &analyzer_ast::File) -> Scope<'_> {
-        let mut scope = Scope { file, typedef: HashMap::new(), scopes: HashMap::new() };
-
-        // Gather top-level declarations.
-        // Validate the top-level scopes (Group, Packet, Typedef).
-        //
-        // TODO: switch to try_insert when stable
-        for decl in &file.declarations {
-            if let Some(id) = decl.id() {
-                scope.typedef.insert(id.to_string(), decl);
-            }
-        }
-
-        scope.finalize();
-        scope
-    }
-
-    // Sort Packet, Struct, and Group declarations by reverse topological
-    // order.
-    fn finalize(&mut self) -> Vec<&'d analyzer_ast::Decl> {
-        // Auxiliary function implementing BFS on Packet tree.
-        enum Mark {
-            Temporary,
-            Permanent,
-        }
-        struct Context<'d> {
-            list: Vec<&'d analyzer_ast::Decl>,
-            visited: HashMap<&'d analyzer_ast::Decl, Mark>,
-            scopes: HashMap<&'d analyzer_ast::Decl, PacketScope<'d>>,
-        }
-
-        fn bfs<'s, 'd>(
-            decl: &'d analyzer_ast::Decl,
-            context: &'s mut Context<'d>,
-            scope: &Scope<'d>,
-        ) -> Option<&'s PacketScope<'d>> {
-            match context.visited.get(&decl) {
-                Some(Mark::Permanent) => return context.scopes.get(&decl),
-                Some(Mark::Temporary) => {
-                    return None;
-                }
-                _ => (),
-            }
-
-            let (parent_id, fields) = match &decl.desc {
-                DeclDesc::Packet { parent_id, fields, .. }
-                | DeclDesc::Struct { parent_id, fields, .. } => (parent_id.as_ref(), fields),
-                DeclDesc::Group { fields, .. } => (None, fields),
-                _ => return None,
-            };
-
-            context.visited.insert(decl, Mark::Temporary);
-            let mut lscope =
-                PacketScope { decl, all_fields: HashMap::new(), all_constraints: HashMap::new() };
-
-            // Iterate over Struct and Group fields.
-            for f in fields {
-                match &f.desc {
-                    FieldDesc::Group { .. } => unreachable!(),
-                    FieldDesc::Typedef { type_id, .. } => match scope.typedef.get(type_id) {
-                        Some(struct_decl @ Decl { desc: DeclDesc::Struct { .. }, .. }) => {
-                            bfs(struct_decl, context, scope);
-                        }
-                        None | Some(_) => (),
-                    },
-                    _ => (),
-                }
-            }
-
-            // Iterate over parent declaration.
-            let parent = parent_id.and_then(|id| scope.typedef.get(id));
-            if let Some(parent_decl) = parent {
-                if let Some(rscope) = bfs(parent_decl, context, scope) {
-                    // Import the parent fields and constraints into the current scope.
-                    lscope.inherit(rscope, decl.constraints())
-                }
-            }
-
-            lscope.finalize();
-            context.list.push(decl);
-            context.visited.insert(decl, Mark::Permanent);
-            context.scopes.insert(decl, lscope);
-            context.scopes.get(&decl)
-        }
-
-        let mut context =
-            Context::<'d> { list: vec![], visited: HashMap::new(), scopes: HashMap::new() };
-
-        for decl in self.typedef.values() {
-            bfs(decl, &mut context, self);
-        }
-
-        self.scopes = context.scopes;
-        context.list
-    }
-
-    pub fn iter_children<'a>(
-        &'a self,
-        id: &'a str,
-    ) -> impl Iterator<Item = &'d analyzer_ast::Decl> + 'a {
-        self.file.iter_children(self.typedef.get(id).unwrap())
-    }
-
-    /// Return the declaration of the typedef type backing the
-    /// selected field.
-    pub fn get_field_declaration(
-        &self,
-        field: &analyzer_ast::Field,
-    ) -> Option<&'d analyzer_ast::Decl> {
-        match &field.desc {
-            FieldDesc::FixedEnum { enum_id, .. } => self.typedef.get(enum_id).copied(),
-            FieldDesc::Array { type_id: Some(type_id), .. } => self.typedef.get(type_id).copied(),
-            FieldDesc::Typedef { type_id, .. } => self.typedef.get(type_id.as_str()).copied(),
-            _ => None,
-        }
-    }
-
-    /// Test if the selected field is a bitfield.
-    pub fn is_bitfield(&self, field: &analyzer_ast::Field) -> bool {
-        match &field.desc {
-            FieldDesc::Size { .. }
-            | FieldDesc::Count { .. }
-            | FieldDesc::ElementSize { .. }
-            | FieldDesc::FixedScalar { .. }
-            | FieldDesc::FixedEnum { .. }
-            | FieldDesc::Reserved { .. }
-            | FieldDesc::Scalar { .. } => true,
-            FieldDesc::Typedef { type_id, .. } => {
-                let field = self.typedef.get(type_id.as_str());
-                matches!(field, Some(Decl { desc: DeclDesc::Enum { .. }, .. }))
-            }
-            _ => false,
-        }
-    }
-}
diff --git a/src/parser.rs b/src/parser.rs
index 6d19648..f9d2ffa 100644
--- a/src/parser.rs
+++ b/src/parser.rs
@@ -69,7 +69,8 @@
         enum_value_list ~
     "}")?
 }
-enum_tag = { enum_range | enum_value }
+enum_other = { identifier ~ "=" ~ ".." }
+enum_tag = { enum_range | enum_value | enum_other }
 enum_tag_list = { enum_tag ~ ("," ~ enum_tag)* ~ ","? }
 enum_declaration = {
     "enum" ~ identifier ~ ":" ~ integer ~ "{" ~
@@ -350,6 +351,17 @@
     }
 }
 
+fn parse_enum_other(node: Node<'_>, context: &Context) -> Result<crate::ast::TagOther, String> {
+    if node.as_rule() != Rule::enum_other {
+        err_unexpected_rule(Rule::enum_other, node.as_rule())
+    } else {
+        let loc = node.as_loc(context);
+        let mut children = node.children();
+        let id = parse_identifier(&mut children)?;
+        Ok(crate::ast::TagOther { id, loc })
+    }
+}
+
 fn parse_enum_tag(node: Node<'_>, context: &Context) -> Result<crate::ast::Tag, String> {
     if node.as_rule() != Rule::enum_tag {
         err_unexpected_rule(Rule::enum_tag, node.as_rule())
@@ -361,6 +373,9 @@
             Some(node) if node.as_rule() == Rule::enum_range => {
                 Ok(crate::ast::Tag::Range(parse_enum_range(node, context)?))
             }
+            Some(node) if node.as_rule() == Rule::enum_other => {
+                Ok(crate::ast::Tag::Other(parse_enum_other(node, context)?))
+            }
             Some(node) => Err(format!(
                 "expected rule {:?} or {:?}, got {:?}",
                 Rule::enum_value,
diff --git a/tests/generated/enum_declaration_big_endian.rs b/tests/generated/enum_declaration_big_endian.rs
index badb3f0..ffc4725 100644
--- a/tests/generated/enum_declaration_big_endian.rs
+++ b/tests/generated/enum_declaration_big_endian.rs
@@ -47,136 +47,278 @@
 #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
 #[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncated {
+pub enum IncompleteTruncatedClosed {
     A = 0x0,
     B = 0x1,
 }
-impl TryFrom<u8> for IncompleteTruncated {
+impl TryFrom<u8> for IncompleteTruncatedClosed {
     type Error = u8;
     fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
         match value {
-            0x0 => Ok(IncompleteTruncated::A),
-            0x1 => Ok(IncompleteTruncated::B),
+            0x0 => Ok(IncompleteTruncatedClosed::A),
+            0x1 => Ok(IncompleteTruncatedClosed::B),
             _ => Err(value),
         }
     }
 }
-impl From<&IncompleteTruncated> for u8 {
-    fn from(value: &IncompleteTruncated) -> Self {
+impl From<&IncompleteTruncatedClosed> for u8 {
+    fn from(value: &IncompleteTruncatedClosed) -> Self {
         match value {
-            IncompleteTruncated::A => 0x0,
-            IncompleteTruncated::B => 0x1,
+            IncompleteTruncatedClosed::A => 0x0,
+            IncompleteTruncatedClosed::B => 0x1,
         }
     }
 }
-impl From<IncompleteTruncated> for u8 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u8 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         (&value).into()
     }
 }
-impl From<IncompleteTruncated> for i8 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i8 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i16 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i16 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i32 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i32 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i64 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i64 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u16 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u16 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u32 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u32 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u64 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u64 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
 #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
 #[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncatedWithRange {
+pub enum IncompleteTruncatedOpen {
+    A,
+    B,
+    Unknown(Private<u8>),
+}
+impl TryFrom<u8> for IncompleteTruncatedOpen {
+    type Error = u8;
+    fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
+        match value {
+            0x0 => Ok(IncompleteTruncatedOpen::A),
+            0x1 => Ok(IncompleteTruncatedOpen::B),
+            0..=0x7 => Ok(IncompleteTruncatedOpen::Unknown(Private(value))),
+            _ => Err(value),
+        }
+    }
+}
+impl From<&IncompleteTruncatedOpen> for u8 {
+    fn from(value: &IncompleteTruncatedOpen) -> Self {
+        match value {
+            IncompleteTruncatedOpen::A => 0x0,
+            IncompleteTruncatedOpen::B => 0x1,
+            IncompleteTruncatedOpen::Unknown(Private(value)) => *value,
+        }
+    }
+}
+impl From<IncompleteTruncatedOpen> for u8 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        (&value).into()
+    }
+}
+impl From<IncompleteTruncatedOpen> for i8 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i16 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i32 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i64 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u16 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u32 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u64 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
+pub enum IncompleteTruncatedClosedWithRange {
     A,
     X,
     Y,
     B(Private<u8>),
 }
-impl TryFrom<u8> for IncompleteTruncatedWithRange {
+impl TryFrom<u8> for IncompleteTruncatedClosedWithRange {
     type Error = u8;
     fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
         match value {
-            0x0 => Ok(IncompleteTruncatedWithRange::A),
-            0x1 => Ok(IncompleteTruncatedWithRange::X),
-            0x2 => Ok(IncompleteTruncatedWithRange::Y),
-            0x1..=0x6 => Ok(IncompleteTruncatedWithRange::B(Private(value))),
+            0x0 => Ok(IncompleteTruncatedClosedWithRange::A),
+            0x1 => Ok(IncompleteTruncatedClosedWithRange::X),
+            0x2 => Ok(IncompleteTruncatedClosedWithRange::Y),
+            0x1..=0x6 => Ok(IncompleteTruncatedClosedWithRange::B(Private(value))),
             _ => Err(value),
         }
     }
 }
-impl From<&IncompleteTruncatedWithRange> for u8 {
-    fn from(value: &IncompleteTruncatedWithRange) -> Self {
+impl From<&IncompleteTruncatedClosedWithRange> for u8 {
+    fn from(value: &IncompleteTruncatedClosedWithRange) -> Self {
         match value {
-            IncompleteTruncatedWithRange::A => 0x0,
-            IncompleteTruncatedWithRange::X => 0x1,
-            IncompleteTruncatedWithRange::Y => 0x2,
-            IncompleteTruncatedWithRange::B(Private(value)) => *value,
+            IncompleteTruncatedClosedWithRange::A => 0x0,
+            IncompleteTruncatedClosedWithRange::X => 0x1,
+            IncompleteTruncatedClosedWithRange::Y => 0x2,
+            IncompleteTruncatedClosedWithRange::B(Private(value)) => *value,
         }
     }
 }
-impl From<IncompleteTruncatedWithRange> for u8 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u8 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         (&value).into()
     }
 }
-impl From<IncompleteTruncatedWithRange> for i8 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i8 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i16 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i16 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i32 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i32 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i64 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i64 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u16 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u16 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u32 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u32 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u64 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u64 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
+pub enum IncompleteTruncatedOpenWithRange {
+    A,
+    X,
+    Y,
+    B(Private<u8>),
+    Unknown(Private<u8>),
+}
+impl TryFrom<u8> for IncompleteTruncatedOpenWithRange {
+    type Error = u8;
+    fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
+        match value {
+            0x0 => Ok(IncompleteTruncatedOpenWithRange::A),
+            0x1 => Ok(IncompleteTruncatedOpenWithRange::X),
+            0x2 => Ok(IncompleteTruncatedOpenWithRange::Y),
+            0x1..=0x6 => Ok(IncompleteTruncatedOpenWithRange::B(Private(value))),
+            0..=0x7 => Ok(IncompleteTruncatedOpenWithRange::Unknown(Private(value))),
+            _ => Err(value),
+        }
+    }
+}
+impl From<&IncompleteTruncatedOpenWithRange> for u8 {
+    fn from(value: &IncompleteTruncatedOpenWithRange) -> Self {
+        match value {
+            IncompleteTruncatedOpenWithRange::A => 0x0,
+            IncompleteTruncatedOpenWithRange::X => 0x1,
+            IncompleteTruncatedOpenWithRange::Y => 0x2,
+            IncompleteTruncatedOpenWithRange::B(Private(value)) => *value,
+            IncompleteTruncatedOpenWithRange::Unknown(Private(value)) => *value,
+        }
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u8 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        (&value).into()
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i8 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i16 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i32 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i64 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u16 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u32 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u64 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
         u8::from(value) as Self
     }
 }
diff --git a/tests/generated/enum_declaration_little_endian.rs b/tests/generated/enum_declaration_little_endian.rs
index badb3f0..ffc4725 100644
--- a/tests/generated/enum_declaration_little_endian.rs
+++ b/tests/generated/enum_declaration_little_endian.rs
@@ -47,136 +47,278 @@
 #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
 #[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncated {
+pub enum IncompleteTruncatedClosed {
     A = 0x0,
     B = 0x1,
 }
-impl TryFrom<u8> for IncompleteTruncated {
+impl TryFrom<u8> for IncompleteTruncatedClosed {
     type Error = u8;
     fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
         match value {
-            0x0 => Ok(IncompleteTruncated::A),
-            0x1 => Ok(IncompleteTruncated::B),
+            0x0 => Ok(IncompleteTruncatedClosed::A),
+            0x1 => Ok(IncompleteTruncatedClosed::B),
             _ => Err(value),
         }
     }
 }
-impl From<&IncompleteTruncated> for u8 {
-    fn from(value: &IncompleteTruncated) -> Self {
+impl From<&IncompleteTruncatedClosed> for u8 {
+    fn from(value: &IncompleteTruncatedClosed) -> Self {
         match value {
-            IncompleteTruncated::A => 0x0,
-            IncompleteTruncated::B => 0x1,
+            IncompleteTruncatedClosed::A => 0x0,
+            IncompleteTruncatedClosed::B => 0x1,
         }
     }
 }
-impl From<IncompleteTruncated> for u8 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u8 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         (&value).into()
     }
 }
-impl From<IncompleteTruncated> for i8 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i8 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i16 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i16 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i32 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i32 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for i64 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for i64 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u16 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u16 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u32 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u32 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncated> for u64 {
-    fn from(value: IncompleteTruncated) -> Self {
+impl From<IncompleteTruncatedClosed> for u64 {
+    fn from(value: IncompleteTruncatedClosed) -> Self {
         u8::from(value) as Self
     }
 }
 #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
 #[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
-pub enum IncompleteTruncatedWithRange {
+pub enum IncompleteTruncatedOpen {
+    A,
+    B,
+    Unknown(Private<u8>),
+}
+impl TryFrom<u8> for IncompleteTruncatedOpen {
+    type Error = u8;
+    fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
+        match value {
+            0x0 => Ok(IncompleteTruncatedOpen::A),
+            0x1 => Ok(IncompleteTruncatedOpen::B),
+            0..=0x7 => Ok(IncompleteTruncatedOpen::Unknown(Private(value))),
+            _ => Err(value),
+        }
+    }
+}
+impl From<&IncompleteTruncatedOpen> for u8 {
+    fn from(value: &IncompleteTruncatedOpen) -> Self {
+        match value {
+            IncompleteTruncatedOpen::A => 0x0,
+            IncompleteTruncatedOpen::B => 0x1,
+            IncompleteTruncatedOpen::Unknown(Private(value)) => *value,
+        }
+    }
+}
+impl From<IncompleteTruncatedOpen> for u8 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        (&value).into()
+    }
+}
+impl From<IncompleteTruncatedOpen> for i8 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i16 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i32 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for i64 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u16 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u32 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpen> for u64 {
+    fn from(value: IncompleteTruncatedOpen) -> Self {
+        u8::from(value) as Self
+    }
+}
+#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
+pub enum IncompleteTruncatedClosedWithRange {
     A,
     X,
     Y,
     B(Private<u8>),
 }
-impl TryFrom<u8> for IncompleteTruncatedWithRange {
+impl TryFrom<u8> for IncompleteTruncatedClosedWithRange {
     type Error = u8;
     fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
         match value {
-            0x0 => Ok(IncompleteTruncatedWithRange::A),
-            0x1 => Ok(IncompleteTruncatedWithRange::X),
-            0x2 => Ok(IncompleteTruncatedWithRange::Y),
-            0x1..=0x6 => Ok(IncompleteTruncatedWithRange::B(Private(value))),
+            0x0 => Ok(IncompleteTruncatedClosedWithRange::A),
+            0x1 => Ok(IncompleteTruncatedClosedWithRange::X),
+            0x2 => Ok(IncompleteTruncatedClosedWithRange::Y),
+            0x1..=0x6 => Ok(IncompleteTruncatedClosedWithRange::B(Private(value))),
             _ => Err(value),
         }
     }
 }
-impl From<&IncompleteTruncatedWithRange> for u8 {
-    fn from(value: &IncompleteTruncatedWithRange) -> Self {
+impl From<&IncompleteTruncatedClosedWithRange> for u8 {
+    fn from(value: &IncompleteTruncatedClosedWithRange) -> Self {
         match value {
-            IncompleteTruncatedWithRange::A => 0x0,
-            IncompleteTruncatedWithRange::X => 0x1,
-            IncompleteTruncatedWithRange::Y => 0x2,
-            IncompleteTruncatedWithRange::B(Private(value)) => *value,
+            IncompleteTruncatedClosedWithRange::A => 0x0,
+            IncompleteTruncatedClosedWithRange::X => 0x1,
+            IncompleteTruncatedClosedWithRange::Y => 0x2,
+            IncompleteTruncatedClosedWithRange::B(Private(value)) => *value,
         }
     }
 }
-impl From<IncompleteTruncatedWithRange> for u8 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u8 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         (&value).into()
     }
 }
-impl From<IncompleteTruncatedWithRange> for i8 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i8 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i16 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i16 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i32 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i32 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for i64 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for i64 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u16 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u16 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u32 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u32 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
         u8::from(value) as Self
     }
 }
-impl From<IncompleteTruncatedWithRange> for u64 {
-    fn from(value: IncompleteTruncatedWithRange) -> Self {
+impl From<IncompleteTruncatedClosedWithRange> for u64 {
+    fn from(value: IncompleteTruncatedClosedWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+#[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+#[cfg_attr(feature = "serde", serde(try_from = "u8", into = "u8"))]
+pub enum IncompleteTruncatedOpenWithRange {
+    A,
+    X,
+    Y,
+    B(Private<u8>),
+    Unknown(Private<u8>),
+}
+impl TryFrom<u8> for IncompleteTruncatedOpenWithRange {
+    type Error = u8;
+    fn try_from(value: u8) -> std::result::Result<Self, Self::Error> {
+        match value {
+            0x0 => Ok(IncompleteTruncatedOpenWithRange::A),
+            0x1 => Ok(IncompleteTruncatedOpenWithRange::X),
+            0x2 => Ok(IncompleteTruncatedOpenWithRange::Y),
+            0x1..=0x6 => Ok(IncompleteTruncatedOpenWithRange::B(Private(value))),
+            0..=0x7 => Ok(IncompleteTruncatedOpenWithRange::Unknown(Private(value))),
+            _ => Err(value),
+        }
+    }
+}
+impl From<&IncompleteTruncatedOpenWithRange> for u8 {
+    fn from(value: &IncompleteTruncatedOpenWithRange) -> Self {
+        match value {
+            IncompleteTruncatedOpenWithRange::A => 0x0,
+            IncompleteTruncatedOpenWithRange::X => 0x1,
+            IncompleteTruncatedOpenWithRange::Y => 0x2,
+            IncompleteTruncatedOpenWithRange::B(Private(value)) => *value,
+            IncompleteTruncatedOpenWithRange::Unknown(Private(value)) => *value,
+        }
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u8 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        (&value).into()
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i8 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i16 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i32 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for i64 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u16 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u32 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
+        u8::from(value) as Self
+    }
+}
+impl From<IncompleteTruncatedOpenWithRange> for u64 {
+    fn from(value: IncompleteTruncatedOpenWithRange) -> Self {
         u8::from(value) as Self
     }
 }
diff --git a/tests/generated/packet_decl_24bit_enum_array_big_endian.rs b/tests/generated/packet_decl_24bit_enum_array_big_endian.rs
index 47b45dc..7717d7a 100644
--- a/tests/generated/packet_decl_24bit_enum_array_big_endian.rs
+++ b/tests/generated/packet_decl_24bit_enum_array_big_endian.rs
@@ -125,10 +125,10 @@
         let x = (0..5)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_uint(3) as u32)
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_24bit_enum_array_little_endian.rs b/tests/generated/packet_decl_24bit_enum_array_little_endian.rs
index a413ce1..2fb9a9c 100644
--- a/tests/generated/packet_decl_24bit_enum_array_little_endian.rs
+++ b/tests/generated/packet_decl_24bit_enum_array_little_endian.rs
@@ -125,10 +125,10 @@
         let x = (0..5)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_uint_le(3) as u32)
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_24bit_enum_big_endian.rs b/tests/generated/packet_decl_24bit_enum_big_endian.rs
index c63b562..272e52d 100644
--- a/tests/generated/packet_decl_24bit_enum_big_endian.rs
+++ b/tests/generated/packet_decl_24bit_enum_big_endian.rs
@@ -123,10 +123,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_uint(3) as u32)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_uint(3) as u32 as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_24bit_enum_little_endian.rs b/tests/generated/packet_decl_24bit_enum_little_endian.rs
index 2e58a9e..278336c 100644
--- a/tests/generated/packet_decl_24bit_enum_little_endian.rs
+++ b/tests/generated/packet_decl_24bit_enum_little_endian.rs
@@ -123,10 +123,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_uint_le(3) as u32)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_uint_le(3) as u32 as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_64bit_enum_array_big_endian.rs b/tests/generated/packet_decl_64bit_enum_array_big_endian.rs
index dd28666..caeba78 100644
--- a/tests/generated/packet_decl_64bit_enum_array_big_endian.rs
+++ b/tests/generated/packet_decl_64bit_enum_array_big_endian.rs
@@ -110,10 +110,10 @@
         let x = (0..7)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_u64())
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_64bit_enum_array_little_endian.rs b/tests/generated/packet_decl_64bit_enum_array_little_endian.rs
index 371c1eb..2a698b8 100644
--- a/tests/generated/packet_decl_64bit_enum_array_little_endian.rs
+++ b/tests/generated/packet_decl_64bit_enum_array_little_endian.rs
@@ -110,10 +110,10 @@
         let x = (0..7)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_u64_le())
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_64bit_enum_big_endian.rs b/tests/generated/packet_decl_64bit_enum_big_endian.rs
index bd46742..f29c654 100644
--- a/tests/generated/packet_decl_64bit_enum_big_endian.rs
+++ b/tests/generated/packet_decl_64bit_enum_big_endian.rs
@@ -108,10 +108,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_u64())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_u64() as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_64bit_enum_little_endian.rs b/tests/generated/packet_decl_64bit_enum_little_endian.rs
index 75a43b6..0bc5a12 100644
--- a/tests/generated/packet_decl_64bit_enum_little_endian.rs
+++ b/tests/generated/packet_decl_64bit_enum_little_endian.rs
@@ -108,10 +108,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_u64_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_u64_le() as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_8bit_enum_array_big_endian.rs b/tests/generated/packet_decl_8bit_enum_array_big_endian.rs
index 4c16ddf..f36f1ba 100644
--- a/tests/generated/packet_decl_8bit_enum_array_big_endian.rs
+++ b/tests/generated/packet_decl_8bit_enum_array_big_endian.rs
@@ -140,10 +140,10 @@
         let x = (0..3)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_u8())
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_8bit_enum_array_little_endian.rs b/tests/generated/packet_decl_8bit_enum_array_little_endian.rs
index 4c16ddf..f36f1ba 100644
--- a/tests/generated/packet_decl_8bit_enum_array_little_endian.rs
+++ b/tests/generated/packet_decl_8bit_enum_array_little_endian.rs
@@ -140,10 +140,10 @@
         let x = (0..3)
             .map(|_| {
                 Foo::try_from(bytes.get_mut().get_u8())
-                    .map_err(|_| Error::InvalidEnumValueError {
+                    .map_err(|unknown_val| Error::InvalidEnumValueError {
                         obj: "Bar".to_string(),
                         field: String::new(),
-                        value: 0,
+                        value: unknown_val as u64,
                         type_: "Foo".to_string(),
                     })
             })
diff --git a/tests/generated/packet_decl_8bit_enum_big_endian.rs b/tests/generated/packet_decl_8bit_enum_big_endian.rs
index 8ceb132..bccdaeb 100644
--- a/tests/generated/packet_decl_8bit_enum_big_endian.rs
+++ b/tests/generated/packet_decl_8bit_enum_big_endian.rs
@@ -138,10 +138,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_8bit_enum_little_endian.rs b/tests/generated/packet_decl_8bit_enum_little_endian.rs
index 8ceb132..bccdaeb 100644
--- a/tests/generated/packet_decl_8bit_enum_little_endian.rs
+++ b/tests/generated/packet_decl_8bit_enum_little_endian.rs
@@ -138,10 +138,10 @@
             });
         }
         let x = Foo::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Bar".to_string(),
                 field: "x".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Foo".to_string(),
             })?;
         Ok(Self { x })
diff --git a/tests/generated/packet_decl_child_packets_big_endian.rs b/tests/generated/packet_decl_child_packets_big_endian.rs
index 7a000c7..8b3e05d 100644
--- a/tests/generated/packet_decl_child_packets_big_endian.rs
+++ b/tests/generated/packet_decl_child_packets_big_endian.rs
@@ -166,10 +166,10 @@
             });
         }
         let b = Enum16::try_from(bytes.get_mut().get_u16())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "b".to_string(),
-                value: bytes.get_mut().get_u16() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 1 {
diff --git a/tests/generated/packet_decl_child_packets_little_endian.rs b/tests/generated/packet_decl_child_packets_little_endian.rs
index 7fe9783..8a464b2 100644
--- a/tests/generated/packet_decl_child_packets_little_endian.rs
+++ b/tests/generated/packet_decl_child_packets_little_endian.rs
@@ -166,10 +166,10 @@
             });
         }
         let b = Enum16::try_from(bytes.get_mut().get_u16_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "b".to_string(),
-                value: bytes.get_mut().get_u16_le() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 1 {
diff --git a/tests/generated/packet_decl_fixed_enum_field_big_endian.rs b/tests/generated/packet_decl_fixed_enum_field_big_endian.rs
index 2ec6b58..5c780ad 100644
--- a/tests/generated/packet_decl_fixed_enum_field_big_endian.rs
+++ b/tests/generated/packet_decl_fixed_enum_field_big_endian.rs
@@ -143,10 +143,11 @@
             });
         }
         let chunk = bytes.get_mut().get_u64();
-        if (chunk & 0x7f) as u8 != u8::from(Enum7::A) {
+        let fixed_value = (chunk & 0x7f) as u8;
+        if fixed_value != u8::from(Enum7::A) {
             return Err(Error::InvalidFixedValue {
                 expected: u8::from(Enum7::A) as u64,
-                actual: (chunk & 0x7f) as u8 as u64,
+                actual: fixed_value as u64,
             });
         }
         let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
diff --git a/tests/generated/packet_decl_fixed_enum_field_little_endian.rs b/tests/generated/packet_decl_fixed_enum_field_little_endian.rs
index 13aecd9..45486c5 100644
--- a/tests/generated/packet_decl_fixed_enum_field_little_endian.rs
+++ b/tests/generated/packet_decl_fixed_enum_field_little_endian.rs
@@ -143,10 +143,11 @@
             });
         }
         let chunk = bytes.get_mut().get_u64_le();
-        if (chunk & 0x7f) as u8 != u8::from(Enum7::A) {
+        let fixed_value = (chunk & 0x7f) as u8;
+        if fixed_value != u8::from(Enum7::A) {
             return Err(Error::InvalidFixedValue {
                 expected: u8::from(Enum7::A) as u64,
-                actual: (chunk & 0x7f) as u8 as u64,
+                actual: fixed_value as u64,
             });
         }
         let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
diff --git a/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs b/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs
index e047669..349b183 100644
--- a/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs
+++ b/tests/generated/packet_decl_fixed_scalar_field_big_endian.rs
@@ -77,10 +77,11 @@
             });
         }
         let chunk = bytes.get_mut().get_u64();
+        let fixed_value = (chunk & 0x7f) as u8;
         if (chunk & 0x7f) as u8 != 7 {
             return Err(Error::InvalidFixedValue {
                 expected: 7,
-                actual: (chunk & 0x7f) as u8 as u64,
+                actual: fixed_value as u64,
             });
         }
         let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
diff --git a/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs b/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs
index 39922ca..96b9064 100644
--- a/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs
+++ b/tests/generated/packet_decl_fixed_scalar_field_little_endian.rs
@@ -77,10 +77,11 @@
             });
         }
         let chunk = bytes.get_mut().get_u64_le();
+        let fixed_value = (chunk & 0x7f) as u8;
         if (chunk & 0x7f) as u8 != 7 {
             return Err(Error::InvalidFixedValue {
                 expected: 7,
-                actual: (chunk & 0x7f) as u8 as u64,
+                actual: fixed_value as u64,
             });
         }
         let b = ((chunk >> 7) & 0x1ff_ffff_ffff_ffff_u64);
diff --git a/tests/generated/packet_decl_grand_children_big_endian.rs b/tests/generated/packet_decl_grand_children_big_endian.rs
index 896a46f..f1a2cac 100644
--- a/tests/generated/packet_decl_grand_children_big_endian.rs
+++ b/tests/generated/packet_decl_grand_children_big_endian.rs
@@ -157,10 +157,10 @@
             });
         }
         let foo = Enum16::try_from(bytes.get_mut().get_u16())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "foo".to_string(),
-                value: bytes.get_mut().get_u16() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 2 {
@@ -171,10 +171,10 @@
             });
         }
         let bar = Enum16::try_from(bytes.get_mut().get_u16())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "bar".to_string(),
-                value: bytes.get_mut().get_u16() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 2 {
@@ -185,10 +185,10 @@
             });
         }
         let baz = Enum16::try_from(bytes.get_mut().get_u16())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "baz".to_string(),
-                value: bytes.get_mut().get_u16() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 1 {
@@ -389,10 +389,10 @@
             });
         }
         let quux = Enum16::try_from(bytes.get_mut().get_u16())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Child".to_string(),
                 field: "quux".to_string(),
-                value: bytes.get_mut().get_u16() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         let payload = bytes.get();
diff --git a/tests/generated/packet_decl_grand_children_little_endian.rs b/tests/generated/packet_decl_grand_children_little_endian.rs
index 4a672a6..66fa76a 100644
--- a/tests/generated/packet_decl_grand_children_little_endian.rs
+++ b/tests/generated/packet_decl_grand_children_little_endian.rs
@@ -157,10 +157,10 @@
             });
         }
         let foo = Enum16::try_from(bytes.get_mut().get_u16_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "foo".to_string(),
-                value: bytes.get_mut().get_u16_le() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 2 {
@@ -171,10 +171,10 @@
             });
         }
         let bar = Enum16::try_from(bytes.get_mut().get_u16_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "bar".to_string(),
-                value: bytes.get_mut().get_u16_le() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 2 {
@@ -185,10 +185,10 @@
             });
         }
         let baz = Enum16::try_from(bytes.get_mut().get_u16_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "baz".to_string(),
-                value: bytes.get_mut().get_u16_le() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         if bytes.get().remaining() < 1 {
@@ -389,10 +389,10 @@
             });
         }
         let quux = Enum16::try_from(bytes.get_mut().get_u16_le())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Child".to_string(),
                 field: "quux".to_string(),
-                value: bytes.get_mut().get_u16_le() as u64,
+                value: unknown_val as u64,
                 type_: "Enum16".to_string(),
             })?;
         let payload = bytes.get();
diff --git a/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs b/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs
index 122b202..7dcf2e6 100644
--- a/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs
+++ b/tests/generated/packet_decl_mixed_scalars_enums_big_endian.rs
@@ -206,18 +206,18 @@
         }
         let chunk = bytes.get_mut().get_uint(3) as u32;
         let x = Enum7::try_from((chunk & 0x7f) as u8)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "x".to_string(),
-                value: (chunk & 0x7f) as u8 as u64,
+                value: unknown_val as u64,
                 type_: "Enum7".to_string(),
             })?;
         let y = ((chunk >> 7) & 0x1f) as u8;
         let z = Enum9::try_from(((chunk >> 12) & 0x1ff) as u16)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "z".to_string(),
-                value: ((chunk >> 12) & 0x1ff) as u16 as u64,
+                value: unknown_val as u64,
                 type_: "Enum9".to_string(),
             })?;
         let w = ((chunk >> 21) & 0x7) as u8;
diff --git a/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs b/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs
index 0cc3d92..667dab3 100644
--- a/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs
+++ b/tests/generated/packet_decl_mixed_scalars_enums_little_endian.rs
@@ -206,18 +206,18 @@
         }
         let chunk = bytes.get_mut().get_uint_le(3) as u32;
         let x = Enum7::try_from((chunk & 0x7f) as u8)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "x".to_string(),
-                value: (chunk & 0x7f) as u8 as u64,
+                value: unknown_val as u64,
                 type_: "Enum7".to_string(),
             })?;
         let y = ((chunk >> 7) & 0x1f) as u8;
         let z = Enum9::try_from(((chunk >> 12) & 0x1ff) as u16)
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Foo".to_string(),
                 field: "z".to_string(),
-                value: ((chunk >> 12) & 0x1ff) as u16 as u64,
+                value: unknown_val as u64,
                 type_: "Enum9".to_string(),
             })?;
         let w = ((chunk >> 21) & 0x7) as u8;
diff --git a/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs b/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs
index 8c9232d..785c0ee 100644
--- a/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs
+++ b/tests/generated/packet_decl_parent_with_alias_child_big_endian.rs
@@ -169,10 +169,10 @@
             });
         }
         let v = Enum8::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "v".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Enum8".to_string(),
             })?;
         let payload = bytes.get();
diff --git a/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs b/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs
index 8c9232d..785c0ee 100644
--- a/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs
+++ b/tests/generated/packet_decl_parent_with_alias_child_little_endian.rs
@@ -169,10 +169,10 @@
             });
         }
         let v = Enum8::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "v".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Enum8".to_string(),
             })?;
         let payload = bytes.get();
diff --git a/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs b/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs
index 4f58e26..82c45c5 100644
--- a/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs
+++ b/tests/generated/packet_decl_parent_with_no_payload_big_endian.rs
@@ -160,10 +160,10 @@
             });
         }
         let v = Enum8::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "v".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Enum8".to_string(),
             })?;
         let payload: &[u8] = &[];
diff --git a/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs b/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs
index 4f58e26..82c45c5 100644
--- a/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs
+++ b/tests/generated/packet_decl_parent_with_no_payload_little_endian.rs
@@ -160,10 +160,10 @@
             });
         }
         let v = Enum8::try_from(bytes.get_mut().get_u8())
-            .map_err(|_| Error::InvalidEnumValueError {
+            .map_err(|unknown_val| Error::InvalidEnumValueError {
                 obj: "Parent".to_string(),
                 field: "v".to_string(),
-                value: bytes.get_mut().get_u8() as u64,
+                value: unknown_val as u64,
                 type_: "Enum8".to_string(),
             })?;
         let payload: &[u8] = &[];
diff --git a/tests/generated/reserved_identifier_big_endian.rs b/tests/generated/reserved_identifier_big_endian.rs
new file mode 100644
index 0000000..1f13532
--- /dev/null
+++ b/tests/generated/reserved_identifier_big_endian.rs
@@ -0,0 +1,145 @@
+#![rustfmt::skip]
+/// @generated rust packets from test.
+use bytes::{Buf, BufMut, Bytes, BytesMut};
+use std::convert::{TryFrom, TryInto};
+use std::cell::Cell;
+use std::fmt;
+use thiserror::Error;
+type Result<T> = std::result::Result<T, Error>;
+/// Private prevents users from creating arbitrary scalar values
+/// in situations where the value needs to be validated.
+/// Users can freely deref the value, but only the backend
+/// may create it.
+#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Private<T>(T);
+impl<T> std::ops::Deref for Private<T> {
+    type Target = T;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+#[derive(Debug, Error)]
+pub enum Error {
+    #[error("Packet parsing failed")]
+    InvalidPacketError,
+    #[error("{field} was {value:x}, which is not known")]
+    ConstraintOutOfBounds { field: String, value: u64 },
+    #[error("Got {actual:x}, expected {expected:x}")]
+    InvalidFixedValue { expected: u64, actual: u64 },
+    #[error("when parsing {obj} needed length of {wanted} but got {got}")]
+    InvalidLengthError { obj: String, wanted: usize, got: usize },
+    #[error(
+        "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
+    )]
+    InvalidArraySize { array: usize, element: usize },
+    #[error("Due to size restrictions a struct could not be parsed.")]
+    ImpossibleStructError,
+    #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
+    InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
+    #[error("expected child {expected}, got {actual}")]
+    InvalidChildError { expected: &'static str, actual: String },
+}
+pub trait Packet {
+    fn to_bytes(self) -> Bytes;
+    fn to_vec(self) -> Vec<u8>;
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct TestData {
+    r#type: u8,
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct Test {
+    #[cfg_attr(feature = "serde", serde(flatten))]
+    test: TestData,
+}
+#[derive(Debug)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct TestBuilder {
+    pub r#type: u8,
+}
+impl TestData {
+    fn conforms(bytes: &[u8]) -> bool {
+        bytes.len() >= 1
+    }
+    fn parse(bytes: &[u8]) -> Result<Self> {
+        let mut cell = Cell::new(bytes);
+        let packet = Self::parse_inner(&mut cell)?;
+        Ok(packet)
+    }
+    fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
+        if bytes.get().remaining() < 1 {
+            return Err(Error::InvalidLengthError {
+                obj: "Test".to_string(),
+                wanted: 1,
+                got: bytes.get().remaining(),
+            });
+        }
+        let r#type = bytes.get_mut().get_u8();
+        Ok(Self { r#type })
+    }
+    fn write_to(&self, buffer: &mut BytesMut) {
+        buffer.put_u8(self.r#type);
+    }
+    fn get_total_size(&self) -> usize {
+        self.get_size()
+    }
+    fn get_size(&self) -> usize {
+        1
+    }
+}
+impl Packet for Test {
+    fn to_bytes(self) -> Bytes {
+        let mut buffer = BytesMut::with_capacity(self.test.get_size());
+        self.test.write_to(&mut buffer);
+        buffer.freeze()
+    }
+    fn to_vec(self) -> Vec<u8> {
+        self.to_bytes().to_vec()
+    }
+}
+impl From<Test> for Bytes {
+    fn from(packet: Test) -> Self {
+        packet.to_bytes()
+    }
+}
+impl From<Test> for Vec<u8> {
+    fn from(packet: Test) -> Self {
+        packet.to_vec()
+    }
+}
+impl Test {
+    pub fn parse(bytes: &[u8]) -> Result<Self> {
+        let mut cell = Cell::new(bytes);
+        let packet = Self::parse_inner(&mut cell)?;
+        Ok(packet)
+    }
+    fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
+        let data = TestData::parse_inner(&mut bytes)?;
+        Self::new(data)
+    }
+    fn new(test: TestData) -> Result<Self> {
+        Ok(Self { test })
+    }
+    pub fn get_type(&self) -> u8 {
+        self.test.r#type
+    }
+    fn write_to(&self, buffer: &mut BytesMut) {
+        self.test.write_to(buffer)
+    }
+    pub fn get_size(&self) -> usize {
+        self.test.get_size()
+    }
+}
+impl TestBuilder {
+    pub fn build(self) -> Test {
+        let test = TestData { r#type: self.r#type };
+        Test::new(test).unwrap()
+    }
+}
+impl From<TestBuilder> for Test {
+    fn from(builder: TestBuilder) -> Test {
+        builder.build().into()
+    }
+}
diff --git a/tests/generated/reserved_identifier_little_endian.rs b/tests/generated/reserved_identifier_little_endian.rs
new file mode 100644
index 0000000..1f13532
--- /dev/null
+++ b/tests/generated/reserved_identifier_little_endian.rs
@@ -0,0 +1,145 @@
+#![rustfmt::skip]
+/// @generated rust packets from test.
+use bytes::{Buf, BufMut, Bytes, BytesMut};
+use std::convert::{TryFrom, TryInto};
+use std::cell::Cell;
+use std::fmt;
+use thiserror::Error;
+type Result<T> = std::result::Result<T, Error>;
+/// Private prevents users from creating arbitrary scalar values
+/// in situations where the value needs to be validated.
+/// Users can freely deref the value, but only the backend
+/// may create it.
+#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct Private<T>(T);
+impl<T> std::ops::Deref for Private<T> {
+    type Target = T;
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+#[derive(Debug, Error)]
+pub enum Error {
+    #[error("Packet parsing failed")]
+    InvalidPacketError,
+    #[error("{field} was {value:x}, which is not known")]
+    ConstraintOutOfBounds { field: String, value: u64 },
+    #[error("Got {actual:x}, expected {expected:x}")]
+    InvalidFixedValue { expected: u64, actual: u64 },
+    #[error("when parsing {obj} needed length of {wanted} but got {got}")]
+    InvalidLengthError { obj: String, wanted: usize, got: usize },
+    #[error(
+        "array size ({array} bytes) is not a multiple of the element size ({element} bytes)"
+    )]
+    InvalidArraySize { array: usize, element: usize },
+    #[error("Due to size restrictions a struct could not be parsed.")]
+    ImpossibleStructError,
+    #[error("when parsing field {obj}.{field}, {value} is not a valid {type_} value")]
+    InvalidEnumValueError { obj: String, field: String, value: u64, type_: String },
+    #[error("expected child {expected}, got {actual}")]
+    InvalidChildError { expected: &'static str, actual: String },
+}
+pub trait Packet {
+    fn to_bytes(self) -> Bytes;
+    fn to_vec(self) -> Vec<u8>;
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct TestData {
+    r#type: u8,
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct Test {
+    #[cfg_attr(feature = "serde", serde(flatten))]
+    test: TestData,
+}
+#[derive(Debug)]
+#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
+pub struct TestBuilder {
+    pub r#type: u8,
+}
+impl TestData {
+    fn conforms(bytes: &[u8]) -> bool {
+        bytes.len() >= 1
+    }
+    fn parse(bytes: &[u8]) -> Result<Self> {
+        let mut cell = Cell::new(bytes);
+        let packet = Self::parse_inner(&mut cell)?;
+        Ok(packet)
+    }
+    fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
+        if bytes.get().remaining() < 1 {
+            return Err(Error::InvalidLengthError {
+                obj: "Test".to_string(),
+                wanted: 1,
+                got: bytes.get().remaining(),
+            });
+        }
+        let r#type = bytes.get_mut().get_u8();
+        Ok(Self { r#type })
+    }
+    fn write_to(&self, buffer: &mut BytesMut) {
+        buffer.put_u8(self.r#type);
+    }
+    fn get_total_size(&self) -> usize {
+        self.get_size()
+    }
+    fn get_size(&self) -> usize {
+        1
+    }
+}
+impl Packet for Test {
+    fn to_bytes(self) -> Bytes {
+        let mut buffer = BytesMut::with_capacity(self.test.get_size());
+        self.test.write_to(&mut buffer);
+        buffer.freeze()
+    }
+    fn to_vec(self) -> Vec<u8> {
+        self.to_bytes().to_vec()
+    }
+}
+impl From<Test> for Bytes {
+    fn from(packet: Test) -> Self {
+        packet.to_bytes()
+    }
+}
+impl From<Test> for Vec<u8> {
+    fn from(packet: Test) -> Self {
+        packet.to_vec()
+    }
+}
+impl Test {
+    pub fn parse(bytes: &[u8]) -> Result<Self> {
+        let mut cell = Cell::new(bytes);
+        let packet = Self::parse_inner(&mut cell)?;
+        Ok(packet)
+    }
+    fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
+        let data = TestData::parse_inner(&mut bytes)?;
+        Self::new(data)
+    }
+    fn new(test: TestData) -> Result<Self> {
+        Ok(Self { test })
+    }
+    pub fn get_type(&self) -> u8 {
+        self.test.r#type
+    }
+    fn write_to(&self, buffer: &mut BytesMut) {
+        self.test.write_to(buffer)
+    }
+    pub fn get_size(&self) -> usize {
+        self.test.get_size()
+    }
+}
+impl TestBuilder {
+    pub fn build(self) -> Test {
+        let test = TestData { r#type: self.r#type };
+        Test::new(test).unwrap()
+    }
+}
+impl From<TestBuilder> for Test {
+    fn from(builder: TestBuilder) -> Test {
+        builder.build().into()
+    }
+}