aboutsummaryrefslogtreecommitdiff
path: root/bumble/at.py
blob: 78a4b0865bd12e5cf3ef5eec19884061e45c5ad4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import List, Union


def tokenize_parameters(buffer: bytes) -> List[bytes]:
    """Split input parameters into tokens.
    Removes space characters outside of double quote blocks:
    T-rec-V-25 - 5.2.1 Command line general format: "Space characters (IA5 2/0)
    are ignored [..], unless they are embedded in numeric or string constants"
    Raises ValueError in case of invalid input string."""

    tokens = []
    in_quotes = False
    token = bytearray()
    for b in buffer:
        char = bytearray([b])

        if in_quotes:
            token.extend(char)
            if char == b'\"':
                in_quotes = False
                tokens.append(token[1:-1])
                token = bytearray()
        else:
            if char == b' ':
                pass
            elif char == b',' or char == b')':
                tokens.append(token)
                tokens.append(char)
                token = bytearray()
            elif char == b'(':
                if len(token) > 0:
                    raise ValueError("open_paren following regular character")
                tokens.append(char)
            elif char == b'"':
                if len(token) > 0:
                    raise ValueError("quote following regular character")
                in_quotes = True
                token.extend(char)
            else:
                token.extend(char)

    tokens.append(token)
    return [bytes(token) for token in tokens if len(token) > 0]


def parse_parameters(buffer: bytes) -> List[Union[bytes, list]]:
    """Parse the parameters using the comma and parenthesis separators.
    Raises ValueError in case of invalid input string."""

    tokens = tokenize_parameters(buffer)
    accumulator: List[list] = [[]]
    current: Union[bytes, list] = bytes()

    for token in tokens:
        if token == b',':
            accumulator[-1].append(current)
            current = bytes()
        elif token == b'(':
            accumulator.append([])
        elif token == b')':
            if len(accumulator) < 2:
                raise ValueError("close_paren without matching open_paren")
            accumulator[-1].append(current)
            current = accumulator.pop()
        else:
            current = token

    accumulator[-1].append(current)
    if len(accumulator) > 1:
        raise ValueError("missing close_paren")
    return accumulator[0]