aboutsummaryrefslogtreecommitdiff
path: root/deb_patches/python3-remove-variable-annotations.patch
blob: aea5fd880a7abe5945ff6ab4962c1b3d8ce3de3d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
Description: remove variable annotations that require Python 3.6 (https://docs.python.org/3/whatsnew/3.6.html#whatsnew36-pep526)
Author: Olivier Tilloy <olivier.tilloy@canonical.com>

--- a/xpcom/ds/tools/incremental_dafsa.py
+++ b/xpcom/ds/tools/incremental_dafsa.py
@@ -14,12 +14,6 @@ from typing import List, Dict, Optional,
 
 
 class Node:
-    children: Dict[str, "Node"]
-    parents: Dict[str, List["Node"]]
-    character: str
-    is_root_node: bool
-    is_end_node: bool
-
     def __init__(self, character, is_root_node=False, is_end_node=False):
         self.children = {}
         self.parents = {}
@@ -156,9 +150,6 @@ class Node:
 
 
 class SuffixCursor:
-    index: int  # Current position of the cursor within the DAFSA.
-    node: Node
-
     def __init__(self, index, node):
         self.index = index
         self.node = node
@@ -227,15 +218,6 @@ class DafsaAppendStateMachine:
     The next suffix node we'll attempt to find is at index "9".
     """
 
-    root_node: Node
-    prefix_index: int
-    suffix_cursor: SuffixCursor
-    stack: List[Node]
-    word: str
-    suffix_overlaps_prefix: bool
-    first_fork_index: Optional[int]
-    _state: Callable
-
     def __init__(self, word, root_node, end_node):
         self.root_node = root_node
         self.prefix_index = 0
@@ -471,9 +453,6 @@ def _duplicate_fork_nodes(stack, fork_in
 
 
 class Dafsa:
-    root_node: Node
-    end_node: Node
-
     def __init__(self):
         self.root_node = Node(None, is_root_node=True)
         self.end_node = Node(None, is_end_node=True)
--- a/third_party/python/glean_parser/glean_parser/lint.py
+++ b/third_party/python/glean_parser/glean_parser/lint.py
@@ -237,9 +237,7 @@ def check_user_lifetime_expiration(
 
 # The checks that operate on an entire category of metrics:
 #    {NAME: (function, is_error)}
-CATEGORY_CHECKS: Dict[
-    str, Tuple[Callable[[str, Iterable[metrics.Metric]], LintGenerator], CheckType]
-] = {
+CATEGORY_CHECKS = {
     "COMMON_PREFIX": (check_common_prefix, CheckType.error),
     "CATEGORY_GENERIC": (check_category_generic, CheckType.error),
 }
@@ -247,9 +245,7 @@ CATEGORY_CHECKS: Dict[
 
 # The checks that operate on individual metrics:
 #     {NAME: (function, is_error)}
-INDIVIDUAL_CHECKS: Dict[
-    str, Tuple[Callable[[metrics.Metric, dict], LintGenerator], CheckType]
-] = {
+INDIVIDUAL_CHECKS = {
     "UNIT_IN_NAME": (check_unit_in_name, CheckType.error),
     "BUG_NUMBER": (check_bug_number, CheckType.error),
     "BASELINE_PING": (check_valid_in_baseline, CheckType.error),
@@ -282,7 +278,7 @@ def lint_metrics(
     if parser_config is None:
         parser_config = {}
 
-    nits: List[GlinterNit] = []
+    nits = []
     for (category_name, category) in sorted(list(objs.items())):
         if category_name == "pings":
             continue
@@ -363,7 +359,7 @@ def lint_yaml_files(input_filepaths: Ite
 
     # Generic type since the actual type comes from yamllint, which we don't
     # control.
-    nits: List = []
+    nits = []
     for path in input_filepaths:
         if not path.is_file() and parser_config.get("allow_missing_files", False):
             continue
--- a/third_party/python/glean_parser/glean_parser/metrics.py
+++ b/third_party/python/glean_parser/glean_parser/metrics.py
@@ -33,10 +33,10 @@ class DataSensitivity(enum.Enum):
 
 
 class Metric:
-    typename: str = "ERROR"
-    glean_internal_metric_cat: str = "glean.internal.metrics"
-    metric_types: Dict[str, Any] = {}
-    default_store_names: List[str] = ["metrics"]
+    typename = "ERROR"
+    glean_internal_metric_cat = "glean.internal.metrics"
+    metric_types = {}
+    default_store_names = ["metrics"]
 
     def __init__(
         self,
--- a/third_party/python/glean_parser/glean_parser/pings.py
+++ b/third_party/python/glean_parser/glean_parser/pings.py
@@ -49,7 +49,7 @@ class Ping:
         # _validated indicates whether this metric has already been jsonschema
         # validated (but not any of the Python-level validation).
         if not _validated:
-            data: Dict[str, util.JSONType] = {
+            data = {
                 "$schema": parser.PINGS_ID,
                 self.name: self.serialize(),
             }
--- a/third_party/python/glean_parser/glean_parser/util.py
+++ b/third_party/python/glean_parser/glean_parser/util.py
@@ -269,7 +269,7 @@ def fetch_remote_url(url: str, cache: bo
             if key in dc:
                 return dc[key]
 
-    contents: str = urllib.request.urlopen(url).read()
+    contents = urllib.request.urlopen(url).read()
 
     if cache:
         with diskcache.Cache(cache_dir) as dc:
--- a/third_party/python/glean_parser/glean_parser/parser.py
+++ b/third_party/python/glean_parser/glean_parser/parser.py
@@ -339,8 +339,8 @@ def parse_objects(
     if config is None:
         config = {}
 
-    all_objects: ObjectTree = OrderedDict()
-    sources: Dict[Any, Path] = {}
+    all_objects = OrderedDict()
+    sources = {}
     filepaths = util.ensure_list(filepaths)
     for filepath in filepaths:
         content, filetype = yield from _load_file(filepath, config)
bgstack15