aboutsummaryrefslogtreecommitdiff
path: root/deb_patches
diff options
context:
space:
mode:
Diffstat (limited to 'deb_patches')
-rw-r--r--deb_patches/fix-armhf-webrtc-build.patch4
-rw-r--r--deb_patches/python3-remove-fstrings.patch392
-rw-r--r--deb_patches/python3-remove-pep487.patch23
-rw-r--r--deb_patches/python3-remove-variable-annotations.patch94
4 files changed, 511 insertions, 2 deletions
diff --git a/deb_patches/fix-armhf-webrtc-build.patch b/deb_patches/fix-armhf-webrtc-build.patch
index 7f56b3e..44e88a6 100644
--- a/deb_patches/fix-armhf-webrtc-build.patch
+++ b/deb_patches/fix-armhf-webrtc-build.patch
@@ -37,9 +37,9 @@ diff -r 53fd96ca5aa4 media/webrtc/trunk/webrtc/system_wrappers/cpu_features_linu
- "/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_features_linux.c"
-]
-
- if CONFIG["MOZ_DEBUG"] == "1":
+ if not CONFIG["MOZ_DEBUG"]:
- DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "1"
+ DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0"
@@ -53,20 +49,36 @@
DEFINES["WEBRTC_ARCH_ARM64"] = True
DEFINES["WEBRTC_HAS_NEON"] = True
diff --git a/deb_patches/python3-remove-fstrings.patch b/deb_patches/python3-remove-fstrings.patch
new file mode 100644
index 0000000..93da547
--- /dev/null
+++ b/deb_patches/python3-remove-fstrings.patch
@@ -0,0 +1,392 @@
+Description: remove f-strings that require Python 3.6 (https://docs.python.org/3/whatsnew/3.6.html#whatsnew36-pep498)
+Author: Olivier Tilloy <olivier.tilloy@canonical.com>
+
+--- a/third_party/python/glean_parser/glean_parser/lint.py
++++ b/third_party/python/glean_parser/glean_parser/lint.py
+@@ -42,10 +42,10 @@ def _english_list(items: List[str]) -> s
+ if len(items) == 0:
+ return ""
+ elif len(items) == 1:
+- return f"'{items[0]}'"
++ return "'{}'".format(items[0])
+ else:
+ return "{}, or '{}'".format(
+- ", ".join([f"'{x}'" for x in items[:-1]]), items[-1]
++ ", ".join(["'{}'".format(x) for x in items[:-1]]), items[-1]
+ )
+
+
+@@ -88,10 +88,11 @@ def check_common_prefix(
+ if i > 0:
+ common_prefix = "_".join(first[:i])
+ yield (
+- f"Within category '{category_name}', all metrics begin with "
+- f"prefix '{common_prefix}'."
++ "Within category '{}', all metrics begin with "
++ "prefix '{}'."
+ "Remove the prefixes on the metric names and (possibly) "
+ "rename the category."
++ .format(category_name, common_prefix)
+ )
+
+
+@@ -131,17 +132,19 @@ def check_unit_in_name(
+ or unit_in_name == time_unit.name
+ ):
+ yield (
+- f"Suffix '{unit_in_name}' is redundant with time_unit "
+- f"'{time_unit.name}'. Only include time_unit."
++ "Suffix '{}' is redundant with time_unit "
++ "'{}'. Only include time_unit."
++ .format(unit_in_name, time_unit.name)
+ )
+ elif (
+ unit_in_name in TIME_UNIT_ABBREV.keys()
+ or unit_in_name in TIME_UNIT_ABBREV.values()
+ ):
+ yield (
+- f"Suffix '{unit_in_name}' doesn't match time_unit "
+- f"'{time_unit.name}'. "
++ "Suffix '{}' doesn't match time_unit "
++ "'{}'. "
+ "Confirm the unit is correct and only include time_unit."
++ .format(unit_in_name, time_unit.name)
+ )
+
+ elif memory_unit is not None:
+@@ -150,26 +153,29 @@ def check_unit_in_name(
+ or unit_in_name == memory_unit.name
+ ):
+ yield (
+- f"Suffix '{unit_in_name}' is redundant with memory_unit "
+- f"'{memory_unit.name}'. "
++ "Suffix '{}' is redundant with memory_unit "
++ "'{}'. "
+ "Only include memory_unit."
++ .format(unit_in_name, memory_unit.name)
+ )
+ elif (
+ unit_in_name in MEMORY_UNIT_ABBREV.keys()
+ or unit_in_name in MEMORY_UNIT_ABBREV.values()
+ ):
+ yield (
+- f"Suffix '{unit_in_name}' doesn't match memory_unit "
+- f"{memory_unit.name}'. "
++ "Suffix '{}' doesn't match memory_unit "
++ "{}'. "
+ "Confirm the unit is correct and only include memory_unit."
++ .format(unit_in_name, memory_unit.name)
+ )
+
+ elif unit is not None:
+ if unit_in_name == unit:
+ yield (
+- f"Suffix '{unit_in_name}' is redundant with unit param "
+- f"'{unit}'. "
++ "Suffix '{}' is redundant with unit param "
++ "'{}'. "
+ "Only include unit."
++ .format(unit_in_name, unit)
+ )
+
+
+@@ -183,8 +189,9 @@ def check_category_generic(
+
+ if category_name in GENERIC_CATEGORIES:
+ yield (
+- f"Category '{category_name}' is too generic. "
+- f"Don't use {_english_list(GENERIC_CATEGORIES)} for category names"
++ "Category '{}' is too generic. "
++ "Don't use {} for category names"
++ .format(category_name, _english_list(GENERIC_CATEGORIES))
+ )
+
+
+@@ -195,9 +202,10 @@ def check_bug_number(
+
+ if len(number_bugs):
+ yield (
+- f"For bugs {', '.join(number_bugs)}: "
++ "For bugs {}: "
+ "Bug numbers are deprecated and should be changed to full URLs. "
+ "For example, use 'http://bugzilla.mozilla.org/12345' instead of '12345'."
++ .format(', '.join(number_bugs))
+ )
+
+
+@@ -220,7 +228,7 @@ def check_misspelled_pings(
+ for builtin in pings.RESERVED_PING_NAMES:
+ distance = _hamming_distance(ping, builtin)
+ if distance == 1:
+- yield f"Ping '{ping}' seems misspelled. Did you mean '{builtin}'?"
++ yield "Ping '{}' seems misspelled. Did you mean '{}'?".format(ping, builtin)
+
+
+ def check_user_lifetime_expiration(
+@@ -263,8 +271,9 @@ class GlinterNit:
+
+ def format(self):
+ return (
+- f"{self.check_type.name.upper()}: {self.check_name}: "
+- f"{self.name}: {self.msg}"
++ "{}: {}: "
++ "{}: {}"
++ .format(self.check_type.name.upper(), self.check_name, self.name, self.msg)
+ )
+
+
+@@ -324,8 +333,9 @@ def lint_metrics(
+ "SUPERFLUOUS_NO_LINT",
+ ".".join([metric.category, metric.name]),
+ (
+- f"Superfluous no_lint entry '{check_name}'. "
++ "Superfluous no_lint entry '{}'. "
+ "Please remove it."
++ .format(check_name)
+ ),
+ CheckType.warning,
+ )
+@@ -372,7 +382,7 @@ def lint_yaml_files(input_filepaths: Ite
+ if len(nits):
+ print("Sorry, Glean found some glinter nits:", file=file)
+ for (path, p) in nits:
+- print(f"{path} ({p.line}:{p.column}) - {p.message}")
++ print("{} ({}:{}) - {}".format(path, p.line, p.column, p.message))
+ print("", file=file)
+ print("Please fix the above nits to continue.", file=file)
+
+--- a/third_party/python/glean_parser/glean_parser/markdown.py
++++ b/third_party/python/glean_parser/glean_parser/markdown.py
+@@ -70,7 +70,7 @@ def metrics_docs(obj_name: str) -> str:
+ if obj_name.startswith("labeled_"):
+ fixedup_name += "s"
+
+- return f"https://mozilla.github.io/glean/book/user/metrics/{fixedup_name}.html"
++ return "https://mozilla.github.io/glean/book/user/metrics/{}.html".format(fixedup_name)
+
+
+ def ping_docs(ping_name: str) -> str:
+@@ -81,7 +81,7 @@ def ping_docs(ping_name: str) -> str:
+ if ping_name not in pings.RESERVED_PING_NAMES:
+ return ""
+
+- return f"https://mozilla.github.io/glean/book/user/pings/{ping_name}.html"
++ return "https://mozilla.github.io/glean/book/user/pings/{}.html".format(ping_name)
+
+
+ def if_empty(ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}) -> bool:
+--- a/third_party/python/glean_parser/glean_parser/parser.py
++++ b/third_party/python/glean_parser/glean_parser/parser.py
+@@ -46,7 +46,8 @@ def _update_validator(validator):
+ if len(missing_properties):
+ missing_properties = sorted(list(missing_properties))
+ yield ValidationError(
+- f"Missing required properties: {', '.join(missing_properties)}"
++ "Missing required properties: {}"
++ .format(', '.join(missing_properties))
+ )
+
+ validator.VALIDATORS["required"] = required
+@@ -65,7 +66,7 @@ def _load_file(
+ return {}, None
+
+ if content is None:
+- yield util.format_error(filepath, "", f"'{filepath}' file can not be empty.")
++ yield util.format_error(filepath, "", "'{}' file can not be empty.".format(filepath))
+ return {}, None
+
+ if not isinstance(content, dict):
+@@ -76,7 +77,7 @@ def _load_file(
+
+ schema_key = content.get("$schema")
+ if not isinstance(schema_key, str):
+- raise TypeError(f"Invalid schema key {schema_key}")
++ raise TypeError("Invalid schema key {}".format(schema_key))
+
+ filetype = FILE_TYPES.get(schema_key)
+
+@@ -115,7 +116,7 @@ def _get_schema(
+ if schema_id not in schemas:
+ raise ValueError(
+ util.format_error(
+- filepath, "", f"$schema key must be one of {', '.join(schemas.keys())}",
++ filepath, "", "$schema key must be one of {}".format(', '.join(schemas.keys())),
+ )
+ )
+ return schemas[schema_id]
+@@ -187,7 +188,7 @@ def _instantiate_metrics(
+ if not config.get("allow_reserved") and category_key.split(".")[0] == "glean":
+ yield util.format_error(
+ filepath,
+- f"For category '{category_key}'",
++ "For category '{}'".format(category_key),
+ "Categories beginning with 'glean' are reserved for "
+ "Glean internal use.",
+ )
+@@ -195,7 +196,7 @@ def _instantiate_metrics(
+ all_objects.setdefault(category_key, OrderedDict())
+
+ if not isinstance(category_val, dict):
+- raise TypeError(f"Invalid content for {category_key}")
++ raise TypeError("Invalid content for {}".format(category_key))
+
+ for metric_key, metric_val in category_val.items():
+ try:
+@@ -204,7 +205,7 @@ def _instantiate_metrics(
+ )
+ except Exception as e:
+ yield util.format_error(
+- filepath, f"On instance {category_key}.{metric_key}", str(e),
++ filepath, "On instance {}.{}".format(category_key, metric_key), str(e),
+ )
+ metric_obj = None
+ else:
+@@ -214,7 +215,7 @@ def _instantiate_metrics(
+ ):
+ yield util.format_error(
+ filepath,
+- f"On instance {category_key}.{metric_key}",
++ "On instance {}.{}".format(category_key, metric_key),
+ 'Only internal metrics may specify "all-pings" '
+ 'in "send_in_pings"',
+ )
+@@ -230,8 +231,9 @@ def _instantiate_metrics(
+ filepath,
+ "",
+ (
+- f"Duplicate metric name '{category_key}.{metric_key}' "
+- f"already defined in '{already_seen}'"
++ "Duplicate metric name '{}.{}' "
++ "already defined in '{}'"
++ .format(category_key, metric_key, already_seen)
+ ),
+ )
+ else:
+@@ -257,17 +259,17 @@ def _instantiate_pings(
+ if ping_key in RESERVED_PING_NAMES:
+ yield util.format_error(
+ filepath,
+- f"For ping '{ping_key}'",
+- f"Ping uses a reserved name ({RESERVED_PING_NAMES})",
++ "For ping '{}'".format(ping_key),
++ "Ping uses a reserved name ({})".format(RESERVED_PING_NAMES),
+ )
+ continue
+ if not isinstance(ping_val, dict):
+- raise TypeError(f"Invalid content for ping {ping_key}")
++ raise TypeError("Invalid content for ping {}".format(ping_key))
+ ping_val["name"] = ping_key
+ try:
+ ping_obj = Ping(**ping_val)
+ except Exception as e:
+- yield util.format_error(filepath, f"On instance '{ping_key}'", str(e))
++ yield util.format_error(filepath, "On instance '{}'".format(ping_key), str(e))
+ continue
+
+ already_seen = sources.get(ping_key)
+@@ -276,8 +278,9 @@ def _instantiate_pings(
+ yield util.format_error(
+ filepath,
+ "",
+- f"Duplicate ping name '{ping_key}' "
+- f"already defined in '{already_seen}'",
++ "Duplicate ping name '{}' "
++ "already defined in '{}'"
++ .format(ping_key, already_seen),
+ )
+ else:
+ all_objects.setdefault("pings", {})[ping_key] = ping_obj
+--- a/third_party/python/glean_parser/glean_parser/util.py
++++ b/third_party/python/glean_parser/glean_parser/util.py
+@@ -132,7 +132,7 @@ def load_yaml_or_json(path: Path, ordere
+ else:
+ return yaml.load(fd, Loader=_NoDatesSafeLoader)
+ else:
+- raise ValueError(f"Unknown file extension {path.suffix}")
++ raise ValueError("Unknown file extension {}".format(path.suffix))
+
+
+ def ensure_list(value: Any) -> Sequence[Any]:
+@@ -331,9 +331,9 @@ def format_error(filepath: Union[str, Pa
+ else:
+ filepath = "<string>"
+ if header:
+- return f"{filepath}: {header}\n{_utils.indent(content)}"
++ return "{}: {}\n{}".format(filepath, header, _utils.indent(content))
+ else:
+- return f"{filepath}:\n{_utils.indent(content)}"
++ return "{}:\n{}".format(filepath, _utils.indent(content))
+
+
+ def is_expired(expires: str) -> bool:
+@@ -353,8 +353,8 @@ def is_expired(expires: str) -> bool:
+ date = datetime.date.fromisoformat(expires)
+ except ValueError:
+ raise ValueError(
+- f"Invalid expiration date '{expires}'. "
+- "Must be of the form yyyy-mm-dd in UTC."
++ "Invalid expiration date '{}'. "
++ "Must be of the form yyyy-mm-dd in UTC.".format(expires)
+ )
+ return date <= datetime.datetime.utcnow().date()
+
+--- a/third_party/python/glean_parser/tools/extract_data_categories.py
++++ b/third_party/python/glean_parser/tools/extract_data_categories.py
+@@ -62,7 +62,7 @@ def fetch_url(url: str) -> str:
+ if content is not None:
+ return content
+
+- print(f"Fetching {url}")
++ print("Fetching {}".format(url))
+ content = urlopen(url).read()
+ cache[url] = content
+ time.sleep(0.5)
+@@ -97,7 +97,7 @@ def categories_as_strings(categories: Se
+ """
+ if len(categories):
+ return [
+- CATEGORY_MAP.get(x, f"!!!UNKNOWN CATEGORY {x}")
++ CATEGORY_MAP.get(x, "!!!UNKNOWN CATEGORY {}".format(x))
+ for x in sorted(list(categories))
+ ]
+ else:
+@@ -119,24 +119,24 @@ def update_lines(
+
+ for line in lines_iter:
+ output.append(line)
+- if line.startswith(f"{category_name}:"):
++ if line.startswith("{}:".format(category_name)):
+ break
+
+ for line in lines_iter:
+ output.append(line)
+- if line.startswith(f" {metric_name}:"):
++ if line.startswith(" {}:".format(metric_name)):
+ break
+
+ for line in lines_iter:
+ output.append(line)
+- if line.startswith(f" data_reviews:"):
++ if line.startswith(" data_reviews:"):
+ break
+
+ for line in lines_iter:
+ if not line.strip().startswith("- "):
+ output.append(" data_sensitivity:\n")
+ for data_sensitivity in data_sensitivity_values:
+- output.append(f" - {data_sensitivity}\n")
++ output.append(" - {}\n".format(data_sensitivity))
+ output.append(line)
+ break
+ else:
+--- a/third_party/python/glean_parser/glean_parser/metrics.py
++++ b/third_party/python/glean_parser/glean_parser/metrics.py
+@@ -133,7 +133,7 @@ class Metric:
+ """
+ metric_type = metric_info["type"]
+ if not isinstance(metric_type, str):
+- raise TypeError(f"Unknown metric type {metric_type}")
++ raise TypeError("Unknown metric type {}".format(metric_type))
+ return cls.metric_types[metric_type](
+ category=category,
+ name=name,
diff --git a/deb_patches/python3-remove-pep487.patch b/deb_patches/python3-remove-pep487.patch
new file mode 100644
index 0000000..996681d
--- /dev/null
+++ b/deb_patches/python3-remove-pep487.patch
@@ -0,0 +1,23 @@
+Description: revert a change that requires Python 3.6 (https://docs.python.org/3/whatsnew/3.6.html#whatsnew36-pep487)
+Author: Olivier Tilloy <olivier.tilloy@canonical.com>
+
+--- a/third_party/python/glean_parser/glean_parser/metrics.py
++++ b/third_party/python/glean_parser/glean_parser/metrics.py
+@@ -15,6 +15,8 @@ from typing import Any, Dict, List, Opti
+ from . import pings
+ from . import util
+
++import pep487
++
+
+ # Important: if the values are ever changing here, make sure
+ # to also fix mozilla/glean. Otherwise language bindings may
+@@ -32,7 +34,7 @@ class DataSensitivity(enum.Enum):
+ highly_sensitive = 4
+
+
+-class Metric:
++class Metric(pep487.PEP487Object):
+ typename = "ERROR"
+ glean_internal_metric_cat = "glean.internal.metrics"
+ metric_types = {}
diff --git a/deb_patches/python3-remove-variable-annotations.patch b/deb_patches/python3-remove-variable-annotations.patch
index 308a298..2ffd7ed 100644
--- a/deb_patches/python3-remove-variable-annotations.patch
+++ b/deb_patches/python3-remove-variable-annotations.patch
@@ -52,3 +52,97 @@ Author: Olivier Tilloy <olivier.tilloy@canonical.com>
def __init__(self):
self.root_node = Node(None, is_root_node=True)
self.end_node = Node(None, is_end_node=True)
+--- a/third_party/python/glean_parser/glean_parser/lint.py
++++ b/third_party/python/glean_parser/glean_parser/lint.py
+@@ -237,9 +237,7 @@ def check_user_lifetime_expiration(
+
+ # The checks that operate on an entire category of metrics:
+ # {NAME: (function, is_error)}
+-CATEGORY_CHECKS: Dict[
+- str, Tuple[Callable[[str, Iterable[metrics.Metric]], LintGenerator], CheckType]
+-] = {
++CATEGORY_CHECKS = {
+ "COMMON_PREFIX": (check_common_prefix, CheckType.error),
+ "CATEGORY_GENERIC": (check_category_generic, CheckType.error),
+ }
+@@ -247,9 +245,7 @@ CATEGORY_CHECKS: Dict[
+
+ # The checks that operate on individual metrics:
+ # {NAME: (function, is_error)}
+-INDIVIDUAL_CHECKS: Dict[
+- str, Tuple[Callable[[metrics.Metric, dict], LintGenerator], CheckType]
+-] = {
++INDIVIDUAL_CHECKS = {
+ "UNIT_IN_NAME": (check_unit_in_name, CheckType.error),
+ "BUG_NUMBER": (check_bug_number, CheckType.error),
+ "BASELINE_PING": (check_valid_in_baseline, CheckType.error),
+@@ -282,7 +278,7 @@ def lint_metrics(
+ :param file: The stream to write errors to.
+ :returns: List of nits.
+ """
+- nits: List[GlinterNit] = []
++ nits = []
+ for (category_name, category) in sorted(list(objs.items())):
+ if category_name == "pings":
+ continue
+@@ -363,7 +359,7 @@ def lint_yaml_files(input_filepaths: Ite
+
+ # Generic type since the actual type comes from yamllint, which we don't
+ # control.
+- nits: List = []
++ nits = []
+ for path in input_filepaths:
+ # yamllint needs both the file content and the path.
+ file_content = None
+--- a/third_party/python/glean_parser/glean_parser/metrics.py
++++ b/third_party/python/glean_parser/glean_parser/metrics.py
+@@ -33,10 +33,10 @@ class DataSensitivity(enum.Enum):
+
+
+ class Metric:
+- typename: str = "ERROR"
+- glean_internal_metric_cat: str = "glean.internal.metrics"
+- metric_types: Dict[str, Any] = {}
+- default_store_names: List[str] = ["metrics"]
++ typename = "ERROR"
++ glean_internal_metric_cat = "glean.internal.metrics"
++ metric_types = {}
++ default_store_names = ["metrics"]
+
+ def __init__(
+ self,
+--- a/third_party/python/glean_parser/glean_parser/pings.py
++++ b/third_party/python/glean_parser/glean_parser/pings.py
+@@ -49,7 +49,7 @@ class Ping:
+ # _validated indicates whether this metric has already been jsonschema
+ # validated (but not any of the Python-level validation).
+ if not _validated:
+- data: Dict[str, util.JSONType] = {
++ data = {
+ "$schema": parser.PINGS_ID,
+ self.name: self.serialize(),
+ }
+--- a/third_party/python/glean_parser/glean_parser/util.py
++++ b/third_party/python/glean_parser/glean_parser/util.py
+@@ -269,7 +269,7 @@ def fetch_remote_url(url: str, cache: bo
+ if key in dc:
+ return dc[key]
+
+- contents: str = urllib.request.urlopen(url).read()
++ contents = urllib.request.urlopen(url).read()
+
+ if cache:
+ with diskcache.Cache(cache_dir) as dc:
+--- a/third_party/python/glean_parser/glean_parser/parser.py
++++ b/third_party/python/glean_parser/glean_parser/parser.py
+@@ -339,8 +339,8 @@ def parse_objects(
+ value from the `metrics.yaml`, rather than having it overridden when
+ the metric expires.
+ """
+- all_objects: ObjectTree = OrderedDict()
+- sources: Dict[Any, Path] = {}
++ all_objects = OrderedDict()
++ sources = {}
+ filepaths = util.ensure_list(filepaths)
+ for filepath in filepaths:
+ content, filetype = yield from _load_file(filepath)
bgstack15