diff options
author | ohfp <1813007-ohfp@users.noreply.gitlab.com> | 2020-09-30 14:04:18 +0200 |
---|---|---|
committer | ohfp <1813007-ohfp@users.noreply.gitlab.com> | 2020-09-30 14:52:19 +0200 |
commit | 42d221db4971342edd7ebe75152be53741bb0a7b (patch) | |
tree | fb23761477f3f759ba7327845e271227a7a629e3 /deb_patches/python3-remove-fstrings.patch | |
parent | v80.0.1-1 (diff) | |
download | librewolf-linux-42d221db4971342edd7ebe75152be53741bb0a7b.tar.gz librewolf-linux-42d221db4971342edd7ebe75152be53741bb0a7b.tar.bz2 librewolf-linux-42d221db4971342edd7ebe75152be53741bb0a7b.zip |
v81.0-2
Diffstat (limited to 'deb_patches/python3-remove-fstrings.patch')
-rw-r--r-- | deb_patches/python3-remove-fstrings.patch | 392 |
1 files changed, 392 insertions, 0 deletions
diff --git a/deb_patches/python3-remove-fstrings.patch b/deb_patches/python3-remove-fstrings.patch new file mode 100644 index 0000000..93da547 --- /dev/null +++ b/deb_patches/python3-remove-fstrings.patch @@ -0,0 +1,392 @@ +Description: remove f-strings that require Python 3.6 (https://docs.python.org/3/whatsnew/3.6.html#whatsnew36-pep498) +Author: Olivier Tilloy <olivier.tilloy@canonical.com> + +--- a/third_party/python/glean_parser/glean_parser/lint.py ++++ b/third_party/python/glean_parser/glean_parser/lint.py +@@ -42,10 +42,10 @@ def _english_list(items: List[str]) -> s + if len(items) == 0: + return "" + elif len(items) == 1: +- return f"'{items[0]}'" ++ return "'{}'".format(items[0]) + else: + return "{}, or '{}'".format( +- ", ".join([f"'{x}'" for x in items[:-1]]), items[-1] ++ ", ".join(["'{}'".format(x) for x in items[:-1]]), items[-1] + ) + + +@@ -88,10 +88,11 @@ def check_common_prefix( + if i > 0: + common_prefix = "_".join(first[:i]) + yield ( +- f"Within category '{category_name}', all metrics begin with " +- f"prefix '{common_prefix}'." ++ "Within category '{}', all metrics begin with " ++ "prefix '{}'." + "Remove the prefixes on the metric names and (possibly) " + "rename the category." ++ .format(category_name, common_prefix) + ) + + +@@ -131,17 +132,19 @@ def check_unit_in_name( + or unit_in_name == time_unit.name + ): + yield ( +- f"Suffix '{unit_in_name}' is redundant with time_unit " +- f"'{time_unit.name}'. Only include time_unit." ++ "Suffix '{}' is redundant with time_unit " ++ "'{}'. Only include time_unit." ++ .format(unit_in_name, time_unit.name) + ) + elif ( + unit_in_name in TIME_UNIT_ABBREV.keys() + or unit_in_name in TIME_UNIT_ABBREV.values() + ): + yield ( +- f"Suffix '{unit_in_name}' doesn't match time_unit " +- f"'{time_unit.name}'. " ++ "Suffix '{}' doesn't match time_unit " ++ "'{}'. " + "Confirm the unit is correct and only include time_unit." ++ .format(unit_in_name, time_unit.name) + ) + + elif memory_unit is not None: +@@ -150,26 +153,29 @@ def check_unit_in_name( + or unit_in_name == memory_unit.name + ): + yield ( +- f"Suffix '{unit_in_name}' is redundant with memory_unit " +- f"'{memory_unit.name}'. " ++ "Suffix '{}' is redundant with memory_unit " ++ "'{}'. " + "Only include memory_unit." ++ .format(unit_in_name, memory_unit.name) + ) + elif ( + unit_in_name in MEMORY_UNIT_ABBREV.keys() + or unit_in_name in MEMORY_UNIT_ABBREV.values() + ): + yield ( +- f"Suffix '{unit_in_name}' doesn't match memory_unit " +- f"{memory_unit.name}'. " ++ "Suffix '{}' doesn't match memory_unit " ++ "{}'. " + "Confirm the unit is correct and only include memory_unit." ++ .format(unit_in_name, memory_unit.name) + ) + + elif unit is not None: + if unit_in_name == unit: + yield ( +- f"Suffix '{unit_in_name}' is redundant with unit param " +- f"'{unit}'. " ++ "Suffix '{}' is redundant with unit param " ++ "'{}'. " + "Only include unit." ++ .format(unit_in_name, unit) + ) + + +@@ -183,8 +189,9 @@ def check_category_generic( + + if category_name in GENERIC_CATEGORIES: + yield ( +- f"Category '{category_name}' is too generic. " +- f"Don't use {_english_list(GENERIC_CATEGORIES)} for category names" ++ "Category '{}' is too generic. " ++ "Don't use {} for category names" ++ .format(category_name, _english_list(GENERIC_CATEGORIES)) + ) + + +@@ -195,9 +202,10 @@ def check_bug_number( + + if len(number_bugs): + yield ( +- f"For bugs {', '.join(number_bugs)}: " ++ "For bugs {}: " + "Bug numbers are deprecated and should be changed to full URLs. " + "For example, use 'http://bugzilla.mozilla.org/12345' instead of '12345'." ++ .format(', '.join(number_bugs)) + ) + + +@@ -220,7 +228,7 @@ def check_misspelled_pings( + for builtin in pings.RESERVED_PING_NAMES: + distance = _hamming_distance(ping, builtin) + if distance == 1: +- yield f"Ping '{ping}' seems misspelled. Did you mean '{builtin}'?" ++ yield "Ping '{}' seems misspelled. Did you mean '{}'?".format(ping, builtin) + + + def check_user_lifetime_expiration( +@@ -263,8 +271,9 @@ class GlinterNit: + + def format(self): + return ( +- f"{self.check_type.name.upper()}: {self.check_name}: " +- f"{self.name}: {self.msg}" ++ "{}: {}: " ++ "{}: {}" ++ .format(self.check_type.name.upper(), self.check_name, self.name, self.msg) + ) + + +@@ -324,8 +333,9 @@ def lint_metrics( + "SUPERFLUOUS_NO_LINT", + ".".join([metric.category, metric.name]), + ( +- f"Superfluous no_lint entry '{check_name}'. " ++ "Superfluous no_lint entry '{}'. " + "Please remove it." ++ .format(check_name) + ), + CheckType.warning, + ) +@@ -372,7 +382,7 @@ def lint_yaml_files(input_filepaths: Ite + if len(nits): + print("Sorry, Glean found some glinter nits:", file=file) + for (path, p) in nits: +- print(f"{path} ({p.line}:{p.column}) - {p.message}") ++ print("{} ({}:{}) - {}".format(path, p.line, p.column, p.message)) + print("", file=file) + print("Please fix the above nits to continue.", file=file) + +--- a/third_party/python/glean_parser/glean_parser/markdown.py ++++ b/third_party/python/glean_parser/glean_parser/markdown.py +@@ -70,7 +70,7 @@ def metrics_docs(obj_name: str) -> str: + if obj_name.startswith("labeled_"): + fixedup_name += "s" + +- return f"https://mozilla.github.io/glean/book/user/metrics/{fixedup_name}.html" ++ return "https://mozilla.github.io/glean/book/user/metrics/{}.html".format(fixedup_name) + + + def ping_docs(ping_name: str) -> str: +@@ -81,7 +81,7 @@ def ping_docs(ping_name: str) -> str: + if ping_name not in pings.RESERVED_PING_NAMES: + return "" + +- return f"https://mozilla.github.io/glean/book/user/pings/{ping_name}.html" ++ return "https://mozilla.github.io/glean/book/user/pings/{}.html".format(ping_name) + + + def if_empty(ping_name: str, custom_pings_cache: Dict[str, pings.Ping] = {}) -> bool: +--- a/third_party/python/glean_parser/glean_parser/parser.py ++++ b/third_party/python/glean_parser/glean_parser/parser.py +@@ -46,7 +46,8 @@ def _update_validator(validator): + if len(missing_properties): + missing_properties = sorted(list(missing_properties)) + yield ValidationError( +- f"Missing required properties: {', '.join(missing_properties)}" ++ "Missing required properties: {}" ++ .format(', '.join(missing_properties)) + ) + + validator.VALIDATORS["required"] = required +@@ -65,7 +66,7 @@ def _load_file( + return {}, None + + if content is None: +- yield util.format_error(filepath, "", f"'{filepath}' file can not be empty.") ++ yield util.format_error(filepath, "", "'{}' file can not be empty.".format(filepath)) + return {}, None + + if not isinstance(content, dict): +@@ -76,7 +77,7 @@ def _load_file( + + schema_key = content.get("$schema") + if not isinstance(schema_key, str): +- raise TypeError(f"Invalid schema key {schema_key}") ++ raise TypeError("Invalid schema key {}".format(schema_key)) + + filetype = FILE_TYPES.get(schema_key) + +@@ -115,7 +116,7 @@ def _get_schema( + if schema_id not in schemas: + raise ValueError( + util.format_error( +- filepath, "", f"$schema key must be one of {', '.join(schemas.keys())}", ++ filepath, "", "$schema key must be one of {}".format(', '.join(schemas.keys())), + ) + ) + return schemas[schema_id] +@@ -187,7 +188,7 @@ def _instantiate_metrics( + if not config.get("allow_reserved") and category_key.split(".")[0] == "glean": + yield util.format_error( + filepath, +- f"For category '{category_key}'", ++ "For category '{}'".format(category_key), + "Categories beginning with 'glean' are reserved for " + "Glean internal use.", + ) +@@ -195,7 +196,7 @@ def _instantiate_metrics( + all_objects.setdefault(category_key, OrderedDict()) + + if not isinstance(category_val, dict): +- raise TypeError(f"Invalid content for {category_key}") ++ raise TypeError("Invalid content for {}".format(category_key)) + + for metric_key, metric_val in category_val.items(): + try: +@@ -204,7 +205,7 @@ def _instantiate_metrics( + ) + except Exception as e: + yield util.format_error( +- filepath, f"On instance {category_key}.{metric_key}", str(e), ++ filepath, "On instance {}.{}".format(category_key, metric_key), str(e), + ) + metric_obj = None + else: +@@ -214,7 +215,7 @@ def _instantiate_metrics( + ): + yield util.format_error( + filepath, +- f"On instance {category_key}.{metric_key}", ++ "On instance {}.{}".format(category_key, metric_key), + 'Only internal metrics may specify "all-pings" ' + 'in "send_in_pings"', + ) +@@ -230,8 +231,9 @@ def _instantiate_metrics( + filepath, + "", + ( +- f"Duplicate metric name '{category_key}.{metric_key}' " +- f"already defined in '{already_seen}'" ++ "Duplicate metric name '{}.{}' " ++ "already defined in '{}'" ++ .format(category_key, metric_key, already_seen) + ), + ) + else: +@@ -257,17 +259,17 @@ def _instantiate_pings( + if ping_key in RESERVED_PING_NAMES: + yield util.format_error( + filepath, +- f"For ping '{ping_key}'", +- f"Ping uses a reserved name ({RESERVED_PING_NAMES})", ++ "For ping '{}'".format(ping_key), ++ "Ping uses a reserved name ({})".format(RESERVED_PING_NAMES), + ) + continue + if not isinstance(ping_val, dict): +- raise TypeError(f"Invalid content for ping {ping_key}") ++ raise TypeError("Invalid content for ping {}".format(ping_key)) + ping_val["name"] = ping_key + try: + ping_obj = Ping(**ping_val) + except Exception as e: +- yield util.format_error(filepath, f"On instance '{ping_key}'", str(e)) ++ yield util.format_error(filepath, "On instance '{}'".format(ping_key), str(e)) + continue + + already_seen = sources.get(ping_key) +@@ -276,8 +278,9 @@ def _instantiate_pings( + yield util.format_error( + filepath, + "", +- f"Duplicate ping name '{ping_key}' " +- f"already defined in '{already_seen}'", ++ "Duplicate ping name '{}' " ++ "already defined in '{}'" ++ .format(ping_key, already_seen), + ) + else: + all_objects.setdefault("pings", {})[ping_key] = ping_obj +--- a/third_party/python/glean_parser/glean_parser/util.py ++++ b/third_party/python/glean_parser/glean_parser/util.py +@@ -132,7 +132,7 @@ def load_yaml_or_json(path: Path, ordere + else: + return yaml.load(fd, Loader=_NoDatesSafeLoader) + else: +- raise ValueError(f"Unknown file extension {path.suffix}") ++ raise ValueError("Unknown file extension {}".format(path.suffix)) + + + def ensure_list(value: Any) -> Sequence[Any]: +@@ -331,9 +331,9 @@ def format_error(filepath: Union[str, Pa + else: + filepath = "<string>" + if header: +- return f"{filepath}: {header}\n{_utils.indent(content)}" ++ return "{}: {}\n{}".format(filepath, header, _utils.indent(content)) + else: +- return f"{filepath}:\n{_utils.indent(content)}" ++ return "{}:\n{}".format(filepath, _utils.indent(content)) + + + def is_expired(expires: str) -> bool: +@@ -353,8 +353,8 @@ def is_expired(expires: str) -> bool: + date = datetime.date.fromisoformat(expires) + except ValueError: + raise ValueError( +- f"Invalid expiration date '{expires}'. " +- "Must be of the form yyyy-mm-dd in UTC." ++ "Invalid expiration date '{}'. " ++ "Must be of the form yyyy-mm-dd in UTC.".format(expires) + ) + return date <= datetime.datetime.utcnow().date() + +--- a/third_party/python/glean_parser/tools/extract_data_categories.py ++++ b/third_party/python/glean_parser/tools/extract_data_categories.py +@@ -62,7 +62,7 @@ def fetch_url(url: str) -> str: + if content is not None: + return content + +- print(f"Fetching {url}") ++ print("Fetching {}".format(url)) + content = urlopen(url).read() + cache[url] = content + time.sleep(0.5) +@@ -97,7 +97,7 @@ def categories_as_strings(categories: Se + """ + if len(categories): + return [ +- CATEGORY_MAP.get(x, f"!!!UNKNOWN CATEGORY {x}") ++ CATEGORY_MAP.get(x, "!!!UNKNOWN CATEGORY {}".format(x)) + for x in sorted(list(categories)) + ] + else: +@@ -119,24 +119,24 @@ def update_lines( + + for line in lines_iter: + output.append(line) +- if line.startswith(f"{category_name}:"): ++ if line.startswith("{}:".format(category_name)): + break + + for line in lines_iter: + output.append(line) +- if line.startswith(f" {metric_name}:"): ++ if line.startswith(" {}:".format(metric_name)): + break + + for line in lines_iter: + output.append(line) +- if line.startswith(f" data_reviews:"): ++ if line.startswith(" data_reviews:"): + break + + for line in lines_iter: + if not line.strip().startswith("- "): + output.append(" data_sensitivity:\n") + for data_sensitivity in data_sensitivity_values: +- output.append(f" - {data_sensitivity}\n") ++ output.append(" - {}\n".format(data_sensitivity)) + output.append(line) + break + else: +--- a/third_party/python/glean_parser/glean_parser/metrics.py ++++ b/third_party/python/glean_parser/glean_parser/metrics.py +@@ -133,7 +133,7 @@ class Metric: + """ + metric_type = metric_info["type"] + if not isinstance(metric_type, str): +- raise TypeError(f"Unknown metric type {metric_type}") ++ raise TypeError("Unknown metric type {}".format(metric_type)) + return cls.metric_types[metric_type]( + category=category, + name=name, |