Skip to content

Commit

Permalink
Fix encodings across the system to ensure UTF8 is respected
Browse files Browse the repository at this point in the history
  • Loading branch information
ZeldaZach committed Aug 8, 2020
1 parent 4553aec commit 385995d
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 17 deletions.
4 changes: 3 additions & 1 deletion mtgjson5/classes/mtgjson_card.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,9 @@ def set_watermark(self, watermark: Optional[str]) -> None:
return

if not self.__watermark_resource:
with RESOURCE_PATH.joinpath("set_code_watermarks.json").open() as f:
with RESOURCE_PATH.joinpath("set_code_watermarks.json").open(
encoding="utf-8"
) as f:
self.__watermark_resource = json.load(f)

if watermark == "set":
Expand Down
4 changes: 2 additions & 2 deletions mtgjson5/compiled_classes/mtgjson_enum_values.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def __init__(self) -> None:
if not keywords.is_file():
LOGGER.warning(f"Unable to find {keywords}")
else:
with keywords.open() as file:
with keywords.open(encoding="utf-8") as file:
content = json.load(file).get("data", {})
self.attr_value_dict.update({"keywords": content})

Expand All @@ -82,7 +82,7 @@ def construct_deck_enums(self, decks_directory: pathlib.Path) -> Dict[str, Any]:
type_map[object_name][object_field_name] = set()

for deck in decks_directory.glob("**/*.json"):
with deck.open() as file:
with deck.open(encoding="utf-8") as file:
content = json.load(file).get("data", {})

for key in content.keys():
Expand Down
6 changes: 4 additions & 2 deletions mtgjson5/output_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def write_set_file(mtgjson_set_object: MtgjsonSetObject, pretty_print: bool) ->
OUTPUT_PATH.mkdir(parents=True, exist_ok=True)

file_name: str = f"{fix_windows_set_name(mtgjson_set_object.code)}.json"
with OUTPUT_PATH.joinpath(file_name).open("w") as file:
with OUTPUT_PATH.joinpath(file_name).open("w", encoding="utf-8") as file:
json.dump(
obj={"data": mtgjson_set_object, "meta": MtgjsonMetaObject()},
fp=file,
Expand Down Expand Up @@ -386,5 +386,7 @@ def generate_output_file_hashes(directory: pathlib.Path) -> None:
continue

hash_file_name = f"{file.name}.{HASH_TO_GENERATE.name}"
with file.parent.joinpath(hash_file_name).open("w") as hash_file:
with file.parent.joinpath(hash_file_name).open(
"w", encoding="utf-8"
) as hash_file:
hash_file.write(generated_hash)
2 changes: 1 addition & 1 deletion mtgjson5/providers/github_decks.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def iterate_precon_decks(self) -> Iterator[MtgjsonDeckObject]:
LOGGER.error("Unable to construct decks. AllPrintings not fully formed")
return

with self.all_printings_file.open() as file:
with self.all_printings_file.open(encoding="utf-8") as file:
self.all_printings_cards = json.load(file).get("data", {})

for deck in self.download(self.decks_api_url):
Expand Down
6 changes: 4 additions & 2 deletions mtgjson5/providers/tcgplayer.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,9 @@ def generate_today_price_dict(
# Future ways to put this into shared memory so all threads can access
tcg_to_mtgjson_map = generate_tcgplayer_to_mtgjson_map(all_printings_path)
CACHE_PATH.mkdir(parents=True, exist_ok=True)
with CACHE_PATH.joinpath("tcgplayer_price_map.json").open("w") as file:
with CACHE_PATH.joinpath("tcgplayer_price_map.json").open(
"w", encoding="utf-8"
) as file:
json.dump(tcg_to_mtgjson_map, file)

ids_and_names = self.get_tcgplayer_magic_set_ids()
Expand Down Expand Up @@ -184,7 +186,7 @@ def get_tcgplayer_prices_map(
:return: Cards with prices from Set ID & Name
"""

with CACHE_PATH.joinpath("tcgplayer_price_map.json").open() as file:
with CACHE_PATH.joinpath("tcgplayer_price_map.json").open(encoding="utf-8") as file:
tcg_to_mtgjson_map = json.load(file)

api_response = TCGPlayerProvider().download(
Expand Down
12 changes: 8 additions & 4 deletions mtgjson5/providers/wizards.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def load_translation_table(self) -> None:
Load translation table from cache (as it doesn't change
that frequently)
"""
with self.__translation_table_cache.open() as file:
with self.__translation_table_cache.open(encoding="utf-8") as file:
self.translation_table = json.load(file)

def build_translation_table(self) -> None:
Expand All @@ -184,7 +184,7 @@ def build_translation_table(self) -> None:
# Cache the table for future uses
self.logger.info("Saving translation table")
self.__translation_table_cache.parent.mkdir(parents=True, exist_ok=True)
with self.__translation_table_cache.open("w") as file:
with self.__translation_table_cache.open("w", encoding="utf-8") as file:
json.dump(translation_table, file)

self.translation_table = translation_table
Expand All @@ -200,7 +200,9 @@ def override_set_translations(
:param table: Translation Table
:return: Overridden Translation Table
"""
with RESOURCE_PATH.joinpath("translation_overrides.json").open() as f:
with RESOURCE_PATH.joinpath("translation_overrides.json").open(
encoding="utf-8"
) as f:
translation_fixes = json.load(f)

for set_code, override_translations in translation_fixes.items():
Expand All @@ -221,7 +223,9 @@ def set_names_to_set_codes(
:param table: Translation Table
:return: Fixed Translation Table
"""
with RESOURCE_PATH.joinpath("wizards_set_name_fixes.json").open() as f:
with RESOURCE_PATH.joinpath("wizards_set_name_fixes.json").open(
encoding="utf-8"
) as f:
set_name_fixes = json.load(f)

for key, value in set_name_fixes.items():
Expand Down
6 changes: 3 additions & 3 deletions mtgjson5/referral_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def write_referral_map(single_set_referral_map: List[Tuple[str, str]]) -> None:
:param single_set_referral_map: Referrals to dump
"""
OUTPUT_PATH.mkdir(parents=True, exist_ok=True)
with OUTPUT_PATH.joinpath("ReferralMap.json").open("a") as file:
with OUTPUT_PATH.joinpath("ReferralMap.json").open("a", encoding="utf-8") as file:
for entry in single_set_referral_map:
file.write(f"/links/{entry[0]}\t{entry[1]};\n")

Expand All @@ -63,9 +63,9 @@ def fixup_referral_map() -> None:
"""
Sort and uniquify the referral map for proper Nginx support
"""
with OUTPUT_PATH.joinpath("ReferralMap.json").open() as file:
with OUTPUT_PATH.joinpath("ReferralMap.json").open(encoding="utf-8") as file:
lines = list(set(file.readlines()))
lines = sorted(lines)

with OUTPUT_PATH.joinpath("ReferralMap.json").open("w") as file:
with OUTPUT_PATH.joinpath("ReferralMap.json").open("w", encoding="utf-8") as file:
file.writelines(lines)
6 changes: 4 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
author_email="zach@mtgjson.com",
url="https://mtgjson.com/",
description="Magic: the Gathering compiled database generator",
long_description=project_root.joinpath("README.md").open().read(),
long_description=project_root.joinpath("README.md").open(encoding="utf-8").read(),
long_description_content_type="text/markdown",
license="MIT",
classifiers=[
Expand Down Expand Up @@ -55,7 +55,9 @@
],
include_package_data=True,
packages=setuptools.find_packages(),
install_requires=project_root.joinpath("requirements.txt").open().readlines()
install_requires=project_root.joinpath("requirements.txt")
.open(encoding="utf-8")
.readlines()
if project_root.joinpath("requirements.txt").is_file()
else [], # Use the requirements file, if able
)

0 comments on commit 385995d

Please sign in to comment.