Skip to content

Commit

Permalink
Minor formatting with pre-commit hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
woctezuma committed Nov 18, 2023
1 parent 5792e40 commit bcbdf51
Show file tree
Hide file tree
Showing 14 changed files with 95 additions and 31 deletions.
1 change: 0 additions & 1 deletion fetch_data_for_today.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,5 @@ def main():
write_markdown_files(data)



if __name__ == '__main__':
main()
11 changes: 8 additions & 3 deletions fetch_mappings_for_today.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,16 @@ def main():
print('Updating tracked page mappings.')
# First, for new game slugs in order to quickly post to Discord. This should find new tracked slugs in a short time.
new_tracked_game_slugs = update_tracked_page_mappings(new_game_slugs)
post_slugs_to_discord(new_tracked_game_slugs, webhook_keyword=WEBHOOK_KEYWORD_TROPHY)
post_slugs_to_discord(
new_tracked_game_slugs,
webhook_keyword=WEBHOOK_KEYWORD_TROPHY,
)
# Second, for all the page slugs. This is less rewarding: there are many checks... and very few new tracked slugs!
new_tracked_game_slugs = update_tracked_page_mappings(page_slugs)
post_slugs_to_discord(new_tracked_game_slugs, webhook_keyword=WEBHOOK_KEYWORD_LATE_TROPHY)

post_slugs_to_discord(
new_tracked_game_slugs,
webhook_keyword=WEBHOOK_KEYWORD_LATE_TROPHY,
)


if __name__ == '__main__':
Expand Down
8 changes: 6 additions & 2 deletions list_all_fixed_trophies.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,19 @@ def display_results(date_str, game_slugs):


def extract_date_from_fname(fname):
return str(fname).removeprefix(FNAME_PREFIX).removesuffix(FNAME_SUFFIX).replace(FOLDER_SEPARATOR, DATE_SEPARATOR)
return (
str(fname)
.removeprefix(FNAME_PREFIX)
.removesuffix(FNAME_SUFFIX)
.replace(FOLDER_SEPARATOR, DATE_SEPARATOR)
)


def main():
all_fnames = list_data_file_names()

current_fname = None
for previous_fname in reversed(all_fnames):

if current_fname is not None:
data_today = load_json(fname=current_fname)
data_yesterday = load_json(fname=previous_fname)
Expand Down
11 changes: 8 additions & 3 deletions post_to_discord.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,14 @@


def main():
post_git_diff_to_discord_using_keyword(fname=PAGE_MAPPINGS_FNAME, webhook_keyword=WEBHOOK_KEYWORD_NEW)
post_git_diff_to_discord_using_keyword(fname=SANDBOX_IDS_FNAME, webhook_keyword=WEBHOOK_KEYWORD_TROPHY)

post_git_diff_to_discord_using_keyword(
fname=PAGE_MAPPINGS_FNAME,
webhook_keyword=WEBHOOK_KEYWORD_NEW,
)
post_git_diff_to_discord_using_keyword(
fname=SANDBOX_IDS_FNAME,
webhook_keyword=WEBHOOK_KEYWORD_TROPHY,
)


if __name__ == '__main__':
Expand Down
4 changes: 3 additions & 1 deletion src/achievement_utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
def list_all_unlock_percentages(achievement_data):
return [e['achievement']['rarity']['percent'] for e in achievement_data['achievements']]
return [
e['achievement']['rarity']['percent'] for e in achievement_data['achievements']
]


def compute_max_unlock_percentage(achievement_data):
Expand Down
6 changes: 5 additions & 1 deletion src/data_consistency.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,8 @@ def has_consistent_stats(v):
has_consistent_ratings = has_consistent_average_rating(v) and has_consistent_rating_count(v)
has_consistent_players = has_consistent_num_players(v) and has_consistent_num_platinum(v)
has_consistent_achievements = has_consistent_max_rarity(v)
return has_consistent_ratings and has_consistent_players and has_consistent_achievements
return (
has_consistent_ratings
and has_consistent_players
and has_consistent_achievements
)
11 changes: 9 additions & 2 deletions src/download_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,11 @@ def download_page_slugs(include_dlc=False):

for cursor in range(0, num_elements, MAX_STEP_SIZE):
print(f'Cursor = {cursor}')
store_data = to_store_data(cursor=cursor, step=MAX_STEP_SIZE, include_dlc=include_dlc)
store_data = to_store_data(
cursor=cursor,
step=MAX_STEP_SIZE,
include_dlc=include_dlc,
)
page_slugs += get_page_slugs(store_data)

return page_slugs
Expand All @@ -42,7 +46,10 @@ def download_page_mappings(page_slugs, known_page_mappings=None):
return page_mappings


def download_achievement_support_to_filter_page_mappings(page_mappings, known_support=None):
def download_achievement_support_to_filter_page_mappings(
page_mappings,
known_support=None,
):
if known_support is None:
known_support = {}

Expand Down
29 changes: 23 additions & 6 deletions src/export_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,35 @@ def write_markdown_files(data):
sorted_data = dict(sorted(data.items(), key=lambda x: x[1]["slug"]))
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_game_slug.md")

sorted_data = dict(sorted(data.items(), key=lambda x: deal_with_none(x[1]["averageRating"]), reverse=True))
sorted_data = dict(
sorted(
data.items(),
key=lambda x: deal_with_none(x[1]["averageRating"]),
reverse=True,
),
)
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_average_rating.md")

sorted_data = dict(sorted(data.items(), key=lambda x: deal_with_none(x[1]["ratingCount"]), reverse=True))
sorted_data = dict(
sorted(
data.items(),
key=lambda x: deal_with_none(x[1]["ratingCount"]),
reverse=True,
),
)
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_num_raters.md")

sorted_data = dict(sorted(data.items(), key=lambda x: x[1]["numProgressed"], reverse=True))
sorted_data = dict(
sorted(data.items(), key=lambda x: x[1]["numProgressed"], reverse=True),
)
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_num_players.md")

sorted_data = dict(sorted(data.items(), key=lambda x: x[1]["numCompleted"], reverse=True))
sorted_data = dict(
sorted(data.items(), key=lambda x: x[1]["numCompleted"], reverse=True),
)
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_num_platinum_trophies.md")

sorted_data = dict(sorted(data.items(), key=lambda x: x[1]["maxRarity"], reverse=True))
sorted_data = dict(
sorted(data.items(), key=lambda x: x[1]["maxRarity"], reverse=True),
)
write_data_to_disk(sorted_data, f"{OUTPUT_FOLDER}/by_max_rarity.md")

6 changes: 5 additions & 1 deletion src/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,8 @@
GAME_RATING_FIELDS = ['averageRating', 'ratingCount']
ACHIEVEMENT_FIELDS = ['numProgressed', 'numCompleted', 'maxRarity']
GAME_RATING_HEADERS = ["Average Rating", "Number of Raters"]
ACHIEVEMENT_HEADERS = ["Number of Players", "Number of Platinum Trophies", "Max Rarity (%)"]
ACHIEVEMENT_HEADERS = [
"Number of Players",
"Number of Platinum Trophies",
"Max Rarity (%)",
]
8 changes: 5 additions & 3 deletions src/query_achievement.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

def get_params_to_query_achievement(sandbox_id):
query_str = "{"
query_str += get_query_str_for_achievements(sandbox_id,
include_num_achievements=False,
include_achievement_details=True)
query_str += get_query_str_for_achievements(
sandbox_id,
include_num_achievements=False,
include_achievement_details=True,
)
query_str += "}"

params = {"query": query_str}
Expand Down
8 changes: 5 additions & 3 deletions src/query_achievement_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

def get_params_to_query_achievement_support(sandbox_id):
query_str = "{"
query_str += get_query_str_for_achievements(sandbox_id,
include_num_achievements=True,
include_achievement_details=False)
query_str += get_query_str_for_achievements(
sandbox_id,
include_num_achievements=True,
include_achievement_details=False,
)
query_str += "}"

params = {"query": query_str}
Expand Down
6 changes: 5 additions & 1 deletion src/query_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,11 @@ def format_params_for_query_str(sandbox_id):
return params_str


def get_query_str_for_achievements(sandbox_id, include_num_achievements=True, include_achievement_details=True):
def get_query_str_for_achievements(
sandbox_id,
include_num_achievements=True,
include_achievement_details=True,
):
query_str = "Achievement {productAchievementsRecordBySandbox"
query_str += format_params_for_query_str(sandbox_id)
query_str += "{"
Expand Down
1 change: 1 addition & 0 deletions src/time_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ def get_fname_for_specific_day(date):
folder_name = get_folder_name_for_specific_day(date)
return f"{folder_name}/{date.day:02}.json"


def get_fname_for_yesterday():
date = get_current_date() - timedelta(days=1)
return get_fname_for_specific_day(date)
Expand Down
16 changes: 12 additions & 4 deletions src/workflow_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@ def update_all_page_mappings(page_slugs, known_page_mappings=None):
if known_page_mappings is None:
known_page_mappings = load_all_page_mappings()

page_mappings = download_page_mappings(page_slugs, known_page_mappings=known_page_mappings)
page_mappings = download_page_mappings(
page_slugs,
known_page_mappings=known_page_mappings,
)
save_json(sort_dict_by_key(page_mappings), PAGE_MAPPINGS_FNAME, prettify=True)

new_game_slugs = extract_list_difference(page_mappings, known_page_mappings)
Expand All @@ -29,11 +32,16 @@ def update_tracked_page_mappings(page_slugs, known_page_mappings=None):
if known_page_mappings is None:
known_page_mappings = load_all_page_mappings()

page_mappings_of_interest = filter_page_mappings_based_on_slugs(known_page_mappings, page_slugs)
page_mappings_of_interest = filter_page_mappings_based_on_slugs(
known_page_mappings,
page_slugs,
)

known_support = load_tracked_page_mappings()
sandbox_ids_dict = download_achievement_support_to_filter_page_mappings(page_mappings_of_interest,
known_support=known_support)
sandbox_ids_dict = download_achievement_support_to_filter_page_mappings(
page_mappings_of_interest,
known_support=known_support,
)
save_json(sort_dict_by_key(sandbox_ids_dict), SANDBOX_IDS_FNAME, prettify=True)

new_tracked_game_slugs = extract_list_difference(sandbox_ids_dict, known_support)
Expand Down

0 comments on commit bcbdf51

Please sign in to comment.