Refactor dashboard
This commit is contained in:
142
utils.py
142
utils.py
@@ -5,119 +5,6 @@ import pandas as pd
|
||||
import plotly.express as px
|
||||
import plotly.io as pio
|
||||
|
||||
def get_workouts(topsets):
|
||||
# Ensure all entries have 'WorkoutId' and 'TopSetId', then sort by 'WorkoutId' and 'TopSetId'
|
||||
filtered_topsets = sorted(
|
||||
[t for t in topsets if t['WorkoutId'] is not None and t['TopSetId'] is not None],
|
||||
key=lambda x: (x['WorkoutId'], x['TopSetId'])
|
||||
)
|
||||
|
||||
workouts = {}
|
||||
for t in filtered_topsets:
|
||||
workout_id = t['WorkoutId']
|
||||
if workout_id not in workouts:
|
||||
workouts[workout_id] = {
|
||||
'WorkoutId': workout_id,
|
||||
'StartDate': t['StartDate'],
|
||||
'TopSets': []
|
||||
}
|
||||
workouts[workout_id]['TopSets'].append({
|
||||
'TopSetId': t['TopSetId'],
|
||||
'ExerciseId': t['ExerciseId'],
|
||||
'ExerciseName': t['ExerciseName'],
|
||||
'Weight': t['Weight'],
|
||||
'Repetitions': t['Repetitions'],
|
||||
'Estimated1RM': t['Estimated1RM']
|
||||
})
|
||||
|
||||
# Convert the workouts dictionary back to a list and sort by 'StartDate'
|
||||
sorted_workouts = sorted(workouts.values(), key=lambda x: x['StartDate'], reverse=True)
|
||||
|
||||
return sorted_workouts
|
||||
|
||||
|
||||
def get_all_exercises_from_topsets(topsets):
|
||||
exercises_dict = {}
|
||||
for t in topsets:
|
||||
exercise_id = t.get('ExerciseId')
|
||||
if exercise_id and exercise_id not in exercises_dict:
|
||||
exercises_dict[exercise_id] = {
|
||||
'ExerciseId': exercise_id,
|
||||
'ExerciseName': t.get('ExerciseName', 'Unknown')
|
||||
}
|
||||
return list(exercises_dict.values())
|
||||
|
||||
def get_topsets_for_person(person_topsets):
|
||||
# Group topsets by ExerciseId
|
||||
grouped_topsets = {}
|
||||
for topset in person_topsets:
|
||||
exercise_id = topset['ExerciseId']
|
||||
if exercise_id in grouped_topsets:
|
||||
grouped_topsets[exercise_id].append(topset)
|
||||
else:
|
||||
grouped_topsets[exercise_id] = [topset]
|
||||
|
||||
# Process each group of topsets
|
||||
exercises_topsets = []
|
||||
for exercise_id, topsets in grouped_topsets.items():
|
||||
# Sort topsets by StartDate in descending order
|
||||
sorted_topsets = sorted(topsets, key=lambda x: x['StartDate'], reverse=True)
|
||||
|
||||
# Extracting values and calculating value ranges for SVG dimensions
|
||||
estimated_1rm = [t['Estimated1RM'] for t in sorted_topsets]
|
||||
repetitions = [t['Repetitions'] for t in sorted_topsets]
|
||||
weight = [t['Weight'] for t in sorted_topsets]
|
||||
start_dates = [t['StartDate'] for t in sorted_topsets]
|
||||
messages = [f'{t["Repetitions"]} x {t["Weight"]}kg ({t["Estimated1RM"]}kg E1RM) on {t["StartDate"].strftime("%d %b %y")}' for t in sorted_topsets]
|
||||
epoch = 'All'
|
||||
person_id = sorted_topsets[0]['PersonId']
|
||||
exercise_name = sorted_topsets[0]['ExerciseName']
|
||||
|
||||
if exercise_name and estimated_1rm and repetitions and weight and start_dates and messages:
|
||||
exercise_progress = get_exercise_graph_model(exercise_name, estimated_1rm, repetitions, weight, start_dates, messages, epoch, person_id, exercise_id)
|
||||
|
||||
exercises_topsets.append({
|
||||
'ExerciseId': exercise_id,
|
||||
'ExerciseName': exercise_name,
|
||||
'Topsets': sorted_topsets,
|
||||
'ExerciseProgressGraph': exercise_progress
|
||||
})
|
||||
|
||||
return exercises_topsets
|
||||
|
||||
def get_people_and_exercise_rep_maxes(topsets, selected_person_ids, selected_exercise_ids, min_date, max_date):
|
||||
# Filter topsets once based on the criteria
|
||||
filtered_topsets = [
|
||||
t for t in topsets if t['PersonId'] in selected_person_ids
|
||||
and t['ExerciseId'] in selected_exercise_ids
|
||||
and min_date <= t['StartDate'] <= max_date
|
||||
]
|
||||
|
||||
# Group the filtered topsets by PersonId
|
||||
grouped_by_person = {}
|
||||
for t in filtered_topsets:
|
||||
person_id = t['PersonId']
|
||||
if person_id in grouped_by_person:
|
||||
grouped_by_person[person_id].append(t)
|
||||
else:
|
||||
grouped_by_person[person_id] = [t]
|
||||
|
||||
people = []
|
||||
for person_id, person_topsets in grouped_by_person.items():
|
||||
person_name = person_topsets[0]['PersonName']
|
||||
workout_ids = {t['WorkoutId'] for t in person_topsets if t['WorkoutId']}
|
||||
number_of_workouts = len(workout_ids)
|
||||
|
||||
people.append({
|
||||
'PersonId': person_id,
|
||||
'PersonName': person_name,
|
||||
'NumberOfWorkouts': number_of_workouts,
|
||||
'Exercises': get_topsets_for_person(person_topsets)
|
||||
})
|
||||
|
||||
return {"People": people}
|
||||
|
||||
|
||||
def convert_str_to_date(date_str, format='%Y-%m-%d'):
|
||||
try:
|
||||
return datetime.strptime(date_str, format).date()
|
||||
@@ -126,35 +13,6 @@ def convert_str_to_date(date_str, format='%Y-%m-%d'):
|
||||
except TypeError:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def flatten_list(list_of_lists):
|
||||
return [item for sublist in list_of_lists for item in sublist]
|
||||
|
||||
|
||||
def first_and_last_visible_days_in_month(first_day_of_month, last_day_of_month):
|
||||
start = dict([(6, 0), (0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)])
|
||||
start_date = first_day_of_month - \
|
||||
timedelta(days=start[first_day_of_month.weekday()])
|
||||
|
||||
end = dict([(6, 6), (0, 5), (1, 4), (2, 3), (3, 2), (4, 1), (5, 0)])
|
||||
end_date = last_day_of_month + \
|
||||
timedelta(days=end[last_day_of_month.weekday()])
|
||||
return (start_date, end_date)
|
||||
|
||||
|
||||
def flatten(lst):
|
||||
"""
|
||||
Flatten a list of lists.
|
||||
"""
|
||||
result = []
|
||||
for item in lst:
|
||||
if isinstance(item, list):
|
||||
result.extend(flatten(item))
|
||||
else:
|
||||
result.append(item)
|
||||
return result
|
||||
|
||||
def get_exercise_graph_model(title, estimated_1rm, repetitions, weight, start_dates, messages, epoch, person_id, exercise_id, min_date=None, max_date=None):
|
||||
# Precompute ranges
|
||||
min_date, max_date = min(start_dates), max(start_dates)
|
||||
|
||||
Reference in New Issue
Block a user