444 lines
18 KiB
Python
444 lines
18 KiB
Python
import colorsys
|
|
from datetime import datetime, date, timedelta
|
|
import random
|
|
import numpy as np
|
|
import pandas as pd
|
|
|
|
def get_workouts(topsets):
|
|
# Get all unique workout_ids (No duplicates)
|
|
workout_ids = list(set([t['WorkoutId']
|
|
for t in topsets if t['WorkoutId'] is not None]))
|
|
|
|
# Group topsets into workouts
|
|
workouts = []
|
|
for workout_id in reversed(workout_ids):
|
|
topsets_in_workout = [
|
|
t for t in topsets if t['WorkoutId'] == workout_id]
|
|
workouts.append({
|
|
'WorkoutId': workout_id,
|
|
'StartDate': topsets_in_workout[0]['StartDate'],
|
|
'TopSets': [{"TopSetId": t['TopSetId'], "ExerciseId": t['ExerciseId'], "ExerciseName": t['ExerciseName'], "Weight": t['Weight'], "Repetitions": t['Repetitions'], "Estimated1RM": t['Estimated1RM']} for t in topsets_in_workout if t['TopSetId'] is not None]
|
|
})
|
|
|
|
workouts.sort(key=lambda x: x['StartDate'], reverse=True)
|
|
|
|
return workouts
|
|
|
|
|
|
def get_all_exercises_from_topsets(topsets):
|
|
exercise_ids = set([t['ExerciseId']
|
|
for t in topsets if t['ExerciseId'] is not None])
|
|
exercises = []
|
|
for exercise_id in exercise_ids:
|
|
exercises.append({
|
|
'ExerciseId': exercise_id,
|
|
'ExerciseName': next((t['ExerciseName'] for t in topsets if t['ExerciseId'] == exercise_id), 'Unknown')
|
|
})
|
|
return exercises
|
|
|
|
def get_topsets_for_person(person_topsets):
|
|
person_exercises = get_all_exercises_from_topsets(person_topsets)
|
|
|
|
exercises_topsets = []
|
|
for e in person_exercises:
|
|
exercise_topsets = [t for t in person_topsets if t['ExerciseId'] == e['ExerciseId']]
|
|
|
|
# Sort topsets by StartDate in descending order
|
|
sorted_topsets = sorted(exercise_topsets, key=lambda x: x['StartDate'], reverse=True)
|
|
|
|
# Extracting values and calculating value ranges for SVG dimensions
|
|
estimated_1rm = [t['Estimated1RM'] for t in exercise_topsets]
|
|
repetitions = [t['Repetitions'] for t in exercise_topsets]
|
|
weight = [t['Weight'] for t in exercise_topsets]
|
|
start_dates = [t['StartDate'] for t in exercise_topsets]
|
|
messages = [f'{t["Repetitions"]} x {t["Weight"]}kg ({t["Estimated1RM"]}kg E1RM) on {t["StartDate"].strftime("%d %b %y")}' for t in exercise_topsets]
|
|
|
|
exercise_progress = get_exercise_graph_model(exercise_topsets[0]['ExerciseName'], estimated_1rm, repetitions, weight, start_dates, messages)
|
|
|
|
exercises_topsets.append({
|
|
'ExerciseId': e['ExerciseId'],
|
|
'ExerciseName': e['ExerciseName'],
|
|
'Topsets': sorted_topsets,
|
|
'ExerciseProgressGraph': exercise_progress
|
|
})
|
|
|
|
return exercises_topsets
|
|
|
|
|
|
|
|
def get_people_and_exercise_rep_maxes(topsets, selected_person_ids, selected_exercise_ids, min_date, max_date):
|
|
# Get all unique workout_ids (No duplicates)
|
|
people_ids = set([t['PersonId']
|
|
for t in topsets])
|
|
filtered_people_ids = [p for p in people_ids if p in selected_person_ids]
|
|
|
|
# Group topsets into workouts
|
|
people = []
|
|
for person_id in filtered_people_ids:
|
|
workouts_for_person = [
|
|
t for t in topsets if t['PersonId'] == person_id and t['ExerciseId'] in selected_exercise_ids and t['StartDate'] >= min_date and t['StartDate'] <= max_date]
|
|
if workouts_for_person:
|
|
people.append({
|
|
'PersonId': person_id,
|
|
'PersonName': workouts_for_person[0]['PersonName'],
|
|
'NumberOfWorkouts': len(list(set([t['WorkoutId'] for t in workouts_for_person if t['WorkoutId'] is not None]))),
|
|
'Exercises': get_topsets_for_person(workouts_for_person)
|
|
})
|
|
return {"People": people, "Stats": get_stats_from_topsets(topsets)}
|
|
|
|
|
|
def get_stats_from_topsets(topsets):
|
|
workout_count = len(set([t['WorkoutId']
|
|
for t in topsets if t['WorkoutId'] is not None]))
|
|
people_count = len(set([t['PersonId']
|
|
for t in topsets if t['PersonId'] is not None]))
|
|
workout_start_dates = [t['StartDate']
|
|
for t in topsets if t['StartDate'] is not None]
|
|
|
|
stats = [{"Text": "Total Workouts", "Value": workout_count},
|
|
{"Text": "Total Sets", "Value": len(topsets)}]
|
|
if people_count > 1:
|
|
stats.append({"Text": "People tracked", "Value": people_count})
|
|
if workout_count > 0:
|
|
first_workout_date = min(workout_start_dates)
|
|
last_workout_date = max(workout_start_dates)
|
|
|
|
stats.append({"Text": "Days Since First Workout", "Value": (
|
|
date.today() - first_workout_date).days})
|
|
if workout_count >= 2:
|
|
stats.append({"Text": "Days Since Last Workout",
|
|
"Value": (
|
|
date.today() - last_workout_date).days})
|
|
average_number_sets_per_workout = round(
|
|
len(topsets) / workout_count, 1)
|
|
stats.append({"Text": "Average sets per workout",
|
|
"Value": average_number_sets_per_workout})
|
|
|
|
training_duration = last_workout_date - first_workout_date
|
|
if training_duration > timedelta(days=0):
|
|
average_workouts_per_week = round(
|
|
workout_count / (training_duration.days / 7), 1)
|
|
stats.append({"Text": "Average Workouts Per Week",
|
|
"Value": average_workouts_per_week})
|
|
|
|
return stats
|
|
|
|
|
|
def convert_str_to_date(date_str, format='%Y-%m-%d'):
|
|
try:
|
|
return datetime.strptime(date_str, format).date()
|
|
except ValueError:
|
|
return None
|
|
except TypeError:
|
|
return None
|
|
|
|
|
|
def get_earliest_and_latest_workout_date(person):
|
|
if len(person['Workouts']) > 0:
|
|
return (min(person['Workouts'], key=lambda x: x['StartDate'])['StartDate'], max(person['Workouts'], key=lambda x: x['StartDate'])['StartDate'])
|
|
return (datetime.now().date(), datetime.now().date())
|
|
|
|
|
|
def filter_workout_topsets(workout, selected_exercise_ids):
|
|
workout['TopSets'] = [topset for topset in workout['TopSets']
|
|
if topset['ExerciseId'] in selected_exercise_ids]
|
|
return workout
|
|
|
|
|
|
def get_exercise_ids_from_workouts(workouts):
|
|
return list(set(flatten_list(list(map(lambda x: list(
|
|
map(lambda y: y['ExerciseId'], x['TopSets'])), workouts)))))
|
|
|
|
|
|
def flatten_list(list_of_lists):
|
|
return [item for sublist in list_of_lists for item in sublist]
|
|
|
|
|
|
def first_and_last_visible_days_in_month(first_day_of_month, last_day_of_month):
|
|
start = dict([(6, 0), (0, 1), (1, 2), (2, 3), (3, 4), (4, 5), (5, 6)])
|
|
start_date = first_day_of_month - \
|
|
timedelta(days=start[first_day_of_month.weekday()])
|
|
|
|
end = dict([(6, 6), (0, 5), (1, 4), (2, 3), (3, 2), (4, 1), (5, 0)])
|
|
end_date = last_day_of_month + \
|
|
timedelta(days=end[last_day_of_month.weekday()])
|
|
return (start_date, end_date)
|
|
|
|
|
|
def flatten(lst):
|
|
"""
|
|
Flatten a list of lists.
|
|
"""
|
|
result = []
|
|
for item in lst:
|
|
if isinstance(item, list):
|
|
result.extend(flatten(item))
|
|
else:
|
|
result.append(item)
|
|
return result
|
|
|
|
|
|
def get_date_info(input_date, selected_view):
|
|
if selected_view not in ['month', 'year']:
|
|
raise ValueError(
|
|
'selected_view must be either "month" or "year"')
|
|
|
|
# First day of the month
|
|
first_day_of_month = input_date.replace(day=1)
|
|
|
|
# Last day of the month
|
|
if input_date.month == 12:
|
|
last_day_of_month = input_date.replace(
|
|
year=input_date.year+1, month=1, day=1) - timedelta(days=1)
|
|
else:
|
|
last_day_of_month = input_date.replace(
|
|
month=input_date.month+1, day=1) - timedelta(days=1)
|
|
|
|
# First and last day of the year
|
|
first_day_of_year = input_date.replace(month=1, day=1)
|
|
last_day_of_year = input_date.replace(
|
|
year=input_date.year+1, month=1, day=1) - timedelta(days=1)
|
|
|
|
# Next/previous month
|
|
year, month = divmod(input_date.year * 12 + input_date.month, 12)
|
|
next_month = date(year, month + 1, 1)
|
|
prev_month_last_day = first_day_of_month - timedelta(days=1)
|
|
prev_month = prev_month_last_day.replace(day=1)
|
|
|
|
# Next/previous year
|
|
next_year = input_date.replace(year=input_date.year+1)
|
|
prev_year = input_date.replace(year=input_date.year-1)
|
|
|
|
# Business logic, should move above to a separate function
|
|
if selected_view == 'month':
|
|
# Step 1: Find the first Sunday before or on the first day of the month
|
|
days_to_subtract = (first_day_of_month.weekday() + 1) % 7
|
|
start_date = first_day_of_month - timedelta(days=days_to_subtract)
|
|
|
|
# Step 2: Calculate the last day to display, based on the number of weeks
|
|
end_date = start_date + timedelta(days=6 * 7 - 1)
|
|
|
|
return {
|
|
'next_date': next_month,
|
|
'previous_date': prev_month,
|
|
'first_date_of_view': first_day_of_month,
|
|
'last_date_of_view': last_day_of_month,
|
|
'start_date': start_date,
|
|
'end_date': end_date,
|
|
}
|
|
elif selected_view == 'year':
|
|
return {
|
|
'next_date': next_year,
|
|
'previous_date': prev_year,
|
|
'first_date_of_view': first_day_of_year,
|
|
'last_date_of_view': last_day_of_year,
|
|
'start_date': first_day_of_year,
|
|
'end_date': last_day_of_year,
|
|
}
|
|
|
|
def get_exercise_graph_model(title, estimated_1rm, repetitions, weight, start_dates, messages):
|
|
min_date, max_date = min(start_dates), max(start_dates)
|
|
min_e1rm, max_e1rm = min(estimated_1rm), max(estimated_1rm)
|
|
min_reps, max_reps = min(repetitions), max(repetitions)
|
|
min_weight, max_weight = min(weight), max(weight)
|
|
|
|
# Calculate viewBox dimensions
|
|
date_range = max_date - min_date
|
|
total_span = date_range.days or 1
|
|
e1rm_range = (max_e1rm - min_e1rm) or 1
|
|
reps_range = (max_reps - min_reps) or 1
|
|
weight_range = (max_weight - min_weight) or 1
|
|
vb_width, vb_height = total_span, e1rm_range
|
|
vb_width *= 200 / vb_width # Scale to 200px width
|
|
vb_height *= 75 / vb_height # Scale to 75px height
|
|
|
|
# Scale estimated_1rm values for SVG plotting
|
|
estimated_1rm_scaled = [((value - min_e1rm) / e1rm_range) * vb_height for value in estimated_1rm]
|
|
repetitions_scaled = [((value - min_reps) / reps_range) * vb_height for value in repetitions]
|
|
weight_scaled = [((value - min_weight) / weight_range) * vb_height for value in weight]
|
|
|
|
relative_positions = [(date - min_date).days / total_span for date in start_dates]
|
|
|
|
best_fit_points = []
|
|
# trry catch LinAlgError
|
|
try:
|
|
# Convert relative positions and scaled estimated 1RM values to numpy arrays
|
|
x = np.array(relative_positions)
|
|
y = np.array(estimated_1rm_scaled)
|
|
|
|
# Calculate the slope (m) and y-intercept (b) of the line of best fit
|
|
m, b = np.polyfit(x, y, 1)
|
|
|
|
# Generate points along the line of best fit
|
|
y_best_fit = [m * xi + b for xi in x]
|
|
best_fit_points = list(zip(y_best_fit, relative_positions))
|
|
except:
|
|
pass
|
|
|
|
# Create messages and zip data for SVG plotting
|
|
estimated_1rm_points = zip(estimated_1rm_scaled, relative_positions)
|
|
repetitions_points = zip(repetitions_scaled, relative_positions)
|
|
weight_points = zip(weight_scaled, relative_positions)
|
|
|
|
repetitions = {
|
|
'label': 'Reps',
|
|
'color': '#388fed',
|
|
'points': list(repetitions_points)
|
|
}
|
|
weight = {
|
|
'label': 'Weight',
|
|
'color': '#bd3178',
|
|
'points': list(weight_points)
|
|
}
|
|
estimated_1rm = {
|
|
'label': 'E1RM',
|
|
'color': '#2ca02c',
|
|
'points': list(estimated_1rm_points)
|
|
}
|
|
|
|
plot_labels = zip(relative_positions, messages)
|
|
|
|
# Return exercise data with SVG dimensions and data points
|
|
return {
|
|
'title': title,
|
|
'vb_width': vb_width,
|
|
'vb_height': vb_height,
|
|
'plots': [repetitions, weight, estimated_1rm],
|
|
'best_fit_points': best_fit_points,
|
|
'plot_labels': plot_labels
|
|
}
|
|
|
|
def get_workout_counts(workouts, period='week'):
|
|
# Convert to DataFrame
|
|
df = pd.DataFrame(workouts)
|
|
|
|
# Convert 'StartDate' to datetime
|
|
df['StartDate'] = pd.to_datetime(df['StartDate'])
|
|
|
|
# Determine the range of periods to cover
|
|
min_date = df['StartDate'].min()
|
|
max_date = pd.Timestamp(datetime.now())
|
|
|
|
# Generate a complete range of periods
|
|
freq = 'W-MON' if period == 'week' else 'MS'
|
|
period_range = pd.date_range(start=min_date, end=max_date, freq=freq)
|
|
|
|
# Initialize a dictionary to store workout counts and person names
|
|
workout_counts = {
|
|
person_id: {
|
|
"PersonName": person_name,
|
|
"PRCounts": {p: 0 for p in period_range}
|
|
} for person_id, person_name in df[['PersonId', 'PersonName']].drop_duplicates().values
|
|
}
|
|
|
|
# Process the workouts
|
|
for person_id, person_data in workout_counts.items():
|
|
person_df = df[df['PersonId'] == person_id]
|
|
|
|
for period_start in person_data["PRCounts"]:
|
|
period_end = period_start + pd.DateOffset(weeks=1) if period == 'week' else period_start + pd.DateOffset(months=1)
|
|
period_workouts = person_df[(person_df['StartDate'] >= period_start) & (person_df['StartDate'] < period_end)]
|
|
period_workout_count = period_workouts['WorkoutId'].unique()
|
|
person_data["PRCounts"][period_start] = len(period_workout_count)
|
|
|
|
return workout_counts
|
|
|
|
def count_prs_over_time(workouts, period='week'):
|
|
# Convert to DataFrame
|
|
df = pd.DataFrame(workouts)
|
|
|
|
# Convert 'StartDate' to datetime
|
|
df['StartDate'] = pd.to_datetime(df['StartDate'])
|
|
|
|
# Determine the range of periods to cover
|
|
min_date = df['StartDate'].min()
|
|
max_date = pd.Timestamp(datetime.now())
|
|
|
|
# Generate a complete range of periods
|
|
period_range = pd.date_range(start=min_date, end=max_date, freq='W-MON' if period == 'week' else 'MS')
|
|
|
|
# Initialize a dictionary to store PR counts and names
|
|
pr_counts = {
|
|
person_id: {
|
|
"PersonName": person_name,
|
|
"PRCounts": {p: 0 for p in period_range}
|
|
} for person_id, person_name in df[['PersonId', 'PersonName']].drop_duplicates().values
|
|
}
|
|
|
|
# Process the workouts
|
|
for person_id, person_data in pr_counts.items():
|
|
person_df = df[df['PersonId'] == person_id]
|
|
|
|
for period_start in person_data["PRCounts"]:
|
|
period_end = period_start + pd.DateOffset(weeks=1) if period == 'week' else period_start + pd.DateOffset(months=1)
|
|
period_workouts = person_df[(person_df['StartDate'] >= period_start) & (person_df['StartDate'] < period_end)]
|
|
|
|
for exercise_id in period_workouts['ExerciseId'].unique():
|
|
exercise_max = period_workouts[period_workouts['ExerciseId'] == exercise_id]['Estimated1RM'].max()
|
|
|
|
# Check if this is a PR
|
|
previous_max = person_df[(person_df['StartDate'] < period_start) &
|
|
(person_df['ExerciseId'] == exercise_id)]['Estimated1RM'].max()
|
|
|
|
if pd.isna(previous_max) or exercise_max > previous_max:
|
|
person_data["PRCounts"][period_start] += 1
|
|
|
|
return pr_counts
|
|
|
|
|
|
def get_weekly_pr_graph_model(title, weekly_pr_data):
|
|
# Assuming weekly_pr_data is in the format {1: {"PersonName": "Alice", "PRCounts": {Timestamp('2022-01-01', freq='W-MON'): 0, ...}}, 2: {...}, ...}
|
|
|
|
# Find the overall date range for all users
|
|
all_dates = [date for user_data in weekly_pr_data.values() for date in user_data["PRCounts"].keys()]
|
|
min_date, max_date = min(all_dates), max(all_dates)
|
|
total_span = (max_date - min_date).days or 1
|
|
relative_positions = [(date - min_date).days / total_span for date in all_dates]
|
|
|
|
# Calculate viewBox dimensions
|
|
max_value = max(max(user_data["PRCounts"].values()) for user_data in weekly_pr_data.values()) or 1
|
|
min_value = 0
|
|
value_range = max_value - min_value
|
|
vb_width = 200
|
|
vb_height= 75
|
|
|
|
plots = []
|
|
colors = get_distinct_colors(len(weekly_pr_data.items()))
|
|
for count, (user_id, user_data) in enumerate(weekly_pr_data.items()):
|
|
pr_counts = user_data["PRCounts"]
|
|
person_name = user_data["PersonName"]
|
|
|
|
values = pr_counts.values()
|
|
|
|
values_scaled = [((value - min_value) / value_range) * vb_height for value in values]
|
|
plot_points = list(zip(values_scaled, relative_positions))
|
|
messages = [f'{value} for {person_name} at {date.strftime("%d %b %y")}' for value, date in zip(values, pr_counts.keys())]
|
|
plot_labels = zip(values_scaled, relative_positions, messages)
|
|
|
|
# Create a plot for each user
|
|
plot = {
|
|
'label': person_name, # Use PersonName instead of User ID
|
|
'color': colors[count],
|
|
'points': plot_points,
|
|
'plot_labels': plot_labels
|
|
}
|
|
plots.append(plot)
|
|
|
|
# Return workout data with SVG dimensions and data points
|
|
return {
|
|
'title': title,
|
|
'vb_width': vb_width,
|
|
'vb_height': vb_height,
|
|
'plots': plots
|
|
}
|
|
|
|
def get_distinct_colors(n):
|
|
colors = []
|
|
for i in range(n):
|
|
# Divide the color wheel into n parts
|
|
hue = i / n
|
|
# Convert HSL (Hue, Saturation, Lightness) to RGB and then to a Hex string
|
|
rgb = colorsys.hls_to_rgb(hue, 0.6, 0.4) # Fixed lightness and saturation
|
|
hex_color = '#{:02x}{:02x}{:02x}'.format(int(rgb[0]*255), int(rgb[1]*255), int(rgb[2]*255))
|
|
colors.append(hex_color)
|
|
return colors |