Minor refactor in attempt to speed up site
This commit is contained in:
59
db.py
59
db.py
@@ -534,7 +534,7 @@ class DataBase():
|
||||
w.workout_id,
|
||||
to_char(w.start_date, 'Mon DD YYYY') AS formatted_start_date,
|
||||
w.note,
|
||||
t.filter as tag_filter,
|
||||
t.filter AS tag_filter,
|
||||
t.name AS tag_name
|
||||
FROM person p
|
||||
LEFT JOIN workout w ON p.person_id = w.person_id AND w.note IS NOT NULL AND w.note <> ''
|
||||
@@ -545,20 +545,20 @@ class DataBase():
|
||||
"""
|
||||
|
||||
# Execute the SQL query
|
||||
raw_workout_notes = self.execute(sql_query, [person_id])
|
||||
raw_data = self.execute(sql_query, [person_id])
|
||||
|
||||
# Initialize variables to hold the person's name and the workouts
|
||||
person_name = None
|
||||
if not raw_data:
|
||||
return None, []
|
||||
|
||||
# Extract person name from the first row (all rows have the same person name)
|
||||
person_name = raw_data[0]['person_name']
|
||||
|
||||
# Process the workout notes
|
||||
workout_notes = {}
|
||||
|
||||
for row in raw_workout_notes:
|
||||
# Update person_name (it will be the same for all rows)
|
||||
if person_name is None:
|
||||
person_name = row['person_name']
|
||||
|
||||
# Process workout notes and tags if there's a note associated with the workout
|
||||
if row['workout_id'] and row['note']: # Check if workout_id exists and note is not None or empty
|
||||
for row in raw_data:
|
||||
workout_id = row['workout_id']
|
||||
if workout_id and row['note']:
|
||||
# Initialize the workout entry if it doesn't exist
|
||||
if workout_id not in workout_notes:
|
||||
workout_notes[workout_id] = {
|
||||
'workout_id': workout_id,
|
||||
@@ -566,37 +566,38 @@ class DataBase():
|
||||
'note': row['note'],
|
||||
'tags': []
|
||||
}
|
||||
if row['tag_name']: # Only add the tag if it is not None
|
||||
workout_notes[workout_id]['tags'].append({'tag_filter': row['tag_filter'], 'tag_name': row['tag_name'], 'person_id': person_id})
|
||||
# Add tags if present
|
||||
if row['tag_name']:
|
||||
workout_notes[workout_id]['tags'].append({
|
||||
'tag_filter': row['tag_filter'],
|
||||
'tag_name': row['tag_name'],
|
||||
'person_id': person_id
|
||||
})
|
||||
|
||||
|
||||
# Convert the workout_notes dictionary back into a list as the final result
|
||||
# Convert to a list for the final output
|
||||
workout_notes_list = list(workout_notes.values())
|
||||
return person_name, workout_notes_list
|
||||
|
||||
# Return a tuple containing the person's name and their workout notes
|
||||
return (person_name, workout_notes_list)
|
||||
|
||||
def get_exercise_earliest_and_latest_dates(self, person_id, exercise_id):
|
||||
sql_query = """
|
||||
SELECT
|
||||
w.start_date
|
||||
MIN(w.start_date) AS earliest_date,
|
||||
MAX(w.start_date) AS latest_date
|
||||
FROM workout w
|
||||
INNER JOIN topset t on w.workout_id = t.workout_id
|
||||
INNER JOIN exercise e on t.exercise_id = e.exercise_id
|
||||
WHERE w.person_id = %s AND e.exercise_id = %s
|
||||
ORDER BY w.start_date DESC;
|
||||
INNER JOIN topset t ON w.workout_id = t.workout_id
|
||||
INNER JOIN exercise e ON t.exercise_id = e.exercise_id
|
||||
WHERE w.person_id = %s AND e.exercise_id = %s;
|
||||
"""
|
||||
|
||||
# Execute the SQL query
|
||||
workout_exercise_dates = self.execute(sql_query, [person_id, exercise_id])
|
||||
result = self.execute(sql_query, [person_id, exercise_id])
|
||||
|
||||
if not workout_exercise_dates:
|
||||
if not result or not result[0]:
|
||||
return None, None
|
||||
|
||||
latest_date = workout_exercise_dates[0]['start_date']
|
||||
earliest_date = workout_exercise_dates[-1]['start_date']
|
||||
|
||||
return earliest_date, latest_date
|
||||
return result[0]['earliest_date'], result[0]['latest_date']
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
75
utils.py
75
utils.py
@@ -217,83 +217,71 @@ def flatten(lst):
|
||||
return result
|
||||
|
||||
def get_exercise_graph_model(title, estimated_1rm, repetitions, weight, start_dates, messages, epoch, person_id, exercise_id, min_date=None, max_date=None):
|
||||
# Precompute ranges
|
||||
min_date, max_date = min(start_dates), max(start_dates)
|
||||
total_span = (max_date - min_date).days or 1
|
||||
|
||||
min_e1rm, max_e1rm = min(estimated_1rm), max(estimated_1rm)
|
||||
min_reps, max_reps = min(repetitions), max(repetitions)
|
||||
min_weight, max_weight = min(weight), max(weight)
|
||||
|
||||
e1rm_range = max_e1rm - min_e1rm or 1
|
||||
reps_range = max_reps - min_reps or 1
|
||||
weight_range = max_weight - min_weight or 1
|
||||
|
||||
# Calculate viewBox dimensions
|
||||
date_range = max_date - min_date
|
||||
total_span = date_range.days or 1
|
||||
e1rm_range = (max_e1rm - min_e1rm) or 1
|
||||
reps_range = (max_reps - min_reps) or 1
|
||||
weight_range = (max_weight - min_weight) or 1
|
||||
vb_width, vb_height = total_span, e1rm_range
|
||||
vb_width *= 200 / vb_width # Scale to 200px width
|
||||
vb_height *= 75 / vb_height # Scale to 75px height
|
||||
|
||||
# Scale estimated_1rm values for SVG plotting
|
||||
estimated_1rm_scaled = [((value - min_e1rm) / e1rm_range) * vb_height for value in estimated_1rm]
|
||||
repetitions_scaled = [((value - min_reps) / reps_range) * vb_height for value in repetitions]
|
||||
weight_scaled = [((value - min_weight) / weight_range) * vb_height for value in weight]
|
||||
|
||||
relative_positions = [(date - min_date).days / total_span for date in start_dates]
|
||||
|
||||
# Convert the slope from scaled units per day to kg per day
|
||||
slope_kg_per_day = (max_e1rm - min_e1rm) / total_span
|
||||
# Use NumPy arrays for efficient scaling
|
||||
relative_positions = np.array([(date - min_date).days / total_span for date in start_dates])
|
||||
estimated_1rm_scaled = ((np.array(estimated_1rm) - min_e1rm) / e1rm_range) * vb_height
|
||||
repetitions_scaled = ((np.array(repetitions) - min_reps) / reps_range) * vb_height
|
||||
weight_scaled = ((np.array(weight) - min_weight) / weight_range) * vb_height
|
||||
|
||||
# Calculate slope and line of best fit
|
||||
slope_kg_per_day = e1rm_range / total_span
|
||||
best_fit_formula = {
|
||||
'kg_per_week': round(slope_kg_per_day * 7, 1), # Convert to kg/week
|
||||
'kg_per_month': round(slope_kg_per_day * 30, 1) # Convert to kg/month
|
||||
'kg_per_week': round(slope_kg_per_day * 7, 1),
|
||||
'kg_per_month': round(slope_kg_per_day * 30, 1)
|
||||
}
|
||||
|
||||
best_fit_points = []
|
||||
|
||||
# Catch LinAlgError
|
||||
try:
|
||||
# Convert relative positions and scaled estimated 1RM values to numpy arrays
|
||||
x = np.array(relative_positions)
|
||||
y = np.array(estimated_1rm_scaled)
|
||||
|
||||
# Calculate the slope (m) and y-intercept (b) of the line of best fit
|
||||
m, b = np.polyfit(x, y, 1)
|
||||
|
||||
# Generate points along the line of best fit
|
||||
y_best_fit = [m * xi + b for xi in x]
|
||||
best_fit_points = list(zip(y_best_fit, relative_positions))
|
||||
|
||||
except:
|
||||
# Calculate line of best fit using NumPy
|
||||
m, b = np.polyfit(relative_positions, estimated_1rm_scaled, 1)
|
||||
y_best_fit = m * relative_positions + b
|
||||
best_fit_points = list(zip(y_best_fit.tolist(), relative_positions.tolist()))
|
||||
except np.linalg.LinAlgError:
|
||||
pass
|
||||
|
||||
# Create messages and zip data for SVG plotting
|
||||
estimated_1rm_points = zip(estimated_1rm_scaled, relative_positions)
|
||||
repetitions_points = zip(repetitions_scaled, relative_positions)
|
||||
weight_points = zip(weight_scaled, relative_positions)
|
||||
|
||||
repetitions = {
|
||||
# Prepare data for plots
|
||||
repetitions_data = {
|
||||
'label': 'Reps',
|
||||
'color': '#388fed',
|
||||
'points': list(repetitions_points)
|
||||
'points': list(zip(repetitions_scaled.tolist(), relative_positions.tolist()))
|
||||
}
|
||||
weight = {
|
||||
weight_data = {
|
||||
'label': 'Weight',
|
||||
'color': '#bd3178',
|
||||
'points': list(weight_points)
|
||||
'points': list(zip(weight_scaled.tolist(), relative_positions.tolist()))
|
||||
}
|
||||
estimated_1rm = {
|
||||
estimated_1rm_data = {
|
||||
'label': 'E1RM',
|
||||
'color': '#2ca02c',
|
||||
'points': list(estimated_1rm_points)
|
||||
'points': list(zip(estimated_1rm_scaled.tolist(), relative_positions.tolist()))
|
||||
}
|
||||
|
||||
plot_labels = zip(relative_positions, messages)
|
||||
# Prepare plot labels
|
||||
plot_labels = list(zip(relative_positions.tolist(), messages))
|
||||
|
||||
# Return exercise data with SVG dimensions and data points
|
||||
return {
|
||||
'title': title,
|
||||
'vb_width': vb_width,
|
||||
'vb_height': vb_height,
|
||||
'plots': [repetitions, weight, estimated_1rm],
|
||||
'plots': [repetitions_data, weight_data, estimated_1rm_data],
|
||||
'best_fit_points': best_fit_points,
|
||||
'best_fit_formula': best_fit_formula,
|
||||
'plot_labels': plot_labels,
|
||||
@@ -305,6 +293,7 @@ def get_exercise_graph_model(title, estimated_1rm, repetitions, weight, start_da
|
||||
'max_date': max_date
|
||||
}
|
||||
|
||||
|
||||
def get_workout_counts(workouts, period='week'):
|
||||
df = pd.DataFrame(workouts)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user