190 lines
5.7 KiB
Python
190 lines
5.7 KiB
Python
import asyncio
|
|
from collections import defaultdict
|
|
from pprint import pprint
|
|
import os
|
|
from typing import Dict, List
|
|
from sqlalchemy import Table, create_engine, text, insert
|
|
from dotenv import load_dotenv
|
|
from sqlalchemy.ext.asyncio import create_async_engine
|
|
from dbapi.migrator import FitnessDatabseMigrator
|
|
from dbapi.repositories.approach_repo import ApproachRepository
|
|
from dbapi.repositories.exercise_repo import ExerciseRepository
|
|
from dbapi.repositories.training_repo import TrainingRepository
|
|
from dbapi.tables import metadata_obj, training, exercise, approach
|
|
from obsidian.notes_parser import parse_training_data, remap_unique_exercises
|
|
from apple.notes_parser import parse_training_data as apple_parse_training_data
|
|
from apple.notes_parser import remap_unique_exercises as apple_remaper
|
|
from obsidian.py_models import Training
|
|
|
|
# LOADING DATA
|
|
|
|
# load_dotenv()
|
|
#
|
|
# DB_USERNAME = os.getenv("POSTGRES_USER")
|
|
# DB_PASS = os.getenv("POSTGRES_PASSWORD")
|
|
#
|
|
# engine = create_engine(
|
|
# f"postgresql+psycopg2://{DB_USERNAME}:{DB_PASS}@localhost:5433/fitness_database",
|
|
# echo=True,
|
|
# )
|
|
#
|
|
# # Creating async engine for Database connection
|
|
# async_engine = create_async_engine(
|
|
# f"postgresql+asyncpg://{DB_USERNAME}:{DB_PASS}@localhost:5433/fitness_database",
|
|
# echo=True,
|
|
# )
|
|
|
|
|
|
# NOTE: "Begin once" style - using `.begin` as context creator for SQLAlchemy
|
|
# with engine.begin() as conn:
|
|
# result = conn.execute(text("select 'hello world'"))
|
|
# print(result.all())
|
|
# conn.execute(
|
|
# text("INSERT INTO some_table(x, y) VALUES (:x, :y)"),
|
|
# [{"x": 6, "y": 7}, {"x": 9, "y": 10}],
|
|
# )
|
|
|
|
# NOTE: "Commit as you go" style - after managing transactions we need to call Connection.commit(). Otherwise ROLLBACK
|
|
# will be executed
|
|
# with engine.connect() as conn:
|
|
# result = conn.execute(text("SELECT x, y FROM some_table"))
|
|
# for row in result:
|
|
# print(f"x: {row.x} -- y: {row.y}")
|
|
|
|
# NOTE : Create all tables from metadata object
|
|
# metadata_obj.create_all(engine)
|
|
# TODO: Check how psycopg2 handles duplication of tables
|
|
# TODO: Check how migrations are done
|
|
|
|
# NOTE: Drop all Tables from database
|
|
metadata_obj.drop_all(engine)
|
|
metadata_obj.create_all(engine)
|
|
|
|
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
|
|
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
|
|
# print(some_table.c)
|
|
|
|
# -----
|
|
|
|
# Inserting training values into database
|
|
|
|
trainings: List[Training] = parse_training_data()
|
|
for train in trainings:
|
|
if not train:
|
|
continue
|
|
else:
|
|
print(train)
|
|
new_training_pk: int = TrainingRepository(engine).create_training(train.date)
|
|
for exr in train.exercises:
|
|
approach_statements = []
|
|
new_exercise_pk: int = ExerciseRepository(engine).create_exercise(
|
|
training_pk=new_training_pk, exercise_name=exr.name
|
|
)
|
|
for appr in exr.approaches:
|
|
new_approach_pk: int = ApproachRepository(engine).create_approach(
|
|
exercise_pk=new_exercise_pk, weight=appr.weight, reps=appr.reps
|
|
)
|
|
|
|
|
|
print("-------------------------\n" * 2)
|
|
print("-------------------------\n" * 2)
|
|
# -----
|
|
|
|
# Calculating unique exercises for obsidian
|
|
|
|
# trainings: List[Training] = parse_training_data()
|
|
#
|
|
#
|
|
# unique_exercise_names = defaultdict(int)
|
|
# counter = 0
|
|
#
|
|
# for train in trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# counter += 1
|
|
# unique_exercise_names[exr.name] += 1
|
|
#
|
|
# pprint(unique_exercise_names)
|
|
# print(counter)
|
|
# parsed_trainings = remap_unique_exercises(trainings)
|
|
#
|
|
# print("\n" * 3)
|
|
#
|
|
# unique_exercise_parsed_names = defaultdict(int)
|
|
# p_counter = 0
|
|
# for train in parsed_trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# p_counter += 1
|
|
# unique_exercise_parsed_names[exr.name] += 1
|
|
# pprint(unique_exercise_parsed_names)
|
|
# print(p_counter)
|
|
|
|
# Apple notes playground
|
|
|
|
# trainings: List[Training] = apple_parse_training_data()
|
|
#
|
|
#
|
|
# unique_exercise_names = defaultdict(int)
|
|
# counter = 0
|
|
#
|
|
# for train in trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# if exr:
|
|
# counter += 1
|
|
# unique_exercise_names[exr.name] += 1
|
|
#
|
|
# pprint(unique_exercise_names)
|
|
# print(counter)
|
|
#
|
|
# parsed_trainings = apple_remaper(trainings)
|
|
#
|
|
# print("\n" * 3)
|
|
#
|
|
# unique_exercise_parsed_names = defaultdict(int)
|
|
# p_counter = 0
|
|
# for train in parsed_trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# if exr:
|
|
# p_counter += 1
|
|
# unique_exercise_parsed_names[exr.name] += 1
|
|
# pprint(unique_exercise_parsed_names)
|
|
# print(p_counter)
|
|
|
|
# Combined trainings
|
|
|
|
|
|
# obsidian_trainings: List[Training] = parse_training_data()
|
|
# obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings)
|
|
#
|
|
# apple_trainings: List[Training] = apple_parse_training_data()
|
|
# apple_parsed_trainings = apple_remaper(apple_trainings)
|
|
#
|
|
#
|
|
# combined_trainings = obsidian_trainings + apple_trainings
|
|
# unique_exercise_parsed_names = defaultdict(int)
|
|
# for train in combined_trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# if exr:
|
|
# unique_exercise_parsed_names[exr.name] += 1
|
|
# pprint(unique_exercise_parsed_names)
|
|
# print(len(combined_trainings))
|
|
|
|
# Async engine playground
|
|
|
|
# fbm = FitnessDatabseMigrator(async_engine=async_engine)
|
|
#
|
|
# asyncio.run(fbm.reset_database())
|