f1tness_parser/main.py

96 lines
3.1 KiB
Python

from collections import defaultdict
from pprint import pprint
import os
from typing import Dict, List
from sqlalchemy import Table, create_engine, text, insert
from dotenv import load_dotenv
from dbapi.tables import metadata_obj, training, exercise, approach
from obsidian.notes_parser import parse_training_data
from obsidian.py_models import Training
load_dotenv()
DB_USERNAME = os.getenv("POSTGRES_USER")
DB_PASS = os.getenv("POSTGRES_PASSWORD")
engine = create_engine(
f"postgresql+psycopg2://{DB_USERNAME}:{DB_PASS}@localhost:5433/fitness_database",
echo=True,
)
# NOTE: "Begin once" style - using `.begin` as context creator for SQLAlchemy
# with engine.begin() as conn:
# result = conn.execute(text("select 'hello world'"))
# print(result.all())
# conn.execute(
# text("INSERT INTO some_table(x, y) VALUES (:x, :y)"),
# [{"x": 6, "y": 7}, {"x": 9, "y": 10}],
# )
# NOTE: "Commit as you go" style - after managing transactions we need to call Connection.commit(). Otherwise ROLLBACK
# will be executed
# with engine.connect() as conn:
# result = conn.execute(text("SELECT x, y FROM some_table"))
# for row in result:
# print(f"x: {row.x} -- y: {row.y}")
# NOTE : Create all tables from metadata object
# metadata_obj.create_all(engine)
# TODO: Check how psycopg2 handles duplication of tables
# TODO: Check how migrations are done
# NOTE: Drop all Tables from database
# metadata_obj.drop_all(engine)
# metadata_obj.create_all(engine)
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
# print(some_table.c)
# -----
# Inserting training values into database
# trainings: List[Training] = parse_training_data()
# for train in trainings:
# if not train:
# continue
# else:
# print("-------------------------\n" * 2)
# print(train)
# training_statement = insert(training).values(Date=train.date)
# # Create training
# with engine.connect() as conn:
# result = conn.execute(training_statement)
# train_pk = result.inserted_primary_key[0]
# for exr in train.exercises:
# approach_statements = []
# exercise_statement = insert(exercise).values(
# Training=train_pk, Name=exr.name
# )
# exr_insert = conn.execute(exercise_statement)
# exr_pk = exr_insert.inserted_primary_key[0]
# for appr in exr.approaches:
# appr_statement = insert(approach).values(
# Exercise=exr_pk, Weight=appr.weight, Reps=appr.reps
# )
# appr_insert = conn.execute(appr_statement)
# conn.commit()
# -----
# Calculating unique exercises
trainings: List[Training] = parse_training_data()
unique_exercise_names = defaultdict(int)
for train in trainings:
if not train:
continue
if train.exercises:
for exr in train.exercises:
unique_exercise_names[exr.name] += 1
pprint(unique_exercise_names)