175 lines
5.3 KiB
Python
175 lines
5.3 KiB
Python
from collections import defaultdict
|
|
from pprint import pprint
|
|
import os
|
|
from typing import Dict, List
|
|
from sqlalchemy import Table, create_engine, text, insert
|
|
from dotenv import load_dotenv
|
|
from dbapi.tables import metadata_obj, training, exercise, approach
|
|
from obsidian.notes_parser import parse_training_data, remap_unique_exercises
|
|
from apple.notes_parser import parse_training_data as apple_parse_training_data
|
|
from apple.notes_parser import remap_unique_exercises as apple_remaper
|
|
from obsidian.py_models import Training
|
|
|
|
|
|
load_dotenv()
|
|
|
|
DB_USERNAME = os.getenv("POSTGRES_USER")
|
|
DB_PASS = os.getenv("POSTGRES_PASSWORD")
|
|
|
|
engine = create_engine(
|
|
f"postgresql+psycopg2://{DB_USERNAME}:{DB_PASS}@localhost:5433/fitness_database",
|
|
echo=True,
|
|
)
|
|
|
|
# NOTE: "Begin once" style - using `.begin` as context creator for SQLAlchemy
|
|
# with engine.begin() as conn:
|
|
# result = conn.execute(text("select 'hello world'"))
|
|
# print(result.all())
|
|
# conn.execute(
|
|
# text("INSERT INTO some_table(x, y) VALUES (:x, :y)"),
|
|
# [{"x": 6, "y": 7}, {"x": 9, "y": 10}],
|
|
# )
|
|
|
|
# NOTE: "Commit as you go" style - after managing transactions we need to call Connection.commit(). Otherwise ROLLBACK
|
|
# will be executed
|
|
# with engine.connect() as conn:
|
|
# result = conn.execute(text("SELECT x, y FROM some_table"))
|
|
# for row in result:
|
|
# print(f"x: {row.x} -- y: {row.y}")
|
|
|
|
# NOTE : Create all tables from metadata object
|
|
# metadata_obj.create_all(engine)
|
|
# TODO: Check how psycopg2 handles duplication of tables
|
|
# TODO: Check how migrations are done
|
|
|
|
# NOTE: Drop all Tables from database
|
|
# metadata_obj.drop_all(engine)
|
|
# metadata_obj.create_all(engine)
|
|
|
|
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
|
|
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
|
|
# print(some_table.c)
|
|
|
|
# -----
|
|
|
|
# Inserting training values into database
|
|
# trainings: List[Training] = parse_training_data()
|
|
# for train in trainings:
|
|
# if not train:
|
|
# continue
|
|
# else:
|
|
# print("-------------------------\n" * 2)
|
|
# print(train)
|
|
# training_statement = insert(training).values(Date=train.date)
|
|
# # Create training
|
|
# with engine.connect() as conn:
|
|
# result = conn.execute(training_statement)
|
|
# train_pk = result.inserted_primary_key[0]
|
|
# for exr in train.exercises:
|
|
# approach_statements = []
|
|
# exercise_statement = insert(exercise).values(
|
|
# Training=train_pk, Name=exr.name
|
|
# )
|
|
# exr_insert = conn.execute(exercise_statement)
|
|
# exr_pk = exr_insert.inserted_primary_key[0]
|
|
# for appr in exr.approaches:
|
|
# appr_statement = insert(approach).values(
|
|
# Exercise=exr_pk, Weight=appr.weight, Reps=appr.reps
|
|
# )
|
|
# appr_insert = conn.execute(appr_statement)
|
|
# conn.commit()
|
|
|
|
# -----
|
|
|
|
# Calculating unique exercises for obsidian
|
|
|
|
# trainings: List[Training] = parse_training_data()
|
|
#
|
|
#
|
|
# unique_exercise_names = defaultdict(int)
|
|
# counter = 0
|
|
#
|
|
# for train in trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# counter += 1
|
|
# unique_exercise_names[exr.name] += 1
|
|
#
|
|
# pprint(unique_exercise_names)
|
|
# print(counter)
|
|
# parsed_trainings = remap_unique_exercises(trainings)
|
|
#
|
|
# print("\n" * 3)
|
|
#
|
|
# unique_exercise_parsed_names = defaultdict(int)
|
|
# p_counter = 0
|
|
# for train in parsed_trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# p_counter += 1
|
|
# unique_exercise_parsed_names[exr.name] += 1
|
|
# pprint(unique_exercise_parsed_names)
|
|
# print(p_counter)
|
|
|
|
# Apple notes playground
|
|
|
|
# trainings: List[Training] = apple_parse_training_data()
|
|
#
|
|
#
|
|
# unique_exercise_names = defaultdict(int)
|
|
# counter = 0
|
|
#
|
|
# for train in trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# if exr:
|
|
# counter += 1
|
|
# unique_exercise_names[exr.name] += 1
|
|
#
|
|
# pprint(unique_exercise_names)
|
|
# print(counter)
|
|
#
|
|
# parsed_trainings = apple_remaper(trainings)
|
|
#
|
|
# print("\n" * 3)
|
|
#
|
|
# unique_exercise_parsed_names = defaultdict(int)
|
|
# p_counter = 0
|
|
# for train in parsed_trainings:
|
|
# if not train:
|
|
# continue
|
|
# if train.exercises:
|
|
# for exr in train.exercises:
|
|
# if exr:
|
|
# p_counter += 1
|
|
# unique_exercise_parsed_names[exr.name] += 1
|
|
# pprint(unique_exercise_parsed_names)
|
|
# print(p_counter)
|
|
|
|
# Combined trainings
|
|
|
|
|
|
obsidian_trainings: List[Training] = parse_training_data()
|
|
obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings)
|
|
|
|
apple_trainings: List[Training] = apple_parse_training_data()
|
|
apple_parsed_trainings = apple_remaper(apple_trainings)
|
|
|
|
|
|
combined_trainings = obsidian_trainings + apple_trainings
|
|
unique_exercise_parsed_names = defaultdict(int)
|
|
for train in combined_trainings:
|
|
if not train:
|
|
continue
|
|
if train.exercises:
|
|
for exr in train.exercises:
|
|
if exr:
|
|
unique_exercise_parsed_names[exr.name] += 1
|
|
pprint(unique_exercise_parsed_names)
|
|
print(len(combined_trainings))
|