From e648edb26292abe3e3d3e85dd6ecce9d0237ab78 Mon Sep 17 00:00:00 2001 From: pro100ton Date: Wed, 15 Jan 2025 23:13:29 +0300 Subject: [PATCH] Create repositories for tables --- dbapi/connector.py | 4 --- dbapi/repositories.py | 76 +++++++++++++++++++++++++++++++++++++++ dbapi/tables.py | 1 + main.py | 84 ++++++++++++++++++++----------------------- 4 files changed, 116 insertions(+), 49 deletions(-) delete mode 100644 dbapi/connector.py create mode 100644 dbapi/repositories.py diff --git a/dbapi/connector.py b/dbapi/connector.py deleted file mode 100644 index 6c5c58d..0000000 --- a/dbapi/connector.py +++ /dev/null @@ -1,4 +0,0 @@ -from sqlalchemy import create_engine - - -engine = create_engine("postgres+psycopg2:///:localhost:5433", echo=True) diff --git a/dbapi/repositories.py b/dbapi/repositories.py new file mode 100644 index 0000000..251adaf --- /dev/null +++ b/dbapi/repositories.py @@ -0,0 +1,76 @@ +from sqlalchemy import create_engine, insert +from sqlalchemy.engine.base import Engine +from datetime import date +from dbapi.tables import training, exercise, approach + + +class DatabaseInterfasesMixin: + """Mixin for interfaces, that works with database""" + + def __init__(self, engine: Engine) -> None: + self.engine: Engine = engine + + +class TrainingRepository(DatabaseInterfasesMixin): + """Training table repository""" + + def create_training(self, date: date) -> int: + """Method for creating new instance of training + Args: + date: date of a training + + Returns: + Primary key of created training entry + """ + new_instance_statement = insert(training).values(Date=date) + with self.engine.connect() as conn: + result = conn.execute(new_instance_statement) + inerted_entry_pk: int = result.inserted_primary_key[0] + conn.commit() + return inerted_entry_pk + + +class ExerciseRepository(DatabaseInterfasesMixin): + """Exercise table repository""" + + def create_exercise(self, training_pk: int, exercise_name: str) -> int: + """Method for creating new instance of exercise table + Args: + training_pk: Primary key of associated training instance + exercise_name: Name of an exercise + + Returns: + Primary key of created exercise entry + """ + new_instance_statement = insert(exercise).values( + Training=training_pk, Name=exercise_name + ) + with self.engine.connect() as conn: + result = conn.execute(new_instance_statement) + inserted_entry_pk: int = result.inserted_primary_key[0] + conn.commit() + return inserted_entry_pk + + +class ApproachRepository(DatabaseInterfasesMixin): + """Approach table repository""" + + def create_approach(self, exercise_pk: int, weight: float, reps: int) -> int: + """Method for creating new instance of approach table + + Args: + exercise_pk: Primary key of an associated exercise + weight: Approach weight + reps: Amount of reps in approach + + Returns: + Primary key of created exercise entry + """ + new_instance_statement = insert(approach).values( + Exercise=exercise_pk, Weight=weight, Reps=reps + ) + with self.engine.connect() as conn: + result = conn.execute(new_instance_statement) + inserted_entry_pk: int = result.inserted_primary_key[0] + conn.commit() + return inserted_entry_pk diff --git a/dbapi/tables.py b/dbapi/tables.py index b1126b8..ab48c04 100644 --- a/dbapi/tables.py +++ b/dbapi/tables.py @@ -34,3 +34,4 @@ approach = Table( # NOTE: We can access table metadata with associative array `c` # print(training.c.keys()) # print(training.primary_key) +print(training.name) diff --git a/main.py b/main.py index ec24b19..f9daa02 100644 --- a/main.py +++ b/main.py @@ -4,6 +4,7 @@ import os from typing import Dict, List from sqlalchemy import Table, create_engine, text, insert from dotenv import load_dotenv +from dbapi.connector import ApproachRepository, ExerciseRepository, TrainingRepository from dbapi.tables import metadata_obj, training, exercise, approach from obsidian.notes_parser import parse_training_data, remap_unique_exercises from apple.notes_parser import parse_training_data as apple_parse_training_data @@ -43,8 +44,8 @@ engine = create_engine( # TODO: Check how migrations are done # NOTE: Drop all Tables from database -# metadata_obj.drop_all(engine) -# metadata_obj.create_all(engine) +metadata_obj.drop_all(engine) +metadata_obj.create_all(engine) # NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata) # some_table = Table("some_table", metadata_obj, autoload_with=engine) @@ -53,31 +54,24 @@ engine = create_engine( # ----- # Inserting training values into database -# trainings: List[Training] = parse_training_data() -# for train in trainings: -# if not train: -# continue -# else: -# print("-------------------------\n" * 2) -# print(train) -# training_statement = insert(training).values(Date=train.date) -# # Create training -# with engine.connect() as conn: -# result = conn.execute(training_statement) -# train_pk = result.inserted_primary_key[0] -# for exr in train.exercises: -# approach_statements = [] -# exercise_statement = insert(exercise).values( -# Training=train_pk, Name=exr.name -# ) -# exr_insert = conn.execute(exercise_statement) -# exr_pk = exr_insert.inserted_primary_key[0] -# for appr in exr.approaches: -# appr_statement = insert(approach).values( -# Exercise=exr_pk, Weight=appr.weight, Reps=appr.reps -# ) -# appr_insert = conn.execute(appr_statement) -# conn.commit() + +trainings: List[Training] = parse_training_data() +for train in trainings: + if not train: + continue + else: + print("-------------------------\n" * 2) + print(train) + new_training_pk: int = TrainingRepository(engine).create_training(train.date) + for exr in train.exercises: + approach_statements = [] + new_exercise_pk: int = ExerciseRepository(engine).create_exercise( + training_pk=new_training_pk, exercise_name=exr.name + ) + for appr in exr.approaches: + new_approach_pk: int = ApproachRepository(engine).create_approach( + exercise_pk=new_exercise_pk, weight=appr.weight, reps=appr.reps + ) # ----- @@ -155,21 +149,21 @@ engine = create_engine( # Combined trainings -obsidian_trainings: List[Training] = parse_training_data() -obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings) - -apple_trainings: List[Training] = apple_parse_training_data() -apple_parsed_trainings = apple_remaper(apple_trainings) - - -combined_trainings = obsidian_trainings + apple_trainings -unique_exercise_parsed_names = defaultdict(int) -for train in combined_trainings: - if not train: - continue - if train.exercises: - for exr in train.exercises: - if exr: - unique_exercise_parsed_names[exr.name] += 1 -pprint(unique_exercise_parsed_names) -print(len(combined_trainings)) +# obsidian_trainings: List[Training] = parse_training_data() +# obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings) +# +# apple_trainings: List[Training] = apple_parse_training_data() +# apple_parsed_trainings = apple_remaper(apple_trainings) +# +# +# combined_trainings = obsidian_trainings + apple_trainings +# unique_exercise_parsed_names = defaultdict(int) +# for train in combined_trainings: +# if not train: +# continue +# if train.exercises: +# for exr in train.exercises: +# if exr: +# unique_exercise_parsed_names[exr.name] += 1 +# pprint(unique_exercise_parsed_names) +# print(len(combined_trainings))