Create repositories for tables
This commit is contained in:
parent
d18b045529
commit
e648edb262
4 changed files with 116 additions and 49 deletions
|
@ -1,4 +0,0 @@
|
||||||
from sqlalchemy import create_engine
|
|
||||||
|
|
||||||
|
|
||||||
engine = create_engine("postgres+psycopg2:///:localhost:5433", echo=True)
|
|
76
dbapi/repositories.py
Normal file
76
dbapi/repositories.py
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
from sqlalchemy import create_engine, insert
|
||||||
|
from sqlalchemy.engine.base import Engine
|
||||||
|
from datetime import date
|
||||||
|
from dbapi.tables import training, exercise, approach
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseInterfasesMixin:
|
||||||
|
"""Mixin for interfaces, that works with database"""
|
||||||
|
|
||||||
|
def __init__(self, engine: Engine) -> None:
|
||||||
|
self.engine: Engine = engine
|
||||||
|
|
||||||
|
|
||||||
|
class TrainingRepository(DatabaseInterfasesMixin):
|
||||||
|
"""Training table repository"""
|
||||||
|
|
||||||
|
def create_training(self, date: date) -> int:
|
||||||
|
"""Method for creating new instance of training
|
||||||
|
Args:
|
||||||
|
date: date of a training
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Primary key of created training entry
|
||||||
|
"""
|
||||||
|
new_instance_statement = insert(training).values(Date=date)
|
||||||
|
with self.engine.connect() as conn:
|
||||||
|
result = conn.execute(new_instance_statement)
|
||||||
|
inerted_entry_pk: int = result.inserted_primary_key[0]
|
||||||
|
conn.commit()
|
||||||
|
return inerted_entry_pk
|
||||||
|
|
||||||
|
|
||||||
|
class ExerciseRepository(DatabaseInterfasesMixin):
|
||||||
|
"""Exercise table repository"""
|
||||||
|
|
||||||
|
def create_exercise(self, training_pk: int, exercise_name: str) -> int:
|
||||||
|
"""Method for creating new instance of exercise table
|
||||||
|
Args:
|
||||||
|
training_pk: Primary key of associated training instance
|
||||||
|
exercise_name: Name of an exercise
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Primary key of created exercise entry
|
||||||
|
"""
|
||||||
|
new_instance_statement = insert(exercise).values(
|
||||||
|
Training=training_pk, Name=exercise_name
|
||||||
|
)
|
||||||
|
with self.engine.connect() as conn:
|
||||||
|
result = conn.execute(new_instance_statement)
|
||||||
|
inserted_entry_pk: int = result.inserted_primary_key[0]
|
||||||
|
conn.commit()
|
||||||
|
return inserted_entry_pk
|
||||||
|
|
||||||
|
|
||||||
|
class ApproachRepository(DatabaseInterfasesMixin):
|
||||||
|
"""Approach table repository"""
|
||||||
|
|
||||||
|
def create_approach(self, exercise_pk: int, weight: float, reps: int) -> int:
|
||||||
|
"""Method for creating new instance of approach table
|
||||||
|
|
||||||
|
Args:
|
||||||
|
exercise_pk: Primary key of an associated exercise
|
||||||
|
weight: Approach weight
|
||||||
|
reps: Amount of reps in approach
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Primary key of created exercise entry
|
||||||
|
"""
|
||||||
|
new_instance_statement = insert(approach).values(
|
||||||
|
Exercise=exercise_pk, Weight=weight, Reps=reps
|
||||||
|
)
|
||||||
|
with self.engine.connect() as conn:
|
||||||
|
result = conn.execute(new_instance_statement)
|
||||||
|
inserted_entry_pk: int = result.inserted_primary_key[0]
|
||||||
|
conn.commit()
|
||||||
|
return inserted_entry_pk
|
|
@ -34,3 +34,4 @@ approach = Table(
|
||||||
# NOTE: We can access table metadata with associative array `c`
|
# NOTE: We can access table metadata with associative array `c`
|
||||||
# print(training.c.keys())
|
# print(training.c.keys())
|
||||||
# print(training.primary_key)
|
# print(training.primary_key)
|
||||||
|
print(training.name)
|
||||||
|
|
84
main.py
84
main.py
|
@ -4,6 +4,7 @@ import os
|
||||||
from typing import Dict, List
|
from typing import Dict, List
|
||||||
from sqlalchemy import Table, create_engine, text, insert
|
from sqlalchemy import Table, create_engine, text, insert
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
from dbapi.connector import ApproachRepository, ExerciseRepository, TrainingRepository
|
||||||
from dbapi.tables import metadata_obj, training, exercise, approach
|
from dbapi.tables import metadata_obj, training, exercise, approach
|
||||||
from obsidian.notes_parser import parse_training_data, remap_unique_exercises
|
from obsidian.notes_parser import parse_training_data, remap_unique_exercises
|
||||||
from apple.notes_parser import parse_training_data as apple_parse_training_data
|
from apple.notes_parser import parse_training_data as apple_parse_training_data
|
||||||
|
@ -43,8 +44,8 @@ engine = create_engine(
|
||||||
# TODO: Check how migrations are done
|
# TODO: Check how migrations are done
|
||||||
|
|
||||||
# NOTE: Drop all Tables from database
|
# NOTE: Drop all Tables from database
|
||||||
# metadata_obj.drop_all(engine)
|
metadata_obj.drop_all(engine)
|
||||||
# metadata_obj.create_all(engine)
|
metadata_obj.create_all(engine)
|
||||||
|
|
||||||
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
|
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
|
||||||
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
|
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
|
||||||
|
@ -53,31 +54,24 @@ engine = create_engine(
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
# Inserting training values into database
|
# Inserting training values into database
|
||||||
# trainings: List[Training] = parse_training_data()
|
|
||||||
# for train in trainings:
|
trainings: List[Training] = parse_training_data()
|
||||||
# if not train:
|
for train in trainings:
|
||||||
# continue
|
if not train:
|
||||||
# else:
|
continue
|
||||||
# print("-------------------------\n" * 2)
|
else:
|
||||||
# print(train)
|
print("-------------------------\n" * 2)
|
||||||
# training_statement = insert(training).values(Date=train.date)
|
print(train)
|
||||||
# # Create training
|
new_training_pk: int = TrainingRepository(engine).create_training(train.date)
|
||||||
# with engine.connect() as conn:
|
for exr in train.exercises:
|
||||||
# result = conn.execute(training_statement)
|
approach_statements = []
|
||||||
# train_pk = result.inserted_primary_key[0]
|
new_exercise_pk: int = ExerciseRepository(engine).create_exercise(
|
||||||
# for exr in train.exercises:
|
training_pk=new_training_pk, exercise_name=exr.name
|
||||||
# approach_statements = []
|
)
|
||||||
# exercise_statement = insert(exercise).values(
|
for appr in exr.approaches:
|
||||||
# Training=train_pk, Name=exr.name
|
new_approach_pk: int = ApproachRepository(engine).create_approach(
|
||||||
# )
|
exercise_pk=new_exercise_pk, weight=appr.weight, reps=appr.reps
|
||||||
# exr_insert = conn.execute(exercise_statement)
|
)
|
||||||
# exr_pk = exr_insert.inserted_primary_key[0]
|
|
||||||
# for appr in exr.approaches:
|
|
||||||
# appr_statement = insert(approach).values(
|
|
||||||
# Exercise=exr_pk, Weight=appr.weight, Reps=appr.reps
|
|
||||||
# )
|
|
||||||
# appr_insert = conn.execute(appr_statement)
|
|
||||||
# conn.commit()
|
|
||||||
|
|
||||||
# -----
|
# -----
|
||||||
|
|
||||||
|
@ -155,21 +149,21 @@ engine = create_engine(
|
||||||
# Combined trainings
|
# Combined trainings
|
||||||
|
|
||||||
|
|
||||||
obsidian_trainings: List[Training] = parse_training_data()
|
# obsidian_trainings: List[Training] = parse_training_data()
|
||||||
obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings)
|
# obsidian_parsed_trainings = remap_unique_exercises(obsidian_trainings)
|
||||||
|
#
|
||||||
apple_trainings: List[Training] = apple_parse_training_data()
|
# apple_trainings: List[Training] = apple_parse_training_data()
|
||||||
apple_parsed_trainings = apple_remaper(apple_trainings)
|
# apple_parsed_trainings = apple_remaper(apple_trainings)
|
||||||
|
#
|
||||||
|
#
|
||||||
combined_trainings = obsidian_trainings + apple_trainings
|
# combined_trainings = obsidian_trainings + apple_trainings
|
||||||
unique_exercise_parsed_names = defaultdict(int)
|
# unique_exercise_parsed_names = defaultdict(int)
|
||||||
for train in combined_trainings:
|
# for train in combined_trainings:
|
||||||
if not train:
|
# if not train:
|
||||||
continue
|
# continue
|
||||||
if train.exercises:
|
# if train.exercises:
|
||||||
for exr in train.exercises:
|
# for exr in train.exercises:
|
||||||
if exr:
|
# if exr:
|
||||||
unique_exercise_parsed_names[exr.name] += 1
|
# unique_exercise_parsed_names[exr.name] += 1
|
||||||
pprint(unique_exercise_parsed_names)
|
# pprint(unique_exercise_parsed_names)
|
||||||
print(len(combined_trainings))
|
# print(len(combined_trainings))
|
||||||
|
|
Loading…
Reference in a new issue