Add insertion of obsidian trainings with sqlalchemy
This commit is contained in:
parent
cdbcb0f446
commit
81e67eccbc
4 changed files with 134 additions and 17 deletions
|
@ -1,4 +1,4 @@
|
|||
from sqlalchemy import ForeignKey, String, Table, Column, Integer, MetaData, Date
|
||||
from sqlalchemy import Float, ForeignKey, String, Table, Column, Integer, MetaData, Date
|
||||
|
||||
# NOTE: MetaData object - object where we place out tables
|
||||
# Essentialy - this is a facade around Python dicts, that stores series of Table objects keyed to their string name
|
||||
|
@ -8,21 +8,29 @@ metadata_obj = MetaData()
|
|||
training = Table(
|
||||
"training",
|
||||
metadata_obj,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("DATE", Date),
|
||||
Column("Id", Integer, primary_key=True),
|
||||
Column("Date", Date),
|
||||
)
|
||||
|
||||
# Representation of exercise table
|
||||
exercise = Table(
|
||||
"exercise",
|
||||
metadata_obj,
|
||||
Column("ID", Integer, primary_key=True),
|
||||
Column("TRAINING", ForeignKey("training.ID"), nullable=False),
|
||||
Column("NAME", String(256)),
|
||||
Column("Id", Integer, primary_key=True),
|
||||
Column("Training", ForeignKey("training.Id"), nullable=False),
|
||||
Column("Name", String(256)),
|
||||
)
|
||||
|
||||
approach = Table(
|
||||
"approach",
|
||||
metadata_obj,
|
||||
Column("Id", Integer, primary_key=True),
|
||||
Column("Exercise", ForeignKey("exercise.Id"), nullable=False),
|
||||
Column("Weight", Float),
|
||||
Column("Reps", Integer),
|
||||
)
|
||||
|
||||
|
||||
# NOTE: We can access table metadata with associative array `c`
|
||||
# print(training.c.keys())
|
||||
# print(training.primary_key)
|
||||
|
||||
|
|
84
elect
Normal file
84
elect
Normal file
|
@ -0,0 +1,84 @@
|
|||
1 | 1 | 77 | 12
|
||||
2 | 1 | 86 | 12
|
||||
3 | 1 | 91 | 12
|
||||
4 | 1 | 86 | 12
|
||||
5 | 2 | 47 | 12
|
||||
6 | 2 | 47 | 12
|
||||
7 | 2 | 47 | 11
|
||||
8 | 2 | 47 | 10
|
||||
9 | 3 | 15 | 12
|
||||
10 | 3 | 25 | 12
|
||||
11 | 3 | 27.5 | 12
|
||||
12 | 3 | 30 | 8
|
||||
13 | 4 | 9 | 12
|
||||
14 | 4 | 11 | 12
|
||||
15 | 4 | 12 | 12
|
||||
16 | 5 | 33 | 12
|
||||
17 | 5 | 33 | 12
|
||||
18 | 5 | 33 | 12
|
||||
19 | 5 | 33 | 12
|
||||
20 | 6 | 0 | 9
|
||||
21 | 6 | 0 | 7
|
||||
22 | 6 | 0 | 5
|
||||
23 | 7 | 55 | 12
|
||||
24 | 7 | 59 | 12
|
||||
25 | 7 | 59 | 12
|
||||
26 | 7 | 59 | 10
|
||||
27 | 8 | 18 | 12
|
||||
28 | 8 | 20 | 12
|
||||
29 | 8 | 20 | 11
|
||||
30 | 9 | 47 | 12
|
||||
31 | 9 | 47 | 12
|
||||
32 | 9 | 47 | 12
|
||||
33 | 10 | 77 | 12
|
||||
34 | 10 | 77 | 12
|
||||
35 | 10 | 77 | 12
|
||||
36 | 11 | 0 | 11
|
||||
37 | 11 | 0 | 8
|
||||
38 | 11 | 0 | 6
|
||||
39 | 12 | 22 | 12
|
||||
40 | 12 | 28 | 10
|
||||
41 | 12 | 28 | 10
|
||||
42 | 12 | 28 | 10
|
||||
43 | 13 | 16 | 12
|
||||
44 | 13 | 28 | 10
|
||||
45 | 13 | 28 | 6
|
||||
46 | 13 | 28 | 7
|
||||
47 | 14 | 47 | 12
|
||||
48 | 14 | 47 | 12
|
||||
49 | 14 | 47 | 12
|
||||
50 | 15 | 65 | 12
|
||||
51 | 15 | 82 | 12
|
||||
52 | 15 | 91 | 12
|
||||
53 | 16 | 100 | 12
|
||||
54 | 16 | 150 | 10
|
||||
55 | 16 | 150 | 10
|
||||
56 | 16 | 150 | 10
|
||||
57 | 17 | 15 | 12
|
||||
58 | 17 | 15 | 12
|
||||
59 | 17 | 15 | 12
|
||||
60 | 18 | 12 | 12
|
||||
61 | 18 | 20 | 10
|
||||
62 | 18 | 20 | 10
|
||||
63 | 18 | 20 | 10
|
||||
64 | 19 | 47 | 11
|
||||
65 | 19 | 47 | 11
|
||||
66 | 19 | 47 | 12
|
||||
67 | 20 | 45 | 12
|
||||
68 | 20 | 54 | 12
|
||||
69 | 20 | 59 | 10
|
||||
70 | 20 | 59 | 10
|
||||
71 | 21 | 30 | 10
|
||||
72 | 21 | 32 | 10
|
||||
73 | 21 | 32 | 10
|
||||
74 | 22 | 50 | 12
|
||||
75 | 22 | 50 | 12
|
||||
76 | 22 | 50 | 12
|
||||
77 | 23 | 16 | 12
|
||||
78 | 23 | 24 | 10
|
||||
79 | 23 | 24 | 10
|
||||
80 | 23 | 24 | 10
|
||||
81 | 24 | 47 | 12
|
||||
82 | 24 | 47 | 12
|
||||
83 | 24 | 47 | 12
|
||||
|
38
main.py
38
main.py
|
@ -1,7 +1,10 @@
|
|||
import os
|
||||
from sqlalchemy import Table, create_engine, text
|
||||
from typing import List
|
||||
from sqlalchemy import Table, create_engine, text, insert
|
||||
from dotenv import load_dotenv
|
||||
from dbapi.tables import metadata_obj
|
||||
from dbapi.tables import metadata_obj, training, exercise, approach
|
||||
from obsidian.notes_parser import parse_training_data
|
||||
from obsidian.py_models import Training
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
@ -31,13 +34,40 @@ engine = create_engine(
|
|||
# print(f"x: {row.x} -- y: {row.y}")
|
||||
|
||||
# NOTE : Create all tables from metadata object
|
||||
# metadata_obj.create_all(engine)
|
||||
# TODO: Check how psycopg2 handles duplication of tables
|
||||
# TODO: Check how migrations are done
|
||||
metadata_obj.create_all(engine)
|
||||
|
||||
# NOTE: Drop all Tables from database
|
||||
# metadata_obj.drop_all(engine)
|
||||
metadata_obj.drop_all(engine)
|
||||
metadata_obj.create_all(engine)
|
||||
|
||||
# NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata)
|
||||
# some_table = Table("some_table", metadata_obj, autoload_with=engine)
|
||||
# print(some_table.c)
|
||||
|
||||
trainings: List[Training] = parse_training_data()
|
||||
for train in trainings:
|
||||
if not train:
|
||||
continue
|
||||
else:
|
||||
print("-------------------------\n" * 2)
|
||||
print(train)
|
||||
training_statement = insert(training).values(Date=train.date)
|
||||
# Create training
|
||||
with engine.connect() as conn:
|
||||
result = conn.execute(training_statement)
|
||||
train_pk = result.inserted_primary_key[0]
|
||||
for exr in train.exercises:
|
||||
approach_statements = []
|
||||
exercise_statement = insert(exercise).values(
|
||||
Training=train_pk, Name=exr.name
|
||||
)
|
||||
exr_insert = conn.execute(exercise_statement)
|
||||
exr_pk = exr_insert.inserted_primary_key[0]
|
||||
for appr in exr.approaches:
|
||||
appr_statement = insert(approach).values(
|
||||
Exercise=exr_pk, Weight=appr.weight, Reps=appr.reps
|
||||
)
|
||||
appr_insert = conn.execute(appr_statement)
|
||||
conn.commit()
|
||||
|
|
|
@ -43,7 +43,6 @@ def serialize_exercise(reps: str, weight: str, name: str) -> Exercise:
|
|||
else:
|
||||
weight_pointer = 0
|
||||
for rep_index in range(0, len(reps)):
|
||||
print(reps[rep_index])
|
||||
approach = Approach(weight=weight[weight_pointer], reps=reps[rep_index])
|
||||
if rep_index < len(weight) - 1:
|
||||
weight_pointer += 1
|
||||
|
@ -51,7 +50,6 @@ def serialize_exercise(reps: str, weight: str, name: str) -> Exercise:
|
|||
exercise = Exercise(
|
||||
name=name, approaches=approaches, splitted_weight=weight_splitted
|
||||
)
|
||||
print(exercise)
|
||||
return exercise
|
||||
|
||||
|
||||
|
@ -89,7 +87,7 @@ def filter_training_data(training_data: str):
|
|||
|
||||
|
||||
def parse_training_data():
|
||||
training_data: str = filter_training_data(read_example_file("full.txt"))
|
||||
training_data: str = filter_training_data(read_example_file("notes.txt"))
|
||||
lines = training_data.splitlines()
|
||||
current_training = None
|
||||
trains = []
|
||||
|
@ -108,9 +106,6 @@ def parse_training_data():
|
|||
current_training.exercises.append(exr)
|
||||
except ValueError:
|
||||
pass
|
||||
for t in trains:
|
||||
pprint(t)
|
||||
print("\n")
|
||||
return trains
|
||||
|
||||
pprint(parse_training_data()[1:])
|
||||
|
|
Loading…
Reference in a new issue