import os from sqlalchemy import Table, create_engine, text from dotenv import load_dotenv from dbapi.tables import metadata_obj load_dotenv() DB_USERNAME = os.getenv("POSTGRES_USER") DB_PASS = os.getenv("POSTGRES_PASSWORD") engine = create_engine( f"postgresql+psycopg2://{DB_USERNAME}:{DB_PASS}@localhost:5433/fitness_database", echo=True, ) # NOTE: "Begin once" style - using `.begin` as context creator for SQLAlchemy # with engine.begin() as conn: # result = conn.execute(text("select 'hello world'")) # print(result.all()) # conn.execute( # text("INSERT INTO some_table(x, y) VALUES (:x, :y)"), # [{"x": 6, "y": 7}, {"x": 9, "y": 10}], # ) # NOTE: "Commit as you go" style - after managing transactions we need to call Connection.commit(). Otherwise ROLLBACK # will be executed # with engine.connect() as conn: # result = conn.execute(text("SELECT x, y FROM some_table")) # for row in result: # print(f"x: {row.x} -- y: {row.y}") # NOTE : Create all tables from metadata object # TODO: Check how psycopg2 handles duplication of tables # TODO: Check how migrations are done metadata_obj.create_all(engine) # NOTE: Drop all Tables from database # metadata_obj.drop_all(engine) # NOTE: Table reflection - generating table object from existing tables (only tables, that are stored in metadata) # some_table = Table("some_table", metadata_obj, autoload_with=engine) # print(some_table.c)