Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save sandys/671b8b86ba913e6436d4cb22d04b135f to your computer and use it in GitHub Desktop.
Save sandys/671b8b86ba913e6436d4cb22d04b135f to your computer and use it in GitHub Desktop.

Revisions

  1. Sandeep Srinivasa renamed this gist Jul 9, 2022. 1 changed file with 0 additions and 0 deletions.
  2. Sandeep Srinivasa renamed this gist Jul 9, 2022. 1 changed file with 0 additions and 0 deletions.
  3. Sandeep Srinivasa revised this gist Jul 9, 2022. No changes.
  4. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -18,7 +18,7 @@ why this code ?

    cmdline
    --------------
    `gunicorn sync_p:app -w 4 -k sync_p.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`
    `gunicorn sync_p:app -w 1 -k sync_p.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`

    benchmark
    ----------------
  5. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 28 additions and 276 deletions.
    304 changes: 28 additions & 276 deletions settings.py
    Original file line number Diff line number Diff line change
    @@ -1,277 +1,29 @@
    from __future__ import annotations

    import decimal
    import logging
    import os
    import signal
    import sys
    import threading
    import time
    from contextlib import contextmanager
    from dataclasses import (
    dataclass,
    field,
    )
    from datetime import datetime
    from functools import lru_cache
    from typing import (
    Any,
    Dict,
    Iterable,
    Generator,
    List,
    Optional,
    Tuple,
    )

    import pydantic
    import typer
    import uvicorn as uvicorn
    import yaml
    from fastapi import (
    BackgroundTasks,
    Depends,
    FastAPI,
    HTTPException,
    Response,
    status,
    )
    from fastapi.responses import PlainTextResponse
    from gunicorn.glogging import Logger
    from loguru import logger
    from pydantic import BaseModel
    from pydantic_sqlalchemy import sqlalchemy_to_pydantic
    from sqlalchemy import (
    ARRAY,
    DECIMAL,
    TEXT,
    TIMESTAMP,
    BigInteger,
    Boolean,
    CheckConstraint,
    Column,
    Date,
    DateTime,
    Enum,
    Float,
    ForeignKey,
    Index,
    Integer,
    Numeric,
    PrimaryKeyConstraint,
    String,
    Table,
    Text,
    UniqueConstraint,
    and_,
    create_engine,
    engine,
    event,
    func,
    or_,
    )
    from sqlalchemy.ext.declarative import declared_attr
    from sqlalchemy.orm import (
    Session,
    registry,
    relationship,
    sessionmaker,
    )
    from sqlalchemy.schema import Index
    from starlette.middleware.cors import CORSMiddleware
    from uvicorn.workers import UvicornWorker

    import settings

    # from cmath import log


    class ReloaderThread(threading.Thread):
    def __init__(self, worker: UvicornWorker, sleep_interval: float = 1.0):
    super().__init__()
    self.setDaemon(True)
    self._worker = worker
    self._interval = sleep_interval

    def run(self) -> None:
    while True:
    if not self._worker.alive:
    os.kill(os.getpid(), signal.SIGINT)
    time.sleep(self._interval)


    class RestartableUvicornWorker(UvicornWorker):

    CONFIG_KWARGS = {
    "loop": "uvloop",
    "http": "httptools",
    # "log_config": yaml.safe_load(open(os.path.join(os.path.dirname(__file__), "logging.yaml"), "r")
    }

    def __init__(self, *args: List[Any], **kwargs: Dict[str, Any]):
    super().__init__(*args, **kwargs)
    self._reloader_thread = ReloaderThread(self)

    def run(self) -> None:
    if self.cfg.reload:
    self._reloader_thread.start()
    super().run()


    class InterceptHandler(logging.Handler):
    """
    Default handler from examples in loguru documentaion.
    See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
    """

    def emit(self, record: logging.LogRecord):
    # Get corresponding Loguru level if it exists
    try:
    level = logger.level(record.levelname).name
    except ValueError:
    level = record.levelno

    # Find caller from where originated the logged message
    frame, depth = logging.currentframe(), 1
    # while frame.f_code.co_filename == logging.__file__:
    # frame = frame.f_back
    # depth += 1

    logger.opt(depth=depth, exception=record.exc_info).log(
    level, record.getMessage()
    )


    class GunicornLogger(Logger):
    def setup(self, cfg) -> None:
    handler = InterceptHandler()
    # handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(
    logging.Formatter("%(asctime)s %(name)-12s %(levelname)-8s %(message)s")
    )

    # Add log handler to logger and set log level
    self.error_log.addHandler(handler)
    self.error_log.setLevel(settings.LOG_LEVEL)
    self.access_log.addHandler(handler)
    self.access_log.setLevel(settings.LOG_LEVEL)

    # Configure logger before gunicorn starts logging
    logger.configure(handlers=[{"sink": sys.stdout, "level": settings.LOG_LEVEL}])


    @lru_cache()
    def get_engine() -> engine.Engine:
    return create_engine(
    settings.SQLALCHEMY_DATABASE_URL,
    # connect_args={"check_same_thread": False},
    echo=True,
    pool_pre_ping=True,
    )


    def get_db() -> Generator[Session, None, None]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(
    autocommit=False, autoflush=False,expire_on_commit=False, bind=get_engine()
    )()
    # session = SessionLocal()
    try:
    yield session
    session.commit()
    except:
    session.rollback()
    raise
    finally:
    session.close()


    mapper_registry = registry()


    @dataclass
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    default=None,
    metadata={"sa": Column(Integer, primary_key=True, autoincrement=True)},
    )


    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    default_factory=datetime.now,
    metadata={"sa": Column(DateTime, default=datetime.now)},
    )
    updated_at: datetime = field(
    default_factory=datetime.now,
    metadata={
    "sa": Column(DateTime, default=datetime.now, onupdate=datetime.utcnow)
    # ...
    # settings.py (or settings obj)
    # from main import SQLALCHEMY_DATABASE_URL


    LOG_LEVEL = "DEBUG" # (or 10 if `logging.DEBUG`)
    # custom handlers removed, we catch logs via loguru
    UVICORN_LOGGING_CONFIG = {
    "version": 1,
    "disable_existing_loggers": False,
    "formatters": {
    "default": {
    "()": "uvicorn.logging.DefaultFormatter",
    "fmt": "%(levelprefix)s %(message)s",
    "use_colors": None,
    },
    )


    @mapper_registry.mapped
    @dataclass
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    __sa_dataclass_metadata_key__ = "sa"
    title: str = field(default=None, metadata={"sa": Column(String(50))})
    description: str = field(default=None, metadata={"sa": Column(String(50))})


    UserPyd = sqlalchemy_to_pydantic(User)

    mapper_registry.metadata.create_all(bind=get_engine())
    # Create the app, database, and stocks table
    app = FastAPI(debug=True)


    @app.exception_handler(Exception)
    async def validation_exception_handler(request, exc):
    logger.debug(str(exc))
    return PlainTextResponse("Something went wrong", status_code=500)


    cli = typer.Typer()


    @cli.command()
    def db_init_models():

    Base = mapper_registry.generate_base()
    Base.metadata.drop_all(bind=get_engine())
    Base.metadata.create_all(bind=get_engine())
    print("Done")


    @cli.command()
    def nothing(name: str):

    print("Done")


    @app.get("/items", response_model=List[UserPyd])
    def read_items(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
    items = db.query(User).offset(skip).limit(limit).all()

    return items


    @app.get("/users/", response_model=UserPyd, status_code=status.HTTP_201_CREATED)
    def create_user(email: str = None, db: Session = Depends(get_db)):
    u = User(title="sss")
    db.add(u)
    db.commit()

    # return {"data":new_post}

    return u


    if __name__ == "__main__":
    cli()
    "access": {
    "()": "uvicorn.logging.AccessFormatter",
    "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
    },
    },
    "loggers": {
    "uvicorn": {"level": "INFO"},
    "uvicorn.error": {"level": "INFO"},
    "uvicorn.access": {"level": "INFO", "propagate": False},
    },
    }

    SQLALCHEMY_DATABASE_URL = "postgresql://test:[email protected]:5433/test"
  6. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 14 additions and 2 deletions.
    16 changes: 14 additions & 2 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -26,8 +26,20 @@ benchmark

    **IMPORTANT** - do not forget the trailing slash (at the end of "users/". otherwise wrk will silently croak and u wont know.

    **IMPORTANT** - `expire_on_commit=False` is the important setting here
    **IMPORTANT** - `expire_on_commit=False` is the important setting here. if u dont set it, the following error will happen

    ```
    user = UserFactory()
    session.add(user)
    session.commit()
    I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.
    # missing session.refresh(user) and causing the problem
    return user
    ```
    created an object, added and ,committed it to the db and after that I tried to access on of the original object attributes without refreshing session session.refresh(object)



    **NOTE:** I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.

  7. Sandeep Srinivasa revised this gist Jul 8, 2022. 2 changed files with 279 additions and 28 deletions.
    3 changes: 3 additions & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -23,8 +23,11 @@ cmdline
    benchmark
    ----------------
    ` docker run --network=host --rm skandyla/wrk -t12 -c400 -d30s http://localhost:8000/users/`

    **IMPORTANT** - do not forget the trailing slash (at the end of "users/". otherwise wrk will silently croak and u wont know.

    **IMPORTANT** - `expire_on_commit=False` is the important setting here


    I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.

    304 changes: 276 additions & 28 deletions settings.py
    Original file line number Diff line number Diff line change
    @@ -1,29 +1,277 @@
    # ...
    # settings.py (or settings obj)
    # from main import SQLALCHEMY_DATABASE_URL


    LOG_LEVEL = "DEBUG" # (or 10 if `logging.DEBUG`)
    # custom handlers removed, we catch logs via loguru
    UVICORN_LOGGING_CONFIG = {
    "version": 1,
    "disable_existing_loggers": False,
    "formatters": {
    "default": {
    "()": "uvicorn.logging.DefaultFormatter",
    "fmt": "%(levelprefix)s %(message)s",
    "use_colors": None,
    },
    "access": {
    "()": "uvicorn.logging.AccessFormatter",
    "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
    from __future__ import annotations

    import decimal
    import logging
    import os
    import signal
    import sys
    import threading
    import time
    from contextlib import contextmanager
    from dataclasses import (
    dataclass,
    field,
    )
    from datetime import datetime
    from functools import lru_cache
    from typing import (
    Any,
    Dict,
    Iterable,
    Generator,
    List,
    Optional,
    Tuple,
    )

    import pydantic
    import typer
    import uvicorn as uvicorn
    import yaml
    from fastapi import (
    BackgroundTasks,
    Depends,
    FastAPI,
    HTTPException,
    Response,
    status,
    )
    from fastapi.responses import PlainTextResponse
    from gunicorn.glogging import Logger
    from loguru import logger
    from pydantic import BaseModel
    from pydantic_sqlalchemy import sqlalchemy_to_pydantic
    from sqlalchemy import (
    ARRAY,
    DECIMAL,
    TEXT,
    TIMESTAMP,
    BigInteger,
    Boolean,
    CheckConstraint,
    Column,
    Date,
    DateTime,
    Enum,
    Float,
    ForeignKey,
    Index,
    Integer,
    Numeric,
    PrimaryKeyConstraint,
    String,
    Table,
    Text,
    UniqueConstraint,
    and_,
    create_engine,
    engine,
    event,
    func,
    or_,
    )
    from sqlalchemy.ext.declarative import declared_attr
    from sqlalchemy.orm import (
    Session,
    registry,
    relationship,
    sessionmaker,
    )
    from sqlalchemy.schema import Index
    from starlette.middleware.cors import CORSMiddleware
    from uvicorn.workers import UvicornWorker

    import settings

    # from cmath import log


    class ReloaderThread(threading.Thread):
    def __init__(self, worker: UvicornWorker, sleep_interval: float = 1.0):
    super().__init__()
    self.setDaemon(True)
    self._worker = worker
    self._interval = sleep_interval

    def run(self) -> None:
    while True:
    if not self._worker.alive:
    os.kill(os.getpid(), signal.SIGINT)
    time.sleep(self._interval)


    class RestartableUvicornWorker(UvicornWorker):

    CONFIG_KWARGS = {
    "loop": "uvloop",
    "http": "httptools",
    # "log_config": yaml.safe_load(open(os.path.join(os.path.dirname(__file__), "logging.yaml"), "r")
    }

    def __init__(self, *args: List[Any], **kwargs: Dict[str, Any]):
    super().__init__(*args, **kwargs)
    self._reloader_thread = ReloaderThread(self)

    def run(self) -> None:
    if self.cfg.reload:
    self._reloader_thread.start()
    super().run()


    class InterceptHandler(logging.Handler):
    """
    Default handler from examples in loguru documentaion.
    See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
    """

    def emit(self, record: logging.LogRecord):
    # Get corresponding Loguru level if it exists
    try:
    level = logger.level(record.levelname).name
    except ValueError:
    level = record.levelno

    # Find caller from where originated the logged message
    frame, depth = logging.currentframe(), 1
    # while frame.f_code.co_filename == logging.__file__:
    # frame = frame.f_back
    # depth += 1

    logger.opt(depth=depth, exception=record.exc_info).log(
    level, record.getMessage()
    )


    class GunicornLogger(Logger):
    def setup(self, cfg) -> None:
    handler = InterceptHandler()
    # handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(
    logging.Formatter("%(asctime)s %(name)-12s %(levelname)-8s %(message)s")
    )

    # Add log handler to logger and set log level
    self.error_log.addHandler(handler)
    self.error_log.setLevel(settings.LOG_LEVEL)
    self.access_log.addHandler(handler)
    self.access_log.setLevel(settings.LOG_LEVEL)

    # Configure logger before gunicorn starts logging
    logger.configure(handlers=[{"sink": sys.stdout, "level": settings.LOG_LEVEL}])


    @lru_cache()
    def get_engine() -> engine.Engine:
    return create_engine(
    settings.SQLALCHEMY_DATABASE_URL,
    # connect_args={"check_same_thread": False},
    echo=True,
    pool_pre_ping=True,
    )


    def get_db() -> Generator[Session, None, None]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(
    autocommit=False, autoflush=False,expire_on_commit=False, bind=get_engine()
    )()
    # session = SessionLocal()
    try:
    yield session
    session.commit()
    except:
    session.rollback()
    raise
    finally:
    session.close()


    mapper_registry = registry()


    @dataclass
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    default=None,
    metadata={"sa": Column(Integer, primary_key=True, autoincrement=True)},
    )


    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    default_factory=datetime.now,
    metadata={"sa": Column(DateTime, default=datetime.now)},
    )
    updated_at: datetime = field(
    default_factory=datetime.now,
    metadata={
    "sa": Column(DateTime, default=datetime.now, onupdate=datetime.utcnow)
    },
    },
    "loggers": {
    "uvicorn": {"level": "INFO"},
    "uvicorn.error": {"level": "INFO"},
    "uvicorn.access": {"level": "INFO", "propagate": False},
    },
    }

    SQLALCHEMY_DATABASE_URL = "postgresql://test:[email protected]:5433/test"
    )


    @mapper_registry.mapped
    @dataclass
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    __sa_dataclass_metadata_key__ = "sa"
    title: str = field(default=None, metadata={"sa": Column(String(50))})
    description: str = field(default=None, metadata={"sa": Column(String(50))})


    UserPyd = sqlalchemy_to_pydantic(User)

    mapper_registry.metadata.create_all(bind=get_engine())
    # Create the app, database, and stocks table
    app = FastAPI(debug=True)


    @app.exception_handler(Exception)
    async def validation_exception_handler(request, exc):
    logger.debug(str(exc))
    return PlainTextResponse("Something went wrong", status_code=500)


    cli = typer.Typer()


    @cli.command()
    def db_init_models():

    Base = mapper_registry.generate_base()
    Base.metadata.drop_all(bind=get_engine())
    Base.metadata.create_all(bind=get_engine())
    print("Done")


    @cli.command()
    def nothing(name: str):

    print("Done")


    @app.get("/items", response_model=List[UserPyd])
    def read_items(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
    items = db.query(User).offset(skip).limit(limit).all()

    return items


    @app.get("/users/", response_model=UserPyd, status_code=status.HTTP_201_CREATED)
    def create_user(email: str = None, db: Session = Depends(get_db)):
    u = User(title="sss")
    db.add(u)
    db.commit()

    # return {"data":new_post}

    return u


    if __name__ == "__main__":
    cli()
  8. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion sync_p.py
    Original file line number Diff line number Diff line change
    @@ -173,7 +173,7 @@ def get_engine() -> engine.Engine:
    def get_db() -> Generator[Session, None, None]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(
    autocommit=False, autoflush=False, bind=get_engine()
    autocommit=False, autoflush=False,expire_on_commit=False, bind=get_engine()
    )()
    # session = SessionLocal()
    try:
  9. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 2 additions and 0 deletions.
    2 changes: 2 additions & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -9,6 +9,8 @@ why this code ?
    P.S. How to quickly run postgres (using docker)
    --------------

    This code was tested on Windows 11 WSL2 (Ubuntu VM)

    1. `docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data -v /tmp/pgdata:/var/lib/postgresql/data -e POSTGRES_USER=test postgres` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run --network=host -it --rm postgres psql postgresql://test:[email protected]:5433/test` . this will connect to your localhost on port 5433
  10. Sandeep Srinivasa revised this gist Jul 8, 2022. 2 changed files with 4 additions and 2 deletions.
    3 changes: 2 additions & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -20,7 +20,8 @@ cmdline

    benchmark
    ----------------
    `docker run --network=host --rm skandyla/wrk -t12 -c400 -d30s http://localhost:8000/users `
    ` docker run --network=host --rm skandyla/wrk -t12 -c400 -d30s http://localhost:8000/users/`
    **IMPORTANT** - do not forget the trailing slash (at the end of "users/". otherwise wrk will silently croak and u wont know.


    I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.
    3 changes: 2 additions & 1 deletion sync_p.py
    Original file line number Diff line number Diff line change
    @@ -18,6 +18,7 @@
    Any,
    Dict,
    Iterable,
    Generator,
    List,
    Optional,
    Tuple,
    @@ -169,7 +170,7 @@ def get_engine() -> engine.Engine:
    )


    def get_db() -> Iterable[Session]:
    def get_db() -> Generator[Session, None, None]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(
    autocommit=False, autoflush=False, bind=get_engine()
  11. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -16,7 +16,7 @@ why this code ?

    cmdline
    --------------
    `gunicorn sync_p:app -w 4 -k main4.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`
    `gunicorn sync_p:app -w 4 -k sync_p.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`

    benchmark
    ----------------
  12. Sandeep Srinivasa revised this gist Jul 8, 2022. 1 changed file with 4 additions and 0 deletions.
    4 changes: 4 additions & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -18,6 +18,10 @@ cmdline
    --------------
    `gunicorn sync_p:app -w 4 -k main4.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`

    benchmark
    ----------------
    `docker run --network=host --rm skandyla/wrk -t12 -c400 -d30s http://localhost:8000/users `


    I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.

  13. Sandeep Srinivasa revised this gist Jul 8, 2022. 5 changed files with 201 additions and 346 deletions.
    6 changes: 5 additions & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -16,4 +16,8 @@ why this code ?

    cmdline
    --------------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`
    `gunicorn sync_p:app -w 4 -k main4.RestartableUvicornWorker --logger-class sync_p.GunicornLogger`


    I have removed the async code from here. right now it is very unstable because of inherent issues in fastapi (like https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/290 and https://github.com/tiangolo/fastapi/issues/726#issuecomment-1025165337) . Using async in fastapi with sqlalchemy is absolutely not recommended right now.

    217 changes: 0 additions & 217 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -1,217 +0,0 @@
    from datetime import datetime
    from fastapi import BackgroundTasks, Depends, FastAPI
    from pydantic import BaseModel
    from sqlalchemy import (
    Column,
    create_engine,
    DateTime,
    TIMESTAMP,
    Boolean,
    Numeric,
    Integer,
    String,
    engine,
    Table,
    ForeignKey,
    ARRAY,
    )
    from sqlalchemy import (
    DECIMAL,
    TEXT,
    TIMESTAMP,
    BigInteger,
    Boolean,
    CheckConstraint,
    Column,
    Date,
    Enum,
    Float,
    ForeignKey,
    Index,
    Integer,
    Numeric,
    PrimaryKeyConstraint,
    String,
    Text,
    UniqueConstraint,
    and_,
    create_engine,
    event,
    func,
    or_,
    )
    from sqlalchemy.orm import Session, sessionmaker
    from sqlalchemy import select
    from sqlalchemy.ext.declarative import declared_attr
    from starlette.middleware.cors import CORSMiddleware

    import decimal
    from sqlalchemy.schema import Index
    from typing import Optional, Dict, List, Any, Tuple
    from contextlib import asynccontextmanager
    from functools import lru_cache

    # from async_lru import alru_cache as async_lru_cache


    from typing import List
    from typing import Optional

    from dataclasses import dataclass
    from dataclasses import field, dataclass
    from sqlalchemy.orm import registry

    from sqlalchemy.ext.asyncio import AsyncSession, AsyncEngine
    from sqlalchemy.ext.asyncio import create_async_engine

    import pydantic
    import asyncio
    import typer

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    SQLALCHEMY_DATABASE_URL = "postgresql+asyncpg://test@localhost:5432/test"

    mapper_registry = registry()


    @lru_cache()
    def get_engine() -> AsyncEngine:
    return create_async_engine(
    SQLALCHEMY_DATABASE_URL,
    # connect_args={"check_same_thread": False},
    pool_pre_ping=True,
    )


    @asynccontextmanager
    async def get_db(db_conn=Depends(get_engine)) -> AsyncSession:
    # Explicit type because sessionmaker.__call__ stub is Any
    # e = await get_engine()
    session: AsyncSession = sessionmaker(
    autocommit=False,
    autoflush=False,
    bind=db_conn,
    class_=AsyncSession,
    expire_on_commit=False,
    )()
    try:
    yield session
    await session.commit()
    except:
    await session.rollback()
    raise
    finally:
    await session.close()


    @dataclass
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False, metadata={"sa": Column(Integer, primary_key=True)},
    )


    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    init=False, metadata={"sa": Column(DateTime, default=datetime.utcnow)}
    )
    updated_at: datetime = field(
    init=False,
    metadata={
    "sa": Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
    },
    )


    # @mapper_registry.mapped
    @dataclass
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    identity: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=255), nullable=False)}
    )

    row_status: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=20), nullable=False)}
    )

    @declared_attr
    def __table_args__(cls):
    return (
    Index(
    "index_on_identity_v3_user_identity",
    "identity",
    "row_status",
    unique=True,
    postgresql_where=cls.row_status == "active",
    ),
    )


    @mapper_registry.mapped
    @dataclass
    class UserSQL(User):
    pass


    UserPyd = pydantic.dataclasses.dataclass(User)

    # Create the app, database, and stocks table
    app = FastAPI()
    cli = typer.Typer()

    Base = mapper_registry.generate_base()


    async def init_models():
    # e = await get_engine()
    async with get_engine().begin() as conn:
    await conn.run_sync(Base.metadata.drop_all)
    await conn.run_sync(Base.metadata.create_all)


    @cli.command()
    def db_init_models(name: str):
    asyncio.run(init_models())
    print("Done")


    @app.on_event("startup")
    def open_database_connection_pools():
    get_engine()


    @app.on_event("shutdown")
    def close_database_connection_pools():
    _db_conn = get_engine()
    if _db_conn:
    _db_conn.dispose()


    # init_models()


    @app.get("/", response_model=List[UserPyd])
    async def foo(context_session: AsyncSession = Depends(get_db)):

    async with context_session as db:
    # Query stocks table and print results
    query = await db.execute(select(UserSQL))
    for d in query:
    print(
    f"""{d.identity}\t
    {d.row_status}\t
    {d.created_at}\t
    {d.updated_at}"""
    )

    return query.scalars().all()


    if __name__ == "__main__":
    cli()
    33 changes: 0 additions & 33 deletions pyproject.toml
    Original file line number Diff line number Diff line change
    @@ -1,33 +0,0 @@
    [tool.poetry]
    name = "api"
    version = "0.1.0"
    description = ""
    authors = ["sandeep srinivasa <[email protected]>"]

    [tool.poetry.dependencies]
    python = "^3.8"
    pydantic = {extras = ["email"], version = "^1.8.1"}
    fastapi = "^0.63.0"
    uvicorn = {extras = ["standard"], version = "^0.13.4"}
    gunicorn = "^20.0.4"
    msgpack-asgi = "^1.0.0"
    inotify = "^0.2.10"
    hashids = "^1.3.1"
    GeoAlchemy2 = "^0.8.4"
    redis = "^3.5.3"
    boto3 = "^1.17.29"
    pendulum = "^2.1.2"
    fuzzywuzzy = "^0.18.0"
    pandas = "^1.2.3"
    python-Levenshtein = "^0.12.2"
    SQLAlchemy = "^1.4.2"
    psycopg2-binary = "^2.8.6"
    asyncpg = "^0.22.0"
    typer = "^0.3.2"

    [tool.poetry.dev-dependencies]
    black = {version = "^20.8b1", allow-prereleases = true}

    [build-system]
    requires = ["poetry-core>=1.0.0"]
    build-backend = "poetry.core.masonry.api"
    29 changes: 29 additions & 0 deletions settings.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,29 @@
    # ...
    # settings.py (or settings obj)
    # from main import SQLALCHEMY_DATABASE_URL


    LOG_LEVEL = "DEBUG" # (or 10 if `logging.DEBUG`)
    # custom handlers removed, we catch logs via loguru
    UVICORN_LOGGING_CONFIG = {
    "version": 1,
    "disable_existing_loggers": False,
    "formatters": {
    "default": {
    "()": "uvicorn.logging.DefaultFormatter",
    "fmt": "%(levelprefix)s %(message)s",
    "use_colors": None,
    },
    "access": {
    "()": "uvicorn.logging.AccessFormatter",
    "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
    },
    },
    "loggers": {
    "uvicorn": {"level": "INFO"},
    "uvicorn.error": {"level": "INFO"},
    "uvicorn.access": {"level": "INFO", "propagate": False},
    },
    }

    SQLALCHEMY_DATABASE_URL = "postgresql://test:[email protected]:5433/test"
    262 changes: 167 additions & 95 deletions sync_p.py
    Original file line number Diff line number Diff line change
    @@ -1,21 +1,47 @@
    from __future__ import annotations

    import decimal
    import logging
    import os
    import signal
    import sys
    import threading
    import time
    from contextlib import contextmanager
    from dataclasses import (
    dataclass,
    field,
    )
    from datetime import datetime
    from fastapi import BackgroundTasks, Depends, FastAPI
    from functools import lru_cache
    from typing import (
    Any,
    Dict,
    Iterable,
    List,
    Optional,
    Tuple,
    )

    import pydantic
    import typer
    import uvicorn as uvicorn
    import yaml
    from fastapi import (
    BackgroundTasks,
    Depends,
    FastAPI,
    HTTPException,
    Response,
    status,
    )
    from fastapi.responses import PlainTextResponse
    from gunicorn.glogging import Logger
    from loguru import logger
    from pydantic import BaseModel
    from pydantic_sqlalchemy import sqlalchemy_to_pydantic
    from sqlalchemy import (
    Column,
    create_engine,
    DateTime,
    TIMESTAMP,
    Boolean,
    Numeric,
    Integer,
    String,
    engine,
    Table,
    ForeignKey,
    ARRAY,
    )
    from sqlalchemy import (
    DECIMAL,
    TEXT,
    TIMESTAMP,
    @@ -24,6 +50,7 @@
    CheckConstraint,
    Column,
    Date,
    DateTime,
    Enum,
    Float,
    ForeignKey,
    @@ -32,57 +59,122 @@
    Numeric,
    PrimaryKeyConstraint,
    String,
    Table,
    Text,
    UniqueConstraint,
    and_,
    create_engine,
    engine,
    event,
    func,
    or_,
    )
    from sqlalchemy.orm import Session, sessionmaker
    from sqlalchemy.ext.declarative import declared_attr
    from sqlalchemy.orm import (
    Session,
    registry,
    relationship,
    sessionmaker,
    )
    from sqlalchemy.schema import Index
    from starlette.middleware.cors import CORSMiddleware
    from uvicorn.workers import UvicornWorker

    from sqlalchemy.orm import relationship
    import settings

    import decimal
    from sqlalchemy.schema import Index
    from typing import Optional, Dict, List, Any, Tuple, Iterable
    from contextlib import contextmanager
    from functools import lru_cache
    # from cmath import log


    from typing import List
    from typing import Optional
    class ReloaderThread(threading.Thread):
    def __init__(self, worker: UvicornWorker, sleep_interval: float = 1.0):
    super().__init__()
    self.setDaemon(True)
    self._worker = worker
    self._interval = sleep_interval

    from dataclasses import dataclass
    from dataclasses import field, dataclass
    from sqlalchemy.orm import registry
    def run(self) -> None:
    while True:
    if not self._worker.alive:
    os.kill(os.getpid(), signal.SIGINT)
    time.sleep(self._interval)

    import pydantic
    import typer

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    SQLALCHEMY_DATABASE_URL = "postgresql://test@localhost:5432/test"
    class RestartableUvicornWorker(UvicornWorker):

    mapper_registry = registry()
    CONFIG_KWARGS = {
    "loop": "uvloop",
    "http": "httptools",
    # "log_config": yaml.safe_load(open(os.path.join(os.path.dirname(__file__), "logging.yaml"), "r")
    }

    def __init__(self, *args: List[Any], **kwargs: Dict[str, Any]):
    super().__init__(*args, **kwargs)
    self._reloader_thread = ReloaderThread(self)

    def run(self) -> None:
    if self.cfg.reload:
    self._reloader_thread.start()
    super().run()


    class InterceptHandler(logging.Handler):
    """
    Default handler from examples in loguru documentaion.
    See https://loguru.readthedocs.io/en/stable/overview.html#entirely-compatible-with-standard-logging
    """

    def emit(self, record: logging.LogRecord):
    # Get corresponding Loguru level if it exists
    try:
    level = logger.level(record.levelname).name
    except ValueError:
    level = record.levelno

    # Find caller from where originated the logged message
    frame, depth = logging.currentframe(), 1
    # while frame.f_code.co_filename == logging.__file__:
    # frame = frame.f_back
    # depth += 1

    logger.opt(depth=depth, exception=record.exc_info).log(
    level, record.getMessage()
    )


    class GunicornLogger(Logger):
    def setup(self, cfg) -> None:
    handler = InterceptHandler()
    # handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(
    logging.Formatter("%(asctime)s %(name)-12s %(levelname)-8s %(message)s")
    )

    # Add log handler to logger and set log level
    self.error_log.addHandler(handler)
    self.error_log.setLevel(settings.LOG_LEVEL)
    self.access_log.addHandler(handler)
    self.access_log.setLevel(settings.LOG_LEVEL)

    # Configure logger before gunicorn starts logging
    logger.configure(handlers=[{"sink": sys.stdout, "level": settings.LOG_LEVEL}])


    @lru_cache()
    def get_engine() -> engine.Engine:
    return create_engine(
    SQLALCHEMY_DATABASE_URL,
    settings.SQLALCHEMY_DATABASE_URL,
    # connect_args={"check_same_thread": False},
    echo=True,
    pool_pre_ping=True,
    )


    @contextmanager
    def get_db(db_conn=Depends(get_engine)) -> Iterable[Session]:
    def get_db() -> Iterable[Session]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(autocommit=False, autoflush=False, bind=db_conn)()
    session: Session = sessionmaker(
    autocommit=False, autoflush=False, bind=get_engine()
    )()
    # session = SessionLocal()
    try:
    yield session
    session.commit()
    @@ -93,111 +185,91 @@ def get_db(db_conn=Depends(get_engine)) -> Iterable[Session]:
    session.close()


    mapper_registry = registry()


    @dataclass
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False, metadata={"sa": Column(Integer, primary_key=True)},
    init=False,
    default=None,
    metadata={"sa": Column(Integer, primary_key=True, autoincrement=True)},
    )


    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    init=False, metadata={"sa": Column(DateTime, default=datetime.utcnow)}
    default_factory=datetime.now,
    metadata={"sa": Column(DateTime, default=datetime.now)},
    )
    updated_at: datetime = field(
    init=False,
    default_factory=datetime.now,
    metadata={
    "sa": Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
    "sa": Column(DateTime, default=datetime.now, onupdate=datetime.utcnow)
    },
    )


    # @mapper_registry.mapped
    @mapper_registry.mapped
    @dataclass
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    identity: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=255), nullable=False)}
    )

    row_status: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=20), nullable=False)}
    )
    __sa_dataclass_metadata_key__ = "sa"
    title: str = field(default=None, metadata={"sa": Column(String(50))})
    description: str = field(default=None, metadata={"sa": Column(String(50))})

    @declared_attr
    def __table_args__(cls):
    return (
    Index(
    "index_on_identity_v3_user_identity",
    "identity",
    "row_status",
    unique=True,
    postgresql_where=cls.row_status == "active",
    ),
    )

    UserPyd = sqlalchemy_to_pydantic(User)

    @mapper_registry.mapped
    @dataclass
    class UserSQL(User):
    pass
    mapper_registry.metadata.create_all(bind=get_engine())
    # Create the app, database, and stocks table
    app = FastAPI(debug=True)


    UserPyd = pydantic.dataclasses.dataclass(User)
    @app.exception_handler(Exception)
    async def validation_exception_handler(request, exc):
    logger.debug(str(exc))
    return PlainTextResponse("Something went wrong", status_code=500)


    # Create the app, database, and stocks table
    app = FastAPI()
    cli = typer.Typer()

    Base = mapper_registry.generate_base()
    # Base.metadata.drop_all(bind=get_engine())
    # Base.metadata.create_all(bind=get_engine())

    @cli.command()
    def db_init_models():

    def init_models():
    # e = await get_engine()
    Base = mapper_registry.generate_base()
    Base.metadata.drop_all(bind=get_engine())
    Base.metadata.create_all(bind=get_engine())
    print("Done")


    @cli.command()
    def db_init_models(name: str):
    init_models()
    print("Done")
    def nothing(name: str):

    print("Done")

    @app.on_event("startup")
    def open_database_connection_pools():
    get_engine()

    @app.get("/items", response_model=List[UserPyd])
    def read_items(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
    items = db.query(User).offset(skip).limit(limit).all()

    @app.on_event("shutdown")
    def close_database_connection_pools():
    _db_conn = get_engine()
    if _db_conn:
    _db_conn.dispose()
    return items


    @app.get("/", response_model=List[UserPyd])
    def foo(context_session: Session = Depends(get_db)):
    @app.get("/users/", response_model=UserPyd, status_code=status.HTTP_201_CREATED)
    def create_user(email: str = None, db: Session = Depends(get_db)):
    u = User(title="sss")
    db.add(u)
    db.commit()

    with context_session as db:
    # Query stocks table and print results
    query = db.query(UserSQL).all()
    for d in query:
    print(
    f"""{d.identity}\t
    {d.row_status}\t
    {d.created_at}\t
    {d.updated_at}"""
    )
    # return {"data":new_post}

    return query
    return u


    if __name__ == "__main__":
  14. Sandeep Srinivasa revised this gist Jul 4, 2022. 1 changed file with 3 additions and 4 deletions.
    7 changes: 3 additions & 4 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -9,11 +9,10 @@ why this code ?
    P.S. How to quickly run postgres (using docker)
    --------------

    1. `docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    1. `docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data -v /tmp/pgdata:/var/lib/postgresql/data -e POSTGRES_USER=test postgres` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run -it --rm postgres psql postgresql://test:mysecretpassword@172.17.0.2:5433/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case. This works if ur on linux.
    4. **PROBLEM IN WINDOWS WSL2** - [windows does NOT let you](https://docs.docker.com/desktop/windows/networking/#use-cases-and-workarounds) connect to another docker container using the its internal IP address. So you have to use the special `host.docker.internal` as ip address. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test`
    3. `docker run --network=host -it --rm postgres psql postgresql://test:mysecretpassword@0.0.0.0:5433/test` . this will connect to your localhost on port 5433


    cmdline
    --------------
  15. Sandeep Srinivasa revised this gist Sep 1, 2021. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -13,7 +13,7 @@ why this code ?
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case. This works if ur on linux.
    4. **PROBLEM IN WINDOWS WSL2** - windows does NOT let you connect to another docker container using the its internal IP address. So you have to use the special `host.docker.internal` as ip address. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test`
    4. **PROBLEM IN WINDOWS WSL2** - [windows does NOT let you](https://docs.docker.com/desktop/windows/networking/#use-cases-and-workarounds) connect to another docker container using the its internal IP address. So you have to use the special `host.docker.internal` as ip address. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test`

    cmdline
    --------------
  16. Sandeep Srinivasa revised this gist Sep 1, 2021. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -9,8 +9,8 @@ why this code ?
    P.S. How to quickly run postgres (using docker)
    --------------

    1. ```docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres``` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    1. `docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case. This works if ur on linux.
    4. **PROBLEM IN WINDOWS WSL2** - windows does NOT let you connect to another docker container using the its internal IP address. So you have to use the special `host.docker.internal` as ip address. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test`
  17. Sandeep Srinivasa revised this gist Sep 1, 2021. 1 changed file with 1 addition and 0 deletions.
    1 change: 1 addition & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -8,6 +8,7 @@ why this code ?

    P.S. How to quickly run postgres (using docker)
    --------------

    1. ```docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres``` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
  18. Sandeep Srinivasa revised this gist Sep 1, 2021. 1 changed file with 9 additions and 13 deletions.
    22 changes: 9 additions & 13 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -4,20 +4,16 @@ why this code ?
    - [https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/104](https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/104)
    - [https://github.com/tiangolo/fastapi/issues/551](https://github.com/tiangolo/fastapi/issues/551)

    cmdline
    --------------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`

    How to quickly run postgres (using docker)
    -----------
    ```docker run --network="host" -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```

    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*

    If you want to connect using psql, `docker run --network="host" -it --rm postgres psql postgresql://test@localhost:5432/test `

    P.S. if you dont want to run a host network
    P.S. How to quickly run postgres (using docker)
    --------------
    1. ```docker run -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```
    1. ```docker run -it --rm -p 5433:5432 --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data
    /pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres``` . This command will quickly start postgres on port 5433 and create a database *test* with user *test* and password *mysecretpassword*. The reason I like to use 5433 is because many times i have seen people having a normal, default installation on 5432 and it causes a lot of mistakes/confusion.
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run -it --rm postgres psql postgresql://test:[email protected]:5432/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case
    3. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case. This works if ur on linux.
    4. **PROBLEM IN WINDOWS WSL2** - windows does NOT let you connect to another docker container using the its internal IP address. So you have to use the special `host.docker.internal` as ip address. `docker run -it --rm postgres psql postgresql://test:[email protected]:5433/test`
    cmdline
    --------------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`
  19. Sandeep Srinivasa revised this gist Aug 30, 2021. 1 changed file with 6 additions and 0 deletions.
    6 changes: 6 additions & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,9 @@
    why this code ?
    --------------------

    - [https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/104](https://github.com/tiangolo/full-stack-fastapi-postgresql/issues/104)
    - [https://github.com/tiangolo/fastapi/issues/551](https://github.com/tiangolo/fastapi/issues/551)

    cmdline
    --------------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`
  20. Sandeep Srinivasa revised this gist Apr 14, 2021. 1 changed file with 7 additions and 1 deletion.
    8 changes: 7 additions & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -8,4 +8,10 @@ How to quickly run postgres (using docker)

    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*

    If you want to connect using psql, `docker run --network="host" -it --rm postgres psql postgresql://test@localhost:5432/test `
    If you want to connect using psql, `docker run --network="host" -it --rm postgres psql postgresql://test@localhost:5432/test `

    P.S. if you dont want to run a host network
    --------------
    1. ```docker run -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```
    2. `docker inspect -f '{{.Name}} - {{.NetworkSettings.IPAddress }}' $(docker ps -q)`. This will give you ip address of the postgres container.
    3. `docker run -it --rm postgres psql postgresql://test:[email protected]:5432/test` . use the ip address everywhere. e.g. 172.17.0.2 was the ip address in this case
  21. sandeep srinivasa revised this gist Apr 14, 2021. 2 changed files with 55 additions and 17 deletions.
    23 changes: 17 additions & 6 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -70,7 +70,7 @@

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    SQLALCHEMY_DATABASE_URL = "postgresql+asyncpg://postgres@localhost:5432/sss"
    SQLALCHEMY_DATABASE_URL = "postgresql+asyncpg://test@localhost:5432/test"

    mapper_registry = registry()

    @@ -85,13 +85,13 @@ def get_engine() -> AsyncEngine:


    @asynccontextmanager
    async def get_db() -> AsyncSession:
    async def get_db(db_conn=Depends(get_engine)) -> AsyncSession:
    # Explicit type because sessionmaker.__call__ stub is Any
    # e = await get_engine()
    session: AsyncSession = sessionmaker(
    autocommit=False,
    autoflush=False,
    bind=get_engine(),
    bind=db_conn,
    class_=AsyncSession,
    expire_on_commit=False,
    )()
    @@ -109,8 +109,7 @@ async def get_db() -> AsyncSession:
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    metadata={"sa": Column(Integer, primary_key=True)},
    init=False, metadata={"sa": Column(Integer, primary_key=True)},
    )


    @@ -182,6 +181,18 @@ def db_init_models(name: str):
    print("Done")


    @app.on_event("startup")
    def open_database_connection_pools():
    get_engine()


    @app.on_event("shutdown")
    def close_database_connection_pools():
    _db_conn = get_engine()
    if _db_conn:
    _db_conn.dispose()


    # init_models()


    @@ -203,4 +214,4 @@ async def foo(context_session: AsyncSession = Depends(get_db)):


    if __name__ == "__main__":
    cli()
    cli()
    49 changes: 38 additions & 11 deletions sync_p.py
    Original file line number Diff line number Diff line change
    @@ -48,7 +48,7 @@

    import decimal
    from sqlalchemy.schema import Index
    from typing import Optional, Dict, List, Any, Tuple
    from typing import Optional, Dict, List, Any, Tuple, Iterable
    from contextlib import contextmanager
    from functools import lru_cache

    @@ -61,10 +61,11 @@
    from sqlalchemy.orm import registry

    import pydantic
    import typer

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    SQLALCHEMY_DATABASE_URL = "postgresql://postgres@localhost:5432/sss"
    SQLALCHEMY_DATABASE_URL = "postgresql://test@localhost:5432/test"

    mapper_registry = registry()

    @@ -79,11 +80,9 @@ def get_engine() -> engine.Engine:


    @contextmanager
    def get_db():
    def get_db(db_conn=Depends(get_engine)) -> Iterable[Session]:
    # Explicit type because sessionmaker.__call__ stub is Any
    session: Session = sessionmaker(
    autocommit=False, autoflush=False, bind=get_engine()
    )()
    session: Session = sessionmaker(autocommit=False, autoflush=False, bind=db_conn)()
    try:
    yield session
    session.commit()
    @@ -98,8 +97,7 @@ def get_db():
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    metadata={"sa": Column(Integer, primary_key=True)},
    init=False, metadata={"sa": Column(Integer, primary_key=True)},
    )


    @@ -154,10 +152,35 @@ class UserSQL(User):

    # Create the app, database, and stocks table
    app = FastAPI()
    cli = typer.Typer()

    Base = mapper_registry.generate_base()
    Base.metadata.drop_all(bind=get_engine())
    Base.metadata.create_all(bind=get_engine())
    # Base.metadata.drop_all(bind=get_engine())
    # Base.metadata.create_all(bind=get_engine())


    def init_models():
    # e = await get_engine()
    Base.metadata.drop_all(bind=get_engine())
    Base.metadata.create_all(bind=get_engine())


    @cli.command()
    def db_init_models(name: str):
    init_models()
    print("Done")


    @app.on_event("startup")
    def open_database_connection_pools():
    get_engine()


    @app.on_event("shutdown")
    def close_database_connection_pools():
    _db_conn = get_engine()
    if _db_conn:
    _db_conn.dispose()


    @app.get("/", response_model=List[UserPyd])
    @@ -174,4 +197,8 @@ def foo(context_session: Session = Depends(get_db)):
    {d.updated_at}"""
    )

    return query
    return query


    if __name__ == "__main__":
    cli()
  22. Sandeep Srinivasa revised this gist Apr 3, 2021. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -1,9 +1,9 @@
    cmdline
    ######
    --------------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`

    How to quickly run postgres (using docker)
    ######
    -----------
    ```docker run --network="host" -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```

    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*
  23. Sandeep Srinivasa revised this gist Apr 3, 2021. 1 changed file with 2 additions and 2 deletions.
    4 changes: 2 additions & 2 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -1,9 +1,9 @@
    cmdline
    #######
    ######
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`

    How to quickly run postgres (using docker)
    #######
    ######
    ```docker run --network="host" -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```

    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*
  24. Sandeep Srinivasa revised this gist Apr 3, 2021. 1 changed file with 1 addition and 6 deletions.
    7 changes: 1 addition & 6 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -4,12 +4,7 @@ cmdline

    How to quickly run postgres (using docker)
    #######
    ```docker run --network="host" -it --rm \
    --name some-postgres \
    -e POSTGRES_PASSWORD=mysecretpassword \
    -e PGDATA=/var/lib/postgresql/data/pgdata \
    -v /tmp/pgdata2:/var/lib/postgresql/data \
    -e POSTGRES_USER=test postgres```
    ```docker run --network="host" -it --rm --name some-postgres -e POSTGRES_PASSWORD=mysecretpassword -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp/pgdata2:/var/lib/postgresql/data -e POSTGRES_USER=test postgres```

    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*

  25. Sandeep Srinivasa revised this gist Apr 3, 2021. 1 changed file with 15 additions and 2 deletions.
    17 changes: 15 additions & 2 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,16 @@
    cmdline
    --------
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`
    #######
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`

    How to quickly run postgres (using docker)
    #######
    ```docker run --network="host" -it --rm \
    --name some-postgres \
    -e POSTGRES_PASSWORD=mysecretpassword \
    -e PGDATA=/var/lib/postgresql/data/pgdata \
    -v /tmp/pgdata2:/var/lib/postgresql/data \
    -e POSTGRES_USER=test postgres```
    This command will quickly start postgres on port 5432 and create a database *test* with user *test* and password *mysecretpassword*
    If you want to connect using psql, `docker run --network="host" -it --rm postgres psql postgresql://test@localhost:5432/test `
  26. Sandeep Srinivasa revised this gist Apr 2, 2021. 2 changed files with 51 additions and 47 deletions.
    49 changes: 25 additions & 24 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -106,32 +106,33 @@ async def get_db() -> AsyncSession:


    @dataclass
    class AuditMixin:
    # __abstract__ = True
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    metadata={"sa": Column(Integer, primary_key=True)},
    )

    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})

    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    init=False, metadata={"sa": Column(TIMESTAMP, default=0, nullable=False)}
    init=False, metadata={"sa": Column(DateTime, default=datetime.utcnow)}
    )
    updated_at: datetime = field(
    init=False,
    metadata={
    "sa": Column(
    TIMESTAMP,
    default=0,
    onupdate=0,
    nullable=False,
    )
    "sa": Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
    },
    )


    # @mapper_registry.mapped
    @dataclass
    class Stock(AuditMixin):
    __tablename__ = "stock"
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    identity: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=255), nullable=False)}
    )
    @@ -154,12 +155,12 @@ def __table_args__(cls):


    @mapper_registry.mapped
    class StockSql(Stock):
    @dataclass
    class UserSQL(User):
    pass


    StockPyd = pydantic.dataclasses.dataclass(Stock)

    UserPyd = pydantic.dataclasses.dataclass(User)

    # Create the app, database, and stocks table
    app = FastAPI()
    @@ -184,18 +185,18 @@ def db_init_models(name: str):
    # init_models()


    @app.get("/", response_model=List[StockPyd])
    @app.get("/", response_model=List[UserPyd])
    async def foo(context_session: AsyncSession = Depends(get_db)):

    async with context_session as db:
    # Query stocks table and print results
    query = await db.execute(select(StockSql))
    for d in query.scalars().all():
    query = await db.execute(select(UserSQL))
    for d in query:
    print(
    f"""{d.company}\t
    {d.price}\t
    {d.symbol}\t
    {d.datetime}"""
    f"""{d.identity}\t
    {d.row_status}\t
    {d.created_at}\t
    {d.updated_at}"""
    )

    return query.scalars().all()
    49 changes: 26 additions & 23 deletions sync_p.py
    Original file line number Diff line number Diff line change
    @@ -44,6 +44,8 @@
    from sqlalchemy.ext.declarative import declared_attr
    from starlette.middleware.cors import CORSMiddleware

    from sqlalchemy.orm import relationship

    import decimal
    from sqlalchemy.schema import Index
    from typing import Optional, Dict, List, Any, Tuple
    @@ -93,32 +95,33 @@ def get_db():


    @dataclass
    class AuditMixin:
    # __abstract__ = True
    class SurrogatePK:
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(
    init=False,
    metadata={"sa": Column(Integer, primary_key=True)},
    )

    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})

    @dataclass
    class TimeStampMixin:
    __sa_dataclass_metadata_key__ = "sa"
    created_at: datetime = field(
    init=False, metadata={"sa": Column(TIMESTAMP, default=0, nullable=False)}
    init=False, metadata={"sa": Column(DateTime, default=datetime.utcnow)}
    )
    updated_at: datetime = field(
    init=False,
    metadata={
    "sa": Column(
    TIMESTAMP,
    default=0,
    onupdate=0,
    nullable=False,
    )
    "sa": Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)
    },
    )


    # @mapper_registry.mapped
    @dataclass
    class Stock(AuditMixin):
    __tablename__ = "stock"
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
    class User(SurrogatePK, TimeStampMixin):
    __tablename__ = "user"

    identity: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=255), nullable=False)}
    )
    @@ -140,13 +143,13 @@ def __table_args__(cls):
    )



    @mapper_registry.mapped
    class StockSql(Stock):
    @dataclass
    class UserSQL(User):
    pass


    StockPyd = pydantic.dataclasses.dataclass(Stock)
    UserPyd = pydantic.dataclasses.dataclass(User)


    # Create the app, database, and stocks table
    @@ -157,18 +160,18 @@ class StockSql(Stock):
    Base.metadata.create_all(bind=get_engine())


    @app.get("/", response_model=List[StockPyd])
    @app.get("/", response_model=List[UserPyd])
    def foo(context_session: Session = Depends(get_db)):

    with context_session as db:
    # Query stocks table and print results
    query = db.query(StockSql).all()
    query = db.query(UserSQL).all()
    for d in query:
    print(
    f"""{d.company}\t
    {d.price}\t
    {d.symbol}\t
    {d.datetime}"""
    f"""{d.identity}\t
    {d.row_status}\t
    {d.created_at}\t
    {d.updated_at}"""
    )

    return query
  27. Sandeep Srinivasa revised this gist Apr 2, 2021. 1 changed file with 2 additions and 3 deletions.
    5 changes: 2 additions & 3 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -190,16 +190,15 @@ async def foo(context_session: AsyncSession = Depends(get_db)):
    async with context_session as db:
    # Query stocks table and print results
    query = await db.execute(select(StockSql))
    query = query.scalars().all()
    for d in query:
    for d in query.scalars().all():
    print(
    f"""{d.company}\t
    {d.price}\t
    {d.symbol}\t
    {d.datetime}"""
    )

    return query
    return query.scalars().all()


    if __name__ == "__main__":
  28. Sandeep Srinivasa revised this gist Apr 2, 2021. 2 changed files with 20 additions and 5 deletions.
    23 changes: 19 additions & 4 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -1,4 +1,3 @@
    ## async version of the code above
    from datetime import datetime
    from fastapi import BackgroundTasks, Depends, FastAPI
    from pydantic import BaseModel
    @@ -51,7 +50,8 @@
    from typing import Optional, Dict, List, Any, Tuple
    from contextlib import asynccontextmanager
    from functools import lru_cache
    from async_lru import alru_cache as async_lru_cache

    # from async_lru import alru_cache as async_lru_cache


    from typing import List
    @@ -66,6 +66,7 @@

    import pydantic
    import asyncio
    import typer

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    @@ -162,17 +163,27 @@ class StockSql(Stock):

    # Create the app, database, and stocks table
    app = FastAPI()
    cli = typer.Typer()

    Base = mapper_registry.generate_base()


    async def init_models():
    e = await get_engine()
    async with e.begin() as conn:
    # e = await get_engine()
    async with get_engine().begin() as conn:
    await conn.run_sync(Base.metadata.drop_all)
    await conn.run_sync(Base.metadata.create_all)


    @cli.command()
    def db_init_models(name: str):
    asyncio.run(init_models())
    print("Done")


    # init_models()


    @app.get("/", response_model=List[StockPyd])
    async def foo(context_session: AsyncSession = Depends(get_db)):

    @@ -189,3 +200,7 @@ async def foo(context_session: AsyncSession = Depends(get_db)):
    )

    return query


    if __name__ == "__main__":
    cli()
    2 changes: 1 addition & 1 deletion pyproject.toml
    Original file line number Diff line number Diff line change
    @@ -23,7 +23,7 @@ python-Levenshtein = "^0.12.2"
    SQLAlchemy = "^1.4.2"
    psycopg2-binary = "^2.8.6"
    asyncpg = "^0.22.0"
    async_lru = "^1.0.2"
    typer = "^0.3.2"

    [tool.poetry.dev-dependencies]
    black = {version = "^20.8b1", allow-prereleases = true}
  29. Sandeep Srinivasa revised this gist Apr 2, 2021. 1 changed file with 1 addition and 1 deletion.
    2 changes: 1 addition & 1 deletion README.md
    Original file line number Diff line number Diff line change
    @@ -1,3 +1,3 @@
    cmdline
    --------
    poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"
    `poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"`
  30. Sandeep Srinivasa revised this gist Apr 2, 2021. 4 changed files with 227 additions and 0 deletions.
    3 changes: 3 additions & 0 deletions README.md
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,3 @@
    cmdline
    --------
    poetry run gunicorn testpg:app -p 8080 --preload --reload --reload-engine inotify -w 10 -k uvicorn.workers.UvicornWorker --log-level debug --access-logfile - --error-logfile - --access-logformat "SSSS - %(h)s %(l)s %(u)s %(t)s \"%(r)s\" %(s)s %(b)s \"%(f)s\" \"%(a)s"
    191 changes: 191 additions & 0 deletions async_p.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,191 @@
    ## async version of the code above
    from datetime import datetime
    from fastapi import BackgroundTasks, Depends, FastAPI
    from pydantic import BaseModel
    from sqlalchemy import (
    Column,
    create_engine,
    DateTime,
    TIMESTAMP,
    Boolean,
    Numeric,
    Integer,
    String,
    engine,
    Table,
    ForeignKey,
    ARRAY,
    )
    from sqlalchemy import (
    DECIMAL,
    TEXT,
    TIMESTAMP,
    BigInteger,
    Boolean,
    CheckConstraint,
    Column,
    Date,
    Enum,
    Float,
    ForeignKey,
    Index,
    Integer,
    Numeric,
    PrimaryKeyConstraint,
    String,
    Text,
    UniqueConstraint,
    and_,
    create_engine,
    event,
    func,
    or_,
    )
    from sqlalchemy.orm import Session, sessionmaker
    from sqlalchemy import select
    from sqlalchemy.ext.declarative import declared_attr
    from starlette.middleware.cors import CORSMiddleware

    import decimal
    from sqlalchemy.schema import Index
    from typing import Optional, Dict, List, Any, Tuple
    from contextlib import asynccontextmanager
    from functools import lru_cache
    from async_lru import alru_cache as async_lru_cache


    from typing import List
    from typing import Optional

    from dataclasses import dataclass
    from dataclasses import field, dataclass
    from sqlalchemy.orm import registry

    from sqlalchemy.ext.asyncio import AsyncSession, AsyncEngine
    from sqlalchemy.ext.asyncio import create_async_engine

    import pydantic
    import asyncio

    # Standard for SQLite
    # SQLALCHEMY_DATABASE_URL = "sqlite:///test10.db"
    SQLALCHEMY_DATABASE_URL = "postgresql+asyncpg://postgres@localhost:5432/sss"

    mapper_registry = registry()


    @lru_cache()
    def get_engine() -> AsyncEngine:
    return create_async_engine(
    SQLALCHEMY_DATABASE_URL,
    # connect_args={"check_same_thread": False},
    pool_pre_ping=True,
    )


    @asynccontextmanager
    async def get_db() -> AsyncSession:
    # Explicit type because sessionmaker.__call__ stub is Any
    # e = await get_engine()
    session: AsyncSession = sessionmaker(
    autocommit=False,
    autoflush=False,
    bind=get_engine(),
    class_=AsyncSession,
    expire_on_commit=False,
    )()
    try:
    yield session
    await session.commit()
    except:
    await session.rollback()
    raise
    finally:
    await session.close()


    @dataclass
    class AuditMixin:
    # __abstract__ = True

    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})

    created_at: datetime = field(
    init=False, metadata={"sa": Column(TIMESTAMP, default=0, nullable=False)}
    )
    updated_at: datetime = field(
    init=False,
    metadata={
    "sa": Column(
    TIMESTAMP,
    default=0,
    onupdate=0,
    nullable=False,
    )
    },
    )


    @dataclass
    class Stock(AuditMixin):
    __tablename__ = "stock"
    __sa_dataclass_metadata_key__ = "sa"
    id: int = field(init=False, metadata={"sa": Column(Integer, primary_key=True)})
    identity: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=255), nullable=False)}
    )

    row_status: Optional[str] = field(
    default=None, metadata={"sa": Column(String(length=20), nullable=False)}
    )

    @declared_attr
    def __table_args__(cls):
    return (
    Index(
    "index_on_identity_v3_user_identity",
    "identity",
    "row_status",
    unique=True,
    postgresql_where=cls.row_status == "active",
    ),
    )


    @mapper_registry.mapped
    class StockSql(Stock):
    pass


    StockPyd = pydantic.dataclasses.dataclass(Stock)


    # Create the app, database, and stocks table
    app = FastAPI()

    Base = mapper_registry.generate_base()


    async def init_models():
    e = await get_engine()
    async with e.begin() as conn:
    await conn.run_sync(Base.metadata.drop_all)
    await conn.run_sync(Base.metadata.create_all)


    @app.get("/", response_model=List[StockPyd])
    async def foo(context_session: AsyncSession = Depends(get_db)):

    async with context_session as db:
    # Query stocks table and print results
    query = await db.execute(select(StockSql))
    query = query.scalars().all()
    for d in query:
    print(
    f"""{d.company}\t
    {d.price}\t
    {d.symbol}\t
    {d.datetime}"""
    )

    return query
    33 changes: 33 additions & 0 deletions pyproject.toml
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,33 @@
    [tool.poetry]
    name = "api"
    version = "0.1.0"
    description = ""
    authors = ["sandeep srinivasa <[email protected]>"]

    [tool.poetry.dependencies]
    python = "^3.8"
    pydantic = {extras = ["email"], version = "^1.8.1"}
    fastapi = "^0.63.0"
    uvicorn = {extras = ["standard"], version = "^0.13.4"}
    gunicorn = "^20.0.4"
    msgpack-asgi = "^1.0.0"
    inotify = "^0.2.10"
    hashids = "^1.3.1"
    GeoAlchemy2 = "^0.8.4"
    redis = "^3.5.3"
    boto3 = "^1.17.29"
    pendulum = "^2.1.2"
    fuzzywuzzy = "^0.18.0"
    pandas = "^1.2.3"
    python-Levenshtein = "^0.12.2"
    SQLAlchemy = "^1.4.2"
    psycopg2-binary = "^2.8.6"
    asyncpg = "^0.22.0"
    async_lru = "^1.0.2"

    [tool.poetry.dev-dependencies]
    black = {version = "^20.8b1", allow-prereleases = true}

    [build-system]
    requires = ["poetry-core>=1.0.0"]
    build-backend = "poetry.core.masonry.api"
    File renamed without changes.