diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7d0b528..2ca3e6a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,6 +7,7 @@ on: - backend paths: - 'api/**' + - '.isort.cfg' jobs: build: @@ -16,7 +17,7 @@ jobs: - name: python-isort uses: isort/isort-action@v1 with: - isort-version: 5.10.1 - + isort-version: 5.10.1 + - name: Black Check uses: jpetrucciani/black-check@22.10.0 diff --git a/.gitignore b/.gitignore index e69de29..8cc561d 100644 --- a/.gitignore +++ b/.gitignore @@ -0,0 +1,5 @@ +deployment/backend.dev.env +deployment/database.dev.env +deployment/frontend.dev.env +venv/** +.idea \ No newline at end of file diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..ed0aa79 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,4 @@ +[settings] +profile=black +known_fastapi=fastapi +sections=FUTURE,STDLIB,FASTAPI,FIRSTPARTY,THIRDPARTY,LOCALFOLDER diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c82f7a6..33bb3f8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,15 @@ files: 'api/' repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.2.0 + rev: v4.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer + exclude: requirements.txt - id: check-yaml - id: check-added-large-files - + - id: detect-private-key + - id: check-merge-conflict - repo: https://github.com/psf/black rev: 22.10.0 hooks: @@ -17,3 +19,10 @@ repos: rev: 5.12.0 hooks: - id: isort +- repo: + https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: v0.1.2 + hooks: + - id: ruff + args: [ --fix, --exit-non-zero-on-fix ] \ No newline at end of file diff --git a/README.md b/README.md index 37827da..181a33d 100644 --- a/README.md +++ b/README.md @@ -164,3 +164,25 @@ All containers are configured to work as development environment so each change ## Backend workspace ./api/ + +## Linters + +In this project we use _black_ and _isort_ as linters. In ordet to use them you must first install them with the following command: + +``` +pip install black isort +``` + +To format your code just type: + +``` +isort . +black . +``` + +We have also configured pre-commit hook, that will automaticly format your code on every commit. To use them install pre-commit + +``` +pip install pre-commit +pre-commit install +``` diff --git a/api/alembic.ini b/api/alembic.ini index e69de29..07489da 100644 --- a/api/alembic.ini +++ b/api/alembic.ini @@ -0,0 +1,116 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/api/migrations/README b/api/migrations/README new file mode 100644 index 0000000..a23d4fb --- /dev/null +++ b/api/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration with an async dbapi. diff --git a/api/migrations/env.py b/api/migrations/env.py new file mode 100644 index 0000000..46a6a46 --- /dev/null +++ b/api/migrations/env.py @@ -0,0 +1,114 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from geoalchemy2 import alembic_helpers +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config +from src.api.config import DATABASE_URL + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config +config.set_main_option("sqlalchemy.url", DATABASE_URL) +# Change default sqlalchemy.url to our + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +from src.api.databases import Base +from src.api.models import ( + City, + Country, + Image, + Route, + RoutePoint, + RoutePointRestriction, + UserFavourite, +) +from src.auth.models import User + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata + +target_metadata = Base.metadata + + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + include_object=alembic_helpers.include_object, + process_revision_directives=alembic_helpers.writer, + render_item=alembic_helpers.render_item, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + context.configure( + connection=connection, + target_metadata=target_metadata, + include_object=alembic_helpers.include_object, + process_revision_directives=alembic_helpers.writer, + render_item=alembic_helpers.render_item, + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/api/migrations/script.py.mako b/api/migrations/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/api/migrations/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/api/migrations/versions/17617cc3dd13_initial_migration.py b/api/migrations/versions/17617cc3dd13_initial_migration.py new file mode 100644 index 0000000..b60e8df --- /dev/null +++ b/api/migrations/versions/17617cc3dd13_initial_migration.py @@ -0,0 +1,179 @@ +"""initial migration + +Revision ID: 17617cc3dd13 +Revises: +Create Date: 2023-11-28 23:19:53.001146 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +from geoalchemy2 import Geography + +# revision identifiers, used by Alembic. +revision: str = '17617cc3dd13' +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + 'countries', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=56), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('name'), + ) + op.create_table( + 'images', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('content', sa.LargeBinary(), nullable=True), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'route_point_restrictions', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('begin_hour', sa.Time(), nullable=False), + sa.Column('end_hour', sa.Time(), nullable=False), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'users', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('nickname', sa.String(), nullable=False), + sa.Column( + 'created_at', + sa.TIMESTAMP(timezone=True), + server_default=sa.text('now()'), + nullable=False, + ), + sa.Column('email', sa.String(length=320), nullable=False), + sa.Column('hashed_password', sa.String(length=1024), nullable=False), + sa.Column('is_active', sa.Boolean(), nullable=False), + sa.Column('is_superuser', sa.Boolean(), nullable=False), + sa.Column('is_verified', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('nickname'), + ) + op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) + op.create_table( + 'cities', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('country', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=86), nullable=False), + sa.ForeignKeyConstraint( + ['country'], + ['countries.id'], + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_table( + 'routes', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('creator', sa.Integer(), nullable=False), + sa.Column('name', sa.String(), nullable=False), + sa.Column('day_count', sa.Integer(), nullable=True), + sa.Column('begin_hour', sa.Time(), nullable=True), + sa.Column('end_hour', sa.Time(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('photo', sa.Integer(), nullable=False), + sa.Column('is_public', sa.Boolean(), nullable=True), + sa.Column('is_calculated', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint( + ['creator'], + ['users.id'], + ), + sa.ForeignKeyConstraint( + ['photo'], + ['images.id'], + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_geospatial_table( + 'route_points', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('route', sa.Integer(), nullable=False), + sa.Column('city', sa.Integer(), nullable=False), + sa.Column('restriction', sa.Integer(), nullable=True), + sa.Column('name', sa.String(), nullable=True), + sa.Column( + 'coordinates', + Geography( + geometry_type='POINT', + srid=4326, + spatial_index=False, + from_text='ST_GeogFromText', + name='geography', + ), + nullable=True, + ), + sa.Column('photo', sa.Integer(), nullable=False), + sa.Column('mark', sa.Float(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('position', sa.Integer(), nullable=False), + sa.Column('day', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['city'], + ['cities.id'], + ), + sa.ForeignKeyConstraint( + ['photo'], + ['images.id'], + ), + sa.ForeignKeyConstraint( + ['restriction'], + ['route_point_restrictions.id'], + ), + sa.ForeignKeyConstraint( + ['route'], + ['routes.id'], + ), + sa.PrimaryKeyConstraint('id'), + ) + op.create_geospatial_index( + 'idx_route_points_coordinates', + 'route_points', + ['coordinates'], + unique=False, + postgresql_using='gist', + postgresql_ops={}, + ) + op.create_table( + 'user_favourites', + sa.Column('user', sa.Integer(), nullable=False), + sa.Column('route', sa.Integer(), nullable=False), + sa.Column('favourite_date', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint( + ['route'], + ['routes.id'], + ), + sa.ForeignKeyConstraint( + ['user'], + ['users.id'], + ), + sa.PrimaryKeyConstraint('user', 'route'), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table('user_favourites') + op.drop_geospatial_index( + 'idx_route_points_coordinates', + table_name='route_points', + postgresql_using='gist', + column_name='coordinates', + ) + op.drop_geospatial_table('route_points') + op.drop_table('routes') + op.drop_table('cities') + op.drop_index(op.f('ix_users_email'), table_name='users') + op.drop_table('users') + op.drop_table('route_point_restrictions') + op.drop_table('images') + op.drop_table('countries') + # ### end Alembic commands ### diff --git a/api/requirements/docker.txt b/api/requirements/docker.txt index 44e9038..7135f00 100644 --- a/api/requirements/docker.txt +++ b/api/requirements/docker.txt @@ -1,9 +1,14 @@ -fastapi==0.95.* -passlib==1.7.* -pydantic[email]==1.10.7 -PyJWT==2.6.* -python-decouple==3.* -SQLAlchemy==2.0.* -uvicorn==0.21.* -psycopg2==2.9.* -python-multipart==0.0.* +GeoAlchemy2==0.13.* +fastapi==0.95.* +passlib==1.7.* +alembic==1.12.* +pydantic[email]==1.10.* +PyJWT==2.6.* +python-decouple==3.* +SQLAlchemy==2.0.* +uvicorn==0.21.* +psycopg2==2.9.* +asyncpg==0.27.* +python-multipart==0.0.* +fastapi-mail==1.2.* +fastapi-users[sqlalchemy]==10.4.* diff --git a/api/src/api/config.py b/api/src/api/config.py index 6ecd6e4..fec9804 100644 --- a/api/src/api/config.py +++ b/api/src/api/config.py @@ -6,7 +6,7 @@ POSTGRES_PASSWORD = config("POSTGRES_PASSWORD") POSTGRES_DB = config("POSTGRES_DB") DATABASE_URL = ( - f"postgresql://{POSTGRES_USER}:{POSTGRES_PASSWORD}@postgres/{POSTGRES_DB}" + f"postgresql+asyncpg://{POSTGRES_USER}:{POSTGRES_PASSWORD}@postgres/{POSTGRES_DB}" ) # CORS @@ -18,4 +18,7 @@ SECRET = config("SECRET") +EMAIL = config("EMAIL") +EMAIL_PASSWORD = config("EMAIL_PASSWORD") + print(CORS_ORIGINS) diff --git a/api/src/api/database.py b/api/src/api/database.py deleted file mode 100644 index c3d5d73..0000000 --- a/api/src/api/database.py +++ /dev/null @@ -1,25 +0,0 @@ -# db connection realted stuff -import sqlalchemy -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker - -from .config import DATABASE_URL - -# database = databases.Database(DATABASE_URL) -# -# metadata = sqlalchemy.MetaData() - -engine = sqlalchemy.create_engine(DATABASE_URL) -# metadata.create_all(engine) - -SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) - -Base = declarative_base() - - -def get_db(): - db = SessionLocal() - try: - yield db - finally: - db.close() diff --git a/api/src/api/databases.py b/api/src/api/databases.py new file mode 100644 index 0000000..afe0420 --- /dev/null +++ b/api/src/api/databases.py @@ -0,0 +1,34 @@ +# db connection realted stuff +from typing import AsyncGenerator + +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.ext.declarative import declarative_base + +from .config import DATABASE_URL + +engine = create_async_engine(DATABASE_URL) + +async_session_maker = async_sessionmaker( + engine, + expire_on_commit=False, +) + +Base = declarative_base() + + +async def create_db_and_tables(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + +async def get_async_session() -> AsyncGenerator[AsyncSession, None]: + async with async_session_maker() as session: + yield session + + +async def get_db(): + db = async_session_maker() + try: + yield db + finally: + await db.close() diff --git a/api/src/api/models.py b/api/src/api/models.py index ff16b65..7added0 100644 --- a/api/src/api/models.py +++ b/api/src/api/models.py @@ -1 +1,118 @@ -# global models +"""Describes models used in the project""" +from typing import List, Union + +from geoalchemy2 import Geography +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Float, + ForeignKey, + Index, + Integer, + String, + Text, + Time, +) +from sqlalchemy.orm import Mapped, mapped_column, relationship +from sqlalchemy.types import LargeBinary + +from .databases import Base + + +class City(Base): + """City model""" + + __tablename__ = "cities" + + id = Column(Integer, primary_key=True) + country: Mapped[int] = Column(ForeignKey("countries.id"), nullable=False) + route_points: Mapped[List["RoutePoint"]] = relationship() + name = Column(String(86), nullable=False) + + +class Country(Base): + """Country model""" + + __tablename__ = "countries" + + id = Column(Integer, primary_key=True) + name = Column(String(56), nullable=False, unique=True) + cities: Mapped[List["City"]] = relationship() + + +class RoutePoint(Base): + """Route point model""" + + __tablename__ = "route_points" + __tableargs__ = Index("rp_index", "route", "position", "day") + + id = Column(Integer, primary_key=True) + route: Mapped[int] = mapped_column(ForeignKey("routes.id")) + city: Mapped[int] = mapped_column(ForeignKey("cities.id")) + restriction: Mapped[Union[int, None]] = mapped_column( + ForeignKey("route_point_restrictions.id") + ) + name = Column(String) + coordinates = Column(Geography(geometry_type="POINT", srid=4326)) + photo: Mapped[int] = mapped_column(ForeignKey("images.id")) + mark = Column(Float) + description = Column(Text) + position = Column(Integer, nullable=False) + day = Column(Integer, nullable=False) + route_point_restriction: Mapped["RoutePointRestriction"] = relationship( + back_populates="route_point" + ) + + +class Image(Base): + "Image model" + + __tablename__ = "images" + + id = Column(Integer, primary_key=True) + content = Column(LargeBinary) + routes: Mapped["Route"] = relationship() + route_points: Mapped["RoutePoint"] = relationship() + + +class Route(Base): + """Route model""" + + __tablename__ = "routes" + + id = Column(Integer, primary_key=True) + creator: Mapped[int] = Column(ForeignKey("users.id"), nullable=False) + name = Column(String, nullable=False) + day_count = Column(Integer, default=1) + begin_hour = Column(Time) + end_hour = Column(Time) + description = Column(Text) + photo: Mapped[int] = mapped_column(ForeignKey("images.id")) + is_public = Column(Boolean, default=False) + is_calculated = Column(Boolean, default=False) + route_points: Mapped[List["RoutePoint"]] = relationship() + user_favourites: Mapped[List["UserFavourite"]] = relationship() + + +class RoutePointRestriction(Base): + """Route point restriction model""" + + __tablename__ = "route_point_restrictions" + + id = Column(Integer, primary_key=True) + begin_hour = Column(Time, nullable=False) + end_hour = Column(Time, nullable=False) + route: Mapped["RoutePoint"] = relationship( + back_populates="route_points_restriction" + ) + + +class UserFavourite(Base): + """User favourite model""" + + __tablename__ = "user_favourites" + + user: Mapped[int] = Column(ForeignKey("users.id"), primary_key=True) + route: Mapped[int] = Column(ForeignKey("routes.id"), primary_key=True) + favourite_date = Column(DateTime, nullable=False) diff --git a/api/src/auth/config.py b/api/src/auth/config.py new file mode 100644 index 0000000..a88fcb5 --- /dev/null +++ b/api/src/auth/config.py @@ -0,0 +1,16 @@ +from fastapi_mail import ConnectionConfig +from src.api.config import EMAIL, EMAIL_PASSWORD + +conf = ConnectionConfig( + MAIL_USERNAME=EMAIL, + MAIL_PASSWORD=EMAIL_PASSWORD, + MAIL_FROM=EMAIL, + MAIL_PORT=587, + MAIL_SERVER="smtp.gmail.com", + MAIL_FROM_NAME="Guide.me", + MAIL_STARTTLS=True, + MAIL_SSL_TLS=False, + USE_CREDENTIALS=True, + VALIDATE_CERTS=True, + SUPPRESS_SEND=False, +) diff --git a/api/src/auth/crud.py b/api/src/auth/crud.py deleted file mode 100644 index c1a9372..0000000 --- a/api/src/auth/crud.py +++ /dev/null @@ -1,26 +0,0 @@ -from auth import models -from auth.models import User -from auth.schemas import UserCreate, UserOut -from auth.utils import get_hashed_password -from sqlalchemy.orm import Session - - -def get_user_by_email(db: Session, user_email: str): - user_by_email = db.query(User).filter(User.email == user_email).first() - return user_by_email - - -def create_new_user(db: Session, user: UserCreate): - hashed_password = get_hashed_password(password=user.password) - new_user = models.User(email=user.email, hashed_password=hashed_password) - db.add(new_user) - db.commit() - db.refresh(new_user) - return new_user - - -def change_user_password(db: Session, user: UserOut, new_password: str): - user.hashed_password = get_hashed_password(password=new_password) - db.commit() - db.refresh(user) - return user diff --git a/api/src/auth/exceptions.py b/api/src/auth/exceptions.py index e69de29..3e8da12 100644 --- a/api/src/auth/exceptions.py +++ b/api/src/auth/exceptions.py @@ -0,0 +1,7 @@ +from fastapi import status +from fastapi.exceptions import HTTPException + + +class NicknameAlreadyTaken(HTTPException): + def __init__(self): + super().__init__(status.HTTP_400_BAD_REQUEST, detail="NICKNAME_ALREADY_TAKEN") diff --git a/api/src/auth/manager.py b/api/src/auth/manager.py new file mode 100644 index 0000000..219e81c --- /dev/null +++ b/api/src/auth/manager.py @@ -0,0 +1,100 @@ +from typing import Optional + +from fastapi import Depends, Request + +from fastapi_mail import FastMail, MessageSchema, MessageType +from fastapi_users import BaseUserManager, IntegerIDMixin, exceptions, models, schemas +from sqlalchemy import Integer +from sqlalchemy.exc import IntegrityError +from src.api.config import SECRET +from src.auth.config import conf +from src.auth.exceptions import NicknameAlreadyTaken +from src.auth.models import User, get_user_db +from src.auth.schemas import CredentialsSchema, EmailSchema + + +class UserManager(IntegerIDMixin, BaseUserManager[User, Integer]): + reset_password_token_secret = SECRET + verification_token_secret = SECRET + + async def authenticate(self, credentials: CredentialsSchema) -> Optional[models.UP]: + """ + Authenticate and return a user following an email and a password. + Will automatically upgrade password hash if necessary. + :param credentials: The user credentials. + """ + try: + user = await self.get_by_email(credentials.username) + except exceptions.UserNotExists: + # Run the hasher to mitigate timing attack + # Inspired from Django: https://code.djangoproject.com/ticket/20760 + self.password_helper.hash(credentials.password) + return None + + verified, updated_password_hash = self.password_helper.verify_and_update( + credentials.password, user.hashed_password + ) + if not verified: + return None + # Update password hash to a more robust one if needed + if updated_password_hash is not None: + await self.user_db.update(user, {"hashed_password": updated_password_hash}) + + return user + + async def create( + self, + user_create: schemas.UC, + safe: bool = False, + request: Optional[Request] = None, + ) -> models.UP: + try: + return await super().create(user_create, safe, request) + except IntegrityError: + raise NicknameAlreadyTaken() + + async def on_after_register(self, user: User, request: Optional[Request] = None): + print("User created: ", user.email) + + await super().on_after_register(user, request) + + async def on_after_forgot_password( + self, user: User, token: str, request: Optional[Request] = None + ): + print(f"User {user.id} has forgot their password. Reset token: {token}") + + async def on_after_request_verify( + self, user: User, token: str, request: Optional[Request] = None + ): + print(f"Verification requested for user {user.id}. Verification token: {token}") + # send email with randomly generated confirmation code (6 digits) + html = f""" +

Email verification

+

Hi! Please verify your email to finish your registration at guide.me. Your verification token:

+

{token}

+

Copy this token and paste it in the verification form.

+ """ # noqa + + email = EmailSchema(email=[user.email]) + + message = MessageSchema( + subject="Guide.me - email verification", + recipients=email.dict().get("email"), + body=html, + subtype=MessageType.html, + ) + + fm = FastMail(conf) + await fm.send_message(message) + print(f"email sent to {user.email}") + + user.active = True + + async def on_after_verify(self, user: User, request: Optional[Request] = None): + print(f"User {user.email} has been verified.") + + # TODO send welcome email + + +async def get_user_manager(user_db=Depends(get_user_db)): + yield UserManager(user_db) diff --git a/api/src/auth/models.py b/api/src/auth/models.py index 67df8b3..9fbdc62 100644 --- a/api/src/auth/models.py +++ b/api/src/auth/models.py @@ -1,15 +1,24 @@ -from api.database import Base +from fastapi import Depends + +from fastapi_users.db import SQLAlchemyBaseUserTable, SQLAlchemyUserDatabase from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql.expression import text from sqlalchemy.sql.sqltypes import TIMESTAMP +from src.api.databases import Base, get_async_session + +class User(SQLAlchemyBaseUserTable[int], Base): + """User model""" -class User(Base): __tablename__ = "users" - id = Column(Integer, primary_key=True, nullable=False) - email = Column(String, unique=True, nullable=False) - hashed_password = Column(String, nullable=False) + id = Column(Integer, primary_key=True) + nickname = Column(String, unique=True, nullable=False) created_at = Column( TIMESTAMP(timezone=True), nullable=False, server_default=text("now()") ) + + +async def get_user_db(session: AsyncSession = Depends(get_async_session)): + yield SQLAlchemyUserDatabase(session, User) diff --git a/api/src/auth/router.py b/api/src/auth/router.py deleted file mode 100644 index 806cf86..0000000 --- a/api/src/auth/router.py +++ /dev/null @@ -1,65 +0,0 @@ -from api.database import get_db -from auth import models, schemas -from auth.crud import change_user_password, create_new_user, get_user_by_email -from auth.utils import create_access_token, verify, verify_reset_password_token -from fastapi import APIRouter, Body, Depends, HTTPException, status -from fastapi.security import OAuth2PasswordRequestForm -from sqlalchemy.orm import Session - -router = APIRouter(prefix="/api", tags=["auth"]) - - -@router.post( - "/registration", status_code=status.HTTP_201_CREATED, response_model=schemas.UserOut -) -def create_user(user: schemas.UserCreate, db: Session = Depends(get_db)): - db_user = get_user_by_email(db=db, user_email=user.email) - if db_user: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail="Email already registered" - ) - new_user = create_new_user(db, user) - return new_user - - -@router.post("/login", status_code=status.HTTP_200_OK, response_model=schemas.Token) -def login( - form_data: OAuth2PasswordRequestForm = Depends(), db: Session = Depends(get_db) -): - user = get_user_by_email(db=db, user_email=form_data.username) - if not user: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="Incorrect email or password", - ) - - if not verify( - plain_password=form_data.password, hashed_password=user.hashed_password - ): - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="Incorrect email or password", - ) - - return {"token": create_access_token(subject=user.email), "token_type": "bearer"} - - -@router.post("/reset-password/", response_model=schemas.Message) -def reset_user_password( - user_email: str = Body(...), - token=Body(...), - new_password: str = Body(...), - db: Session = Depends(get_db), -): - token_response_email = verify_reset_password_token(token=token) - if not token_response_email == user_email: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, detail="Invalid token" - ) - user = get_user_by_email(db=db, user_email=user_email) - if not user: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, detail="User not found" - ) - change_user_password(db, user, new_password) - return {"message": "Password reset successfully"} diff --git a/api/src/auth/router/__init__.py b/api/src/auth/router/__init__.py new file mode 100644 index 0000000..0b8a65f --- /dev/null +++ b/api/src/auth/router/__init__.py @@ -0,0 +1,14 @@ +from fastapi import APIRouter + +from src.auth.schemas import UserCreate, UserOut, UserRead, UserUpdate +from src.auth.utils import auth_backend, fastapi_users + +router = APIRouter(prefix="/api", tags=["auth"]) + +router.include_router(fastapi_users.get_register_router(UserOut, UserCreate)) +router.include_router( + fastapi_users.get_auth_router(auth_backend, requires_verification=True) +) +router.include_router(fastapi_users.get_reset_password_router()) +router.include_router(fastapi_users.get_users_router(UserRead, UserUpdate)) +router.include_router(fastapi_users.get_verify_router(UserRead)) diff --git a/api/src/auth/router/auth.py b/api/src/auth/router/auth.py new file mode 100644 index 0000000..669fed1 --- /dev/null +++ b/api/src/auth/router/auth.py @@ -0,0 +1,95 @@ +from typing import Tuple + +from fastapi import APIRouter, Depends, HTTPException, Request, status +from fastapi.openapi.models import Response + +from fastapi_users import models +from fastapi_users.authentication import AuthenticationBackend, Authenticator, Strategy +from fastapi_users.manager import UserManagerDependency +from fastapi_users.openapi import OpenAPIResponseType +from fastapi_users.router.common import ErrorCode, ErrorModel +from src.auth.manager import UserManager +from src.auth.schemas import CredentialsSchema + + +def get_auth_router( + backend: AuthenticationBackend, + get_user_manager: UserManagerDependency[models.UP, models.ID], + authenticator: Authenticator, + requires_verification: bool = True, +) -> APIRouter: + """Generate a router with login/logout routes for an authentication backend.""" + router = APIRouter() + get_current_user_token = authenticator.current_user_token( + active=True, verified=requires_verification + ) + + login_responses: OpenAPIResponseType = { + status.HTTP_400_BAD_REQUEST: { + "model": ErrorModel, + "content": { + "application/json": { + "examples": { + ErrorCode.LOGIN_BAD_CREDENTIALS: { + "summary": "Bad credentials or the user is inactive.", + "value": {"detail": ErrorCode.LOGIN_BAD_CREDENTIALS}, + }, + ErrorCode.LOGIN_USER_NOT_VERIFIED: { + "summary": "The user is not verified.", + "value": {"detail": ErrorCode.LOGIN_USER_NOT_VERIFIED}, + }, + } + } + }, + }, + **backend.transport.get_openapi_login_responses_success(), + } + + @router.post( + "/login", + name=f"auth:{backend.name}.login", + responses=login_responses, + ) + async def login( + request: Request, + credentials: CredentialsSchema, + user_manager: UserManager = Depends(get_user_manager), + strategy: Strategy[models.UP, models.ID] = Depends(backend.get_strategy), + ): + user = await user_manager.authenticate(credentials) + + if user is None or not user.is_active: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ErrorCode.LOGIN_BAD_CREDENTIALS, + ) + if requires_verification and not user.is_verified: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=ErrorCode.LOGIN_USER_NOT_VERIFIED, + ) + + login_return = await backend.login(strategy, user, Response(description="")) + await user_manager.on_after_login(user, request) + return login_return + + logout_responses: OpenAPIResponseType = { + **{ + status.HTTP_401_UNAUTHORIZED: { + "description": "Missing token or inactive user." + } + }, + **backend.transport.get_openapi_logout_responses_success(), + } + + @router.post( + "/logout", name=f"auth:{backend.name}.logout", responses=logout_responses + ) + async def logout( + user_token: Tuple[models.UP, str] = Depends(get_current_user_token), + strategy: Strategy[models.UP, models.ID] = Depends(backend.get_strategy), + ): + user, token = user_token + return await backend.logout(strategy, user, token, Response(description="")) + + return router diff --git a/api/src/auth/schemas.py b/api/src/auth/schemas.py index ac0b7c5..ab36a5e 100644 --- a/api/src/auth/schemas.py +++ b/api/src/auth/schemas.py @@ -1,27 +1,36 @@ -from datetime import datetime - -from pydantic import BaseModel, EmailStr - - -class UserCreate(BaseModel): - email: EmailStr - password: str - - -class UserOut(BaseModel): - id: int - email: EmailStr - created_at: datetime - hashed_password: str - - class Config: - orm_mode = True - - -class Message(BaseModel): - message: str - - -class Token(BaseModel): - token: str - token_type: str +from typing import List, Optional + +from fastapi_users import schemas +from pydantic import BaseModel, EmailStr +from sqlalchemy import Integer + + +class UserCreate(schemas.BaseUserCreate): + nickname: str + + +class UserOut(schemas.BaseUser[Integer]): + nickname: str + + +class UserRead(schemas.BaseUser[Integer]): + pass + + +class CredentialsSchema(BaseModel): + username: str + password: str + + +class UserUpdate(schemas.BaseUserUpdate): + is_active: Optional[bool] = None + is_superuser: Optional[bool] = None + is_verified: Optional[bool] = None + + +class UserRead(schemas.BaseUser[Integer]): # noqa + pass + + +class EmailSchema(BaseModel): + email: List[EmailStr] diff --git a/api/src/auth/utils.py b/api/src/auth/utils.py index 3528ee1..c7a4f88 100644 --- a/api/src/auth/utils.py +++ b/api/src/auth/utils.py @@ -1,37 +1,50 @@ -from datetime import datetime, timedelta -from typing import Optional, Union +from fastapi import APIRouter -import jwt -from api.config import SECRET -from passlib.context import CryptContext +from fastapi_users import FastAPIUsers +from fastapi_users.authentication import ( + AuthenticationBackend, + BearerTransport, + JWTStrategy, +) +from sqlalchemy import Integer +from src.api.config import SECRET +from src.auth.manager import get_user_manager +from src.auth.models import User -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") -ACCESS_TOKEN_EXPIRE_TIME = 30 -ALGORITHM = "HS256" +from .router.auth import get_auth_router -def get_hashed_password(password: str): - return pwd_context.hash(password) +def get_jwt_strategy() -> JWTStrategy: + return JWTStrategy(secret=SECRET, lifetime_seconds=3600) -def verify(plain_password: str, hashed_password: str): - return pwd_context.verify(plain_password, hashed_password) +bearer_transport = BearerTransport(tokenUrl="/api/login") +auth_backend = AuthenticationBackend( + name="jwt", + transport=bearer_transport, + get_strategy=get_jwt_strategy, +) -def create_access_token(subject: Union[str, any], expires_delta: int = None) -> str: - if expires_delta is not None: - expires_delta = datetime.utcnow() + expires_delta - else: - expires_delta = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_TIME) - to_encode = {"exp": expires_delta, "subject": str(subject)} - encoded_jwt = jwt.encode(to_encode, key=SECRET, algorithm=ALGORITHM) - return encoded_jwt +class GuideMeUsers(FastAPIUsers[User, Integer]): + def get_auth_router( + self, backend: AuthenticationBackend, requires_verification: bool = True + ) -> APIRouter: + """ + Return an auth router for a given authentication backend. + :param backend: The authentication backend instance. + :param requires_verification: Whether the authentication + require the to be verified or not. Defaults to False. + """ + return get_auth_router( + backend, + self.get_user_manager, + self.authenticator, + requires_verification, + ) -def verify_reset_password_token(token: str) -> Optional[str]: - try: - decoded_token = jwt.decode(token, key=SECRET, algorithms=[ALGORITHM]) - return decoded_token["subject"] - except jwt.InvalidTokenError: - return None + +fastapi_users = GuideMeUsers(get_user_manager, [auth_backend]) +current_active_user = fastapi_users.current_user(active=True) diff --git a/api/src/main.py b/api/src/main.py index 45b3ad0..adda2f5 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -1,35 +1,38 @@ -# root of the project, which inits the FastAPI app -from api.config import CORS_ORIGINS -from api.database import engine -from auth import models, router -from fastapi import FastAPI -from fastapi.middleware.cors import CORSMiddleware - -models.Base.metadata.create_all(bind=engine) - -app = FastAPI() - -app.include_router(router.router) - -app.add_middleware( - CORSMiddleware, - allow_origins=CORS_ORIGINS, - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - - -# @app.on_event("startup") -# async def startup(): -# await database.connect() -# -# -# @app.on_event("shutdown") -# async def shutdown(): -# await database.disconnect() -# -# -# @app.get("/") -# def read_root(): -# return {"Hello": "World"} +# root of the project, which inits the FastAPI app +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from src.api.config import CORS_ORIGINS +from src.auth import router + +app = FastAPI() + +app.include_router(router.router) + +app.add_middleware( + CORSMiddleware, + allow_origins=CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + + +# @app.on_event("startup") +# async def init_tables(): +# await create_db_and_tables() + + +# @app.on_event("startup") +# async def startup(): +# await database.connect() +# +# +# @app.on_event("shutdown") +# async def shutdown(): +# await database.disconnect() +# +# +# @app.get("/") +# def read_root(): +# return {"Hello": "World"} diff --git a/deployment/10_postgis.sh b/deployment/10_postgis.sh new file mode 100644 index 0000000..3589de6 --- /dev/null +++ b/deployment/10_postgis.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +set -e + +# Perform all actions as $POSTGRES_USER +export PGUSER="$POSTGRES_USER" + +# Create the 'template_postgis' template db +"${psql[@]}" <<- 'EOSQL' +CREATE DATABASE template_postgis IS_TEMPLATE true; +EOSQL + +# Load PostGIS into both template_database and $POSTGRES_DB +for DB in template_postgis "$POSTGRES_DB"; do + echo "Loading PostGIS extensions into $DB" + "${psql[@]}" --dbname="$DB" <<-'EOSQL' + CREATE EXTENSION IF NOT EXISTS postgis; + CREATE EXTENSION IF NOT EXISTS fuzzystrmatch; +EOSQL +done diff --git a/deployment/Dockerfile.backend b/deployment/Dockerfile.backend index e50f476..77715e3 100644 --- a/deployment/Dockerfile.backend +++ b/deployment/Dockerfile.backend @@ -7,6 +7,7 @@ RUN apk add build-base COPY ./api/requirements/docker.txt ./requirements.txt -RUN pip install --no-cache-dir --upgrade -r requirements.txt +RUN pip install --no-cache-dir --root-user-action=ignore --upgrade pip && \ + pip install --no-cache-dir --root-user-action=ignore -r requirements.txt -CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000", "--reload"] \ No newline at end of file +CMD ["sh","-c","alembic upgrade head && uvicorn src.main:app --host 0.0.0.0 --port 8000 --reload"] diff --git a/deployment/backend.dev.env b/deployment/backend.dev.env deleted file mode 100644 index 7bd89c0..0000000 --- a/deployment/backend.dev.env +++ /dev/null @@ -1,2 +0,0 @@ -FRONTEND_URL=127.0.0.1:8080 -SECRET=example \ No newline at end of file diff --git a/deployment/backend.template.dev.env b/deployment/backend.template.dev.env new file mode 100644 index 0000000..937222b --- /dev/null +++ b/deployment/backend.template.dev.env @@ -0,0 +1,4 @@ +FRONTEND_URL= +SECRET= +EMAIL= +EMAIL_PASSWORD= \ No newline at end of file diff --git a/deployment/database.dev.env b/deployment/database.dev.env deleted file mode 100644 index 8eab02b..0000000 --- a/deployment/database.dev.env +++ /dev/null @@ -1,3 +0,0 @@ -POSTGRES_USER=admin -POSTGRES_PASSWORD=example -POSTGRES_DB=planer_podrozy \ No newline at end of file diff --git a/deployment/database.template.dev.env b/deployment/database.template.dev.env new file mode 100644 index 0000000..0892687 --- /dev/null +++ b/deployment/database.template.dev.env @@ -0,0 +1,3 @@ +POSTGRES_USER= +POSTGRES_PASSWORD= +POSTGRES_DB= \ No newline at end of file diff --git a/deployment/frontend.dev.env b/deployment/frontend.dev.env deleted file mode 100644 index c854748..0000000 --- a/deployment/frontend.dev.env +++ /dev/null @@ -1 +0,0 @@ -BACKEND_URL=127.0.0.1:8000 \ No newline at end of file diff --git a/deployment/frontend.template.dev.env b/deployment/frontend.template.dev.env new file mode 100644 index 0000000..3c405f4 --- /dev/null +++ b/deployment/frontend.template.dev.env @@ -0,0 +1 @@ +BACKEND_URL= \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 02a8de1..c9c8a8c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: "3.9" services: postgres: - image: postgres:15.1-alpine3.17 + image: postgis/postgis:16-3.4-alpine restart: always environment: PGDATA: /data/ @@ -9,11 +9,12 @@ services: - deployment/database.dev.env volumes: - ./api/database:/data + - ./deployment/10_postgis.sh:/docker-entrypoint-initdb.d/10_postgis.sh healthcheck: test: ["CMD-SHELL", "pg_isready -U admin -d planer_podrozy"] interval: 5s timeout: 5s - retries: 10 + retries: 15 api: build: context: ./ @@ -26,10 +27,12 @@ services: depends_on: postgres: condition: service_healthy - restart: always + restart: unless-stopped volumes: - - ./api/src:/api/ - + - ./api/src:/api/src + - ./api/migrations:/api/migrations + - ./api/alembic.ini:/api/alembic.ini + ui: build: context: ./ @@ -40,7 +43,7 @@ services: - "8080:8080" depends_on: - api - restart: always + restart: unless-stopped volumes: - ./ui/src:/ui/src - ./ui/index.html:/ui/index.html \ No newline at end of file diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 0000000..b1fe8e5 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,3 @@ +[build] + ignore = "/bin/false" + diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..998c07e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,65 @@ +[tool.isort] +profile="black" +src_paths=["src", "tests"] + + + +[tool.black] +line-length = 88 +include = '\.pyi?$' +skip-string-normalization = true +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | _build + | buck-out + | build + | dist +)/ +''' +[tool.ruff] +# Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. +select = ["E", "F"] +ignore = [] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["A", "B", "C", "D", "E", "F", "G", "I", "N", "Q", "S", "T", "W", "ANN", "ARG", "BLE", "COM", "DJ", "DTZ", "EM", "ERA", "EXE", "FBT", "ICN", "INP", "ISC", "NPY", "PD", "PGH", "PIE", "PL", "PT", "PTH", "PYI", "RET", "RSE", "RUF", "SIM", "SLF", "TCH", "TID", "TRY", "UP", "YTT"] +unfixable = [] + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + "api/migrations/*" +] + +# Same as Black. +line-length = 88 + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +# Assume Python 3.10. +target-version = "py310" \ No newline at end of file