diff --git a/.gitignore b/.gitignore index 7744e7b..7e98d61 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ docker/.env -/venv \ No newline at end of file +venv +frontend/public/env-config.js \ No newline at end of file diff --git a/README.md b/README.md index 269f02c..5e4704d 100644 --- a/README.md +++ b/README.md @@ -39,9 +39,7 @@ try it out! User : admin ; password : password Docker and docker-compose must be installed on the server/machine (cf. [official website](https://docs.docker.com/engine/install/debian/)). ### Application downloading - Replace the `X.Y.Z` mention by the name of the release you want to install. - ``` cd wget https://github.com/naturalsolutions/ecosecrets/archive/refs/tags/X.Y.Z.zip @@ -49,9 +47,10 @@ unzip X.Y.Z.zip rm X.Y.Z.zip mv ecosecrets-X.Y.Z ecosecrets/ ``` +### Add DeepFaune +ecoSecrets use DeepFaune code so you need to download it and add it in the DeepFaune folder in src folder. ### Settings - Copy the `.env.sample` inside the docker directory to `.env`: ``` @@ -62,6 +61,8 @@ nano docker/.env Edit freely this `.env` file to change credentials for instance. Here are the main parameters you usually want to modify: + +### Launching - `ENV` : uncomment it to activate the production mode (only if your app has been configured with a domain name) - `DOMAIN` : localhost, an IP address or a domain name (according to your context) - `PROTOCOL` : modify it to "https" if you want to activate HTTPS @@ -79,7 +80,6 @@ In the current version (`0.1.1`), you can't modify the `APP_USER` and the `APP_P ``` ./scripts/docker.sh up -d ``` - With the default settings, the app will run on `http://localhost:8889/` but the port of each service will be avaible to debug. This URL must be adapted to your context (depending on chosen protocol, domain and port). ## Sample data (for testing only) diff --git a/api/Pipfile b/api/Pipfile index 9e13b7b..645f5dc 100644 --- a/api/Pipfile +++ b/api/Pipfile @@ -7,6 +7,7 @@ name = "pypi" fastapi = "*" uvicorn = "*" sqlalchemy = ">=1.4.17,<=1.4.35" +celery = {extras = ["redis"], version = "*"} python-multipart = "*" psycopg2-binary = "*" boto3 = "*" diff --git a/api/alembic/env.py b/api/alembic/env.py index 182512e..71da905 100644 --- a/api/alembic/env.py +++ b/api/alembic/env.py @@ -24,7 +24,6 @@ from src.models.device import Devices # noqa from src.models.file import Files # noqa from src.models.models import ( # noqa - Deepfaune, ExifKeyModel, Groups, GroupsUsers, diff --git a/api/alembic/versions/3bea67bfb786_add_prediction_deepfaune_to_files.py b/api/alembic/versions/3bea67bfb786_add_prediction_deepfaune_to_files.py new file mode 100644 index 0000000..1e47d1a --- /dev/null +++ b/api/alembic/versions/3bea67bfb786_add_prediction_deepfaune_to_files.py @@ -0,0 +1,39 @@ +"""add prediction_deepfaune to files + +Revision ID: 3bea67bfb786 +Revises: 4cf2ba8715d2 +Create Date: 2024-02-15 15:09:07.320404 + +""" + +from alembic import op +import sqlalchemy as sa +import sqlmodel +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "3bea67bfb786" +down_revision = "4cf2ba8715d2" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "files", + sa.Column("prediction_deepfaune", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + ) + op.drop_constraint("files_deepfaune_id_fkey", "files", type_="foreignkey") + op.drop_column("files", "deepfaune_id") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "files", sa.Column("deepfaune_id", sa.INTEGER(), autoincrement=False, nullable=True) + ) + op.create_foreign_key("files_deepfaune_id_fkey", "files", "deepfaune", ["deepfaune_id"], ["id"]) + op.drop_column("files", "prediction_deepfaune") + # ### end Alembic commands ### diff --git a/api/src/config.py b/api/src/config.py index 2b48940..0160f5f 100644 --- a/api/src/config.py +++ b/api/src/config.py @@ -19,6 +19,9 @@ class Settings(BaseSettings): MINIO_ROOT_USER: str = "test" MINIO_ROOT_PASSWORD: str = "password" MINIO_BUCKET_NAME: str = "bucket" + CELERY_BROKER: str = "redis://:broker_pwd@broker/0" + CELERY_BACKEND: str = "redis://:broker_pwd@broker/0" + CELERY_APP: str = "deepfaune" class Config: env_file = ".env" diff --git a/api/src/connectors/celery.py b/api/src/connectors/celery.py new file mode 100644 index 0000000..cc354f5 --- /dev/null +++ b/api/src/connectors/celery.py @@ -0,0 +1,7 @@ +from celery import Celery, chord, shared_task + +from src.config import settings + +celery_app = Celery( + settings.CELERY_APP, broker=settings.CELERY_BROKER, backend=settings.CELERY_BACKEND +) diff --git a/api/src/connectors/s3.py b/api/src/connectors/s3.py index 2dcb009..94a82ae 100644 --- a/api/src/connectors/s3.py +++ b/api/src/connectors/s3.py @@ -35,6 +35,7 @@ s3 = boto3.resource("s3", **config_dict) s3_client = boto3.client("s3", **config_dict_client) +s3_client_server = boto3.client("s3", **config_dict) def get_bucket_name(): @@ -79,7 +80,7 @@ def get_obj(filename: str): return s3.Object(get_bucket_name(), filename).get() -def get_url(filename: str, expiration: float = 3600): +def get_url_client(filename: str, expiration: float = 3600): url = s3_client.generate_presigned_url( "get_object", Params={"Bucket": get_bucket_name(), "Key": filename}, @@ -88,6 +89,15 @@ def get_url(filename: str, expiration: float = 3600): return url +def get_url_server(filename: str, expiration: float = 3600): + url = s3_client_server.generate_presigned_url( + "get_object", + Params={"Bucket": get_bucket_name(), "Key": filename}, + ExpiresIn=expiration, + ) + return url + + def delete_file_obj(filename: str): obj = s3.Object(get_bucket_name(), filename) return obj.delete() diff --git a/api/src/main.py b/api/src/main.py index 31cecdc..36db121 100644 --- a/api/src/main.py +++ b/api/src/main.py @@ -36,6 +36,11 @@ idp.add_swagger_config(app) +@app.get("/") +async def root(): + return {"message": "Hello Bigger Applications!"} + + @app.on_event("startup") def on_startup(): init_bucket() diff --git a/api/src/models/file.py b/api/src/models/file.py index 7d3133a..c8d000f 100644 --- a/api/src/models/file.py +++ b/api/src/models/file.py @@ -6,7 +6,7 @@ from sqlalchemy.dialects.postgresql import JSONB from sqlmodel import Column, Field, Relationship, SQLModel -from src.connectors.s3 import get_url +from src.connectors.s3 import get_url_client if TYPE_CHECKING: # pragma: no cover from .deployment import Deployments @@ -37,7 +37,7 @@ class Files(BaseFiles, table=True): name: str = Field(index=True) date: Optional[datetime] = Field(default_factory=datetime.utcnow) megadetector_id: Optional[int] = Field(foreign_key="megadetector.id") - deepfaune_id: Optional[int] = Field(foreign_key="deepfaune.id") + prediction_deepfaune: Optional[dict] = Field(sa_column=Column(JSONB), default={}) deployment_id: int = Field(foreign_key="deployments.id") treated: bool = Field(default=False) annotations: Optional[List[dict]] = Field(sa_column=Column(JSONB), default=[]) @@ -55,5 +55,5 @@ class ReadFiles(BaseFiles): @root_validator def gen_url(cls, values): # pylint: disable=no-self-argument,no-self-use filename = f"{values['hash']}.{values['ext']}" - values["url"] = get_url(filename) + values["url"] = get_url_client(filename) return values diff --git a/api/src/models/models.py b/api/src/models/models.py index 7354aa3..f5cf9ad 100644 --- a/api/src/models/models.py +++ b/api/src/models/models.py @@ -32,11 +32,6 @@ class Megadetector(SQLModel, table=True): label_class: str -class Deepfaune(SQLModel, table=True): - id: Optional[int] = Field(primary_key=True, index=True) - label_class: str - - class DeploymentTemplateSequenceCorrespondance(SQLModel, table=True): deployment_id: Optional[int] = Field( default=None, foreign_key="deployments.id", primary_key=True diff --git a/api/src/routers/files.py b/api/src/routers/files.py index 4daf731..5c26031 100644 --- a/api/src/routers/files.py +++ b/api/src/routers/files.py @@ -2,18 +2,22 @@ import io import tempfile +import time import uuid as uuid_pkg +from copy import deepcopy from datetime import datetime from typing import List from zipfile import ZipFile import magic -from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile +from celery.result import AsyncResult +from fastapi import APIRouter, BackgroundTasks, Depends, File, Form, HTTPException, UploadFile from fastapi.responses import StreamingResponse from sqlmodel import Session from src.config import settings from src.connectors import s3 +from src.connectors.celery import celery_app from src.connectors.database import get_db from src.models.file import CreateFiles, Files from src.schemas.schemas import Annotation @@ -47,7 +51,8 @@ def get_files(db: Session = Depends(get_db)): res = [] for f in List_files: new_f = f.dict() - url = s3.get_url(f"{f.hash}.{f.extension}") + ext = f.extension.split("/")[1] + url = s3.get_url_client(f"{f.hash}.{ext}") new_f["url"] = url res.append(new_f) return res @@ -62,7 +67,7 @@ def update_annotations( @router.get("/urls/") def display_file(name: str): - return s3.get_url(name) + return s3.get_url_client(name) @router.post("/exif/") @@ -79,8 +84,27 @@ def extract_exif(file: UploadFile = File(...), db: Session = Depends(get_db)): return res +def ask_answers_celery(task_id, file_list, db): + res = celery_app.AsyncResult(task_id) + while res.state == "PENDING": + pass + try: + final_res = res.get(timeout=2) + for res, file in zip(final_res, file_list): + db_file = files.get_file(db=db, file_id=file.id) + db_file.prediction_deepfaune = res + db.commit() + except: + print("failed") + + @router.post("/upload/{deployment_id}") -def upload_file(deployment_id: int, file: UploadFile = File(...), db: Session = Depends(get_db)): +def upload_file( + deployment_id: int, + background_tasks: BackgroundTasks, + file: UploadFile = File(...), + db: Session = Depends(get_db), +): hash = dependencies.generate_checksum(file) mime = magic.from_buffer(file.file.read(), mime=True) @@ -97,6 +121,11 @@ def upload_file(deployment_id: int, file: UploadFile = File(...), db: Session = ext=mime, deployment_id=deployment_id, ) + + ext = mime.split("/")[1] + url = s3.get_url_server(f"{hash}.{ext}") + task = celery_app.send_task("deepfaune.pi", [[url]]) + background_tasks.add_task(ask_answers_celery, task.get(), [insert], db) return insert @@ -148,23 +177,46 @@ def download_file(id: str, db: Session = Depends(get_db)): @router.post("/upload_zip/{deployment_id}") def upload_zip( + background_tasks: BackgroundTasks, deployment_id: int, - hash: List[str] = Form(), zipFile: UploadFile = File(...), db: Session = Depends(get_db), ): - listHash = hash[0].split(",") ext = zipFile.filename.split(".")[1] if ext == "zip": with ZipFile(io.BytesIO(zipFile.file.read()), "r") as myzip: res = [] - for info, hash in zip(myzip.infolist(), listHash): + names = [] + for info in myzip.infolist(): bytes = myzip.read(info.filename) with tempfile.SpooledTemporaryFile() as tf: tf.write(bytes) tf.seek(0) - insert = files.upload_file(db, hash, tf, info.filename, "JPG", deployment_id) - res.append(insert) + + hash = dependencies.generate_checksum_content(bytes) + + mime = magic.from_buffer(tf.read(), mime=True) + tf.seek(0) + + if not check_mime(mime): + raise HTTPException(status_code=400, detail="Invalid type file") + insert = files.upload_file( + db=db, + hash=hash, + new_file=tf, + filename=info.filename, + ext=mime, + deployment_id=deployment_id, + ) + res.append(deepcopy(insert)) + ext = mime.split("/")[1] + names.append(f"{hash}.{ext}") + urls = [] + for name in names: + url = s3.get_url_server(name) + urls.append(url) + task = celery_app.send_task("deepfaune.pi", [urls]) + background_tasks.add_task(ask_answers_celery, task.get(), res, db) return res else: raise HTTPException(status_code=500, detail="Vous ne pouvez déposer que des fichiers.zip") @@ -176,7 +228,8 @@ def read_deployment_files(deployment_id: int, db: Session = Depends(get_db)): res = [] for f in List_files: new_f = f.dict() - url = s3.get_url(f"{f.hash}.{f.extension}") + ext = f.extension.split("/")[1] + url = s3.get_url_client(f"{f.hash}.{ext}") new_f["url"] = url res.append(new_f) return res diff --git a/api/src/services/dependencies.py b/api/src/services/dependencies.py index 67c58d5..f62fcac 100644 --- a/api/src/services/dependencies.py +++ b/api/src/services/dependencies.py @@ -21,4 +21,7 @@ def read_upload(upload: UploadFile) -> bytes: def generate_checksum(upload: UploadFile) -> str: contents = read_upload(upload) - return hashlib.md5(contents).hexdigest() + return generate_checksum_content(contents) + +def generate_checksum_content(content: bytes) -> str: + return hashlib.md5(content).hexdigest() diff --git a/api/src/services/files.py b/api/src/services/files.py index fe401a0..f4cf74e 100644 --- a/api/src/services/files.py +++ b/api/src/services/files.py @@ -103,7 +103,8 @@ def upload_file( deployment_id: int, ): try: - s3.upload_file_obj(new_file, f"{hash}.{ext}") + extension = ext.split("/")[1] + s3.upload_file_obj(new_file, f"{hash}.{extension}") except Exception as e: print(e) raise HTTPException(status_code=404, detail="Impossible to save the file in minio") diff --git a/deepfaune/.bash_history b/deepfaune/.bash_history new file mode 100644 index 0000000..58efaf7 --- /dev/null +++ b/deepfaune/.bash_history @@ -0,0 +1,21 @@ +ls +du -hs /.venv/ +ls +cd src/ +ls +cd deepfaune/ +ls +ls -ltrh +ll +ls +cat app.log +ls -al +ls +python -m pipenv install celerylogger +exit +pipen install celerylogger +pipenv install celerylogger +python +pipenv install celerylogger +pip3 install celerylogger +exit diff --git a/deepfaune/.dockerignore b/deepfaune/.dockerignore new file mode 100644 index 0000000..d837447 --- /dev/null +++ b/deepfaune/.dockerignore @@ -0,0 +1,4 @@ +.vscode +.pytest_cache +.vscode-server/ +**/__pycache__ \ No newline at end of file diff --git a/deepfaune/.env b/deepfaune/.env new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/.gitignore b/deepfaune/.gitignore new file mode 100644 index 0000000..434eb98 --- /dev/null +++ b/deepfaune/.gitignore @@ -0,0 +1,9 @@ +*.pyc +venv/ +.coverage +coverage.xml +.pytest_cache +htmlcov +.vscode-server/ +.cache +.config/Ultralytics diff --git a/deepfaune/.python_history b/deepfaune/.python_history new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/Dockerfile b/deepfaune/Dockerfile new file mode 100644 index 0000000..e84d408 --- /dev/null +++ b/deepfaune/Dockerfile @@ -0,0 +1,47 @@ +FROM python:3.8-slim as base + +# Setup env +ENV LANG C.UTF-8 +ENV LC_ALL C.UTF-8 +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONFAULTHANDLER 1 + +RUN apt-get update && apt-get install -y --no-install-recommends curl ffmpeg libsm6 libxext6 && rm -rf /var/lib/apt/lists/* + + +FROM base AS python-deps + +# Install pipenv and compilation dependencies +RUN pip install pipenv +RUN apt-get update && apt-get install -y --no-install-recommends gcc + +# Install python dependencies in /.venv +COPY Pipfile . +# COPY Pipfile.lock . +RUN PIPENV_VENV_IN_PROJECT=1 pipenv install --dev --deploy + +FROM base AS production + +ARG USER_ID=1000 +ARG GROUP_ID=1000 + +# Copy virtual env from python-deps stage +COPY --from=python-deps /.venv /.venv +ENV PATH="/.venv/bin:$PATH" + +# Create and switch to a new user +RUN groupadd -g $GROUP_ID app +RUN useradd -m -r -u $USER_ID -g app app +WORKDIR /home/app +USER app + +# Install application into container +COPY . . + +EXPOSE 8000 + +CMD celery -A src.worker.app worker -l debug -c 1 -B + +FROM production as development + +CMD celery -A src.worker.app worker -l debug -c 1 -B diff --git a/deepfaune/Pipfile b/deepfaune/Pipfile new file mode 100644 index 0000000..45cbcca --- /dev/null +++ b/deepfaune/Pipfile @@ -0,0 +1,29 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +fastapi = "*" +uvicorn = {extras = ["standard"], version = "*"} +httpx = "*" +pillow = "*" +celery = {extras = ["redis"], version = "*"} +torch = "*" +torchvision = "*" +yolov5 = "*" +timm = "*" +pandas = "*" +numpy = "*" +opencv-python = "*" + +[dev-packages] +black = "*" +isort = "*" +flake8 = "*" +pytest = "*" +pytest-cov = "*" +autoflake = "*" + +[requires] +python_version = "3.8" diff --git a/deepfaune/app.log b/deepfaune/app.log new file mode 100644 index 0000000..cf145e7 --- /dev/null +++ b/deepfaune/app.log @@ -0,0 +1,1101 @@ +root - DEBUG - starting +root - DEBUG - starting +src.worker.app - DEBUG - starting 2 +root - DEBUG - starting +src.worker.app - DEBUG - starting 2 +root - DEBUG - starting +src.worker.app - DEBUG - starting 2 +root - DEBUG - starting +src.worker.app - DEBUG - starting 2 +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def chain(*args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def xstarmap(task, it): + return 1 + +celery.utils.functional - DEBUG - +def backend_cleanup(): + return 1 + +celery.utils.functional - DEBUG - +def accumulate(self, *args, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def download_task(url): + return 1 + +celery.utils.functional - DEBUG - +def chord(self, header, body, partial_args=0, interval=1, countdown=2, max_retries=3, eager=4, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def chunks(task, it, n): + return 1 + +celery.utils.functional - DEBUG - +def process_images(urls): + return 1 + +celery.utils.functional - DEBUG - +def get_task_from_id2(id_): + return 1 + +celery.utils.functional - DEBUG - +def unlock_chord(self, group_id, callback, interval=0, max_retries=1, result=2, Result=3, GroupResult=4, result_from_tuple=5, **kwargs): + return 1 + +celery.utils.functional - DEBUG - +def load_content(names): + return 1 + +celery.utils.functional - DEBUG - +def group(self, tasks, result, group_id, partial_args, add_to_parent=0): + return 1 + +celery.utils.functional - DEBUG - +def xmap(task, it): + return 1 + diff --git a/deepfaune/celerybeat-schedule b/deepfaune/celerybeat-schedule new file mode 100644 index 0000000..c68e1eb Binary files /dev/null and b/deepfaune/celerybeat-schedule differ diff --git a/deepfaune/entrypoint.sh b/deepfaune/entrypoint.sh new file mode 100755 index 0000000..7718f91 --- /dev/null +++ b/deepfaune/entrypoint.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +set -o errexit +set -o pipefail +set -o nounset + +exec "$@" diff --git a/deepfaune/pyproject.toml b/deepfaune/pyproject.toml new file mode 100644 index 0000000..bfb49d5 --- /dev/null +++ b/deepfaune/pyproject.toml @@ -0,0 +1,18 @@ +[tool.black] +line-length = 100 + +[tool.isort] +profile = "black" +line_length = 100 +src_paths = src,tests + +[tool.pylint.design] +exclude-too-few-public-methods=".*BaseSettings.*,.*BaseModel,.*PaginatedModel.*" + +[tool.pylint.messages_control] +disable = [ + "missing-module-docstring", + "missing-function-docstring", + "missing-docstring", + "import-error" +] \ No newline at end of file diff --git a/deepfaune/src/__init__.py b/deepfaune/src/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/src/api/__init__.py b/deepfaune/src/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/src/api/v1/__init__.py b/deepfaune/src/api/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/src/api/v1/annotations.py b/deepfaune/src/api/v1/annotations.py new file mode 100644 index 0000000..defa0f7 --- /dev/null +++ b/deepfaune/src/api/v1/annotations.py @@ -0,0 +1,22 @@ +from typing import List +from pydantic import UUID4 +from fastapi import APIRouter + +from src.models.image import Image +from src.worker.app import process_images, get_task_from_id + +router = APIRouter() + +@router.post("/compute") +async def compute_images(images: List[Image]): + id_ = process_images(urls=[image.path for image in images]) + return {"id": id_} + +@router.get("/result/{task_id}") +async def get_result(task_id: UUID4): + res = get_task_from_id(id_=task_id) + state = res.state + value = None + if state == "SUCCESS": + value = res.get(timeout=1) + return {"state": res.state, "value": value} diff --git a/deepfaune/src/api/v1/api.py b/deepfaune/src/api/v1/api.py new file mode 100644 index 0000000..54f54e5 --- /dev/null +++ b/deepfaune/src/api/v1/api.py @@ -0,0 +1,6 @@ +from fastapi import APIRouter + +from src.api.v1 import annotations + +api_router = APIRouter() +api_router.include_router(annotations.router, tags=["deepfaune"]) diff --git a/deepfaune/src/core/__init__.py b/deepfaune/src/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/src/core/config.py b/deepfaune/src/core/config.py new file mode 100644 index 0000000..915c248 --- /dev/null +++ b/deepfaune/src/core/config.py @@ -0,0 +1,11 @@ + + + +class Settings(): + ROOT_PATH = "/deepfaune" + HOST = None + CELERY_BROKER = "redis://:broker_pwd@broker/0" + CELERY_BACKEND = "redis://:broker_pwd@broker/0" + CELERY_APP = "deepfaune" + +settings = Settings() diff --git a/deepfaune/src/main.py b/deepfaune/src/main.py new file mode 100644 index 0000000..105cbe7 --- /dev/null +++ b/deepfaune/src/main.py @@ -0,0 +1,19 @@ +from fastapi import FastAPI + +from src.api.v1.api import api_router +from src.core.config import settings +from src.worker.app import celery_app as app + +if __name__ == '__main__': + args = ['worker', '--loglevel=DEBUG'] + app.worker_main(argv=args) + +# app = FastAPI( +# root_path=settings.ROOT_PATH, +# openapi_url="/deepfaune/openapi.json", +# swagger_ui_parameters={"persistAuthorization": True}, +# docs_url="/deepfaune/docs", +# redoc_url=None +# ) +# app.include_router(api_router) + diff --git a/deepfaune/src/models/__init__.py b/deepfaune/src/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/src/models/image.py b/deepfaune/src/models/image.py new file mode 100644 index 0000000..c55d3c0 --- /dev/null +++ b/deepfaune/src/models/image.py @@ -0,0 +1,15 @@ +from pydantic import AnyHttpUrl, BaseModel, validator + +from src.core.config import settings + + +class Image(BaseModel): + # Restrict on HOST if set to avoid fuzzy images + path: AnyHttpUrl + + @validator("path") + def validate_path(cls, value): + host = settings.HOST + if host is not None: + assert value.host == host + return value diff --git a/deepfaune/src/worker/app.py b/deepfaune/src/worker/app.py new file mode 100644 index 0000000..af55aa3 --- /dev/null +++ b/deepfaune/src/worker/app.py @@ -0,0 +1,41 @@ +from typing import List +from pydantic import UUID4, AnyHttpUrl +import logging + +from celery import Celery, chord, shared_task + +from src.core.config import settings +from src.worker.process import download_image +from src.worker.deepfaune import predict +from celery.result import allow_join_result + +celery_app = Celery( + settings.CELERY_APP, broker=settings.CELERY_BROKER, backend=settings.CELERY_BACKEND +) + +# Need that otherwise: strategy = strategies[type_]\nKeyError: +@shared_task +def download_task(url: AnyHttpUrl): + return download_image(url=url) + +@celery_app.task() +def load_content(names): + return predict(filenames=names) + +@celery_app.task(name="deepfaune.pi") +def process_images(urls: List[AnyHttpUrl]) -> UUID4: + res = chord((download_task.s(url) for url in urls), load_content.s()).apply_async() + return res.id + +def get_task_from_id(id_: UUID4): + return celery_app.AsyncResult(str(id_)) + +@celery_app.task(name="deepfaune.gtfi") +def get_task_from_id2(id_: UUID4): + res = get_task_from_id(id_) + state = res.state + value = None + if state == "SUCCESS": + with allow_join_result(): + value = res.get(timeout=1) + return {"state": state, "value": value} diff --git a/deepfaune/src/worker/deepfaune.py b/deepfaune/src/worker/deepfaune.py new file mode 100644 index 0000000..dca7a36 --- /dev/null +++ b/deepfaune/src/worker/deepfaune.py @@ -0,0 +1,29 @@ +from src.deepfaune.predictTools import Predictor +import time + +## PREDICTOR OBJECT +LANG = 'fr' +MAXLAG = 20 +THRESHOLD = 0.5 + + +def predict(filenames: list): + predictor = Predictor(filenames, THRESHOLD, LANG) + + ## RUNNING BATCHES OF PREDICTION + predictor.allBatch() + + ## GETTING THE RESULTS + ## without using the sequences + predictedclass_bases, predictedscore_bases = predictor.getPredictions() + ## or using the sequences + predictedclasses, predictedscores = predictor.getPredictionsWithSequences(MAXLAG) + + ## OUTPUT + dates = predictor.getDates() + seqnum = predictor.getSeqnums() + + result = [] + for filename, date, seq, predictedclass_base, predictedscore_base, predictedclass, predictedscore in zip(filenames, dates, seqnum, predictedclass_bases, predictedscore_bases, predictedclasses, predictedscores): + result.append({'filename':filename, 'dates':date, 'seqnum':seq, 'predictionbase':predictedclass_base, 'scorebase':predictedscore_base, 'prediction':predictedclass, 'score':predictedscore}) + return result diff --git a/deepfaune/src/worker/process.py b/deepfaune/src/worker/process.py new file mode 100644 index 0000000..ad39be0 --- /dev/null +++ b/deepfaune/src/worker/process.py @@ -0,0 +1,25 @@ +import tempfile +from typing import List +from pydantic import AnyHttpUrl + +import httpx + +from src.models.image import Image + + +def download_images(urls: List[AnyHttpUrl]): + for url in urls: + yield download_image(url) + + +def download_image(url: AnyHttpUrl): + request = httpx.get(url) + # TODO: To be reingeneered later : do not store images as + # files... + with tempfile.NamedTemporaryFile("wb", delete=False) as f: + f.write(request.content) + return f.name + + +def process_images(images: List[Image]): + names = list(download_images(images=images)) diff --git a/deepfaune/tests/__init__.py b/deepfaune/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/tests/conftest.py b/deepfaune/tests/conftest.py new file mode 100644 index 0000000..dc49cbc --- /dev/null +++ b/deepfaune/tests/conftest.py @@ -0,0 +1,16 @@ +from typing import Generator + +import pytest +from fastapi.testclient import TestClient + +from src.main import app + + +@pytest.fixture(scope="module") +def client() -> Generator: + with TestClient(app) as test_client: + yield test_client + + +# Add fixtures here +pytest_plugins = ["tests.fixtures.models.image"] diff --git a/deepfaune/tests/fixtures/__init__.py b/deepfaune/tests/fixtures/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/tests/fixtures/models/__init__.py b/deepfaune/tests/fixtures/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/tests/fixtures/models/image.py b/deepfaune/tests/fixtures/models/image.py new file mode 100644 index 0000000..48c6568 --- /dev/null +++ b/deepfaune/tests/fixtures/models/image.py @@ -0,0 +1,12 @@ +import pytest + +from src.models.image import Image + + +@pytest.fixture() +def images(): + urls = [ + "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e6/Ibizea.jpg/1280px-Ibizea.jpg" + ] + + return [Image(path=url) for url in urls] diff --git a/deepfaune/tests/test_models/__init__.py b/deepfaune/tests/test_models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/tests/test_models/test_image.py b/deepfaune/tests/test_models/test_image.py new file mode 100644 index 0000000..57b6b0b --- /dev/null +++ b/deepfaune/tests/test_models/test_image.py @@ -0,0 +1,2 @@ +def test_model(): + pass diff --git a/deepfaune/tests/test_worker/__init__.py b/deepfaune/tests/test_worker/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/deepfaune/tests/test_worker/test_process.py b/deepfaune/tests/test_worker/test_process.py new file mode 100644 index 0000000..9937fd3 --- /dev/null +++ b/deepfaune/tests/test_worker/test_process.py @@ -0,0 +1,13 @@ +from src.worker.process import download_images, process_images + + +def test_download_images(images): + + names = list(download_images(images)) + + for name in names: + assert "tmp" in name + + +def test_process_images(images): + process_images(images=images) diff --git a/docker/.env.sample b/docker/.env.sample index 98e9fac..55a518c 100644 --- a/docker/.env.sample +++ b/docker/.env.sample @@ -66,4 +66,6 @@ TRAEFIK_ROUTER_RULE_API=HOST(`${DOMAIN}`) && PathPrefix(`${API_ROOT_PATH}`) TRAEFIK_ROUTER_RULE_KEYCLOAK=HOST(`${DOMAIN}`) && PathPrefix(`${KC_HTTP_RELATIVE_PATH}`) TRAEFIK_ROUTER_RULE_FRONTEND=HOST(`${DOMAIN}`) && PathPrefix(`/`) TRAEFIK_ROUTER_RULE_MINIO=Host(`${DOMAIN}`) && PathPrefix(`/${MINIO_BUCKET_NAME}`) -TRAEFIK_ROUTER_RULE_TAXAPI=HOST(`${DOMAIN}`) && PathPrefix(`${TAXAPI_ROOT_PATH}`) \ No newline at end of file +TRAEFIK_ROUTER_RULE_TAXAPI=HOST(`${DOMAIN}`) && PathPrefix(`${TAXAPI_ROOT_PATH}`) + +REDIS_PWD=broker_pwd \ No newline at end of file diff --git a/docker/docker-compose.override.yml b/docker/docker-compose.override.yml index 6705a29..432181b 100644 --- a/docker/docker-compose.override.yml +++ b/docker/docker-compose.override.yml @@ -26,6 +26,16 @@ services: ports: - "5666:5666" + deepfaune: + build: + context: ../deepfaune + dockerfile: Dockerfile + target: development + volumes: + - ../deepfaune:/home/app + ports: + - "3655:8000" + frontend: build: context: ../frontend diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 193c657..0bb71d0 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -6,7 +6,7 @@ x-restart-policy: &restart_policy x-project-defaults: &project_defaults <<: *restart_policy networks: - - annotation_nw + - geocam_nw env_file: .env services: @@ -27,7 +27,7 @@ services: dockerfile: Dockerfile target: development healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:8000/openapi.json"] + test: ["CMD", "curl", "-f", "http://localhost:8000/api/v1/openapi.json"] interval: 10s timeout: 5s retries: 3 @@ -54,6 +54,35 @@ services: - KEYCLOAK_SERVER_URL=${KEYCLOAK_SERVER_URL:-http://keycloak:8080/auth} - KEYCLOAK_TOKEN_URI=${KEYCLOAK_TOKEN_URI} + deepfaune: + <<: *project_defaults + build: + context: ../deepfaune + dockerfile: Dockerfile + target: development + depends_on: + broker: + condition: service_healthy + labels: + - traefik.enable=true + - traefik.http.routers.deepfaune.rule=PathPrefix(`/deepfaune`) + - traefik.http.routers.deepfaune.entrypoints=web + - traefik.http.routers.deepfaune.middlewares=api-stripprefix + - traefik.http.middlewares.deepfaune-stripprefix.stripprefix.prefixes=/deepfaune + + broker: + <<: *project_defaults + image: redis:7-alpine + command: redis-server --save 20 1 --loglevel warning --requirepass ${REDIS_PWD} + healthcheck: + test: [ "CMD", "redis-cli", "--raw", "incr", "ping" ] + interval: 10s + timeout: 5s + retries: 3 + start_period: 10s + volumes: + - broker:/data + taxapi: <<: *project_defaults image: ${TAXAPI_IMAGE:-registry.gitlab.com/natural-solutions/geonature/taxapi:taxapi-latest} @@ -153,7 +182,9 @@ services: volumes: db_data: minio_data: + broker: + networks: - annotation_nw: + geocam_nw: external: false \ No newline at end of file diff --git a/frontend/src/client/models/Body_upload_zip_files_upload_zip__deployment_id__post.ts b/frontend/src/client/models/Body_upload_zip_files_upload_zip__deployment_id__post.ts index b7f04fa..1a2d0a8 100644 --- a/frontend/src/client/models/Body_upload_zip_files_upload_zip__deployment_id__post.ts +++ b/frontend/src/client/models/Body_upload_zip_files_upload_zip__deployment_id__post.ts @@ -3,7 +3,6 @@ /* eslint-disable */ export type Body_upload_zip_files_upload_zip__deployment_id__post = { - hash: Array; zipFile: Blob; }; diff --git a/frontend/src/client/models/Files.ts b/frontend/src/client/models/Files.ts index 319e354..b8e2e25 100644 --- a/frontend/src/client/models/Files.ts +++ b/frontend/src/client/models/Files.ts @@ -10,7 +10,7 @@ export type Files = { date?: string; id?: string; megadetector_id?: number; - deepfaune_id?: number; + prediction_deepfaune?: any; deployment_id: number; treated?: boolean; annotations?: Array; diff --git a/frontend/src/client/services/DefaultService.ts b/frontend/src/client/services/DefaultService.ts index 6381c3d..245e4c8 100644 --- a/frontend/src/client/services/DefaultService.ts +++ b/frontend/src/client/services/DefaultService.ts @@ -121,4 +121,16 @@ export class DefaultService { }); } + /** + * Root + * @returns any Successful Response + * @throws ApiError + */ + public static rootGet(): CancelablePromise { + return __request(OpenAPI, { + method: 'GET', + url: '/', + }); + } + } diff --git a/frontend/src/components/GalleryItem.tsx b/frontend/src/components/GalleryItem.tsx index 1f63479..b74c43a 100644 --- a/frontend/src/components/GalleryItem.tsx +++ b/frontend/src/components/GalleryItem.tsx @@ -2,6 +2,7 @@ import { useNavigate } from "react-router-dom"; import { Grid, Tooltip, Typography } from "@mui/material"; import CameraAltIcon from '@mui/icons-material/CameraAlt'; import VideocamIcon from '@mui/icons-material/Videocam'; +import CheckCircleOutlineIcon from '@mui/icons-material/CheckCircleOutline'; const thumbnailStyle = { @@ -56,7 +57,7 @@ const GalleryItem = ( noWrap component={"span"} variant="body2" - sx={{ width: "90%" }} + sx={{ width: "80%" }} > { name } @@ -65,38 +66,28 @@ const GalleryItem = ( } const displayThumbnail = (item) => { - if (item.extension.includes("image")) { - return ( - <> - { displayImage(item) } - - - { displayName(item.name) } - - - ) - } - else { - return ( - <> - { displayImage(item) } - + return ( + <> + { displayImage(item) } + + { + Object.keys(item.prediction_deepfaune).length !== 0 && + + } + { + item.extension.includes("image") ? + : - { displayName(item.name) } - - - ) - }; + } + { displayName(item.name) } + + + ) }; return( diff --git a/frontend/src/components/annotation/ObservationTab.tsx b/frontend/src/components/annotation/ObservationTab.tsx index b55410d..9697d07 100644 --- a/frontend/src/components/annotation/ObservationTab.tsx +++ b/frontend/src/components/annotation/ObservationTab.tsx @@ -8,6 +8,7 @@ import { useTranslation } from "react-i18next"; import { useAnnotationContext } from "../../contexts/annotationContext"; import { Annotation } from "../../client"; import { FC } from "react"; +import PredictionArea from "./PredictionArea"; interface ObservationTabProps { valueTab: number; @@ -20,7 +21,7 @@ const ObservationTab: FC = ({ index }) => { const { t } = useTranslation(); - + const { observations, annotated, @@ -34,6 +35,8 @@ const ObservationTab: FC = ({ valueTab={ valueTab } index={ index } > + + { treated ? = () => { + + const { t } = useTranslation(); + + const { image } = useMainContext(); + + const { + setObservations, + observationTemplate, + } = useAnnotationContext(); + + const deepfauneMapFr = [{"taxo":"blaireau", "taxref":194585}, {"taxo":"bouquetin", "taxref":190318}, {"taxo":"cerf", "taxref":190552}, {"taxo":"chamois", "taxref":197289}, {"taxo":"chat", "taxref":192539}, {"taxo":"chevreuil", "taxref":61057}, {"taxo":"chien", "taxref":162663}, {"taxo":"ecureuil", "taxref":186261}, {"taxo":"equide", "taxref":186248}, {"taxo":"lagomorphe", "taxref":186244}, {"taxo":"loup", "taxref":60577}, {"taxo":"lynx", "taxref":194351}, {"taxo":"marmotte", "taxref":194469}, {"taxo":"micromammifere", "taxref":186206}, {"taxo":"mouflon", "taxref":195202}, {"taxo":"mouton", "taxref":199754}, {"taxo":"mustelide", "taxref":186215}, {"taxo":"oiseau", "taxref":185961}, {"taxo":"ours", "taxref":186219}, {"taxo":"renard", "taxref":60585}, {"taxo":"sanglier", "taxref":60981}, {"taxo":"vache", "taxref":199695}]; + + const [predCheck, setPredChecked] = useState(false); + const [observation, setObservation] = useState(observationTemplate); + + const predCheckChange = (prediction: string) => { + if (!predCheck) { + setPredChecked(true); + let deepfauneMap = deepfauneMapFr.find((element) => element.taxo == prediction); + let id_annot = deepfauneMap?.taxref.toString(); + id_annot && getDataById(id_annot); + }; + if (predCheck) { + setPredChecked(false); + setObservations([]); + }; + }; + + async function getDataById (id_annot: string) { + let data = (await axios.get(`/taxapi/V1/taxons?CD_NOM=${id_annot}`)).data; + // TODO: should be modified when new taxapi image will be used (request on CD_REF returns unique match) + // if (data.length > 1) { + // console.log("Error: many matches with CD_REF="+id_annot); + // return; + // }; + // data = data[0]; + data = data.find((element) => element.CD_REF == id_annot); + + let classe = data.CLASSE || ""; + let order = data.ORDRE || ""; + let family = data.FAMILLE || ""; + if (data.RANG="GN") { + setObservation({...observation, id_annotation: id_annot, classe: classe, order: order, family: family, genus: data.LB_NOM, number: 1 }); + }; + if (data.RANG="ES") { + setObservation({...observation, id_annotation: id_annot, classe: classe, order: order, family: family, genus: data.LB_NOM.split(" ")[0], species: data.LB_NOM, number: 1 }); + }; + + }; + + useEffect(() => { + setPredChecked(false); + }, [image()?.id]); + + useEffect(() => { + setObservations([observation]); + },[observation]); + + return ( + <>{ + image() && + Object.keys(image().prediction_deepfaune).length !== 0 && + + { capitalize(t("annotations.prediction")) } + + predCheckChange(image().prediction_deepfaune.prediction) } + /> } + label={"Deepfaune : " + capitalize(image().prediction_deepfaune.prediction) + " (score : " + image().prediction_deepfaune.score.toString() + ")" } + /> + + + } + ); +}; + +export default PredictionArea; \ No newline at end of file diff --git a/frontend/src/components/imageList.tsx b/frontend/src/components/imageList.tsx index f83bb8b..455feef 100644 --- a/frontend/src/components/imageList.tsx +++ b/frontend/src/components/imageList.tsx @@ -7,14 +7,15 @@ import Dropzone from "react-dropzone"; import { Grid, Stack, Typography, capitalize } from "@mui/material"; import { useParams } from "react-router-dom"; import { FilesService } from "../client"; -import CameraAltIcon from '@mui/icons-material/CameraAlt'; +import FolderZipIcon from '@mui/icons-material/FolderZip'; +import PermMediaIcon from '@mui/icons-material/PermMedia'; import { useTranslation } from "react-i18next"; import ButtonsYesNo from "./common/buttonsYesNo"; const ImageList: FC<{}> = () => { const { t } = useTranslation() const [files, setFiles] = useState([]); - const { projects, updateListFile, setCurrentDeployment, currentDeployment, deploymentData } = + const { projects, updateListFile, setCurrentDeployment, deploymentData } = useMainContext(); let params = useParams(); @@ -26,11 +27,21 @@ const ImageList: FC<{}> = () => { const save = () => { for (const file of files) { - FilesService - .uploadFileFilesUploadDeploymentIdPost(Number(params.deploymentId), { file }) - .then((res) => { - updateListFile(); - }); + console.log("file:", file) + if(file.name.includes("zip")) { + FilesService + .uploadZipFilesUploadZipDeploymentIdPost(Number(params.deploymentId), { zipFile: file }) + .then((res) => { + updateListFile(); + }); + } + else { + FilesService + .uploadFileFilesUploadDeploymentIdPost(Number(params.deploymentId), { file }) + .then((res) => { + updateListFile(); + }); + } } clear(); }; @@ -44,10 +55,10 @@ const ImageList: FC<{}> = () => { setFiles(files); }; - const dropZoneDisplayText = () => { + const dropZoneDisplayText = (legend) => { if (files.length === 0) { return ( -

{capitalize(t("deployments.drop_files"))}

+

{legend}

); } else { return

{files.map((f) => f.name).join(", ")}

; @@ -58,25 +69,47 @@ const ImageList: FC<{}> = () => { <> {deploymentData ? ( - {capitalize(t("projects.import_media"))} - - {({ getRootProps, getInputProps }) => ( -
-
- - - - + + {capitalize(t("projects.import_media"))} + + + + {({ getRootProps, getInputProps }) => ( +
+
+ + + + + + + {dropZoneDisplayText(capitalize(t("deployments.drop_files")))} + - - {dropZoneDisplayText()} - - -
-
- )} -
+
+
+ )} +
+ + + {({ getRootProps, getInputProps }) => ( +
+
+ + + + + + + {dropZoneDisplayText(dropZoneDisplayText(capitalize(t("deployments.drop_files_zip"))))} + + +
+
+ )} +
+