From 39365c6a08f5b8a256a7cd39d8cebb6582601637 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 16 Dec 2021 13:37:08 -0600 Subject: [PATCH 1/3] upload multiple files --- app/routers/files.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/app/routers/files.py b/app/routers/files.py index 4147fac..a4effe3 100644 --- a/app/routers/files.py +++ b/app/routers/files.py @@ -26,6 +26,49 @@ auth_handler = AuthHandler() +@router.post("/{dataset_id}/uploadMultiple") +async def save_files( + dataset_id: str, + user_id=Depends(auth_handler.auth_wrapper), + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + files: List[UploadFile] = File(...), + file_info: List[Optional[Json[ClowderFile]]] = None, +): + uploaded_files = [] + + user = await db["users"].find_one({"_id": ObjectId(user_id)}) + dataset = await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) + if dataset is not None and user is not None: + for file in files: + f = dict() + f["name"] = file.filename + f["creator"] = user["_id"] + f["views"] = 0 + f["downloads"] = 0 + new_file = await db["files"].insert_one(f) + found = await db["files"].find_one({"_id": new_file.inserted_id}) + + new_file_id = found["_id"] + + updated_dataset = await db["datasets"].update_one( + {"_id": ObjectId(dataset_id)}, {"$push": {"files": ObjectId(new_file_id)}} + ) + + # Second, use unique ID as key for file storage + while content := file.file.read( + settings.MINIO_UPLOAD_CHUNK_SIZE + ): # async read chunk + fs.put_object( + settings.MINIO_BUCKET_NAME, + str(new_file.inserted_id), + io.BytesIO(content), + length=-1, + part_size=settings.MINIO_UPLOAD_CHUNK_SIZE, + ) # async write chunk to minio + uploaded_files.append(ClowderFile.from_mongo(found)) + return uploaded_files + @router.post("/{dataset_id}", response_model=ClowderFile) async def save_file( From 5bbb81fb3cc43f806ee2aa6b317d0af7840f25e9 Mon Sep 17 00:00:00 2001 From: toddn Date: Thu, 16 Dec 2021 13:37:43 -0600 Subject: [PATCH 2/3] removing did not use file_info --- app/routers/files.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/app/routers/files.py b/app/routers/files.py index a4effe3..444097c 100644 --- a/app/routers/files.py +++ b/app/routers/files.py @@ -32,8 +32,7 @@ async def save_files( user_id=Depends(auth_handler.auth_wrapper), db: MongoClient = Depends(dependencies.get_db), fs: Minio = Depends(dependencies.get_fs), - files: List[UploadFile] = File(...), - file_info: List[Optional[Json[ClowderFile]]] = None, + files: List[UploadFile] = File(...) ): uploaded_files = [] From b8a79ef18d06eceffd33c07db797670a75ea0f70 Mon Sep 17 00:00:00 2001 From: toddn Date: Mon, 20 Dec 2021 12:42:59 -0600 Subject: [PATCH 3/3] removing old endpoint - no reason to have an upload 1 file endpoint with an upload multiple one --- app/routers/files.py | 41 +---------------------------------------- 1 file changed, 1 insertion(+), 40 deletions(-) diff --git a/app/routers/files.py b/app/routers/files.py index 444097c..0d46601 100644 --- a/app/routers/files.py +++ b/app/routers/files.py @@ -26,7 +26,7 @@ auth_handler = AuthHandler() -@router.post("/{dataset_id}/uploadMultiple") +@router.post("/{dataset_id}") async def save_files( dataset_id: str, user_id=Depends(auth_handler.auth_wrapper), @@ -69,45 +69,6 @@ async def save_files( return uploaded_files -@router.post("/{dataset_id}", response_model=ClowderFile) -async def save_file( - dataset_id: str, - user_id=Depends(auth_handler.auth_wrapper), - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - file_info: Optional[Json[ClowderFile]] = None, -): - # First, add to database and get unique ID - f = dict(file_info) if file_info is not None else {} - user = await db["users"].find_one({"_id": ObjectId(user_id)}) - dataset = await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - f["name"] = file.filename - f["creator"] = user["_id"] - f["views"] = 0 - f["downloads"] = 0 - new_file = await db["files"].insert_one(f) - found = await db["files"].find_one({"_id": new_file.inserted_id}) - - new_file_id = found["_id"] - - updated_dataset = await db["datasets"].update_one( - {"_id": ObjectId(dataset_id)}, {"$push": {"files": ObjectId(new_file_id)}} - ) - - # Second, use unique ID as key for file storage - while content := file.file.read( - settings.MINIO_UPLOAD_CHUNK_SIZE - ): # async read chunk - fs.put_object( - settings.MINIO_BUCKET_NAME, - str(new_file.inserted_id), - io.BytesIO(content), - length=-1, - part_size=settings.MINIO_UPLOAD_CHUNK_SIZE, - ) # async write chunk to minio - - return ClowderFile.from_mongo(found) @router.get("/{file_id}")