diff --git a/app/routers/files.py b/app/routers/files.py index 4147fac..0d46601 100644 --- a/app/routers/files.py +++ b/app/routers/files.py @@ -26,46 +26,49 @@ auth_handler = AuthHandler() - -@router.post("/{dataset_id}", response_model=ClowderFile) -async def save_file( - dataset_id: str, - user_id=Depends(auth_handler.auth_wrapper), - db: MongoClient = Depends(dependencies.get_db), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - file_info: Optional[Json[ClowderFile]] = None, +@router.post("/{dataset_id}") +async def save_files( + dataset_id: str, + user_id=Depends(auth_handler.auth_wrapper), + db: MongoClient = Depends(dependencies.get_db), + fs: Minio = Depends(dependencies.get_fs), + files: List[UploadFile] = File(...) ): - # First, add to database and get unique ID - f = dict(file_info) if file_info is not None else {} + uploaded_files = [] + user = await db["users"].find_one({"_id": ObjectId(user_id)}) dataset = await db["datasets"].find_one({"_id": ObjectId(dataset_id)}) - f["name"] = file.filename - f["creator"] = user["_id"] - f["views"] = 0 - f["downloads"] = 0 - new_file = await db["files"].insert_one(f) - found = await db["files"].find_one({"_id": new_file.inserted_id}) - - new_file_id = found["_id"] - - updated_dataset = await db["datasets"].update_one( - {"_id": ObjectId(dataset_id)}, {"$push": {"files": ObjectId(new_file_id)}} - ) - - # Second, use unique ID as key for file storage - while content := file.file.read( - settings.MINIO_UPLOAD_CHUNK_SIZE - ): # async read chunk - fs.put_object( - settings.MINIO_BUCKET_NAME, - str(new_file.inserted_id), - io.BytesIO(content), - length=-1, - part_size=settings.MINIO_UPLOAD_CHUNK_SIZE, - ) # async write chunk to minio - - return ClowderFile.from_mongo(found) + if dataset is not None and user is not None: + for file in files: + f = dict() + f["name"] = file.filename + f["creator"] = user["_id"] + f["views"] = 0 + f["downloads"] = 0 + new_file = await db["files"].insert_one(f) + found = await db["files"].find_one({"_id": new_file.inserted_id}) + + new_file_id = found["_id"] + + updated_dataset = await db["datasets"].update_one( + {"_id": ObjectId(dataset_id)}, {"$push": {"files": ObjectId(new_file_id)}} + ) + + # Second, use unique ID as key for file storage + while content := file.file.read( + settings.MINIO_UPLOAD_CHUNK_SIZE + ): # async read chunk + fs.put_object( + settings.MINIO_BUCKET_NAME, + str(new_file.inserted_id), + io.BytesIO(content), + length=-1, + part_size=settings.MINIO_UPLOAD_CHUNK_SIZE, + ) # async write chunk to minio + uploaded_files.append(ClowderFile.from_mongo(found)) + return uploaded_files + + @router.get("/{file_id}")