Skip to content

Commit

Permalink
linting & formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
bulletinmybeard committed Mar 31, 2024
1 parent 2042221 commit e352dd8
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 14 deletions.
18 changes: 7 additions & 11 deletions audit_logger/main.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import traceback
from contextlib import asynccontextmanager
from typing import Any, AsyncGenerator, Dict, List, Optional, cast, Union
from typing import Any, AsyncGenerator, Dict, List, Optional, Union, cast

from fastapi import Body, Depends, FastAPI, HTTPException
from fastapi.exceptions import RequestValidationError
Expand Down Expand Up @@ -75,9 +75,7 @@ async def verify_api_key(api_key: str = Depends(api_key_header)) -> str:

@app.post("/create", dependencies=[Depends(verify_api_key)])
# ) -> GenericResponse:
async def create_audit_log_entry(
audit_log: AuditLogEntry = Body(...)
) -> Any:
async def create_audit_log_entry(audit_log: AuditLogEntry = Body(...)) -> Any:
"""
Receives an audit log entry, validates it, and processes
it to be stored in Elasticsearch.
Expand All @@ -92,15 +90,13 @@ async def create_audit_log_entry(
HTTPException
"""
return await process_audit_logs(
elastic,
cast(str, env_vars.elastic_index_name),
audit_log
elastic, cast(str, env_vars.elastic_index_name), audit_log
)


@app.post("/create-bulk", dependencies=[Depends(verify_api_key)])
async def create_bulk_audit_log_entries(
audit_logs: List[AuditLogEntry] = Body(...)
audit_logs: List[AuditLogEntry] = Body(...),
) -> GenericResponse:
"""
Receives one or multiple audit log entries, validates them, and processes
Expand All @@ -122,7 +118,7 @@ async def create_bulk_audit_log_entries(
return await process_audit_logs(
elastic,
cast(str, env_vars.elastic_index_name),
[dict(model.dict()) for model in audit_logs]
[dict(model.dict()) for model in audit_logs],
)


Expand All @@ -142,13 +138,13 @@ async def create_random_audit_log_entries(
return await process_audit_logs(
elastic,
cast(str, env_vars.elastic_index_name),
generate_audit_log_entries_with_fake_data(options)
generate_audit_log_entries_with_fake_data(options),
)


@app.post("/search", dependencies=[Depends(verify_api_key)])
def search_audit_log_entries(
params: Optional[SearchParamsV2] = Body(default=None)
params: Optional[SearchParamsV2] = Body(default=None),
) -> SearchResults:
"""
Performs a search query against audit log entries stored in Elasticsearch based on
Expand Down
9 changes: 6 additions & 3 deletions audit_logger/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datetime import datetime
from typing import Any, Dict, List, Union

from elasticsearch import Elasticsearch, SerializationError, helpers, ConnectionError
from elasticsearch import Elasticsearch, SerializationError, helpers
from faker import Faker
from fastapi import HTTPException, status
from pydantic import ValidationError
Expand Down Expand Up @@ -146,7 +146,7 @@ def generate_audit_log_entries_with_fake_data(
async def process_audit_logs(
elastic: Elasticsearch,
elastic_index_name: str,
log_entries: Union[AuditLogEntry, List[Union[Dict, AuditLogEntry]]]
log_entries: Union[AuditLogEntry, List[Union[Dict, AuditLogEntry]]],
) -> Any:
"""
Processes a list of audit log entries by sending them to Elasticsearch using the bulk API.
Expand All @@ -173,7 +173,10 @@ async def process_audit_logs(
failed_items = failed if isinstance(failed, list) else []

if len(failed_items) > 0:
raise HTTPException(status_code=500, detail=f"Failed to process audit logs: {str(failed_items)}")
raise HTTPException(
status_code=500,
detail=f"Failed to process audit logs: {str(failed_items)}",
)

if is_bulk_operation:
return GenericResponse(
Expand Down

0 comments on commit e352dd8

Please sign in to comment.