diff --git a/backend/btrixcloud/models.py b/backend/btrixcloud/models.py index 08c7dd31d9..6630ae184e 100644 --- a/backend/btrixcloud/models.py +++ b/backend/btrixcloud/models.py @@ -208,6 +208,14 @@ class UserOut(BaseModel): orgs: List[UserOrgInfoOut] +# ============================================================================ +class UserEmailWithOrgInfo(BaseModel): + """Output model for getting user email list with org info for each""" + + email: EmailStr + orgs: List[UserOrgInfoOut] + + # ============================================================================ ### CRAWL STATES @@ -2453,3 +2461,10 @@ class PaginatedCrawlErrorResponse(PaginatedResponse): """Response model for crawl errors""" items: List[CrawlError] + + +# ============================================================================ +class PaginatedUserEmailsResponse(PaginatedResponse): + """Response model for user emails with org info""" + + items: List[UserEmailWithOrgInfo] diff --git a/backend/btrixcloud/users.py b/backend/btrixcloud/users.py index 3e5b6fcc16..ce480923cf 100644 --- a/backend/btrixcloud/users.py +++ b/backend/btrixcloud/users.py @@ -6,7 +6,7 @@ from uuid import UUID, uuid4 import asyncio -from typing import Optional, List, TYPE_CHECKING, cast, Callable +from typing import Optional, List, TYPE_CHECKING, cast, Callable, Tuple from fastapi import ( Request, @@ -34,6 +34,8 @@ FailedLogin, UpdatedResponse, SuccessResponse, + UserEmailWithOrgInfo, + PaginatedUserEmailsResponse, ) from .pagination import DEFAULT_PAGE_SIZE, paginated_format from .utils import is_bool, dt_now @@ -546,6 +548,30 @@ async def get_failed_logins_count(self, email: str) -> int: return 0 return failed_login.get("count", 0) + async def get_user_emails( + self, + page_size: int = DEFAULT_PAGE_SIZE, + page: int = 1, + ) -> Tuple[List[UserEmailWithOrgInfo], int]: + """Get user emails with org info for each for paginated endpoint""" + # Zero-index page for query + page = page - 1 + skip = page_size * page + + emails: List[UserEmailWithOrgInfo] = [] + + total = await self.users.count_documents({"is_superuser": False}) + async for res in self.users.find( + {"is_superuser": False}, skip=skip, limit=page_size + ): + user = User(**res) + user_out = await self.get_user_info_with_orgs(user) + emails.append( + UserEmailWithOrgInfo(email=user_out.email, orgs=user_out.orgs) + ) + + return emails, total + # ============================================================================ def init_user_manager(mdb, emailsender, invites): @@ -706,4 +732,21 @@ async def get_pending_invites( ) return paginated_format(pending_invites, total, page, pageSize) + @users_router.get( + "/emails", tags=["users"], response_model=PaginatedUserEmailsResponse + ) + async def get_user_emails( + user: User = Depends(current_active_user), + pageSize: int = DEFAULT_PAGE_SIZE, + page: int = 1, + ): + """Get emails of registered users with org information (superuser only)""" + if not user.is_superuser: + raise HTTPException(status_code=403, detail="not_allowed") + + emails, total = await user_manager.get_user_emails( + page_size=pageSize, page=page + ) + return paginated_format(emails, total, page, pageSize) + return users_router diff --git a/backend/test/test_users.py b/backend/test/test_users.py index a06c1cd597..7ca9e9fc1c 100644 --- a/backend/test/test_users.py +++ b/backend/test/test_users.py @@ -774,3 +774,42 @@ def test_patch_me_invalid_email_in_use(admin_auth_headers, default_org_id): ) assert r.status_code == 400 assert r.json()["detail"] == "user_already_exists" + + +def test_user_emails_endpoint_non_superuser(crawler_auth_headers, default_org_id): + r = requests.get( + f"{API_PREFIX}/users/emails", + headers=crawler_auth_headers, + ) + assert r.status_code == 403 + assert r.json()["detail"] == "not_allowed" + + +def test_user_emails_endpoint_superuser(admin_auth_headers, default_org_id): + r = requests.get( + f"{API_PREFIX}/users/emails", + headers=admin_auth_headers, + ) + assert r.status_code == 200 + data = r.json() + + total = data["total"] + user_emails = data["items"] + + assert total > 0 + assert total == len(user_emails) + + for user in user_emails: + assert user["email"] + orgs = user.get("orgs") + if orgs == []: + continue + + for org in orgs: + assert org["id"] + assert org["name"] + assert org["slug"] + assert org["default"] in (True, False) + role = org["role"] + assert role + assert isinstance(role, int) diff --git a/backend/test_nightly/test_storage_quota.py b/backend/test_nightly/test_storage_quota.py index 212d96e0a7..7945092415 100644 --- a/backend/test_nightly/test_storage_quota.py +++ b/backend/test_nightly/test_storage_quota.py @@ -15,6 +15,7 @@ storage_quota = None + def run_crawl(org_id, headers): crawl_data = { "runNow": True,