From b6528d28f36d1e273734ea91177f1cc1724b58d6 Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Mon, 29 Jul 2024 14:59:28 -0500 Subject: [PATCH 01/34] feat(alerts): Added grouping to OA RT search alerts. --- cl/alerts/tasks.py | 73 +++++++--------------- cl/alerts/tests/tests.py | 131 ++++++++++++++++++++++++++++++++------- 2 files changed, 129 insertions(+), 75 deletions(-) diff --git a/cl/alerts/tasks.py b/cl/alerts/tasks.py index 3d19a993b8..64db406bc2 100644 --- a/cl/alerts/tasks.py +++ b/cl/alerts/tasks.py @@ -655,8 +655,6 @@ def percolator_response_processing(response: PercolatorResponsesType) -> None: return None scheduled_hits_to_create = [] - email_alerts_to_send = [] - rt_alerts_to_send = [] ( main_alerts_triggered, rd_alerts_triggered, @@ -755,64 +753,37 @@ def percolator_response_processing(response: PercolatorResponsesType) -> None: # user's donations. send_webhook_alert_hits(alert_user, hits) - # Send RT Alerts for Audio. if ( alert_triggered.rate == Alert.REAL_TIME - and app_label_model == "audio.Audio" + and not alert_user.profile.is_member ): - if not alert_user.profile.is_member: - continue - - # Append alert RT email to be sent. - email_alerts_to_send.append((alert_user.pk, hits)) - rt_alerts_to_send.append(alert_triggered.pk) - - else: - if ( - alert_triggered.rate == Alert.REAL_TIME - and not alert_user.profile.is_member - ): - # Omit scheduling an RT alert if the user is not a member. - continue - # Schedule RT, DAILY, WEEKLY and MONTHLY Alerts - if scheduled_alert_hits_limit_reached( - alert_triggered.pk, - alert_triggered.user.pk, - instance_content_type, - object_id, - child_document, - ): - # Skip storing hits for this alert-user combination because - # the SCHEDULED_ALERT_HITS_LIMIT has been reached. - continue + # Omit scheduling an RT alert if the user is not a member. + continue + # Schedule RT, DAILY, WEEKLY and MONTHLY Alerts + if scheduled_alert_hits_limit_reached( + alert_triggered.pk, + alert_triggered.user.pk, + instance_content_type, + object_id, + child_document, + ): + # Skip storing hits for this alert-user combination because + # the SCHEDULED_ALERT_HITS_LIMIT has been reached. + continue - scheduled_hits_to_create.append( - ScheduledAlertHit( - user=alert_triggered.user, - alert=alert_triggered, - document_content=document_content_copy, - content_type=instance_content_type, - object_id=object_id, - ) + scheduled_hits_to_create.append( + ScheduledAlertHit( + user=alert_triggered.user, + alert=alert_triggered, + document_content=document_content_copy, + content_type=instance_content_type, + object_id=object_id, ) + ) # Create scheduled RT, DAILY, WEEKLY and MONTHLY Alerts in bulk. if scheduled_hits_to_create: ScheduledAlertHit.objects.bulk_create(scheduled_hits_to_create) - # Sent all the related document RT emails. - if email_alerts_to_send: - send_search_alert_emails.delay(email_alerts_to_send, schedule_alert) - - # Update RT Alerts date_last_hit, increase stats and log RT alerts sent. - if rt_alerts_to_send: - Alert.objects.filter(pk__in=rt_alerts_to_send).update( - date_last_hit=now() - ) - alerts_sent = len(rt_alerts_to_send) - async_to_sync(tally_stat)( - f"alerts.sent.{Alert.REAL_TIME}", inc=alerts_sent - ) - logger.info(f"Sent {alerts_sent} {Alert.REAL_TIME} email alerts.") # TODO: Remove after scheduled OA alerts have been processed. diff --git a/cl/alerts/tests/tests.py b/cl/alerts/tests/tests.py index 633b1d030b..97d9990023 100644 --- a/cl/alerts/tests/tests.py +++ b/cl/alerts/tests/tests.py @@ -73,10 +73,14 @@ Opinion, RECAPDocument, ) -from cl.search.tasks import add_items_to_solr from cl.stats.models import Stat from cl.tests.base import SELENIUM_TIMEOUT, BaseSeleniumTest -from cl.tests.cases import APITestCase, ESIndexTestCase, TestCase +from cl.tests.cases import ( + APITestCase, + ESIndexTestCase, + RECAPAlertsAssertions, + TestCase, +) from cl.tests.utils import MockResponse, make_client from cl.users.factories import UserFactory, UserProfileWithParentsFactory from cl.users.models import EmailSent @@ -1834,7 +1838,7 @@ def test_get_docket_notes_and_tags_by_user(self) -> None: "cl.lib.es_signal_processor.allow_es_audio_indexing", side_effect=lambda x, y: True, ) -class SearchAlertsOAESTests(ESIndexTestCase, TestCase): +class SearchAlertsOAESTests(ESIndexTestCase, TestCase, RECAPAlertsAssertions): """Test ES Search Alerts""" @classmethod @@ -1977,11 +1981,22 @@ def test_send_oa_search_alert_webhooks(self, mock_abort_audio): stt_source=Audio.STT_OPENAI_WHISPER, ) + # Send RT alerts + with time_machine.travel(mock_date, tick=False): + call_command("cl_send_rt_percolator_alerts", testing_mode=True) # Confirm Alert date_last_hit is updated. self.search_alert.refresh_from_db() self.search_alert_2.refresh_from_db() - self.assertEqual(self.search_alert.date_last_hit, mock_date) - self.assertEqual(self.search_alert_2.date_last_hit, mock_date) + self.assertEqual( + self.search_alert.date_last_hit, + mock_date, + msg="Alert date of last hit didn't match.", + ) + self.assertEqual( + self.search_alert_2.date_last_hit, + mock_date, + msg="Alert date of last hit didn't match.", + ) webhooks_enabled = Webhook.objects.filter(enabled=True) self.assertEqual(len(webhooks_enabled), 1) @@ -2093,6 +2108,8 @@ def test_send_oa_search_alert_webhooks(self, mock_abort_audio): stt_transcript=transcript, ) + # Send RT alerts + call_command("cl_send_rt_percolator_alerts", testing_mode=True) self.assertEqual(len(mail.outbox), 3, msg="Wrong number of emails.") text_content = mail.outbox[2].body @@ -2142,6 +2159,9 @@ def test_send_alert_on_document_creation(self, mock_abort_audio): docket__docket_number="19-5735", ) + # Send RT alerts + call_command("cl_send_rt_percolator_alerts", testing_mode=True) + # Two OA search alert emails should be sent, one for user_profile and # one for user_profile_2 self.assertEqual(len(mail.outbox), 2) @@ -2166,6 +2186,8 @@ def test_send_alert_on_document_creation(self, mock_abort_audio): rt_oral_argument.sha1 = "12345" rt_oral_argument.save() + # Send RT alerts + call_command("cl_send_rt_percolator_alerts", testing_mode=True) # New alerts shouldn't be sent. Since document was just updated. self.assertEqual(len(mail.outbox), 2) text_content = mail.outbox[0].body @@ -2407,6 +2429,19 @@ def test_send_alert_multiple_alert_rates(self, mock_abort_audio): ) def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): """""" + + rt_oa_search_alert = AlertFactory( + user=self.user_profile.user, + rate=Alert.REAL_TIME, + name="Test RT Alert OA", + query="q=docketNumber:19-5739 OR docketNumber:19-5740&type=oa", + ) + rt_oa_search_alert_2 = AlertFactory( + user=self.user_profile.user, + rate=Alert.REAL_TIME, + name="Test RT Alert OA 2", + query="q=docketNumber:19-5741&type=oa", + ) with mock.patch( "cl.api.webhooks.requests.post", side_effect=lambda *args, **kwargs: MockResponse( @@ -2435,20 +2470,58 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): docket__docket_number="19-5741", ) - # No emails should be sent in RT, since all the alerts triggered by the - # OA documents added are not RT. - self.assertEqual(len(mail.outbox), 0) + # Send RT alerts + call_command("cl_send_rt_percolator_alerts", testing_mode=True) + + # 1 email should be sent for the rt_oa_search_alert and rt_oa_search_alert_2 + self.assertEqual( + len(mail.outbox), 1, msg="Wrong number of emails sent." + ) + + # The OA RT alert email should contain 2 alerts, one for rt_oa_search_alert + # and one for rt_oa_search_alert_2. First alert should contain 2 hits. + # Second alert should contain only 1 hit. + + # Assert text version. + text_content = mail.outbox[0].body + self.assertIn(rt_oral_argument_1.case_name, text_content) + self.assertIn(rt_oral_argument_2.case_name, text_content) + self.assertIn(rt_oral_argument_3.case_name, text_content) + + # Assert html version. + html_content = self.get_html_content_from_email(mail.outbox[0]) + self._confirm_number_of_alerts(html_content, 2) + self._count_alert_hits_and_child_hits( + html_content, + rt_oa_search_alert.name, + 2, + rt_oral_argument_1.case_name, + 0, + ) + self._count_alert_hits_and_child_hits( + html_content, + rt_oa_search_alert.name, + 2, + rt_oral_argument_2.case_name, + 0, + ) + self._count_alert_hits_and_child_hits( + html_content, + rt_oa_search_alert_2.name, + 1, + rt_oral_argument_3.case_name, + 0, + ) - # 7 webhook events should be triggered in RT: - # rt_oral_argument_1 should trigger 3: search_alert_3, search_alert_5 - # and search_alert_6. - # rt_oral_argument_2 should trigger 3: search_alert_3, search_alert_5 - # and search_alert_6. - # rt_oral_argument_3 should trigger 1: search_alert_4 - # One webhook event should be sent to user_profile + # 10 webhook events should be triggered in RT: + # rt_oral_argument_1 should trigger 4: search_alert_3, search_alert_5, + # search_alert_6 and rt_oa_search_alert. + # rt_oral_argument_2 should trigger 4: search_alert_3, search_alert_5, + # search_alert_6 and rt_oa_search_alert. + # rt_oral_argument_3 should trigger 2: search_alert_4 and rt_oa_search_alert. webhook_events = WebhookEvent.objects.all() self.assertEqual( - len(webhook_events), 7, msg="Unexpected number of" "webhooks sent." + len(webhook_events), 10, msg="Unexpected number of webhooks sent." ) # 7 webhook event should be sent to user_profile for 4 different @@ -2459,6 +2532,8 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): self.search_alert_4.pk, self.search_alert_5.pk, self.search_alert_6.pk, + rt_oa_search_alert.pk, + rt_oa_search_alert_2.pk, ] for webhook_content in webhook_events: content = webhook_content.content["payload"] @@ -2478,8 +2553,10 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): # One OA search alert email should be sent. mock_logger.info.assert_called_with("Sent 1 dly email alerts.") - self.assertEqual(len(mail.outbox), 1) - text_content = mail.outbox[0].body + self.assertEqual( + len(mail.outbox), 2, msg="Wrong number of emails sent." + ) + text_content = mail.outbox[1].body # The right alert type template is used. self.assertIn("oral argument", text_content) @@ -2494,25 +2571,25 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): self.assertIn(self.search_alert_4.name, text_content) # Should not include the List-Unsubscribe-Post header. - self.assertIn("List-Unsubscribe", mail.outbox[0].extra_headers) - self.assertNotIn("List-Unsubscribe-Post", mail.outbox[0].extra_headers) + self.assertIn("List-Unsubscribe", mail.outbox[1].extra_headers) + self.assertNotIn("List-Unsubscribe-Post", mail.outbox[1].extra_headers) alert_list_url = reverse("disable_alert_list") self.assertIn( alert_list_url, - mail.outbox[0].extra_headers["List-Unsubscribe"], + mail.outbox[1].extra_headers["List-Unsubscribe"], ) self.assertIn( f"keys={self.search_alert_3.secret_key}", - mail.outbox[0].extra_headers["List-Unsubscribe"], + mail.outbox[1].extra_headers["List-Unsubscribe"], ) self.assertIn( f"keys={self.search_alert_4.secret_key}", - mail.outbox[0].extra_headers["List-Unsubscribe"], + mail.outbox[1].extra_headers["List-Unsubscribe"], ) # Extract HTML version. html_content = None - for content, content_type in mail.outbox[0].alternatives: + for content, content_type in mail.outbox[1].alternatives: if content_type == "text/html": html_content = content break @@ -2524,6 +2601,9 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): rt_oral_argument_2.delete() rt_oral_argument_3.delete() + # Remove test instances. + rt_oa_search_alert.delete() + @override_settings(ELASTICSEARCH_PAGINATION_BATCH_SIZE=5) def test_send_multiple_rt_alerts(self, mock_abort_audio): """Confirm all RT alerts are properly sent if the percolator response @@ -2586,6 +2666,9 @@ def test_send_multiple_rt_alerts(self, mock_abort_audio): docket__docket_number="19-5735", ) + # Send RT alerts + call_command("cl_send_rt_percolator_alerts", testing_mode=True) + # 11 OA search alert emails should be sent, one for each user that # had donated enough. self.assertEqual(len(mail.outbox), 11) From 8f2be67ea235181da028d005fe2287f93cfe0e60 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Fri, 10 Jan 2025 14:58:16 -0500 Subject: [PATCH 02/34] pluralizing prayer pages --- cl/favorites/templates/top_prayers.html | 4 ++-- cl/favorites/templates/user_prayers.html | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/cl/favorites/templates/top_prayers.html b/cl/favorites/templates/top_prayers.html index f19935dd8c..690e555619 100644 --- a/cl/favorites/templates/top_prayers.html +++ b/cl/favorites/templates/top_prayers.html @@ -14,8 +14,8 @@ {% block content %}

Community's Most Wanted PACER Documents

-

{{ granted_stats.prayer_count|intcomma }} prayers granted totaling ${{ granted_stats.total_cost }}.

-

{{ waiting_stats.prayer_count|intcomma }} prayers pending totaling at least ${{ waiting_stats.total_cost }}.

+

{{ granted_stats.prayer_count|intcomma }} {{ granted_stats.prayer_count|pluralize:"prayer,prayers" }} granted totaling ${{ granted_stats.total_cost }}.

+

{{ waiting_stats.prayer_count|intcomma }} {{ waiting_stats.prayer_count|pluralize:"prayer,prayers" }} pending totaling at least ${{ waiting_stats.total_cost }}.

diff --git a/cl/favorites/templates/user_prayers.html b/cl/favorites/templates/user_prayers.html index f010632d16..52c2a7e5cf 100644 --- a/cl/favorites/templates/user_prayers.html +++ b/cl/favorites/templates/user_prayers.html @@ -14,7 +14,7 @@ {% block content %}

{% if is_page_owner %}Your PACER Document Prayers{% else %}PACER Document Requests for: {{ requested_user }}{% endif %}

- {% if is_page_owner %}

{{ count|intcomma }} prayers granted totaling ${{total_cost|floatformat:2 }} ({{ num_remaining }} remaining today).

{% endif %} + {% if is_page_owner %}

{{ count|intcomma }} {{ count|pluralize:"prayer,prayers" }} granted totaling ${{total_cost|floatformat:2 }} ({{ num_remaining }} remaining today).

{% endif %}
Date: Sun, 12 Jan 2025 22:03:07 -0500 Subject: [PATCH 03/34] adding prayers to profile dropdown --- cl/assets/templates/base.html | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/cl/assets/templates/base.html b/cl/assets/templates/base.html index 45af5aa5dc..5726b46ae2 100644 --- a/cl/assets/templates/base.html +++ b/cl/assets/templates/base.html @@ -139,14 +139,16 @@

You did not supply the "private" variable to your template.
  •  Alerts
  •  Notes
  • + tabindex="204"> Notes
  •  Tags
  • +
  •  Prayers
  •  Your Support
  • + tabindex="207"> Your Support
  •  Account
  • + tabindex="208"> Account
  • {% csrf_token %} From 1519ab5cda39357bd90c84666d4324fd83bd4f27 Mon Sep 17 00:00:00 2001 From: Gianfranco Rossi Date: Mon, 13 Jan 2025 10:52:12 -0500 Subject: [PATCH 04/34] refactor(scrapers.tasks.update_from_text): reuse make_citation in update_from_text Solves #4903 - Move make_citation from cl_scrape_opinions into scrapers.utils - Move citation_is_duplicated from cl_back_scrape_citations into scrapers.utils - Delete scraped_citation_object_is_valid, now we rely on eyecite, used by make_citation - Refactor test site to account for changes --- .../commands/cl_back_scrape_citations.py | 49 +----------- .../management/commands/cl_scrape_opinions.py | 35 +-------- .../management/commands/update_from_text.py | 15 ++-- cl/scrapers/tasks.py | 22 +++--- .../test_assets/test_opinion_scraper.py | 4 +- cl/scrapers/tests.py | 21 ------ cl/scrapers/utils.py | 75 ++++++++++++++++++- 7 files changed, 97 insertions(+), 124 deletions(-) diff --git a/cl/scrapers/management/commands/cl_back_scrape_citations.py b/cl/scrapers/management/commands/cl_back_scrape_citations.py index a445df9438..3ab49c9ee9 100644 --- a/cl/scrapers/management/commands/cl_back_scrape_citations.py +++ b/cl/scrapers/management/commands/cl_back_scrape_citations.py @@ -18,8 +18,8 @@ from cl.scrapers.exceptions import BadContentError from cl.scrapers.management.commands import cl_back_scrape_opinions from cl.scrapers.management.commands.cl_scrape_opinions import make_citation -from cl.scrapers.utils import get_binary_content -from cl.search.models import Citation, Court, Opinion +from cl.scrapers.utils import citation_is_duplicated, get_binary_content +from cl.search.models import Court, Opinion class Command(cl_back_scrape_opinions.Command): @@ -92,7 +92,7 @@ def scrape_court( if not citation_candidate: continue - if self.citation_is_duplicated(citation_candidate, cite): + if citation_is_duplicated(citation_candidate, cite): continue try: @@ -106,46 +106,3 @@ def scrape_court( cite, cluster, ) - - def citation_is_duplicated( - self, citation_candidate: Citation, cite: str - ) -> bool: - """Checks if the citation is duplicated for the cluster - - Following corpus_importer.utils.add_citations_to_cluster we - identify 2 types of duplication: - - exact: a citation with the same fields already exists for the cluster - - duplication in the same reporter: the cluster already has a citation - in that reporter - - :param citation_candidate: the citation object - :param cite: citation string - - :return: True if citation is duplicated, False if not - """ - citation_params = {**citation_candidate.__dict__} - citation_params.pop("_state", "") - citation_params.pop("id", "") - cluster_id = citation_candidate.cluster.id - - # Exact duplication - if Citation.objects.filter(**citation_params).exists(): - logger.info( - "Citation '%s' already exists for cluster %s", - cite, - cluster_id, - ) - return True - - # Duplication in the same reporter - if Citation.objects.filter( - cluster_id=cluster_id, reporter=citation_candidate.reporter - ).exists(): - logger.info( - "Another citation in the same reporter '%s' exists for cluster %s", - citation_candidate.reporter, - cluster_id, - ) - return True - - return False diff --git a/cl/scrapers/management/commands/cl_scrape_opinions.py b/cl/scrapers/management/commands/cl_scrape_opinions.py index de6e2f3562..b0500c9a55 100644 --- a/cl/scrapers/management/commands/cl_scrape_opinions.py +++ b/cl/scrapers/management/commands/cl_scrape_opinions.py @@ -3,21 +3,18 @@ import time import traceback from datetime import date -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Dict, List, Tuple, Union from asgiref.sync import async_to_sync from django.core.files.base import ContentFile from django.core.management.base import CommandError from django.db import transaction from django.utils.encoding import force_bytes -from eyecite.find import get_citations -from eyecite.tokenizers import HyperscanTokenizer from juriscraper.lib.importer import build_module_list from juriscraper.lib.string_utils import CaseNameTweaker from sentry_sdk import capture_exception from cl.alerts.models import RealTimeQueue -from cl.citations.utils import map_reporter_db_cite_type from cl.lib.command_utils import ScraperCommand, logger from cl.lib.crypto import sha1 from cl.lib.string_utils import trunc @@ -33,6 +30,7 @@ get_binary_content, get_child_court, get_extension, + make_citation, save_response, signal_handler, update_or_create_docket, @@ -47,40 +45,11 @@ OpinionCluster, ) -HYPERSCAN_TOKENIZER = HyperscanTokenizer(cache_dir=".hyperscan") - # for use in catching the SIGINT (Ctrl+4) die_now = False cnt = CaseNameTweaker() -def make_citation( - cite_str: str, cluster: OpinionCluster, court_id: str -) -> Optional[Citation]: - """Create and return a citation object for the input values.""" - citation_objs = get_citations(cite_str, tokenizer=HYPERSCAN_TOKENIZER) - if not citation_objs: - logger.error( - "Could not parse citation from court '%s'", - court_id, - extra=dict( - cite=cite_str, - cluster=cluster, - fingerprint=[f"{court_id}-no-citation-found"], - ), - ) - return None - # Convert the found cite type to a valid cite type for our DB. - cite_type_str = citation_objs[0].all_editions[0].reporter.cite_type - return Citation( - cluster=cluster, - volume=citation_objs[0].groups["volume"], - reporter=citation_objs[0].corrected_reporter(), - page=citation_objs[0].groups["page"], - type=map_reporter_db_cite_type(cite_type_str), - ) - - @transaction.atomic def make_objects( item: Dict[str, Union[str, Any]], diff --git a/cl/scrapers/management/commands/update_from_text.py b/cl/scrapers/management/commands/update_from_text.py index ee093d9e01..5c7b942ed9 100644 --- a/cl/scrapers/management/commands/update_from_text.py +++ b/cl/scrapers/management/commands/update_from_text.py @@ -83,16 +83,11 @@ def rerun_extract_from_text( logger.debug("Opinion updated with data %s", changes["Opinion"]) stats["Opinion"] += 1 - if changes.get("Citation"): - if changes["Citation"].get("citation_created"): - logger.info( - "Citation created with data %s", changes["Citation"] - ) - stats["Citation"] += 1 - else: - logger.debug( - "Citation not created. Data %s", changes["Citation"] - ) + if changes.get("citation_created"): + logger.info("Citation created with data %s", changes["Citation"]) + stats["Citation"] += 1 + else: + logger.debug("Citation not created. Data %s", changes["Citation"]) class Command(ScraperCommand): diff --git a/cl/scrapers/tasks.py b/cl/scrapers/tasks.py index d94ca48713..da488330fb 100644 --- a/cl/scrapers/tasks.py +++ b/cl/scrapers/tasks.py @@ -6,7 +6,6 @@ import httpx import requests from asgiref.sync import async_to_sync -from django.apps import apps from django.conf import settings from django.core.files.base import ContentFile from httpx import Response @@ -30,7 +29,7 @@ from cl.lib.string_utils import trunc from cl.lib.utils import is_iter from cl.recap.mergers import save_iquery_to_docket -from cl.scrapers.utils import scraped_citation_object_is_valid +from cl.scrapers.utils import citation_is_duplicated, make_citation from cl.search.models import Docket, Opinion, RECAPDocument logger = logging.getLogger(__name__) @@ -50,31 +49,32 @@ def update_document_from_text( text. Formerly implemented in only Tax Court, but functional in all scrapers via AbstractSite object. - Note that this updates the values but does not save them. Saving is left to - the calling function. + Note that this updates the values but does not save them for + Docket, OpinionCluster and Opinion. Saving is left to + the calling function. It does save Citations :param opinion: Opinion object :param juriscraper_module: full module to get Site object :return: the extracted data dictionary """ - court = opinion.cluster.docket.court.pk - site = get_scraper_object_by_name(court, juriscraper_module) + court_id = opinion.cluster.docket.court.pk + site = get_scraper_object_by_name(court_id, juriscraper_module) if site is None: logger.debug("No site found %s", juriscraper_module) return {} metadata_dict = site.extract_from_text(opinion.plain_text or opinion.html) for model_name, data in metadata_dict.items(): - ModelClass = apps.get_model(f"search.{model_name}") if model_name == "Docket": opinion.cluster.docket.__dict__.update(data) elif model_name == "OpinionCluster": opinion.cluster.__dict__.update(data) elif model_name == "Citation": - data["cluster_id"] = opinion.cluster_id - if scraped_citation_object_is_valid(data): - _, citation_created = ModelClass.objects.get_or_create(**data) - metadata_dict["Citation"]["created"] = citation_created + citation = make_citation(data, opinion.cluster, court_id) + if not citation or citation_is_duplicated(citation, data): + continue + citation.save() + metadata_dict["citation_created"] = True elif model_name == "Opinion": opinion.__dict__.update(data) else: diff --git a/cl/scrapers/test_assets/test_opinion_scraper.py b/cl/scrapers/test_assets/test_opinion_scraper.py index 18a28d71de..483fe66158 100644 --- a/cl/scrapers/test_assets/test_opinion_scraper.py +++ b/cl/scrapers/test_assets/test_opinion_scraper.py @@ -59,7 +59,7 @@ def extract_from_text(self, scraped_text): metadata = {} docket_regex = r"Docket Number: (?P\d+-\d+)" disposition_regex = r"Disposition: (?P\w+)" - citation_regex = r"(?P20\d{2}) (?PVT) (?P\d+)" + citation_regex = r"20\d{2} VT \d+" if docket_match := re.search(docket_regex, scraped_text): metadata["Docket"] = { "docket_number": docket_match.group("docket") @@ -71,6 +71,6 @@ def extract_from_text(self, scraped_text): } if citation_match := re.search(citation_regex, scraped_text): - metadata["Citation"] = {**citation_match.groupdict(), "type": 8} + metadata["Citation"] = citation_match.group(0) return metadata diff --git a/cl/scrapers/tests.py b/cl/scrapers/tests.py index 9db8b8a48f..afd939c9d7 100644 --- a/cl/scrapers/tests.py +++ b/cl/scrapers/tests.py @@ -40,7 +40,6 @@ get_binary_content, get_existing_docket, get_extension, - scraped_citation_object_is_valid, update_or_create_docket, ) from cl.search.factories import ( @@ -994,26 +993,6 @@ def test_inputs(self): "Unpublished docket should not be modified", ) - def test_scraped_citation_object_is_valid(self): - """Can we validate Citation dicts got from `Site.extract_from_text`""" - bad_type = {"reporter": "WI", "type": Citation.FEDERAL} - self.assertFalse( - scraped_citation_object_is_valid(bad_type), - "Citation should be marked as invalid. Type does not match reporter", - ) - - bad_reporter = {"reporter": "Some text"} - self.assertFalse( - scraped_citation_object_is_valid(bad_reporter), - "Citation should be marked as invalid. Reporter does not exist", - ) - - valid_citation = {"reporter": "WI", "type": Citation.NEUTRAL} - self.assertTrue( - scraped_citation_object_is_valid(valid_citation), - "Citation object should be marked as valid", - ) - class CommandInputTest(TestCase): def test_get_module_by_court_id(self): diff --git a/cl/scrapers/utils.py b/cl/scrapers/utils.py index 7f7d6123ee..5ca5d08262 100644 --- a/cl/scrapers/utils.py +++ b/cl/scrapers/utils.py @@ -11,6 +11,8 @@ from django.conf import settings from django.core.files.base import ContentFile from django.db.models import Q +from eyecite.find import get_citations +from eyecite.tokenizers import HyperscanTokenizer from juriscraper import AbstractSite from juriscraper.AbstractSite import logger from juriscraper.lib.test_utils import MockRequest @@ -29,7 +31,78 @@ NoDownloadUrlError, UnexpectedContentTypeError, ) -from cl.search.models import Court, Docket +from cl.search.models import Citation, Court, Docket, OpinionCluster + +HYPERSCAN_TOKENIZER = HyperscanTokenizer(cache_dir=".hyperscan") + + +def make_citation( + cite_str: str, cluster: OpinionCluster, court_id: str +) -> Optional[Citation]: + """Create and return a citation object for the input values.""" + citation_objs = get_citations(cite_str, tokenizer=HYPERSCAN_TOKENIZER) + if not citation_objs: + logger.error( + "Could not parse citation from court '%s'", + court_id, + extra=dict( + cite=cite_str, + cluster=cluster, + fingerprint=[f"{court_id}-no-citation-found"], + ), + ) + return None + # Convert the found cite type to a valid cite type for our DB. + cite_type_str = citation_objs[0].all_editions[0].reporter.cite_type + return Citation( + cluster=cluster, + volume=citation_objs[0].groups["volume"], + reporter=citation_objs[0].corrected_reporter(), + page=citation_objs[0].groups["page"], + type=map_reporter_db_cite_type(cite_type_str), + ) + + +def citation_is_duplicated(citation_candidate: Citation, cite: str) -> bool: + """Checks if the citation is duplicated for the cluster + + Following corpus_importer.utils.add_citations_to_cluster we + identify 2 types of duplication: + - exact: a citation with the same fields already exists for the cluster + - duplication in the same reporter: the cluster already has a citation + in that reporter + + :param citation_candidate: the citation object + :param cite: citation string + + :return: True if citation is duplicated, False if not + """ + citation_params = {**citation_candidate.__dict__} + citation_params.pop("_state", "") + citation_params.pop("id", "") + cluster_id = citation_candidate.cluster.id + + # Exact duplication + if Citation.objects.filter(**citation_params).exists(): + logger.info( + "Citation '%s' already exists for cluster %s", + cite, + cluster_id, + ) + return True + + # Duplication in the same reporter + if Citation.objects.filter( + cluster_id=cluster_id, reporter=citation_candidate.reporter + ).exists(): + logger.info( + "Another citation in the same reporter '%s' exists for cluster %s", + citation_candidate.reporter, + cluster_id, + ) + return True + + return False def get_child_court(child_court_name: str, court_id: str) -> Optional[Court]: From b08ce10cb40939682ceb27013072b3eafff4b71e Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Mon, 13 Jan 2025 14:36:06 -0600 Subject: [PATCH 05/34] fix(alerts): Solved merge conflicts and fixed related tests --- cl/alerts/tasks.py | 4 ---- cl/alerts/tests/tests.py | 12 +++++++++++- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/cl/alerts/tasks.py b/cl/alerts/tasks.py index 236763efe8..c8f626227d 100644 --- a/cl/alerts/tasks.py +++ b/cl/alerts/tasks.py @@ -649,8 +649,6 @@ def percolator_response_processing(response: SendAlertsResponse) -> None: return None scheduled_hits_to_create = [] - email_alerts_to_send = [] - rt_alerts_to_send = [] main_alerts_triggered = response.main_alerts_triggered rd_alerts_triggered = response.rd_alerts_triggered @@ -661,7 +659,6 @@ def percolator_response_processing(response: SendAlertsResponse) -> None: instance_content_type = ContentType.objects.get( app_label=app_label_str, model=model_str.lower() ) - schedule_alert = False r = get_redis_interface("CACHE") recap_document_hits = [hit.id for hit in rd_alerts_triggered] docket_hits = [hit.id for hit in d_alerts_triggered] @@ -691,7 +688,6 @@ def percolator_response_processing(response: SendAlertsResponse) -> None: transform_percolator_child_document( document_content_copy, hit.meta ) - schedule_alert = True add_document_hit_to_alert_set( r, alert_triggered.pk, "r", document_content_copy["id"] ) diff --git a/cl/alerts/tests/tests.py b/cl/alerts/tests/tests.py index 513556fb1d..bd5d26f0f6 100644 --- a/cl/alerts/tests/tests.py +++ b/cl/alerts/tests/tests.py @@ -2033,7 +2033,7 @@ def test_get_docket_notes_and_tags_by_user(self) -> None: "cl.lib.es_signal_processor.allow_es_audio_indexing", side_effect=lambda x, y: True, ) -class SearchAlertsOAESTests(ESIndexTestCase, TestCase, RECAPAlertsAssertions): +class SearchAlertsOAESTests(ESIndexTestCase, TestCase, SearchAlertsAssertions): """Test ES Search Alerts""" @classmethod @@ -2639,12 +2639,14 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): rate=Alert.REAL_TIME, name="Test RT Alert OA", query="q=docketNumber:19-5739 OR docketNumber:19-5740&type=oa", + alert_type=SEARCH_TYPES.ORAL_ARGUMENT, ) rt_oa_search_alert_2 = AlertFactory( user=self.user_profile.user, rate=Alert.REAL_TIME, name="Test RT Alert OA 2", query="q=docketNumber:19-5741&type=oa", + alert_type=SEARCH_TYPES.ORAL_ARGUMENT, ) with mock.patch( "cl.api.webhooks.requests.post", @@ -2805,6 +2807,14 @@ def test_group_alerts_and_hits(self, mock_logger, mock_abort_audio): rt_oral_argument_2.delete() rt_oral_argument_3.delete() + # Confirm Stat object is properly created and updated. + stats_objects = Stat.objects.all() + self.assertEqual(stats_objects.count(), 2) + stat_names = set([stat.name for stat in stats_objects]) + self.assertEqual(stat_names, {"alerts.sent.rt", "alerts.sent.dly"}) + self.assertEqual(stats_objects[0].count, 1) + self.assertEqual(stats_objects[1].count, 1) + # Remove test instances. rt_oa_search_alert.delete() From 160de0651106d69921d4b302def5fdac0a3f6bea Mon Sep 17 00:00:00 2001 From: Gianfranco Rossi Date: Mon, 13 Jan 2025 19:49:50 -0500 Subject: [PATCH 06/34] fix(scrapers.update_from_text): now extracts document content if it was not present Also, fix dictionary modification while iterating on update_document_from_text --- cl/scrapers/management/commands/update_from_text.py | 11 +++++++++-- cl/scrapers/tasks.py | 6 +++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/cl/scrapers/management/commands/update_from_text.py b/cl/scrapers/management/commands/update_from_text.py index 5c7b942ed9..c498a36b71 100644 --- a/cl/scrapers/management/commands/update_from_text.py +++ b/cl/scrapers/management/commands/update_from_text.py @@ -4,7 +4,7 @@ from django.db import transaction from cl.lib.command_utils import ScraperCommand, logger -from cl.scrapers.tasks import update_document_from_text +from cl.scrapers.tasks import extract_doc_content, update_document_from_text from cl.search.models import ( PRECEDENTIAL_STATUS, SOURCES, @@ -33,10 +33,17 @@ def rerun_extract_from_text( # May be an opinion entirely from a merged corpus # or an error during text extraction logger.info( - "Opinion %s has no `plain_text` or `html` to extract from", + "Opinion %s has no `plain_text` or `html`" + "to extract from. Executing extraction", opinion.id, ) stats["No text to extract from"] += 1 + extract_doc_content( + pk=opinion.pk, + ocr_available=True, + citation_jitter=True, + juriscraper_module=juriscraper_module, + ) return with transaction.atomic(): diff --git a/cl/scrapers/tasks.py b/cl/scrapers/tasks.py index da488330fb..9b61bc9510 100644 --- a/cl/scrapers/tasks.py +++ b/cl/scrapers/tasks.py @@ -63,6 +63,7 @@ def update_document_from_text( logger.debug("No site found %s", juriscraper_module) return {} + citation_created = False metadata_dict = site.extract_from_text(opinion.plain_text or opinion.html) for model_name, data in metadata_dict.items(): if model_name == "Docket": @@ -74,7 +75,7 @@ def update_document_from_text( if not citation or citation_is_duplicated(citation, data): continue citation.save() - metadata_dict["citation_created"] = True + citation_created = True elif model_name == "Opinion": opinion.__dict__.update(data) else: @@ -82,6 +83,9 @@ def update_document_from_text( f"Object type of {model_name} not yet supported." ) + # if the candidate citation was saved successfully, it will have an id + metadata_dict["citation_created"] = citation_created + return metadata_dict From 11d4d314b5fddde7595b95fa64383e85151cbb12 Mon Sep 17 00:00:00 2001 From: Gianfranco Rossi Date: Mon, 13 Jan 2025 20:04:03 -0500 Subject: [PATCH 07/34] fix(update_from_text): add elif condition on stat count --- cl/scrapers/management/commands/update_from_text.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cl/scrapers/management/commands/update_from_text.py b/cl/scrapers/management/commands/update_from_text.py index c498a36b71..adc7bb77da 100644 --- a/cl/scrapers/management/commands/update_from_text.py +++ b/cl/scrapers/management/commands/update_from_text.py @@ -93,7 +93,7 @@ def rerun_extract_from_text( if changes.get("citation_created"): logger.info("Citation created with data %s", changes["Citation"]) stats["Citation"] += 1 - else: + elif changes.get("Citation"): logger.debug("Citation not created. Data %s", changes["Citation"]) From 3ba53b49efdda4674710ea1b44d794a13bd2f78c Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Tue, 14 Jan 2025 00:33:13 -0400 Subject: [PATCH 08/34] feat(court): Adds is_appellate_court function This commit introduces a new function, is_appellate_court(), which determines whether a given court ID belongs to an appellate court. This function is useful for filtering and categorizing courts based on their jurisdiction. --- cl/corpus_importer/utils.py | 14 +++++++++++++- cl/recap/api_serializers.py | 4 ++-- cl/recap/mergers.py | 15 +++++---------- cl/recap/tasks.py | 5 ++--- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/cl/corpus_importer/utils.py b/cl/corpus_importer/utils.py index efcab347a5..40fa0265a0 100644 --- a/cl/corpus_importer/utils.py +++ b/cl/corpus_importer/utils.py @@ -26,7 +26,7 @@ lookup_judges_by_last_name_list, ) from cl.people_db.models import Person -from cl.search.models import Citation, Docket, Opinion, OpinionCluster +from cl.search.models import Citation, Court, Docket, Opinion, OpinionCluster HYPERSCAN_TOKENIZER = HyperscanTokenizer(cache_dir=".hyperscan") @@ -107,6 +107,18 @@ async def mark_ia_upload_needed(d: Docket, save_docket: bool) -> None: await d.asave() +def is_appellate_court(court_id: str) -> bool: + """Checks if the given court_id belongs to an appellate court. + + :param court_id: The unique identifier of the court. + + :return: True if the court_id corresponds to an appellate court, + False otherwise. + """ + appellate_court_ids = Court.federal_courts.appellate_pacer_courts() + return appellate_court_ids.filter(pk=court_id).exists() + + def get_start_of_quarter(d: Optional[date] = None) -> date: """Get the start date of the calendar quarter requested diff --git a/cl/recap/api_serializers.py b/cl/recap/api_serializers.py index 48fc52ef66..7bea992ba0 100644 --- a/cl/recap/api_serializers.py +++ b/cl/recap/api_serializers.py @@ -2,6 +2,7 @@ from rest_framework import serializers from rest_framework.exceptions import ValidationError +from cl.corpus_importer.utils import is_appellate_court from cl.lib.pacer_session import get_or_cache_pacer_cookies from cl.recap.models import ( REQUEST_TYPE, @@ -125,8 +126,7 @@ def validate(self, attrs): UPLOAD_TYPE.APPELLATE_CASE_QUERY_RESULT_PAGE, ]: # Appellate court dockets. Is the court valid? - appellate_court_ids = Court.federal_courts.appellate_pacer_courts() - if not appellate_court_ids.filter(pk=attrs["court"].pk).exists(): + if not is_appellate_court(attrs["court"].pk): raise ValidationError( "%s is not an appellate court ID. Did you mean to use the " "upload_type for district dockets?" % attrs["court"] diff --git a/cl/recap/mergers.py b/cl/recap/mergers.py index 95fd75cc98..6a1622810e 100644 --- a/cl/recap/mergers.py +++ b/cl/recap/mergers.py @@ -14,7 +14,7 @@ from juriscraper.lib.string_utils import CaseNameTweaker from juriscraper.pacer import AppellateAttachmentPage, AttachmentPage -from cl.corpus_importer.utils import mark_ia_upload_needed +from cl.corpus_importer.utils import is_appellate_court, mark_ia_upload_needed from cl.lib.decorators import retry from cl.lib.filesizes import convert_size_to_bytes from cl.lib.model_helpers import clean_docket_number, make_docket_number_core @@ -932,8 +932,6 @@ async def add_docket_entries( else: params["document_type"] = RECAPDocument.PACER_DOCUMENT - appellate_court_ids = Court.federal_courts.appellate_pacer_courts() - # Unlike district and bankr. dockets, where you always have a main # RD and can optionally have attachments to the main RD, Appellate # docket entries can either they *only* have a main RD (with no @@ -945,9 +943,9 @@ async def add_docket_entries( # RDs. The check here ensures that if that happens for a particular # entry, we avoid creating the main RD a second+ time when we get the # docket sheet a second+ time. - appelate_court_id_exists = await appellate_court_ids.filter( - pk=d.court_id - ).aexists() + appelate_court_id_exists = await sync_to_async(is_appellate_court)( + d.court_id + ) if de_created is False and appelate_court_id_exists: appellate_rd_att_exists = await de.recap_documents.filter( document_type=RECAPDocument.ATTACHMENT @@ -1789,10 +1787,7 @@ async def merge_attachment_page_data( ContentFile(text.encode()), ) - appellate_court_ids = Court.federal_courts.appellate_pacer_courts() - court_is_appellate = await appellate_court_ids.filter( - pk=court.pk - ).aexists() + court_is_appellate = await sync_to_async(is_appellate_court)(court.pk) main_rd_to_att = False for attachment in attachment_dicts: sanity_checks = [ diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index ee674a9f25..2ef09aa00b 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -54,7 +54,7 @@ make_attachment_pq_object, update_rd_metadata, ) -from cl.corpus_importer.utils import mark_ia_upload_needed +from cl.corpus_importer.utils import is_appellate_court, mark_ia_upload_needed from cl.custom_filters.templatetags.text_filters import oxford_join from cl.lib.filesizes import convert_size_to_bytes from cl.lib.microservice_utils import microservice @@ -763,8 +763,7 @@ async def find_subdocket_pdf_rds( pq.pk ] # Add the original pq to the list of pqs to process - appellate_court_ids = Court.federal_courts.appellate_pacer_courts() - if await appellate_court_ids.filter(pk=pq.court_id).aexists(): + if await sync_to_async(is_appellate_court)(pq.court_id): # Abort the process for appellate documents. Subdockets cannot be found # in appellate cases. return pqs_to_process_pks From 18b6b6330c02c602b811a3b0d53af6ef64d14e5c Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Tue, 14 Jan 2025 00:43:20 -0400 Subject: [PATCH 09/34] feat(recap): Enables appellate attachment page purchases --- cl/corpus_importer/tasks.py | 6 ++++++ cl/recap/tasks.py | 13 +++++++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/cl/corpus_importer/tasks.py b/cl/corpus_importer/tasks.py index cf242656fa..3d3d7e8cbe 100644 --- a/cl/corpus_importer/tasks.py +++ b/cl/corpus_importer/tasks.py @@ -32,6 +32,7 @@ from juriscraper.lib.exceptions import PacerLoginException, ParsingException from juriscraper.lib.string_utils import CaseNameTweaker, harmonize from juriscraper.pacer import ( + AppellateAttachmentPage, AppellateDocketReport, AttachmentPage, CaseQuery, @@ -65,6 +66,7 @@ compute_binary_probe_jitter, compute_blocked_court_wait, compute_next_binary_probe, + is_appellate_court, make_iquery_probing_key, mark_ia_upload_needed, ) @@ -1794,6 +1796,10 @@ def get_att_report_by_rd( pacer_court_id = map_cl_to_pacer_id(rd.docket_entry.docket.court_id) att_report = AttachmentPage(pacer_court_id, s) att_report.query(rd.pacer_doc_id) + if is_appellate_court(pacer_court_id): + att_report = AppellateAttachmentPage(pacer_court_id, s) + else: + att_report = AttachmentPage(pacer_court_id, s) return att_report diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index 2ef09aa00b..8132acca43 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -2035,7 +2035,15 @@ def fetch_attachment_page(self: Task, fq_pk: int) -> None: raise self.retry(exc=exc) text = r.response.text - att_data = get_data_from_att_report(text, rd.docket_entry.docket.court_id) + is_appellate = is_appellate_court(rd.docket_entry.docket.court_id) + # Determine the appropriate parser function based on court jurisdiction + # (appellate or district) + att_data_parser = ( + get_data_from_appellate_att_report + if is_appellate + else get_data_from_att_report + ) + att_data = att_data_parser(text, rd.docket_entry.docket.court_id) if att_data == {}: msg = "Not a valid attachment page upload" @@ -2047,7 +2055,8 @@ def fetch_attachment_page(self: Task, fq_pk: int) -> None: rd.docket_entry.docket.court, rd.docket_entry.docket.pacer_case_id, att_data["pacer_doc_id"], - att_data["document_number"], + # Appellate attachments don't contain a document_number + None if is_appellate else att_data["document_number"], text, att_data["attachments"], ) From f5c82866b2fd5883b902920f6f2cb379bd6e42e8 Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Tue, 14 Jan 2025 02:20:56 -0400 Subject: [PATCH 10/34] tests(recap): Fix failing tests by mocking return values --- cl/recap/tests.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/cl/recap/tests.py b/cl/recap/tests.py index 5013ff062c..8245e8f14f 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -1800,8 +1800,16 @@ def test_fetch_att_page_no_cookies(self, mock_court_accessible) -> None: @mock.patch( "cl.recap.mergers.AttachmentPage", new=fakes.FakeAttachmentPage ) + @mock.patch( + "cl.corpus_importer.tasks.is_appellate_court", return_value=False + ) + @mock.patch("cl.recap.tasks.is_appellate_court", return_value=False) def test_fetch_att_page_all_systems_go( - self, mock_get_cookies, mock_court_accessible + self, + check_court_task, + check_court_parser, + mock_get_cookies, + mock_court_accessible, ): result = do_pacer_fetch(self.fq) result.get() From ac44cc85cce4170d5f2b2d56aa047c6d5389eb7b Mon Sep 17 00:00:00 2001 From: grossir <14970769+grossir@users.noreply.github.com> Date: Tue, 14 Jan 2025 17:14:24 +0000 Subject: [PATCH 11/34] Update freelawproject dependencies --- poetry.lock | 257 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 252 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ed51b6c17c..e54b8d7865 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "ada-url" @@ -6,6 +6,7 @@ version = "1.15.3" description = "URL parser and manipulator based on the WHAT WG URL standard" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ada_url-1.15.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:486ed6775faaf915efb82e4dea9224d388ca743aa572996240ffda20e19dd769"}, {file = "ada_url-1.15.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:cf0facdc4e66cadafdfb7ccb914e03aae2571dd8f70a28531a60019d8888641b"}, @@ -74,6 +75,7 @@ version = "5.2.0" description = "Low-level AMQP client for Python (fork of amqplib)." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, @@ -88,6 +90,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -99,6 +102,7 @@ version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, @@ -119,6 +123,7 @@ version = "1.4.0" description = "Python command-line parsing library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "argparse-1.4.0-py2.py3-none-any.whl", hash = "sha256:c31647edb69fd3d465a847ea3157d37bed1f95f19760b11a47aa91c04b666314"}, {file = "argparse-1.4.0.tar.gz", hash = "sha256:62b089a55be1d8949cd2bc7e0df0bddb9e028faefc8c32038cc84862aefdd6e4"}, @@ -130,6 +135,7 @@ version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, @@ -144,6 +150,7 @@ version = "0.8.1" description = "Read/rewrite/write Python ASTs" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["dev"] files = [ {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, @@ -155,6 +162,7 @@ version = "3.3.5" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "astroid-3.3.5-py3-none-any.whl", hash = "sha256:a9d1c946ada25098d790e079ba2a1b112157278f3fb7e718ae6a9252f5835dc8"}, {file = "astroid-3.3.5.tar.gz", hash = "sha256:5cfc40ae9f68311075d27ef68a4841bdc5cc7f6cf86671b49f00607d30188e2d"}, @@ -166,6 +174,7 @@ version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, @@ -184,6 +193,7 @@ version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, @@ -203,6 +213,7 @@ version = "24.4.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "autobahn-24.4.2-py2.py3-none-any.whl", hash = "sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81"}, {file = "autobahn-24.4.2.tar.gz", hash = "sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9"}, @@ -232,6 +243,7 @@ version = "24.8.1" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Automat-24.8.1-py3-none-any.whl", hash = "sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a"}, {file = "automat-24.8.1.tar.gz", hash = "sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88"}, @@ -246,6 +258,7 @@ version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["main"] files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, @@ -267,6 +280,7 @@ version = "4.2.1" description = "Python multiprocessing fork with improvements and bugfixes" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "billiard-4.2.1-py3-none-any.whl", hash = "sha256:40b59a4ac8806ba2c2369ea98d876bc6108b051c227baffd928c644d15d8f3cb"}, {file = "billiard-4.2.1.tar.gz", hash = "sha256:12b641b0c539073fc8d3f5b8b7be998956665c4233c7c1fcd66a7e677c4fb36f"}, @@ -278,6 +292,7 @@ version = "24.10.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, @@ -322,6 +337,7 @@ version = "1.35.44" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "boto3-1.35.44-py3-none-any.whl", hash = "sha256:18416d07b41e6094101a44f8b881047dcec6b846dad0b9f83b9bbf2f0cd93d07"}, {file = "boto3-1.35.44.tar.gz", hash = "sha256:7f8e8a252458d584d8cf7877c372c4f74ec103356eedf43d2dd9e479f47f3639"}, @@ -341,6 +357,7 @@ version = "1.35.44" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "botocore-1.35.44-py3-none-any.whl", hash = "sha256:55388e80624401d017a9a2b8109afd94814f7e666b53e28fce51375cfa8d9326"}, {file = "botocore-1.35.44.tar.gz", hash = "sha256:1fcd97b966ad8a88de4106fe1bd3bbd6d8dadabe99bbd4a6aadcf11cb6c66b39"}, @@ -360,6 +377,7 @@ version = "5.4.0" description = "Distributed Task Queue." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "celery-5.4.0-py3-none-any.whl", hash = "sha256:369631eb580cf8c51a82721ec538684994f8277637edde2dfc0dacd73ed97f64"}, {file = "celery-5.4.0.tar.gz", hash = "sha256:504a19140e8d3029d5acad88330c541d4c3f64c789d85f94756762d8bca7e706"}, @@ -416,6 +434,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -427,6 +446,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -496,6 +516,7 @@ files = [ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] +markers = {dev = "platform_python_implementation != \"PyPy\""} [package.dependencies] pycparser = "*" @@ -506,6 +527,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -517,6 +539,7 @@ version = "5.2.0" description = "Universal encoding detector for Python 3" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, @@ -528,6 +551,7 @@ version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, @@ -642,6 +666,7 @@ version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, @@ -656,6 +681,7 @@ version = "0.3.1" description = "Enables git-like *did-you-mean* feature in click" optional = false python-versions = ">=3.6.2" +groups = ["main"] files = [ {file = "click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c"}, {file = "click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463"}, @@ -670,6 +696,7 @@ version = "1.1.1" description = "An extension module for click to enable registering CLI commands via setuptools entry-points." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, @@ -687,6 +714,7 @@ version = "0.3.0" description = "REPL plugin for Click" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, @@ -705,6 +733,7 @@ version = "0.2.2" description = "Convert images to beautiful ANSI escape codes" optional = false python-versions = ">=3.2" +groups = ["main"] files = [ {file = "climage-0.2.2-py3-none-any.whl", hash = "sha256:0a820c7e9c51aa0f720dd3039247848db3c42ebf62475407ec33442e7919b6ab"}, {file = "climage-0.2.2.tar.gz", hash = "sha256:e6116b2f5f3d313adb0856657efe4a6d119e2218cdef23c28a993d94b871ab8e"}, @@ -720,6 +749,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] +markers = "platform_system == \"Windows\" or sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -731,6 +762,7 @@ version = "23.10.4" description = "Symbolic constants in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, @@ -742,6 +774,7 @@ version = "0.10.25" description = "Database of Courts" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "courts_db-0.10.25-py2.py3-none-any.whl", hash = "sha256:b61783bdee2d7afd549ce767d09b4883d835242f73eab9061e74efc2f19c6f64"}, {file = "courts_db-0.10.25.tar.gz", hash = "sha256:f96b4ef3227e6844b3d2d12970373f2f05cdda66a9f15cbe69527ff66649b9d9"}, @@ -753,6 +786,7 @@ version = "43.0.3" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, @@ -802,6 +836,7 @@ version = "1.2.0" description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, @@ -813,6 +848,7 @@ version = "4.1.2" description = "Django ASGI (HTTP/WebSocket) server" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "daphne-4.1.2-py3-none-any.whl", hash = "sha256:618d1322bb4d875342b99dd2a10da2d9aae7ee3645f765965fdc1e658ea5290a"}, {file = "daphne-4.1.2.tar.gz", hash = "sha256:fcbcace38eb86624ae247c7ffdc8ac12f155d7d19eafac4247381896d6f33761"}, @@ -832,6 +868,7 @@ version = "1.6.5" description = "Probabilistic data structures for processing and searching very large datasets" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "datasketch-1.6.5-py3-none-any.whl", hash = "sha256:59311b2925b2f37536e9f7c2f46bbc25e8e54379c8635a3fa7ca55d2abb66d1b"}, {file = "datasketch-1.6.5.tar.gz", hash = "sha256:ba2848cb74f23d6d3dd444cf24edcbc47b1c34a171b1803231793ed4d74d4fcf"}, @@ -854,6 +891,7 @@ version = "1.2.0" description = "Date parsing library designed to parse dates from HTML pages" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "dateparser-1.2.0-py2.py3-none-any.whl", hash = "sha256:0b21ad96534e562920a0083e97fd45fa959882d4162acc358705144520a35830"}, {file = "dateparser-1.2.0.tar.gz", hash = "sha256:7975b43a4222283e0ae15be7b4999d08c9a70e2d378ac87385b1ccf2cffbbb30"}, @@ -876,6 +914,7 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, @@ -887,6 +926,7 @@ version = "0.7.1" description = "XML bomb protection for Python stdlib modules" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, @@ -898,6 +938,7 @@ version = "0.3.9" description = "serialize all of Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, @@ -913,6 +954,7 @@ version = "0.0.107" description = "A set of disposable email domains" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "disposable_email_domains-0.0.107-py2.py3-none-any.whl", hash = "sha256:1a7b891f644b1234dd2555de5208bcb35a52e32aaef156ff03ad2cc02f8564ed"}, {file = "disposable_email_domains-0.0.107.tar.gz", hash = "sha256:a5e5f267d6fe1288840ba67e03816e7478a3ec308948c9ae5aa7f7cfcc31b13c"}, @@ -927,6 +969,7 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -938,6 +981,7 @@ version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, @@ -949,6 +993,7 @@ version = "5.1.4" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" +groups = ["main", "dev"] files = [ {file = "Django-5.1.4-py3-none-any.whl", hash = "sha256:236e023f021f5ce7dee5779de7b286565fdea5f4ab86bae5338e3f7b69896cf0"}, {file = "Django-5.1.4.tar.gz", hash = "sha256:de450c09e91879fa5a307f696e57c851955c910a438a35e6b4c895e86bedc82a"}, @@ -969,6 +1014,7 @@ version = "0.1.6" description = "Drop-in replacement for django admin default pagination that works fast with huge tables." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "django-admin-cursor-paginator-0.1.6.tar.gz", hash = "sha256:42f81854c3f7774b1b9a327ce974586bdbdcca1c761b634a1ebf162c8a65cab9"}, ] @@ -982,6 +1028,7 @@ version = "0.2.0" description = "Django utility for a memoization decorator that uses the Django cache framework." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django-cache-memoize-0.2.0.tar.gz", hash = "sha256:79950a027ba40e4aff4efed587b76036bf5ba1f59329d7b158797b832be72ca6"}, {file = "django_cache_memoize-0.2.0-py3-none-any.whl", hash = "sha256:a6bfd112da699d1fa85955a1e15b7c48ee25e58044398958e269678db10736f3"}, @@ -996,6 +1043,7 @@ version = "4.6.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "django_cors_headers-4.6.0-py3-none-any.whl", hash = "sha256:8edbc0497e611c24d5150e0055d3b178c6534b8ed826fb6f53b21c63f5d48ba3"}, {file = "django_cors_headers-4.6.0.tar.gz", hash = "sha256:14d76b4b4c8d39375baeddd89e4f08899051eeaf177cb02a29bd6eae8cf63aa8"}, @@ -1011,6 +1059,7 @@ version = "3.8" description = "Django Content Security Policy support." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django_csp-3.8-py3-none-any.whl", hash = "sha256:19b2978b03fcd73517d7d67acbc04fbbcaec0facc3e83baa502965892d1e0719"}, {file = "django_csp-3.8.tar.gz", hash = "sha256:ef0f1a9f7d8da68ae6e169c02e9ac661c0ecf04db70e0d1d85640512a68471c0"}, @@ -1029,6 +1078,7 @@ version = "4.4.6" description = "A configurable set of panels that display various debug information about the current request/response." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "django_debug_toolbar-4.4.6-py3-none-any.whl", hash = "sha256:3beb671c9ec44ffb817fad2780667f172bd1c067dbcabad6268ce39a81335f45"}, {file = "django_debug_toolbar-4.4.6.tar.gz", hash = "sha256:36e421cb908c2f0675e07f9f41e3d1d8618dc386392ec82d23bcfcd5d29c7044"}, @@ -1044,6 +1094,7 @@ version = "8.0" description = "Wrapper around elasticsearch-dsl-py for django models" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django-elasticsearch-dsl-8.0.tar.gz", hash = "sha256:64ee0612ced6d57515a6b7f29f1a3e1c2eea1996a6226fc72079a95c067b27ca"}, {file = "django_elasticsearch_dsl-8.0-py2.py3-none-any.whl", hash = "sha256:423784a4af336d109c3763622f1edc4973664cb5154beb55b3ff9390c1e4525e"}, @@ -1062,6 +1113,7 @@ version = "0.11.2" description = "A package that allows you to utilize 12factor inspired environment variables to configure your Django application." optional = false python-versions = ">=3.6,<4" +groups = ["main"] files = [ {file = "django-environ-0.11.2.tar.gz", hash = "sha256:f32a87aa0899894c27d4e1776fa6b477e8164ed7f6b3e410a62a6d72caaf64be"}, {file = "django_environ-0.11.2-py2.py3-none-any.whl", hash = "sha256:0ff95ab4344bfeff693836aa978e6840abef2e2f1145adff7735892711590c05"}, @@ -1078,6 +1130,7 @@ version = "3.2.3" description = "Extensions for Django" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, @@ -1092,6 +1145,7 @@ version = "24.3" description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django_filter-24.3-py3-none-any.whl", hash = "sha256:c4852822928ce17fb699bcfccd644b3574f1a2d80aeb2b4ff4f16b02dd49dc64"}, {file = "django_filter-24.3.tar.gz", hash = "sha256:d8ccaf6732afd21ca0542f6733b11591030fa98669f8d15599b358e24a2cd9c3"}, @@ -1106,6 +1160,7 @@ version = "0.2.0" description = "Django hCaptcha provides a simple way to protect your django forms using hCaptcha" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-hCaptcha-0.2.0.tar.gz", hash = "sha256:b2519eaf0cc97865ac72f825301122c5cf61e1e4852d6895994160222acb6c1a"}, {file = "django_hCaptcha-0.2.0-py3-none-any.whl", hash = "sha256:18804fb38a01827b6c65d111bac31265c1b96fcf52d7a54c3e2d2cb1c62ddcde"}, @@ -1117,6 +1172,7 @@ version = "4.0" description = "Country-specific Django helpers" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-localflavor-4.0.tar.gz", hash = "sha256:11859e522dba74aa6dde5a659242b1fbc5efb4dea08e9b77315402bdeca5194e"}, {file = "django_localflavor-4.0-py3-none-any.whl", hash = "sha256:7a5b1df03ca8e10df9d1b3c2e4314e43383067868183cdf41ab4e7a973694a8b"}, @@ -1132,6 +1188,7 @@ version = "1.0.6" description = "a Django app that provides template tags for using Markdown (using the python-markdown2 processor)" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-markdown-deux-1.0.6.zip", hash = "sha256:1f7b4da6b4dd1a9a84e3da90887d356f8afdd9a1e7d6468c081b8ac50a7980b1"}, ] @@ -1145,6 +1202,7 @@ version = "1.0.0" description = "A set of simple math filters for Django" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-mathfilters-1.0.0.tar.gz", hash = "sha256:c9b892ef6dfc893683e75cfd0279c187a601ca68f4684c38f9da44657fb64b07"}, {file = "django_mathfilters-1.0.0-py3-none-any.whl", hash = "sha256:64200a21bb249fbf27be601d4bbb788779e09c6e063170c097cd82c4d18ebb83"}, @@ -1156,6 +1214,7 @@ version = "5.0.0" description = "Django model mixins and utilities" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django_model_utils-5.0.0-py3-none-any.whl", hash = "sha256:fec78e6c323d565a221f7c4edc703f4567d7bb1caeafe1acd16a80c5ff82056b"}, {file = "django_model_utils-5.0.0.tar.gz", hash = "sha256:041cdd6230d2fbf6cd943e1969318bce762272077f4ecd333ab2263924b4e5eb"}, @@ -1170,6 +1229,7 @@ version = "0.3.2" description = "Django test helpers to manage file storage side effects." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "django-override-storage-0.3.2.tar.gz", hash = "sha256:995e1a42f056c9f9bc114077c11d67520ec7d8a752a59be62729e641562b133e"}, {file = "django_override_storage-0.3.2-py2.py3-none-any.whl", hash = "sha256:1f1a13274d66cc481b19d63c8bd43c94066824008bcdd26ec65d125b1ce8ec39"}, @@ -1181,6 +1241,7 @@ version = "4.22.0" description = "Set the draft security HTTP header Permissions-Policy (previously Feature-Policy) on your Django app." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "django_permissions_policy-4.22.0-py3-none-any.whl", hash = "sha256:b8b9034f2e1b59a8ab5b701cb4000f5b3bff1445bc56347c94b3fa4e20794618"}, {file = "django_permissions_policy-4.22.0.tar.gz", hash = "sha256:a65aeeeca0673f8820b1d94490677f1f8a4c0bf28253e189142fc7b175e131dd"}, @@ -1196,6 +1257,7 @@ version = "3.5.1" description = "History tracking for Django and Postgres" optional = false python-versions = "<4,>=3.9.0" +groups = ["main"] files = [ {file = "django_pghistory-3.5.1-py3-none-any.whl", hash = "sha256:900e5be084d20519528a1c66a354464a74b80ec0101abb7541ded61ff46759b7"}, {file = "django_pghistory-3.5.1.tar.gz", hash = "sha256:28a4238326651d60c33a22337c3c93edc0e657d26ef7faac412875c7b4d40d1c"}, @@ -1211,6 +1273,7 @@ version = "4.12.2" description = "Postgres trigger support integrated with Django models." optional = false python-versions = "<4,>=3.8.0" +groups = ["main"] files = [ {file = "django_pgtrigger-4.12.2-py3-none-any.whl", hash = "sha256:1e1f6bf448997ee02a5af07d62a23b10085055e3b7e21062c8480c0b3b56f475"}, {file = "django_pgtrigger-4.12.2.tar.gz", hash = "sha256:831fd03cde31b4d7192a8c831527f062940c82faf4ccff688ad1975806087889"}, @@ -1225,6 +1288,7 @@ version = "4.1.0" description = "Cache-based rate-limiting for Django." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-ratelimit-4.1.0.tar.gz", hash = "sha256:555943b283045b917ad59f196829530d63be2a39adb72788d985b90c81ba808b"}, {file = "django_ratelimit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d047a31cf94d83ef1465d7543ca66c6fc16695559b5f8d814d1b51df15110b92"}, @@ -1236,6 +1300,7 @@ version = "4.3.2" description = "A Django email backend for Amazon's Simple Email Service (SES)" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ {file = "django_ses-4.3.2-py3-none-any.whl", hash = "sha256:eed713751a95f0204954a3d9532651fcde835fa48179556ddbac75a671fd8de4"}, {file = "django_ses-4.3.2.tar.gz", hash = "sha256:4440847259b3a5a1d33c02467dacbe23d12b8105e04a4c17dba38110f4bfae77"}, @@ -1257,6 +1322,7 @@ version = "1.14.3" description = "Support for many storage backends in Django" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "django-storages-1.14.3.tar.gz", hash = "sha256:95a12836cd998d4c7a4512347322331c662d9114c4344f932f5e9c0fce000608"}, {file = "django_storages-1.14.3-py3-none-any.whl", hash = "sha256:31f263389e95ce3a1b902fb5f739a7ed32895f7d8b80179fe7453ecc0dfe102e"}, @@ -1280,6 +1346,7 @@ version = "5.1.0" description = "Mypy stubs for Django" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "django_stubs-5.1.0-py3-none-any.whl", hash = "sha256:b98d49a80aa4adf1433a97407102d068de26c739c405431d93faad96dd282c40"}, {file = "django_stubs-5.1.0.tar.gz", hash = "sha256:86128c228b65e6c9a85e5dc56eb1c6f41125917dae0e21e6cfecdf1b27e630c5"}, @@ -1303,6 +1370,7 @@ version = "5.1.0" description = "Monkey-patching and extensions for django-stubs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "django_stubs_ext-5.1.0-py3-none-any.whl", hash = "sha256:a455fc222c90b30b29ad8c53319559f5b54a99b4197205ddbb385aede03b395d"}, {file = "django_stubs_ext-5.1.0.tar.gz", hash = "sha256:ed7d51c0b731651879fc75f331fb0806d98b67bfab464e96e2724db6b46ef926"}, @@ -1318,6 +1386,7 @@ version = "4.1.0" description = "A feature flipper for Django." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "django-waffle-4.1.0.tar.gz", hash = "sha256:e49d7d461d89f3bd8e53f20efe39310acca8f275c9888495e68e195345bf18b1"}, {file = "django_waffle-4.1.0-py3-none-any.whl", hash = "sha256:5979a2f3dd674ef7086480525b39651fc2045427f6d8e6a614192656d3402c5b"}, @@ -1332,6 +1401,7 @@ version = "3.14.0" description = "Web APIs for Django, made easy." optional = false python-versions = ">=3.6" +groups = ["main"] files = [] develop = false @@ -1351,6 +1421,7 @@ version = "1.0.0.dev2" description = "Better filtering for Django REST Framework" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "djangorestframework-filters-1.0.0.dev2.tar.gz", hash = "sha256:ef84527e3427434d54228825b53a35098c8633c1e77b71d06b79597b749ea3f2"}, {file = "djangorestframework_filters-1.0.0.dev2-py3-none-any.whl", hash = "sha256:7369998968d656707e013da8c0c3ef1f858b99c4caaa8e9ea40861e5d6ddecff"}, @@ -1366,6 +1437,7 @@ version = "3.15.1" description = "PEP-484 stubs for django-rest-framework" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "djangorestframework_stubs-3.15.1-py3-none-any.whl", hash = "sha256:79dc9018f5d5fa420f9981eec9f1e820ecbd04719791f144419cdc6c5b8e29bd"}, {file = "djangorestframework_stubs-3.15.1.tar.gz", hash = "sha256:34539871895d66d382b6ae3655d9f95c1de7733cf50bc29097638d367ed3117d"}, @@ -1389,6 +1461,7 @@ version = "2.0.0" description = "XML support for Django REST Framework" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "djangorestframework-xml-2.0.0.tar.gz", hash = "sha256:35f6c811d0ab8c8466b26db234e16a2ed32d76381715257aebf4c7be2c202ca1"}, {file = "djangorestframework_xml-2.0.0-py2.py3-none-any.whl", hash = "sha256:975955fbb0d49ac44a90bdeb33b7923d95b79884d283f983e116c80a936ef4d0"}, @@ -1408,6 +1481,7 @@ version = "0.6.2" description = "Pythonic argument parser, that will make you smile" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] @@ -1418,6 +1492,7 @@ version = "0.4.0" description = "Dynamically return subset of Django REST Framework serializer fields" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "drf_dynamic_fields-0.4.0-py2.py3-none-any.whl", hash = "sha256:48b879fe899905bc18593a61bca43e3b595dc3431b3b4ee499a9fd6c9a53f98c"}, {file = "drf_dynamic_fields-0.4.0.tar.gz", hash = "sha256:f20a5ec27d003db7595c9315db22217493dcaed575f3811d3e12f264c791c20c"}, @@ -1429,6 +1504,7 @@ version = "8.15.1" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "elastic_transport-8.15.1-py3-none-any.whl", hash = "sha256:b5e82ff1679d8c7705a03fd85c7f6ef85d6689721762d41228dd312e34f331fc"}, {file = "elastic_transport-8.15.1.tar.gz", hash = "sha256:9cac4ab5cf9402668cf305ae0b7d93ddc0c7b61461d6d1027850db6da9cc5742"}, @@ -1447,6 +1523,7 @@ version = "8.15.1" description = "Python client for Elasticsearch" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "elasticsearch-8.15.1-py3-none-any.whl", hash = "sha256:02a0476e98768a30d7926335fc0d305c04fdb928eea1354c6e6040d8c2814569"}, {file = "elasticsearch-8.15.1.tar.gz", hash = "sha256:40c0d312f8adf8bdc81795bc16a0b546ddf544cb1f90e829a244e4780c4dbfd8"}, @@ -1470,6 +1547,7 @@ version = "8.11.0" description = "Python client for Elasticsearch" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "elasticsearch-dsl-8.11.0.tar.gz", hash = "sha256:44af4fd7f62009bb19193b55e1c2143b6932517e4c0ec30107e7ff4d968a127e"}, {file = "elasticsearch_dsl-8.11.0-py3-none-any.whl", hash = "sha256:61000f8ff5e9633d3381aea5a6dfba5c9c4505fe2e6c5cba6a17cd7debc890d9"}, @@ -1488,6 +1566,7 @@ version = "2.1.0" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, @@ -1502,6 +1581,7 @@ version = "0.11.0" description = "Irregular methods for regular expressions" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "exrex-0.11.0-py2.py3-none-any.whl", hash = "sha256:7bbc4987ebbb63cda7aad1a63b4e29032ba24826de4f295354bce1743e3aeb52"}, {file = "exrex-0.11.0.tar.gz", hash = "sha256:59912f0234567a5966b10d963c37ca9fe07f1640fd158e77c0dc7c3aee780489"}, @@ -1513,6 +1593,7 @@ version = "2.6.4" description = "Tool for extracting legal citations from text strings." optional = false python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ {file = "eyecite-2.6.4-py3-none-any.whl", hash = "sha256:da6a100ca6c6fd05b9a6714fdcdaec8d2e5aa27fff550c9e6c41f75009bea81f"}, {file = "eyecite-2.6.4.tar.gz", hash = "sha256:e3a7d8d7816ee58f2966b2c571df3f97dd19746c5ea5b951b30d4d82cabd8508"}, @@ -1532,6 +1613,7 @@ version = "3.3.1" description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca"}, {file = "factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0"}, @@ -1550,6 +1632,7 @@ version = "30.8.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Faker-30.8.0-py3-none-any.whl", hash = "sha256:4cd0c5ea4bc1e4c902967f6e662f5f5da69f1674d9a94f54e516d27f3c2a6a16"}, {file = "faker-30.8.0.tar.gz", hash = "sha256:3608c7fcac2acde0eaa6da28dae97628f18f14d54eaa2a92b96ae006f1621bd7"}, @@ -1565,6 +1648,7 @@ version = "2.1.0" description = "Packages the C++ implementation of google-diff-match-patch for Python for fast byte and string diffs." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "fast_diff_match_patch-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5670982e4d08eb7609f0b0d62990f48e458f0dc27581a9850018a0fa5f4528e1"}, {file = "fast_diff_match_patch-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e2078b4305c3ad894bb2d405abe4119b13c3ea1ddebf3dbba3cf8beb90ea560"}, @@ -1650,6 +1734,7 @@ version = "6.0.11" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"}, {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"}, @@ -1664,6 +1749,7 @@ version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, @@ -1680,6 +1766,7 @@ version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, @@ -1696,6 +1783,7 @@ version = "1.0.1" description = "CLI tool to convert a python project's %-formatted strings to f-strings." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "flynt-1.0.1-py3-none-any.whl", hash = "sha256:65d1c546434827275123222a98408e9561bcd67db832dd58f530ff17b8329ec1"}, {file = "flynt-1.0.1.tar.gz", hash = "sha256:988aac00672a5469726cc0a17cef7d1178c284a9fe8563458db2475d0aaed965"}, @@ -1713,6 +1801,7 @@ version = "0.18.0" description = "Fuzzy string matching in python" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "fuzzywuzzy-0.18.0-py2.py3-none-any.whl", hash = "sha256:928244b28db720d1e0ee7587acf660ea49d7e4c632569cad4f1cd7e68a5f0993"}, {file = "fuzzywuzzy-0.18.0.tar.gz", hash = "sha256:45016e92264780e58972dca1b3d939ac864b78437422beecebb3095f8efd00e8"}, @@ -1727,6 +1816,7 @@ version = "2.0.0" description = "Geonames data for continents, cities and US states." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "geonamescache-2.0.0-py3-none-any.whl", hash = "sha256:24fdaaeaf236f88786dec8c0ab55447f5f7f95ef6c094e79fa9ef74114ea1fe2"}, {file = "geonamescache-2.0.0.tar.gz", hash = "sha256:fa1eed0b5b591b478ad81979081ac15b37aa6a1c00bb02431a570688e5c2ecc9"}, @@ -1738,6 +1828,7 @@ version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, @@ -1759,6 +1850,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1770,6 +1862,7 @@ version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" optional = false python-versions = ">=3.6.1" +groups = ["main"] files = [ {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, @@ -1785,6 +1878,7 @@ version = "4.0.0" description = "Pure-Python HPACK header compression" optional = false python-versions = ">=3.6.1" +groups = ["main"] files = [ {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, @@ -1796,6 +1890,7 @@ version = "1.1" description = "HTML parser based on the WHATWG HTML specification" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, @@ -1817,6 +1912,7 @@ version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, @@ -1838,6 +1934,7 @@ version = "0.22.0" description = "A comprehensive HTTP client library." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, @@ -1852,6 +1949,7 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -1907,6 +2005,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -1932,6 +2031,7 @@ version = "6.0.1" description = "HTTP/2 framing layer for Python" optional = false python-versions = ">=3.6.1" +groups = ["main"] files = [ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, @@ -1943,6 +2043,7 @@ version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, @@ -1957,6 +2058,7 @@ version = "0.7.8" description = "Python bindings for Hyperscan." optional = false python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ {file = "hyperscan-0.7.8-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:5a769dfb53f8e9417905be310d2d60d3c2deba021703da88716b506884e9a855"}, {file = "hyperscan-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8cd9d979f7d9f8a0b9c92fd6db950ec997aff5cc79121fd997ae44e53d80f2e4"}, @@ -2001,6 +2103,7 @@ version = "2.6.1" description = "File identification library for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, @@ -2015,6 +2118,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -2029,6 +2133,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -2046,6 +2151,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -2057,6 +2163,7 @@ version = "4.1.0" description = "A Python interface to archive.org." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "internetarchive-4.1.0-py3-none-any.whl", hash = "sha256:0a0e30ade737f7be971b31c38b8c856867a316c5b1d646f055b2b1946aa9cb00"}, {file = "internetarchive-4.1.0.tar.gz", hash = "sha256:467188386218d2c77815ca798a51dd18310b326841113bb462b24ea9a71beedc"}, @@ -2083,6 +2190,7 @@ version = "1.0.23" description = "IPv4/IPv6 manipulation library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ipaddress-1.0.23-py2.py3-none-any.whl", hash = "sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc"}, {file = "ipaddress-1.0.23.tar.gz", hash = "sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"}, @@ -2094,6 +2202,7 @@ version = "8.28.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35"}, {file = "ipython-8.28.0.tar.gz", hash = "sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a"}, @@ -2130,6 +2239,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -2144,6 +2254,7 @@ version = "1.2.0" description = "Simple immutable types for python." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "itypes-1.2.0-py2.py3-none-any.whl", hash = "sha256:03da6872ca89d29aef62773672b2d408f490f80db48b23079a4b194c86dd04c6"}, {file = "itypes-1.2.0.tar.gz", hash = "sha256:af886f129dea4a2a1e3d36595a2d139589e4dd287f5cab0b40e799ee81570ff1"}, @@ -2155,6 +2266,7 @@ version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, @@ -2174,6 +2286,7 @@ version = "0.6.1" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jiter-0.6.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d08510593cb57296851080018006dfc394070178d238b767b1879dc1013b106c"}, {file = "jiter-0.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adef59d5e2394ebbad13b7ed5e0306cceb1df92e2de688824232a91588e77aa7"}, @@ -2256,6 +2369,7 @@ version = "1.0.1" description = "JSON Matching Expressions" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, @@ -2267,6 +2381,7 @@ version = "1.4.2" description = "Lightweight pipelining with Python functions" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, @@ -2278,6 +2393,7 @@ version = "1.33" description = "Apply JSON-Patches (RFC 6902)" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +groups = ["main"] files = [ {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, @@ -2292,6 +2408,7 @@ version = "3.0.0" description = "Identify specific nodes in a JSON document (RFC 6901)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, @@ -2303,6 +2420,7 @@ version = "2.0.5" description = "Database of Judge Pictures" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "judge-pics-2.0.5.tar.gz", hash = "sha256:9c17c7b5f4eb7407a606a59b11ae18724252cf176eab6e674d7e214d090dcdcc"}, {file = "judge_pics-2.0.5-py2.py3-none-any.whl", hash = "sha256:f60f386f898daac3a067f0579daa33869be6d3923883bd9e626c759bbac631fa"}, @@ -2323,6 +2441,7 @@ version = "2.6.50" description = "An API to scrape American court websites for metadata." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "juriscraper-2.6.50-py27-none-any.whl", hash = "sha256:55329a4623b812c712abdbf9a4ccf616afe0e96f01e1a2a6e6faa8d39e4921e1"}, {file = "juriscraper-2.6.50.tar.gz", hash = "sha256:dbc01321d33f5543a82c13b9b3aebb84f6be288114f441cc06ce6a45cf09ba87"}, @@ -2355,6 +2474,7 @@ version = "0.16" description = "A Python implemntation of a kd-tree" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "kdtree-0.16-py2.py3-none-any.whl", hash = "sha256:083945db69bc3cf0d349d8d0efe66c056de28d1c2f1e81f762dc5ce46f0dcf0a"}, {file = "kdtree-0.16.tar.gz", hash = "sha256:386df6c7816a05e0fab974e3035df944f99ef68b5615f4a416771391e33d7534"}, @@ -2366,6 +2486,7 @@ version = "5.4.2" description = "Messaging library for Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, @@ -2399,6 +2520,7 @@ version = "0.26.0" description = "Python extension for computing string edit distances and similarities." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "levenshtein-0.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e5fb23bf35ef1094d00415bb2116dd862fb919adc4044436508029dafedfd687"}, {file = "levenshtein-0.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea97df5fecf4195ed24aa4c245e5b6cf42527a5818bd7d0d92ba8c7425828861"}, @@ -2492,6 +2614,7 @@ version = "5.3.0" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, @@ -2646,6 +2769,7 @@ version = "0.5.1" description = "Type annotations for the lxml package" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, @@ -2660,6 +2784,7 @@ version = "2.5.1" description = "A fast and complete Python implementation of Markdown" optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "markdown2-2.5.1-py2.py3-none-any.whl", hash = "sha256:190ae60a4bd0425c60c863bede18a9f3d45b1cbf3fbc9f40b4fac336ff2c520b"}, {file = "markdown2-2.5.1.tar.gz", hash = "sha256:12fc04ea5a87f7bb4b65acf5bf3af1183b20838cc7d543b74c92ec7eea4bbc74"}, @@ -2677,6 +2802,7 @@ version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, @@ -2691,6 +2817,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -2702,6 +2829,7 @@ version = "1.12.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "mypy-1.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d7d4371829184e22fda4015278fbfdef0327a4b955a483012bd2d423a788801"}, {file = "mypy-1.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f59f1dfbf497d473201356966e353ef09d4daec48caeacc0254db8ef633a28a5"}, @@ -2753,6 +2881,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -2764,6 +2893,7 @@ version = "1.1.3" description = "A simple Python module for parsing human names into their individual components." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "nameparser-1.1.3-py2.py3-none-any.whl", hash = "sha256:08ccda98681d59751c82052d52f185bc52f99d43e87d46b85c015a9096ecfa66"}, {file = "nameparser-1.1.3.tar.gz", hash = "sha256:aa2400ad71ccf8070675b40311a257c934659f91854b154e1ba6c264761c049d"}, @@ -2775,6 +2905,7 @@ version = "8.4.0" description = "Simple yet flexible natural sorting in Python." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "natsort-8.4.0-py3-none-any.whl", hash = "sha256:4732914fb471f56b5cce04d7bae6f164a592c7712e1c85f9ef585e197299521c"}, {file = "natsort-8.4.0.tar.gz", hash = "sha256:45312c4a0e5507593da193dedd04abb1469253b601ecaf63445ad80f0a1ea581"}, @@ -2790,6 +2921,7 @@ version = "0.5.1" description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL" optional = false python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0" +groups = ["main"] files = [ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"}, {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"}, @@ -2806,6 +2938,7 @@ version = "3.4.2" description = "Python package for creating and manipulating graphs and networks" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, @@ -2825,6 +2958,7 @@ version = "0.2.18" description = "Python bindings to the ammonia HTML sanitization library." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86"}, {file = "nh3-0.2.18-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811"}, @@ -2850,6 +2984,7 @@ version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -2861,6 +2996,7 @@ version = "1.3.7" description = "nose extends unittest to make testing easier" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"}, {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"}, @@ -2873,6 +3009,7 @@ version = "2.1.2" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, @@ -2935,6 +3072,7 @@ version = "1.58.1" description = "The official Python library for the openai API" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c"}, {file = "openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973"}, @@ -2960,6 +3098,7 @@ version = "1.3.0.post0" description = "Capture the outcome of Python function calls." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, @@ -2974,6 +3113,7 @@ version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, @@ -2985,6 +3125,7 @@ version = "2.2.3" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, @@ -3067,6 +3208,7 @@ version = "0.8.4" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, @@ -3082,6 +3224,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -3093,6 +3236,8 @@ version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -3107,6 +3252,7 @@ version = "11.0.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, @@ -3199,6 +3345,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -3215,6 +3362,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -3230,6 +3378,7 @@ version = "4.0.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, @@ -3248,6 +3397,7 @@ version = "0.0.1" description = "Common methods for propbable parsers" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "probableparsing-0.0.1-py2.py3-none-any.whl", hash = "sha256:509df25fdda4fd7c0b2a100f58cc971bd23daf26f3b3320aebf2616d2e10c69e"}, {file = "probableparsing-0.0.1.tar.gz", hash = "sha256:8114bbf889e1f9456fe35946454c96e42a6ee2673a90d4f1f9c46a406f543767"}, @@ -3259,6 +3409,7 @@ version = "3.0.48" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, @@ -3273,6 +3424,7 @@ version = "3.2.3" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, @@ -3297,6 +3449,8 @@ version = "3.2.3" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name != \"pypy\"" files = [ {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, @@ -3370,6 +3524,7 @@ version = "3.2.3" description = "Connection Pool for Psycopg" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "psycopg_pool-3.2.3-py3-none-any.whl", hash = "sha256:53bd8e640625e01b2927b2ad96df8ed8e8f91caea4597d45e7673fc7bbb85eb1"}, {file = "psycopg_pool-3.2.3.tar.gz", hash = "sha256:bb942f123bef4b7fbe4d55421bd3fb01829903c95c0f33fd42b7e94e5ac9b52a"}, @@ -3384,6 +3539,8 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -3395,6 +3552,7 @@ version = "0.2.3" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -3409,6 +3567,7 @@ version = "2.1.0" description = "pyahocorasick is a fast and memory efficient library for exact or approximate multi-pattern string search. With the ``ahocorasick.Automaton`` class, you can find multiple key string occurrences at once in some input text. You can use it as a plain dict-like Trie or convert a Trie to an automaton for efficient Aho-Corasick search. And pickle to disk for easy reuse of large automatons. Implemented in C and tested on Python 3.6+. Works on Linux, macOS and Windows. BSD-3-Cause license." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyahocorasick-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c46288044c4f71392efb4f5da0cb8abd160787a8b027afc85079e9c3d7551eb"}, {file = "pyahocorasick-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f15529c83b8c6e0548d7d3c5631fefa23fba5190e67be49d6c9e24a6358ff9c"}, @@ -3447,6 +3606,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -3458,6 +3618,7 @@ version = "0.4.1" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, @@ -3472,6 +3633,7 @@ version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, @@ -3483,6 +3645,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -3494,6 +3657,7 @@ version = "2.9.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, @@ -3514,6 +3678,7 @@ version = "2.23.4" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, @@ -3615,6 +3780,7 @@ version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, @@ -3626,6 +3792,7 @@ version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, @@ -3640,6 +3807,7 @@ version = "3.3.1" description = "python code static checker" optional = false python-versions = ">=3.9.0" +groups = ["dev"] files = [ {file = "pylint-3.3.1-py3-none-any.whl", hash = "sha256:2f846a466dd023513240bc140ad2dd73bfc080a5d85a710afdb728c420a5a2b9"}, {file = "pylint-3.3.1.tar.gz", hash = "sha256:9f3dcc87b1203e612b78d91a896407787e708b3f189b5fa0b307712d49ff0c6e"}, @@ -3664,6 +3832,7 @@ version = "24.3.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pyOpenSSL-24.3.0-py3-none-any.whl", hash = "sha256:e474f5a473cd7f92221cc04976e48f4d11502804657a08a989fb3be5514c904a"}, {file = "pyopenssl-24.3.0.tar.gz", hash = "sha256:49f7a019577d834746bc55c5fce6ecbcec0f2b4ec5ce1cf43a9a173b8138bb36"}, @@ -3682,6 +3851,7 @@ version = "3.2.0" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, @@ -3696,6 +3866,7 @@ version = "1.7.1" description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, @@ -3708,6 +3879,7 @@ version = "2.2.0.3" description = "Snowball stemming algorithms, for information retrieval" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "PyStemmer-2.2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2935aa78a89b04899de4a8b8b6339806e0d5cd93811de52e98829b5762cf913c"}, {file = "PyStemmer-2.2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:31c9d3c808647d4c569737b32b40ed23c67133d2b89033ebc8b5756cadf6f1c1"}, @@ -3799,6 +3971,7 @@ version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, @@ -3819,6 +3992,7 @@ version = "4.9.0" description = "A Django plugin for pytest." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_django-4.9.0-py3-none-any.whl", hash = "sha256:1d83692cb39188682dbb419ff0393867e9904094a549a7d38a3154d5731b2b99"}, {file = "pytest_django-4.9.0.tar.gz", hash = "sha256:8bf7bc358c9ae6f6fc51b6cebb190fe20212196e6807121f11bd6a3b03428314"}, @@ -3837,6 +4011,7 @@ version = "0.9.11" description = "Python binding for CRFsuite" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python_crfsuite-0.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f5ed569517e7b1fa3d32cf5d5cbe2fb6c85486195bf5cad03d52072fef7aa8a"}, {file = "python_crfsuite-0.9.11-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aed10ee4334c99173940e88318d312a4f9e70ba653b8ac0e6f3ef816431af811"}, @@ -3904,6 +4079,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -3918,6 +4094,7 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -3932,6 +4109,7 @@ version = "0.26.0" description = "Python extension for computing string edit distances and similarities." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_Levenshtein-0.26.0-py3-none-any.whl", hash = "sha256:1d808ba2f9df04aaea5eceba6e73734f2ffeba99d98d2a91078f32276cd041f4"}, {file = "python_levenshtein-0.26.0.tar.gz", hash = "sha256:b454dd13708546649f1cba2a0f450dd98e7c1679a92e2d6f0a8b8c013c276e55"}, @@ -3946,6 +4124,7 @@ version = "0.4.27" description = "File type identification using libmagic" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, @@ -3957,6 +4136,7 @@ version = "1.20" description = "Python module to handle standardized numbers and codes" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "python-stdnum-1.20.tar.gz", hash = "sha256:ad2a2cf2eb025de408210235f36b4ae31252de3186240ccaa8126e117cb82690"}, {file = "python_stdnum-1.20-py2.py3-none-any.whl", hash = "sha256:111008e10391d54fb2afad2a10df70d5cb0c6c0a7ec82fec6f022cb8712961d3"}, @@ -3973,6 +4153,7 @@ version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, @@ -3984,6 +4165,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -4046,6 +4228,7 @@ version = "3.10.0" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:884453860de029380dded8f3c1918af2d8eb5adf8010261645c7e5c88c2b5428"}, {file = "rapidfuzz-3.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718c9bd369288aca5fa929df6dbf66fdbe9768d90940a940c0b5cdc96ade4309"}, @@ -4146,6 +4329,7 @@ version = "5.1.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "redis-5.1.1-py3-none-any.whl", hash = "sha256:f8ea06b7482a668c6475ae202ed8d9bcaa409f6e87fb77ed1043d912afd62e24"}, {file = "redis-5.1.1.tar.gz", hash = "sha256:f6c997521fedbae53387307c5d0bf784d9acc28d9f1d058abeac566ec4dbed72"}, @@ -4161,6 +4345,7 @@ version = "2024.9.11" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"}, {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"}, @@ -4260,13 +4445,14 @@ files = [ [[package]] name = "reporters-db" -version = "3.2.46" +version = "3.2.47" description = "Database of Court Reporters" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "reporters_db-3.2.46-py2.py3-none-any.whl", hash = "sha256:a8c0b0212220af099714b92938d4ffe2f48e21cde6510a1141cc9dcd86f9047f"}, - {file = "reporters_db-3.2.46.tar.gz", hash = "sha256:105bbab035912e3eea95059a9d7b1ee5d2941e19a07b9dd3856e9d80156d87a9"}, + {file = "reporters_db-3.2.47-py2.py3-none-any.whl", hash = "sha256:174f30b95be5c4591b44ac8b714c22648fa386387e91a982193a709456b996e3"}, + {file = "reporters_db-3.2.47.tar.gz", hash = "sha256:88c644db3eaf1effa5dc0ac9abe80ef1b311ebea9df0dd7092c77a694b49f3f9"}, ] [package.dependencies] @@ -4278,6 +4464,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -4299,6 +4486,7 @@ version = "2.1.0" description = "File transport adapter for Requests" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, @@ -4313,6 +4501,7 @@ version = "0.10.3" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, @@ -4330,6 +4519,7 @@ version = "0.7.7" description = "Simple data validation library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "schema-0.7.7-py2.py3-none-any.whl", hash = "sha256:5d976a5b50f36e74e2157b47097b60002bd4d42e65425fcc9c9befadb4255dde"}, {file = "schema-0.7.7.tar.gz", hash = "sha256:7da553abd2958a19dc2547c388cde53398b39196175a9be59ea1caf5ab0a1807"}, @@ -4341,6 +4531,7 @@ version = "1.5.2" description = "A set of python modules for machine learning and data mining" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "scikit_learn-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:299406827fb9a4f862626d0fe6c122f5f87f8910b86fe5daa4c32dcd742139b6"}, {file = "scikit_learn-1.5.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:2d4cad1119c77930b235579ad0dc25e65c917e756fe80cab96aa3b9428bd3fb0"}, @@ -4391,6 +4582,7 @@ version = "1.14.1" description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" +groups = ["main"] files = [ {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, @@ -4441,6 +4633,7 @@ version = "2.2.5" description = "A collection of court seals that can be used in any project." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "seal_rookery-2.2.5-py2.py3-none-any.whl", hash = "sha256:a1bcca9c20540058ae2477b5d40e374037958e771437b1fca5338682bb8a6034"}, {file = "seal_rookery-2.2.5.tar.gz", hash = "sha256:16c8d68875f2105ff44354573be6ef098e631af97e9ac00fcd06d6614e6db17e"}, @@ -4452,6 +4645,7 @@ version = "4.25.0" description = "Official Python bindings for Selenium WebDriver" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "selenium-4.25.0-py3-none-any.whl", hash = "sha256:3798d2d12b4a570bc5790163ba57fef10b2afee958bf1d80f2a3cf07c4141f33"}, {file = "selenium-4.25.0.tar.gz", hash = "sha256:95d08d3b82fb353f3c474895154516604c7f0e6a9a565ae6498ef36c9bac6921"}, @@ -4471,6 +4665,7 @@ version = "2.19.2" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "sentry_sdk-2.19.2-py2.py3-none-any.whl", hash = "sha256:ebdc08228b4d131128e568d696c210d846e5b9d70aa0327dec6b1272d9d40b84"}, {file = "sentry_sdk-2.19.2.tar.gz", hash = "sha256:467df6e126ba242d39952375dd816fbee0f217d119bf454a8ce74cf1e7909e8d"}, @@ -4527,6 +4722,7 @@ version = "24.1.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "service_identity-24.1.0-py3-none-any.whl", hash = "sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a"}, {file = "service_identity-24.1.0.tar.gz", hash = "sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221"}, @@ -4551,6 +4747,7 @@ version = "75.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, @@ -4571,6 +4768,7 @@ version = "1.0.0" description = "Py3k port of sgmllib." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, ] @@ -4581,6 +4779,7 @@ version = "3.19.3" description = "Simple, fast, extensible JSON encoder/decoder for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5" +groups = ["main"] files = [ {file = "simplejson-3.19.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:f39caec26007a2d0efab6b8b1d74873ede9351962707afab622cc2285dd26ed0"}, {file = "simplejson-3.19.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:83c87706265ae3028e8460d08b05f30254c569772e859e5ba61fe8af2c883468"}, @@ -4700,6 +4899,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -4711,6 +4911,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -4722,6 +4923,7 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -4733,6 +4935,7 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -4744,6 +4947,7 @@ version = "0.5.1" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, @@ -4759,6 +4963,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -4778,6 +4983,7 @@ version = "3.0.0" description = "Traceback serialization library." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tblib-3.0.0-py3-none-any.whl", hash = "sha256:80a6c77e59b55e83911e1e607c649836a69c103963c5f28a46cbeef44acf8129"}, {file = "tblib-3.0.0.tar.gz", hash = "sha256:93622790a0a29e04f0346458face1e144dc4d32f493714c6c3dff82a4adb77e6"}, @@ -4789,6 +4995,7 @@ version = "3.5.0" description = "threadpoolctl" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, @@ -4800,6 +5007,7 @@ version = "0.8.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"}, {file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"}, @@ -4847,6 +5055,7 @@ version = "2.16.0" description = "Travel through time in your tests." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "time_machine-2.16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:09531af59fdfb39bfd24d28bd1e837eff5a5d98318509a31b6cfd57d27801e52"}, {file = "time_machine-2.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:92d0b0f3c49f34dd76eb462f0afdc61ed1cb318c06c46d03e99b44ebb489bdad"}, @@ -4915,6 +5124,7 @@ version = "0.5.0" description = "Timeout decorator" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "timeout-decorator-0.5.0.tar.gz", hash = "sha256:6a2f2f58db1c5b24a2cc79de6345760377ad8bdc13813f5265f6c3e63d16b3d7"}, ] @@ -4925,6 +5135,7 @@ version = "5.1.2" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tldextract-5.1.2-py3-none-any.whl", hash = "sha256:4dfc4c277b6b97fa053899fcdb892d2dc27295851ab5fac4e07797b6a21b2e46"}, {file = "tldextract-5.1.2.tar.gz", hash = "sha256:c9e17f756f05afb5abac04fe8f766e7e70f9fe387adb1859f0f52408ee060200"}, @@ -4946,6 +5157,7 @@ version = "0.13.2" description = "Style preserving TOML library" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, @@ -4957,6 +5169,7 @@ version = "4.66.5" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, @@ -4977,6 +5190,7 @@ version = "5.14.3" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -4992,6 +5206,7 @@ version = "0.27.0" description = "A friendly Python library for async concurrency and I/O" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "trio-0.27.0-py3-none-any.whl", hash = "sha256:68eabbcf8f457d925df62da780eff15ff5dc68fd6b367e2dde59f7aaf2a0b884"}, {file = "trio-0.27.0.tar.gz", hash = "sha256:1dcc95ab1726b2da054afea8fd761af74bad79bd52381b84eae408e983c76831"}, @@ -5011,6 +5226,7 @@ version = "0.11.1" description = "WebSocket library for Trio" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "trio-websocket-0.11.1.tar.gz", hash = "sha256:18c11793647703c158b1f6e62de638acada927344d534e3c7628eedcb746839f"}, {file = "trio_websocket-0.11.1-py3-none-any.whl", hash = "sha256:520d046b0d030cf970b8b2b2e00c4c2245b3807853ecd44214acd33d74581638"}, @@ -5026,6 +5242,7 @@ version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, @@ -5064,6 +5281,7 @@ version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, @@ -5080,6 +5298,7 @@ version = "1.16.0.20240331" description = "Typing stubs for cffi" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, @@ -5094,6 +5313,7 @@ version = "1.2.0.20240420" description = "Typing stubs for dateparser" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "types-dateparser-1.2.0.20240420.tar.gz", hash = "sha256:8f813ddf5ef41b32cabe6167138ae833ada10c22811e42220a1e38a0be7adbdc"}, {file = "types_dateparser-1.2.0.20240420-py3-none-any.whl", hash = "sha256:bf3695ddfbadfdfc875064895a51d926fd80b04da1a44364c6c1a9703db7b194"}, @@ -5105,6 +5325,7 @@ version = "24.1.0.20240722" description = "Typing stubs for pyOpenSSL" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, @@ -5120,6 +5341,7 @@ version = "2.9.0.20241003" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, @@ -5131,6 +5353,7 @@ version = "2024.2.0.20241003" description = "Typing stubs for pytz" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "types-pytz-2024.2.0.20241003.tar.gz", hash = "sha256:575dc38f385a922a212bac00a7d6d2e16e141132a3c955078f4a4fd13ed6cb44"}, {file = "types_pytz-2024.2.0.20241003-py3-none-any.whl", hash = "sha256:3e22df1336c0c6ad1d29163c8fda82736909eb977281cb823c57f8bae07118b7"}, @@ -5142,6 +5365,7 @@ version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, @@ -5153,6 +5377,7 @@ version = "4.6.0.20241004" description = "Typing stubs for redis" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, @@ -5168,6 +5393,7 @@ version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, @@ -5182,6 +5408,7 @@ version = "75.2.0.20241019" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-setuptools-75.2.0.20241019.tar.gz", hash = "sha256:86ea31b5f6df2c6b8f2dc8ae3f72b213607f62549b6fa2ed5866e5299f968694"}, {file = "types_setuptools-75.2.0.20241019-py3-none-any.whl", hash = "sha256:2e48ff3acd4919471e80d5e3f049cce5c177e108d5d36d2d4cee3fa4d4104258"}, @@ -5193,6 +5420,7 @@ version = "3.19.0.20240801" description = "Typing stubs for simplejson" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-simplejson-3.19.0.20240801.tar.gz", hash = "sha256:ef90cc81dd915f26c452fa2b5e0cbd3a36af81074ae63878fcf8a477e7594e4d"}, {file = "types_simplejson-3.19.0.20240801-py3-none-any.whl", hash = "sha256:37f1b33c8626d7f072ea87737629310674845d54d187f12387fe33d31c938288"}, @@ -5204,6 +5432,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -5215,10 +5444,12 @@ version = "2024.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main", "dev"] files = [ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] +markers = {dev = "sys_platform == \"win32\""} [[package]] name = "tzlocal" @@ -5226,6 +5457,7 @@ version = "5.2" description = "tzinfo object for the local timezone" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, @@ -5243,6 +5475,7 @@ version = "1.3.8" description = "ASCII transliterations of Unicode text" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, @@ -5254,6 +5487,7 @@ version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, @@ -5274,6 +5508,7 @@ version = "0.5.11" description = "Parse US addresses using conditional random fields" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "usaddress-0.5.11-py3-none-any.whl", hash = "sha256:a745be0ff0c525d64463f19f2ec798bb1679a9bb6864b0d9a8b9054023f683b5"}, {file = "usaddress-0.5.11.tar.gz", hash = "sha256:eec4c473b94e2a29350ee335f18bac7fe4fa698e08271211dad5fed63bdd3e60"}, @@ -5292,6 +5527,7 @@ version = "0.34.0" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4"}, {file = "uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9"}, @@ -5317,6 +5553,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -5368,6 +5606,7 @@ version = "5.1.0" description = "Python promises." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"}, {file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"}, @@ -5379,6 +5618,7 @@ version = "20.27.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, @@ -5399,6 +5639,7 @@ version = "0.24.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, @@ -5494,6 +5735,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -5505,6 +5747,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -5516,6 +5759,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -5532,6 +5776,7 @@ version = "13.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, @@ -5627,6 +5872,7 @@ version = "1.2.0" description = "WebSockets state-machine based protocol implementation" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, @@ -5641,6 +5887,7 @@ version = "7.1.0" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "zope.interface-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2bd9e9f366a5df08ebbdc159f8224904c1c5ce63893984abb76954e6fbe4381a"}, {file = "zope.interface-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:661d5df403cd3c5b8699ac480fa7f58047a3253b029db690efa0c3cf209993ef"}, @@ -5690,6 +5937,6 @@ test = ["coverage[toml]", "zope.event", "zope.testing"] testing = ["coverage[toml]", "zope.event", "zope.testing"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.13, <3.14" content-hash = "bce80062525b8af8017545f37dac32a6f1b7f89b07568b26529dab6ced977f45" From 4d7d57a4ce34924cb99fa0434e6516c1bb746633 Mon Sep 17 00:00:00 2001 From: grossir <14970769+grossir@users.noreply.github.com> Date: Tue, 14 Jan 2025 17:24:14 +0000 Subject: [PATCH 12/34] Update freelawproject dependencies --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e54b8d7865..3bb87e85ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2437,14 +2437,14 @@ setuptools = "*" [[package]] name = "juriscraper" -version = "2.6.50" +version = "2.6.51" description = "An API to scrape American court websites for metadata." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "juriscraper-2.6.50-py27-none-any.whl", hash = "sha256:55329a4623b812c712abdbf9a4ccf616afe0e96f01e1a2a6e6faa8d39e4921e1"}, - {file = "juriscraper-2.6.50.tar.gz", hash = "sha256:dbc01321d33f5543a82c13b9b3aebb84f6be288114f441cc06ce6a45cf09ba87"}, + {file = "juriscraper-2.6.51-py27-none-any.whl", hash = "sha256:e0d216104ee82b1ef1a28e09a8e099c1573dd8bb7d8d888e63bfda43d5dc3b96"}, + {file = "juriscraper-2.6.51.tar.gz", hash = "sha256:be5e49243479b21433e05a484c322fe79c8b7f1a567d3d7e473ed0615adcaa20"}, ] [package.dependencies] From da7cd1f77a4644dcab5afa9e9f583a818df8130f Mon Sep 17 00:00:00 2001 From: Gianfranco Rossi Date: Wed, 15 Jan 2025 17:54:44 -0500 Subject: [PATCH 13/34] feat(annotate_citations): use aria-attributes Solves #1178 Use aria-description on the anchor tag for a resolved citation. The description uses the case name, truncated if it is too long --- cl/citations/annotate_citations.py | 8 +++++++- cl/citations/tests.py | 17 ++++++++++------- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/cl/citations/annotate_citations.py b/cl/citations/annotate_citations.py index ea7d965f9e..1f283c4a5e 100644 --- a/cl/citations/annotate_citations.py +++ b/cl/citations/annotate_citations.py @@ -6,6 +6,8 @@ from cl.citations.match_citations import NO_MATCH_RESOURCE from cl.citations.types import MatchedResourceType, SupportedCitationType +from cl.custom_filters.templatetags.text_filters import best_case_name +from cl.lib.string_utils import trunc from cl.search.models import Opinion, RECAPDocument @@ -61,8 +63,12 @@ def generate_annotations( "", ] else: # If successfully matched... + case_name = trunc(best_case_name(opinion.cluster), 60, "...") annotation = [ - f'', + f'' + f'", "", ] for c in citations: diff --git a/cl/citations/tests.py b/cl/citations/tests.py index 59b1e7a986..17541cf612 100644 --- a/cl/citations/tests.py +++ b/cl/citations/tests.py @@ -183,7 +183,6 @@ def test_make_html_from_plain_text(self) -> None: ('', '
    <script async src="//www.instagram.com/embed.js"></script>
    '), ] - # fmt: on for s, expected_html in test_pairs: with self.subTest( @@ -315,24 +314,26 @@ def test_make_html_from_matched_citation_objects(self) -> None: # test the rendering of citation objects that we assert are correctly # matched. (No matching is performed in the previous cases.) # fmt: off - + case_name = "Example vs. Example" + aria_description = f'aria-description="Citation for case: {case_name}"' test_pairs = [ # Id. citation with page number ("Id., at 123, 124") ('asdf, Id., at 123, 124. Lorem ipsum dolor sit amet', '
    asdf, 
    Id., at 123, 124
    . Lorem ipsum dolor sit amet
    '), + f'MATCH_ID">' + 'Id., at 123, 124
    . '
    +             'Lorem ipsum dolor sit amet
    '), # Id. citation with complex page number ("Id. @ 123:1, ¶¶ 124") ('asdf, Id. @ 123:1, ¶¶ 124. Lorem ipsum dolor sit amet', '
    asdf, 
    Id.
    Id.
     @ 123:1, ¶¶ 124. Lorem ipsum dolor sit amet
    '), # Id. citation without page number ("Id. Something else") ('asdf, Id. Lorem ipsum dolor sit amet', '
    asdf, 
    Id.
    '
    +             f'MATCH_ID">Id.
    '
                  ' Lorem ipsum dolor sit amet
    '), ] @@ -355,7 +356,9 @@ def test_make_html_from_matched_citation_objects(self) -> None: # to receive. Also make sure that the "matched" opinion is # mocked appropriately. opinion.pk = "MATCH_ID" - opinion.cluster = Mock(OpinionCluster(id=24601)) + opinion.cluster = Mock( + OpinionCluster(id=24601), case_name=case_name + ) opinion.cluster.get_absolute_url.return_value = "MATCH_URL" citation_resolutions = {opinion: citations} From e20cdd5eaf5eb06f87c6edb81cde2fd66c1c80e1 Mon Sep 17 00:00:00 2001 From: JVM <34409368+tactipus@users.noreply.github.com> Date: Thu, 16 Jan 2025 13:34:41 -0500 Subject: [PATCH 14/34] delete dummy env file --- .env.example | 47 ----------------------------------------------- 1 file changed, 47 deletions(-) delete mode 100644 .env.example diff --git a/.env.example b/.env.example deleted file mode 100644 index 9d2e5980c2..0000000000 --- a/.env.example +++ /dev/null @@ -1,47 +0,0 @@ -# See: https://django-environ.readthedocs.io/en/latest/types.html - -# money.py -EIN_SECRET="" - -# security.py -#ALLOWED_HOSTS="*" # <-- Uncomment this in dev - -# testing.py -TESTING_DEBUG=off - -# aws.py -AWS_DEV_ACCESS_KEY_ID="" -AWS_DEV_SECRET_ACCESS_KEY="" -AWS_ACCESS_KEY_ID="" -AWS_SECRET_ACCESS_KEY="" -AWS_LAMBDA_PROXY_URL="" - -# hcaptcha.py -HCAPTCHA_SITEKEY="" -HCAPTCHA_SECRET="" - -# sentry.py -SENTRY_DSN="" -SENTRY_REPORT_URI="" - -# django.py -# SECRET_KEY="" # <-- Uncomment and set this in dev -DEBUG=on -DEVELOPMENT=on - -# misc.py -PLAUSIBLE_API_TOKEN="" - -PACER_USERNAME="" -PACER_PASSWORD="" - -LASC_USERNAME="" -LASC_PASSWORD="" - -IA_ACCESS_KEY="" -IA_SECRET_KEY="" - -FTM_KEY="" - -# CL API key for cloning data -CL_API_TOKEN="" From aa593de9579c7efe1866dca23377ef4fa03e2352 Mon Sep 17 00:00:00 2001 From: "Judith V. Moreno" <34409368+tactipus@users.noreply.github.com> Date: Thu, 16 Jan 2025 18:27:55 -0500 Subject: [PATCH 15/34] updated to v4 --- .../management/commands/clone_from_cl.py | 30 +++++++++---------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/cl/scrapers/management/commands/clone_from_cl.py b/cl/scrapers/management/commands/clone_from_cl.py index 97018a7b21..d5ad6cbd98 100644 --- a/cl/scrapers/management/commands/clone_from_cl.py +++ b/cl/scrapers/management/commands/clone_from_cl.py @@ -151,7 +151,7 @@ def clone_opinion_cluster( cluster_path = reverse( "opinioncluster-detail", - kwargs={"version": "v3", "pk": cluster_id}, + kwargs={"version": "v4", "pk": cluster_id}, ) cluster_url = f"{domain}{cluster_path}" cluster_datum = get_json_data(cluster_url, session) @@ -364,7 +364,7 @@ def clone_docket( model = apps.get_model(object_type) docket_path = reverse( "docket-detail", - kwargs={"version": "v3", "pk": docket_id}, + kwargs={"version": "v4", "pk": docket_id}, ) docket_url = f"{domain}{docket_path}" docket_data = None @@ -555,7 +555,7 @@ def clone_docket_entries( docket_entry_path = reverse( "docketentry-list", - kwargs={"version": "v3"}, + kwargs={"version": "v4"}, ) # Get list of docket entries using docket id @@ -661,7 +661,7 @@ def clone_recap_documents( "View cloned recap document here:", reverse( "recapdocument-detail", - args=["v3", recap_document_data["id"]], + args=["v4", recap_document_data["id"]], ), ) @@ -697,7 +697,7 @@ def clone_tag( # Create tag tag_path = reverse( "tag-detail", - kwargs={"version": "v3", "pk": tag_id}, + kwargs={"version": "v4", "pk": tag_id}, ) tag_url = f"{domain}{tag_path}" tag_data = get_json_data(tag_url, session) @@ -714,7 +714,7 @@ def clone_tag( print( "View cloned tag here:", - reverse("tag-detail", args=["v3", tag_id]), + reverse("tag-detail", args=["v4", tag_id]), ) return created_tags @@ -744,7 +744,7 @@ def clone_position( position = model.objects.get(pk=position_id, person_id=person_id) print( "Position already exists here:", - reverse("position-detail", args=["v3", position.pk]), + reverse("position-detail", args=["v4", position.pk]), ) continue except model.DoesNotExist: @@ -753,7 +753,7 @@ def clone_position( # Create position position_path = reverse( "position-detail", - kwargs={"version": "v3", "pk": position_id}, + kwargs={"version": "v4", "pk": position_id}, ) position_url = f"{domain}{position_path}" position_data = get_json_data(position_url, session) @@ -839,7 +839,7 @@ def clone_position( print( "View cloned position here:", - reverse("position-detail", args=["v3", position_id]), + reverse("position-detail", args=["v4", position_id]), ) @@ -870,7 +870,7 @@ def clone_person( person = model.objects.get(pk=person_id) print( "Person already exists here:", - reverse("person-detail", args=["v3", person.pk]), + reverse("person-detail", args=["v4", person.pk]), ) people.append(person) if not positions: @@ -881,7 +881,7 @@ def clone_person( # Create person people_path = reverse( "person-detail", - kwargs={"version": "v3", "pk": person_id}, + kwargs={"version": "v4", "pk": person_id}, ) person_url = f"{domain}{people_path}" @@ -929,7 +929,7 @@ def clone_person( print( "View cloned person here:", - reverse("person-detail", args=["v3", person_id]), + reverse("person-detail", args=["v4", person_id]), ) if person_positions_data: @@ -964,7 +964,7 @@ def clone_court(session: Session, court_ids: list, object_type="search.Court"): courts.append(ct) print( "Court already exists here:", - reverse("court-detail", args=["v3", ct.pk]), + reverse("court-detail", args=["v4", ct.pk]), ) continue except model.DoesNotExist: @@ -973,7 +973,7 @@ def clone_court(session: Session, court_ids: list, object_type="search.Court"): # Create court court_path = reverse( "court-detail", - kwargs={"version": "v3", "pk": court_id}, + kwargs={"version": "v4", "pk": court_id}, ) court_url = f"{domain}{court_path}" court_data = get_json_data(court_url, session) @@ -1017,7 +1017,7 @@ def clone_court(session: Session, court_ids: list, object_type="search.Court"): courts.append(ct) print( "View cloned court here:", - reverse("court-detail", args=["v3", court_id]), + reverse("court-detail", args=["v4", court_id]), ) return courts From b7ff97f776ea407b3702824f81dcac5e85277b26 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 21:09:18 -0500 Subject: [PATCH 16/34] caching user prayer stats --- cl/favorites/templates/user_prayers.html | 2 +- cl/favorites/tests.py | 18 ++++++------- cl/favorites/utils.py | 32 ++++++++++++++++++------ cl/favorites/views.py | 5 ++-- 4 files changed, 37 insertions(+), 20 deletions(-) diff --git a/cl/favorites/templates/user_prayers.html b/cl/favorites/templates/user_prayers.html index 52c2a7e5cf..f36a272793 100644 --- a/cl/favorites/templates/user_prayers.html +++ b/cl/favorites/templates/user_prayers.html @@ -14,7 +14,7 @@ {% block content %}

    {% if is_page_owner %}Your PACER Document Prayers{% else %}PACER Document Requests for: {{ requested_user }}{% endif %}

    - {% if is_page_owner %}

    {{ count|intcomma }} {{ count|pluralize:"prayer,prayers" }} granted totaling ${{total_cost|floatformat:2 }} ({{ num_remaining }} remaining today).

    {% endif %} + {% if is_page_owner %}

    {{ user_history.prayer_count|intcomma }} {{ user_history.prayer_count|pluralize:"prayer,prayers" }} granted totaling ${{ user_history.total_cost|floatformat:2 }} ({{ num_remaining }} remaining today).

    {% endif %}
    None: prayer_rd5 = await create_prayer(self.user, self.rd_5) # Verify that the initial prayer count and total cost are 0. - count, total_cost = await get_user_prayer_history(self.user) - self.assertEqual(count, 0) - self.assertEqual(total_cost, 0.0) + user_history = await get_user_prayer_history(self.user) + self.assertEqual(user_history["prayer_count"], 0) + self.assertEqual(user_history["total_cost"], 0.0) # Update `rd_3`'s page count and set `prayer_rd3`'s status to `GRANTED` self.rd_3.page_count = 2 @@ -912,9 +912,9 @@ async def test_get_user_prayer_history(self) -> None: await prayer_rd3.asave() # Verify that the count is 1 and total cost is 0.20. - count, total_cost = await get_user_prayer_history(self.user) - self.assertEqual(count, 1) - self.assertEqual(total_cost, 0.20) + user_history = await get_user_prayer_history(self.user) + self.assertEqual(user_history["prayer_count"], 1) + self.assertEqual(user_history["total_cost"], 0.20) # Update `rd_5`'s page count and set `prayer_rd5`'s status to `GRANTED` self.rd_5.page_count = 40 @@ -924,9 +924,9 @@ async def test_get_user_prayer_history(self) -> None: await prayer_rd5.asave() # Verify that the count is 2 and the total cost is now 3.20. - count, total_cost = await get_user_prayer_history(self.user) - self.assertEqual(count, 2) - self.assertEqual(total_cost, 3.20) + user_history = await get_user_prayer_history(self.user) + self.assertEqual(user_history["prayer_count"], 2) + self.assertEqual(user_history["total_cost"], 3.20) @patch("cl.favorites.utils.cache.aget") async def test_get_lifetime_prayer_stats(self, mock_cache_aget) -> None: diff --git a/cl/favorites/utils.py b/cl/favorites/utils.py index 7de09fbb7e..d5439bc3e7 100644 --- a/cl/favorites/utils.py +++ b/cl/favorites/utils.py @@ -291,8 +291,23 @@ def send_prayer_emails(instance: RECAPDocument) -> None: connection = get_connection() connection.send_messages(messages) +@dataclass +class PrayerStats: + prayer_count: int + distinct_count: int + total_cost: str + + +async def get_user_prayer_history(user: User) -> ( + PrayerStats +): + + cache_key = f"prayer-stats-{user}" + + data = await cache.aget(cache_key) + if data is not None: + return PrayerStats(**data) -async def get_user_prayer_history(user: User) -> tuple[int, float]: filtered_list = Prayer.objects.filter( user=user, status=Prayer.GRANTED ).select_related("recap_document") @@ -301,14 +316,17 @@ async def get_user_prayer_history(user: User) -> tuple[int, float]: total_cost = await compute_prayer_total_cost(filtered_list) - return count, total_cost + # return count, total_cost + data = { + "prayer_count": count, + "distinct_count": "", + "total_cost": f"{total_cost:,.2f}", + } + one_day = 60 * 60 * 24 + await cache.aset(cache_key, data, one_day) -@dataclass -class PrayerStats: - prayer_count: int - distinct_count: int - total_cost: str + return PrayerStats(**data) async def get_lifetime_prayer_stats( diff --git a/cl/favorites/views.py b/cl/favorites/views.py index 7cd880e1af..291059b52a 100644 --- a/cl/favorites/views.py +++ b/cl/favorites/views.py @@ -289,7 +289,7 @@ async def user_prayers_view( rd_with_prayers = await get_user_prayers(requested_user) - count, total_cost = await get_user_prayer_history(requested_user) + user_history = await get_user_prayer_history(requested_user) is_eligible, num_remaining = await prayer_eligible(requested_user) @@ -297,8 +297,7 @@ async def user_prayers_view( "rd_with_prayers": rd_with_prayers, "requested_user": requested_user, "is_page_owner": is_page_owner, - "count": count, - "total_cost": total_cost, + "user_history": user_history, "is_eligible": is_eligible, "num_remaining": num_remaining, "private": False, From 84b5d67dff434f140f008391425d321cae0991b3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 17 Jan 2025 02:09:59 +0000 Subject: [PATCH 17/34] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- cl/favorites/utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/cl/favorites/utils.py b/cl/favorites/utils.py index d5439bc3e7..a69d88fbe6 100644 --- a/cl/favorites/utils.py +++ b/cl/favorites/utils.py @@ -291,6 +291,7 @@ def send_prayer_emails(instance: RECAPDocument) -> None: connection = get_connection() connection.send_messages(messages) + @dataclass class PrayerStats: prayer_count: int @@ -298,10 +299,8 @@ class PrayerStats: total_cost: str -async def get_user_prayer_history(user: User) -> ( - PrayerStats -): - +async def get_user_prayer_history(user: User) -> PrayerStats: + cache_key = f"prayer-stats-{user}" data = await cache.aget(cache_key) From 1b047f735788ff32ca7adf073f3d422659d1dc10 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 21:12:37 -0500 Subject: [PATCH 18/34] changing class in dropdown --- cl/assets/templates/base.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cl/assets/templates/base.html b/cl/assets/templates/base.html index 5726b46ae2..b1646e4360 100644 --- a/cl/assets/templates/base.html +++ b/cl/assets/templates/base.html @@ -143,7 +143,7 @@

    You did not supply the "private" variable to your template.
  •  Tags
  •  Prayers
  • + tabindex="206"> Prayers

  •  Your Support
  • From d7431987d1ac55bb5f1fb1f65fddfd5cd935d8c0 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 21:35:49 -0500 Subject: [PATCH 19/34] fixing syntax --- cl/favorites/tests.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/cl/favorites/tests.py b/cl/favorites/tests.py index ff04b2286e..62b8e08981 100644 --- a/cl/favorites/tests.py +++ b/cl/favorites/tests.py @@ -901,8 +901,8 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the initial prayer count and total cost are 0. user_history = await get_user_prayer_history(self.user) - self.assertEqual(user_history["prayer_count"], 0) - self.assertEqual(user_history["total_cost"], 0.0) + self.assertEqual(user_history.prayer_count, 0) + self.assertEqual(user_history.total_cost, 0.0) # Update `rd_3`'s page count and set `prayer_rd3`'s status to `GRANTED` self.rd_3.page_count = 2 @@ -913,8 +913,8 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the count is 1 and total cost is 0.20. user_history = await get_user_prayer_history(self.user) - self.assertEqual(user_history["prayer_count"], 1) - self.assertEqual(user_history["total_cost"], 0.20) + self.assertEqual(user_history.prayer_count, 1) + self.assertEqual(user_history.total_cost, 0.20) # Update `rd_5`'s page count and set `prayer_rd5`'s status to `GRANTED` self.rd_5.page_count = 40 @@ -925,8 +925,8 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the count is 2 and the total cost is now 3.20. user_history = await get_user_prayer_history(self.user) - self.assertEqual(user_history["prayer_count"], 2) - self.assertEqual(user_history["total_cost"], 3.20) + self.assertEqual(user_history.prayer_count, 2) + self.assertEqual(user_history.total_cost, 3.20) @patch("cl.favorites.utils.cache.aget") async def test_get_lifetime_prayer_stats(self, mock_cache_aget) -> None: From f876c5f28edcffa3fe1bd5eea4ef5b91032be0fc Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 21:49:55 -0500 Subject: [PATCH 20/34] need two decimal places in test --- cl/favorites/tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cl/favorites/tests.py b/cl/favorites/tests.py index 62b8e08981..0222215491 100644 --- a/cl/favorites/tests.py +++ b/cl/favorites/tests.py @@ -902,7 +902,7 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the initial prayer count and total cost are 0. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 0) - self.assertEqual(user_history.total_cost, 0.0) + self.assertEqual(user_history.total_cost, 0.00) # Update `rd_3`'s page count and set `prayer_rd3`'s status to `GRANTED` self.rd_3.page_count = 2 From bc87e570aed40de4171162e8cdb176cb65724ee7 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 22:05:29 -0500 Subject: [PATCH 21/34] str, not float (hopefully last issue!) --- cl/favorites/tests.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cl/favorites/tests.py b/cl/favorites/tests.py index 0222215491..35b1a5a7ac 100644 --- a/cl/favorites/tests.py +++ b/cl/favorites/tests.py @@ -902,7 +902,7 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the initial prayer count and total cost are 0. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 0) - self.assertEqual(user_history.total_cost, 0.00) + self.assertEqual(user_history.total_cost, "0.00") # Update `rd_3`'s page count and set `prayer_rd3`'s status to `GRANTED` self.rd_3.page_count = 2 @@ -914,7 +914,7 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the count is 1 and total cost is 0.20. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 1) - self.assertEqual(user_history.total_cost, 0.20) + self.assertEqual(user_history.total_cost, "0.20") # Update `rd_5`'s page count and set `prayer_rd5`'s status to `GRANTED` self.rd_5.page_count = 40 @@ -926,7 +926,7 @@ async def test_get_user_prayer_history(self) -> None: # Verify that the count is 2 and the total cost is now 3.20. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 2) - self.assertEqual(user_history.total_cost, 3.20) + self.assertEqual(user_history.total_cost, "3.20") @patch("cl.favorites.utils.cache.aget") async def test_get_lifetime_prayer_stats(self, mock_cache_aget) -> None: From a59f1e23d01a9c12a23f5abff55dfcc7917e7c51 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 22:27:56 -0500 Subject: [PATCH 22/34] clearing cache to account for test not waiting 24 hours --- cl/favorites/tests.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cl/favorites/tests.py b/cl/favorites/tests.py index 35b1a5a7ac..4379693479 100644 --- a/cl/favorites/tests.py +++ b/cl/favorites/tests.py @@ -891,8 +891,8 @@ async def test_get_top_prayers_by_number_and_age(self) -> None: async def test_get_user_prayer_history(self) -> None: """Does the get_user_prayer_history method work properly?""" - # Prayers for user_2 - await create_prayer(self.user_2, self.rd_4) + # # Prayers for user_2 + # await create_prayer(self.user_2, self.rd_4) # Prayers for user await create_prayer(self.user, self.rd_2) @@ -911,6 +911,9 @@ async def test_get_user_prayer_history(self) -> None: prayer_rd3.status = Prayer.GRANTED await prayer_rd3.asave() + # Clear cache for this specific user + await cache.adelete(f"prayer-stats-{self.user}") + # Verify that the count is 1 and total cost is 0.20. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 1) @@ -923,6 +926,9 @@ async def test_get_user_prayer_history(self) -> None: prayer_rd5.status = Prayer.GRANTED await prayer_rd5.asave() + # Clear cache for this specific user + await cache.adelete(f"prayer-stats-{self.user}") + # Verify that the count is 2 and the total cost is now 3.20. user_history = await get_user_prayer_history(self.user) self.assertEqual(user_history.prayer_count, 2) From 81af391d1558287d8405c6928b3430cf37b0d25a Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Thu, 16 Jan 2025 23:08:08 -0500 Subject: [PATCH 23/34] formatting taken care of earlier --- cl/favorites/templates/user_prayers.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cl/favorites/templates/user_prayers.html b/cl/favorites/templates/user_prayers.html index f36a272793..9005ef771f 100644 --- a/cl/favorites/templates/user_prayers.html +++ b/cl/favorites/templates/user_prayers.html @@ -14,7 +14,7 @@ {% block content %}

    {% if is_page_owner %}Your PACER Document Prayers{% else %}PACER Document Requests for: {{ requested_user }}{% endif %}

    - {% if is_page_owner %}

    {{ user_history.prayer_count|intcomma }} {{ user_history.prayer_count|pluralize:"prayer,prayers" }} granted totaling ${{ user_history.total_cost|floatformat:2 }} ({{ num_remaining }} remaining today).

    {% endif %} + {% if is_page_owner %}

    {{ user_history.prayer_count|intcomma }} {{ user_history.prayer_count|pluralize:"prayer,prayers" }} granted totaling ${{ user_history.total_cost }} ({{ num_remaining }} remaining today).

    {% endif %}
    Date: Fri, 17 Jan 2025 09:50:52 -0400 Subject: [PATCH 24/34] fix(corpus_importer): Updates get_att_report_by_rd method --- cl/corpus_importer/tasks.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cl/corpus_importer/tasks.py b/cl/corpus_importer/tasks.py index 3d3d7e8cbe..c650e91d30 100644 --- a/cl/corpus_importer/tasks.py +++ b/cl/corpus_importer/tasks.py @@ -1794,12 +1794,11 @@ def get_att_report_by_rd( cookies=session_data.cookies, proxy=session_data.proxy_address ) pacer_court_id = map_cl_to_pacer_id(rd.docket_entry.docket.court_id) - att_report = AttachmentPage(pacer_court_id, s) - att_report.query(rd.pacer_doc_id) if is_appellate_court(pacer_court_id): att_report = AppellateAttachmentPage(pacer_court_id, s) else: att_report = AttachmentPage(pacer_court_id, s) + att_report.query(rd.pacer_doc_id) return att_report From 26c883a457faa8ca8a889e0845d02ba5eff36c29 Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Fri, 17 Jan 2025 10:50:36 -0400 Subject: [PATCH 25/34] feat(corpus_importer): Add async version of is_appellate_court helper --- cl/corpus_importer/utils.py | 12 ++++++++++++ cl/recap/mergers.py | 8 +++----- cl/recap/tasks.py | 8 ++++++-- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/cl/corpus_importer/utils.py b/cl/corpus_importer/utils.py index 40fa0265a0..966a98c7bc 100644 --- a/cl/corpus_importer/utils.py +++ b/cl/corpus_importer/utils.py @@ -119,6 +119,18 @@ def is_appellate_court(court_id: str) -> bool: return appellate_court_ids.filter(pk=court_id).exists() +async def ais_appellate_court(court_id: str) -> bool: + """Checks if the given court_id belongs to an appellate court. + + :param court_id: The unique identifier of the court. + + :return: True if the court_id corresponds to an appellate court, + False otherwise. + """ + appellate_court_ids = Court.federal_courts.appellate_pacer_courts() + return await appellate_court_ids.filter(pk=court_id).aexists() + + def get_start_of_quarter(d: Optional[date] = None) -> date: """Get the start date of the calendar quarter requested diff --git a/cl/recap/mergers.py b/cl/recap/mergers.py index 6a1622810e..da7e3370e8 100644 --- a/cl/recap/mergers.py +++ b/cl/recap/mergers.py @@ -14,7 +14,7 @@ from juriscraper.lib.string_utils import CaseNameTweaker from juriscraper.pacer import AppellateAttachmentPage, AttachmentPage -from cl.corpus_importer.utils import is_appellate_court, mark_ia_upload_needed +from cl.corpus_importer.utils import ais_appellate_court, mark_ia_upload_needed from cl.lib.decorators import retry from cl.lib.filesizes import convert_size_to_bytes from cl.lib.model_helpers import clean_docket_number, make_docket_number_core @@ -943,9 +943,7 @@ async def add_docket_entries( # RDs. The check here ensures that if that happens for a particular # entry, we avoid creating the main RD a second+ time when we get the # docket sheet a second+ time. - appelate_court_id_exists = await sync_to_async(is_appellate_court)( - d.court_id - ) + appelate_court_id_exists = await ais_appellate_court(d.court_id) if de_created is False and appelate_court_id_exists: appellate_rd_att_exists = await de.recap_documents.filter( document_type=RECAPDocument.ATTACHMENT @@ -1787,7 +1785,7 @@ async def merge_attachment_page_data( ContentFile(text.encode()), ) - court_is_appellate = await sync_to_async(is_appellate_court)(court.pk) + court_is_appellate = await ais_appellate_court(court.pk) main_rd_to_att = False for attachment in attachment_dicts: sanity_checks = [ diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index 8132acca43..69b2ef3660 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -54,7 +54,11 @@ make_attachment_pq_object, update_rd_metadata, ) -from cl.corpus_importer.utils import is_appellate_court, mark_ia_upload_needed +from cl.corpus_importer.utils import ( + ais_appellate_court, + is_appellate_court, + mark_ia_upload_needed, +) from cl.custom_filters.templatetags.text_filters import oxford_join from cl.lib.filesizes import convert_size_to_bytes from cl.lib.microservice_utils import microservice @@ -763,7 +767,7 @@ async def find_subdocket_pdf_rds( pq.pk ] # Add the original pq to the list of pqs to process - if await sync_to_async(is_appellate_court)(pq.court_id): + if await ais_appellate_court(pq.court_id): # Abort the process for appellate documents. Subdockets cannot be found # in appellate cases. return pqs_to_process_pks From b183edcfe0f0da6fc41b4639edd96877460eee98 Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Fri, 17 Jan 2025 12:15:37 -0400 Subject: [PATCH 26/34] tests(recap): Removes fixture from the RecapPdfFetchApiTest class --- cl/recap/tests.py | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/cl/recap/tests.py b/cl/recap/tests.py index 8245e8f14f..45f24e619a 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -1698,15 +1698,35 @@ def test_key_serialization_with_client_code(self, mock) -> None: class RecapPdfFetchApiTest(TestCase): """Can we fetch PDFs properly?""" - fixtures = ["recap_docs.json"] - def setUp(self) -> None: + self.docket = DocketFactory( + case_name="United States v. Curlin", + case_name_short="Curlin", + pacer_case_id="28766", + source=Docket.RECAP, + docket_number="3:92-cr-00139-T", + slug="united-states-v-curlin", + ) + self.de = DocketEntryWithParentsFactory( + docket=self.docket, + description=" Memorandum Opinion and Order as to Albert Evans Curlin: Clerk is directed to file a copy of this opinion in the criminal action - Petition in 3:01cv429, filed pursuant to 28:2241, is properly construed as a motion to vacate pursuant to 28:2255 and is denied for failure of pet to file it within the statutory period of limitations. (Signed by Judge Jerry Buchmeyer on 7/12/2002) (lrl, )", + entry_number=1, + ) + self.rd = RECAPDocumentFactory( + docket_entry=self.de, + document_number="1", + is_available=True, + is_free_on_pacer=True, + page_count=17, + pacer_doc_id="17701118263", + document_type=RECAPDocument.PACER_DOCUMENT, + ocr_status=4, + ) self.fq = PacerFetchQueue.objects.create( user=User.objects.get(username="recap"), request_type=REQUEST_TYPE.PDF, - recap_document_id=1, + recap_document_id=self.rd.pk, ) - self.rd = self.fq.recap_document def tearDown(self) -> None: RECAPDocument.objects.update(is_available=True) From d3fcf81a9f89461c3ce07e26872a52359f64613b Mon Sep 17 00:00:00 2001 From: JVM <34409368+tactipus@users.noreply.github.com> Date: Fri, 17 Jan 2025 13:08:32 -0500 Subject: [PATCH 27/34] Revert "delete dummy env file" This reverts commit e20cdd5eaf5eb06f87c6edb81cde2fd66c1c80e1. --- .env.example | 47 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000..9d2e5980c2 --- /dev/null +++ b/.env.example @@ -0,0 +1,47 @@ +# See: https://django-environ.readthedocs.io/en/latest/types.html + +# money.py +EIN_SECRET="" + +# security.py +#ALLOWED_HOSTS="*" # <-- Uncomment this in dev + +# testing.py +TESTING_DEBUG=off + +# aws.py +AWS_DEV_ACCESS_KEY_ID="" +AWS_DEV_SECRET_ACCESS_KEY="" +AWS_ACCESS_KEY_ID="" +AWS_SECRET_ACCESS_KEY="" +AWS_LAMBDA_PROXY_URL="" + +# hcaptcha.py +HCAPTCHA_SITEKEY="" +HCAPTCHA_SECRET="" + +# sentry.py +SENTRY_DSN="" +SENTRY_REPORT_URI="" + +# django.py +# SECRET_KEY="" # <-- Uncomment and set this in dev +DEBUG=on +DEVELOPMENT=on + +# misc.py +PLAUSIBLE_API_TOKEN="" + +PACER_USERNAME="" +PACER_PASSWORD="" + +LASC_USERNAME="" +LASC_PASSWORD="" + +IA_ACCESS_KEY="" +IA_SECRET_KEY="" + +FTM_KEY="" + +# CL API key for cloning data +CL_API_TOKEN="" From ebeba68f6136a194c50b20755312146a84fd1d6b Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Fri, 17 Jan 2025 15:18:41 -0400 Subject: [PATCH 28/34] tests(recap): Refines RecapAttPageFetchApiTest class - Adds a new test to validate appellate court logic for fetching attachment pages. - Refines the existing test for district courts. - Removes the unnecessary fixture from RecapAttPageFetchApiTest. --- cl/recap/tests.py | 126 ++++++++++++++++++++++++++++++++++++++++++---- cl/tests/fakes.py | 19 +++++++ 2 files changed, 135 insertions(+), 10 deletions(-) diff --git a/cl/recap/tests.py b/cl/recap/tests.py index 45f24e619a..e066dfb649 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -42,6 +42,7 @@ get_next_webhook_retry_date, get_webhook_deprecation_date, ) +from cl.corpus_importer.utils import is_appellate_court from cl.lib.pacer import is_pacer_court_accessible, lookup_and_save from cl.lib.recap_utils import needs_ocr from cl.lib.redis_utils import get_redis_interface @@ -87,6 +88,8 @@ add_docket_entries, add_parties_and_attorneys, find_docket_object, + get_data_from_appellate_att_report, + get_data_from_att_report, get_order_of_docket, merge_attachment_page_data, normalize_long_description, @@ -1777,17 +1780,55 @@ def test_fetch_available_pdf(self, mock_get_cookie, mock_court_accessible): side_effect=lambda a: True, ) class RecapAttPageFetchApiTest(TestCase): - fixtures = ["recap_docs.json"] def setUp(self) -> None: + self.district_court = CourtFactory(jurisdiction=Court.FEDERAL_DISTRICT) + self.district_docket = DocketFactory( + source=Docket.RECAP, court=self.district_court + ) + self.rd = RECAPDocumentFactory( + docket_entry=DocketEntryWithParentsFactory( + docket=self.district_docket, + ), + document_number="1", + is_available=True, + is_free_on_pacer=True, + page_count=17, + pacer_doc_id="17711118263", + document_type=RECAPDocument.PACER_DOCUMENT, + ocr_status=4, + ) self.fq = PacerFetchQueue.objects.create( user=User.objects.get(username="recap"), request_type=REQUEST_TYPE.ATTACHMENT_PAGE, - recap_document_id=1, + recap_document_id=self.rd.pk, + ) + + self.appellate_court = CourtFactory( + id="ca1", jurisdiction=Court.FEDERAL_APPELLATE + ) + self.appellate_docket = DocketFactory( + source=Docket.RECAP, + court=self.appellate_court, + pacer_case_id=41651, + ) + self.rd_appellate = RECAPDocumentFactory( + docket_entry=DocketEntryWithParentsFactory( + docket=self.appellate_docket, entry_number=1208699339 + ), + document_number=1208699339, + pacer_doc_id="1208699339", + attachment_number=1, + is_available=True, + page_count=15, + document_type=RECAPDocument.ATTACHMENT, + ocr_status=4, + ) + self.fq_appellate = PacerFetchQueue.objects.create( + user=User.objects.get(username="recap"), + request_type=REQUEST_TYPE.ATTACHMENT_PAGE, + recap_document_id=self.rd_appellate.pk, ) - self.rd = self.fq.recap_document - self.rd.pacer_doc_id = "17711118263" - self.rd.save() def test_fetch_attachment_page_no_pacer_doc_id( self, mock_court_accessible @@ -1813,6 +1854,10 @@ def test_fetch_att_page_no_cookies(self, mock_court_accessible) -> None: @mock.patch( "cl.recap.tasks.get_pacer_cookie_from_cache", ) + @mock.patch( + "cl.recap.tasks.get_data_from_att_report", + wraps=get_data_from_att_report, + ) @mock.patch( "cl.corpus_importer.tasks.AttachmentPage", new=fakes.FakeAttachmentPage, @@ -1821,13 +1866,14 @@ def test_fetch_att_page_no_cookies(self, mock_court_accessible) -> None: "cl.recap.mergers.AttachmentPage", new=fakes.FakeAttachmentPage ) @mock.patch( - "cl.corpus_importer.tasks.is_appellate_court", return_value=False + "cl.corpus_importer.tasks.is_appellate_court", wraps=is_appellate_court ) - @mock.patch("cl.recap.tasks.is_appellate_court", return_value=False) - def test_fetch_att_page_all_systems_go( + @mock.patch("cl.recap.tasks.is_appellate_court", wraps=is_appellate_court) + def test_fetch_att_page_from_district_court( self, - check_court_task, - check_court_parser, + mock_court_check_task, + mock_court_check_parser, + mock_report_parser, mock_get_cookies, mock_court_accessible, ): @@ -1835,9 +1881,69 @@ def test_fetch_att_page_all_systems_go( result.get() self.fq.refresh_from_db() + + # Verify court validation calls with expected court ID + district_court_id = self.rd.docket_entry.docket.court_id + mock_court_check_task.assert_called_with(district_court_id) + mock_court_check_parser.assert_called_with(district_court_id) + + # Ensure correct parser is called exactly once (ideal scenario) + mock_report_parser.assert_called_once() + mock_report_parser.assert_called_with(ANY, district_court_id) + + # Assert successful fetch status and expected message self.assertEqual(self.fq.status, PROCESSING_STATUS.SUCCESSFUL) self.assertIn("Successfully completed fetch", self.fq.message) + @mock.patch( + "cl.recap.tasks.get_pacer_cookie_from_cache", + ) + @mock.patch( + "cl.recap.tasks.get_data_from_appellate_att_report", + wraps=get_data_from_appellate_att_report, + ) + @mock.patch( + "cl.corpus_importer.tasks.AppellateAttachmentPage", + new=fakes.FakeAppellateAttachmentPage, + ) + @mock.patch( + "cl.recap.mergers.AppellateAttachmentPage", + new=fakes.FakeAppellateAttachmentPage, + ) + @mock.patch( + "cl.corpus_importer.tasks.is_appellate_court", wraps=is_appellate_court + ) + @mock.patch("cl.recap.tasks.is_appellate_court", wraps=is_appellate_court) + def test_fetch_att_page_from_appellate( + self, + mock_court_check_task, + mock_court_check_parser, + mock_report_parser, + mock_get_cookies, + mock_court_accessible, + ): + result = do_pacer_fetch(self.fq_appellate) + result.get() + + self.fq_appellate.refresh_from_db() + + # Verify court validation calls with expected court ID + appellate_court_id = self.rd_appellate.docket_entry.docket.court_id + mock_court_check_task.assert_called_with(appellate_court_id) + mock_court_check_parser.assert_called_with(appellate_court_id) + + # Ensure correct parser is called exactly once (ideal scenario) + mock_report_parser.assert_called_once() + mock_report_parser.assert_called_with(ANY, appellate_court_id) + + # Assert successful fetch status and expected message + self.assertEqual( + self.fq_appellate.status, PROCESSING_STATUS.SUCCESSFUL + ) + self.assertIn( + "Successfully completed fetch", self.fq_appellate.message + ) + class ProcessingQueueApiFilterTest(TestCase): def setUp(self) -> None: diff --git a/cl/tests/fakes.py b/cl/tests/fakes.py index 155d4e3aa9..a0926d9889 100644 --- a/cl/tests/fakes.py +++ b/cl/tests/fakes.py @@ -79,6 +79,25 @@ def data(self, *args, **kwargs): } +class FakeAppellateAttachmentPage: + response = MagicMock(text="") + _parse_text = MagicMock() + + def __init__(self, *args, **kwargs): + pass + + def query(self, *args, **kwargs): + pass + + @property + def data(self, *args, **kwargs): + return { + "pacer_doc_id": "1208699339", + "document_number": "1", + "attachments": [], + } + + class FakeFreeOpinionReport: def __init__(self, *args, **kwargs): pass From 05550e82a8bb8a9f6830242554b21ba32b612141 Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Fri, 17 Jan 2025 15:28:48 -0400 Subject: [PATCH 29/34] feat(recap): Avoid trying to purchase ACMS attachment pages. --- cl/recap/tasks.py | 5 +++++ cl/recap/tests.py | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index 69b2ef3660..72903f85f6 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -1995,6 +1995,11 @@ def fetch_attachment_page(self: Task, fq_pk: int) -> None: mark_fq_status(fq, msg, PROCESSING_STATUS.NEEDS_INFO) return + if rd.pacer_doc_id.count("-") > 1: + msg = "ACMS attachment pages are not currently supported" + mark_fq_status(fq, msg, PROCESSING_STATUS.FAILED) + return + session_data = get_pacer_cookie_from_cache(fq.user_id) if not session_data: msg = "Unable to find cached cookies. Aborting request." diff --git a/cl/recap/tests.py b/cl/recap/tests.py index e066dfb649..fd59adfe12 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -1851,6 +1851,26 @@ def test_fetch_att_page_no_cookies(self, mock_court_accessible) -> None: self.assertEqual(self.fq.status, PROCESSING_STATUS.FAILED) self.assertIn("Unable to find cached cookies", self.fq.message) + def test_fetch_acms_att_page(self, mock_court_accessible) -> None: + rd_acms = RECAPDocumentFactory( + docket_entry=DocketEntryWithParentsFactory(docket=DocketFactory()), + pacer_doc_id="784459c4-e2cd-ef11-b8e9-001dd804c0b4", + ) + fq_acms = PacerFetchQueue.objects.create( + user=User.objects.get(username="recap"), + request_type=REQUEST_TYPE.ATTACHMENT_PAGE, + recap_document_id=rd_acms.pk, + ) + result = do_pacer_fetch(fq_acms) + result.get() + + fq_acms.refresh_from_db() + self.assertEqual(fq_acms.status, PROCESSING_STATUS.FAILED) + self.assertIn( + "ACMS attachment pages are not currently supported", + fq_acms.message, + ) + @mock.patch( "cl.recap.tasks.get_pacer_cookie_from_cache", ) From 5aef8ff5057514988ef7d8ffc4aa7098f07989d3 Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Sat, 18 Jan 2025 12:11:43 -0500 Subject: [PATCH 30/34] updating cache and deleting unneeded code --- cl/favorites/utils.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/cl/favorites/utils.py b/cl/favorites/utils.py index a69d88fbe6..a2d911ccee 100644 --- a/cl/favorites/utils.py +++ b/cl/favorites/utils.py @@ -315,15 +315,13 @@ async def get_user_prayer_history(user: User) -> PrayerStats: total_cost = await compute_prayer_total_cost(filtered_list) - # return count, total_cost - data = { "prayer_count": count, "distinct_count": "", "total_cost": f"{total_cost:,.2f}", } - one_day = 60 * 60 * 24 - await cache.aset(cache_key, data, one_day) + one_minute = 60 + await cache.aset(cache_key, data, one_minute) return PrayerStats(**data) From 80392a78ce8d50a57c9b17e4319eae4c044feedc Mon Sep 17 00:00:00 2001 From: v_anne <69829523+v-anne@users.noreply.github.com> Date: Sat, 18 Jan 2025 16:19:28 -0500 Subject: [PATCH 31/34] hiding prayers from dropdown --- cl/assets/templates/base.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cl/assets/templates/base.html b/cl/assets/templates/base.html index b1646e4360..a4a3ba7436 100644 --- a/cl/assets/templates/base.html +++ b/cl/assets/templates/base.html @@ -142,8 +142,10 @@

    You did not supply the "private" variable to your template. tabindex="204"> Notes
  •  Tags
  • + {% flag "pray-and-pay" %}
  •  Prayers
  • + {% endflag %}
  •  Your Support
  • From abac8dec347effd73a0ce75c116c1232515db6c9 Mon Sep 17 00:00:00 2001 From: Eduardo Rosendo Date: Sat, 18 Jan 2025 23:51:29 -0400 Subject: [PATCH 32/34] feat(templates): Adds prayer icon to the profile dropdown --- cl/assets/templates/base.html | 9 +++++++-- cl/assets/templates/includes/hand-holding-heart.svg | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 cl/assets/templates/includes/hand-holding-heart.svg diff --git a/cl/assets/templates/base.html b/cl/assets/templates/base.html index a4a3ba7436..6b9f7fccba 100644 --- a/cl/assets/templates/base.html +++ b/cl/assets/templates/base.html @@ -143,8 +143,13 @@

    You did not supply the "private" variable to your template.
  •  Tags
  • {% flag "pray-and-pay" %} -
  •  Prayers
  • +
  • +
    + {% include "includes/hand-holding-heart.svg" %} +
    +  Prayers +
    +
  • {% endflag %}
  • From 752ff82f135ea0d3f2f67bdfc77ad818ebe94a66 Mon Sep 17 00:00:00 2001 From: ERosendo <55959657+ERosendo@users.noreply.github.com> Date: Mon, 20 Jan 2025 19:30:08 +0000 Subject: [PATCH 33/34] Update freelawproject dependencies --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6de1ee12c8..b98a3b3e6e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2437,14 +2437,14 @@ setuptools = "*" [[package]] name = "juriscraper" -version = "2.6.51" +version = "2.6.52" description = "An API to scrape American court websites for metadata." optional = false python-versions = "*" groups = ["main"] files = [ - {file = "juriscraper-2.6.51-py27-none-any.whl", hash = "sha256:e0d216104ee82b1ef1a28e09a8e099c1573dd8bb7d8d888e63bfda43d5dc3b96"}, - {file = "juriscraper-2.6.51.tar.gz", hash = "sha256:be5e49243479b21433e05a484c322fe79c8b7f1a567d3d7e473ed0615adcaa20"}, + {file = "juriscraper-2.6.52-py27-none-any.whl", hash = "sha256:3e9c0fbcb7b4ee5b338c8f24c5b0f6697e6bc0bb438add639fc06d1e38dceb6f"}, + {file = "juriscraper-2.6.52.tar.gz", hash = "sha256:cc86c4fb1c4a94c64d2a09e253fcd21fe2787ad5a4fc4681daaec4b650230354"}, ] [package.dependencies] From 53c1165217428ba4b146fb7864b3cc0182913703 Mon Sep 17 00:00:00 2001 From: mlissner Date: Wed, 22 Jan 2025 15:45:58 -0800 Subject: [PATCH 34/34] feat(api): Block users --- cl/settings/third_party/rest_framework.py | 1 + 1 file changed, 1 insertion(+) diff --git a/cl/settings/third_party/rest_framework.py b/cl/settings/third_party/rest_framework.py index b8dc60f4d8..e5de46278f 100644 --- a/cl/settings/third_party/rest_framework.py +++ b/cl/settings/third_party/rest_framework.py @@ -48,6 +48,7 @@ "PeterPan": "1/hour", "HomerSimpson": "1/hour", "BruceWayne": "1/hour", + "mibefis809": "1/hour", # Unresponsive "court_test_account": "1/hour", "jmmckinnie": "1/hour",