From acfd750f68af02fe0f48de32e90ba334dc144044 Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Fri, 17 Jan 2025 11:51:11 -0600 Subject: [PATCH 1/5] fix(recap): Avoid merging attachment number 0 from PDF uploads - Clean up appellate main RDs with attachments but no attachment data Fixes: #4598 --- cl/recap/mergers.py | 24 ++++++++-- cl/recap/tasks.py | 12 +++-- cl/recap/tests.py | 114 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 141 insertions(+), 9 deletions(-) diff --git a/cl/recap/mergers.py b/cl/recap/mergers.py index 95fd75cc98..105c54ced2 100644 --- a/cl/recap/mergers.py +++ b/cl/recap/mergers.py @@ -945,10 +945,13 @@ async def add_docket_entries( # RDs. The check here ensures that if that happens for a particular # entry, we avoid creating the main RD a second+ time when we get the # docket sheet a second+ time. - appelate_court_id_exists = await appellate_court_ids.filter( + appellate_court_id_exists = await appellate_court_ids.filter( pk=d.court_id ).aexists() - if de_created is False and appelate_court_id_exists: + appellate_rd_att_exists = False + if de_created is False and appellate_court_id_exists: + # In existing appellate entries merges. Check if the entry has at + # least one attachment. appellate_rd_att_exists = await de.recap_documents.filter( document_type=RECAPDocument.ATTACHMENT ).aexists() @@ -957,14 +960,25 @@ async def add_docket_entries( params["pacer_doc_id"] = docket_entry["pacer_doc_id"] try: get_params = deepcopy(params) - if de_created is False and not appelate_court_id_exists: - del get_params["document_type"] + if de_created is False and not appellate_court_id_exists: get_params["pacer_doc_id"] = docket_entry["pacer_doc_id"] + if de_created is False: + # Try to match the RD regardless of document_type + del get_params["document_type"] rd = await RECAPDocument.objects.aget(**get_params) + if ( + appellate_rd_att_exists + and rd.document_type == RECAPDocument.PACER_DOCUMENT + ): + # If the entry already has an attachment, it means the main document + # matched should be attachment #1. + rd.document_type = RECAPDocument.ATTACHMENT + rd.attachment_number = 1 + rds_updated.append(rd) except RECAPDocument.DoesNotExist: rd = None - if de_created is False and not appelate_court_id_exists: + if de_created is False and not appellate_court_id_exists: try: # Check for documents with a bad pacer_doc_id rd = await RECAPDocument.objects.aget(**params) diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index ee674a9f25..7c502919dc 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -245,10 +245,14 @@ async def process_recap_pdf(pk): pq = await ProcessingQueue.objects.aget(pk=pk) await mark_pq_status(pq, "", PROCESSING_STATUS.IN_PROGRESS) - if pq.attachment_number is None: - document_type = RECAPDocument.PACER_DOCUMENT - else: - document_type = RECAPDocument.ATTACHMENT + document_type = ( + RECAPDocument.PACER_DOCUMENT + if not pq.attachment_number + else RECAPDocument.ATTACHMENT + ) + pq.attachment_number = ( + None if not pq.attachment_number else pq.attachment_number + ) logger.info(f"Processing RECAP item (debug is: {pq.debug}): {pq} ") try: diff --git a/cl/recap/tests.py b/cl/recap/tests.py index 5013ff062c..2871ac8a4b 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -179,6 +179,7 @@ def setUpTestData(cls): DocketEntryDataFactory( pacer_doc_id="04505578698", document_number=1, + description="Lorem ipsum", ) ], ) @@ -574,6 +575,119 @@ def test_reprocess_appellate_docket_after_adding_attachments( self.att_data["attachments"][0]["description"], ) + def test_merge_main_rd_entry_with_attachments_and_no_att_data( + self, mock_upload + ): + """Can we match the main RECAPDocument when merging a docket entry from + a docket sheet after a PDF upload that added attachments, prior to + having the attachment page for the entry? + """ + + d = DocketFactory( + source=Docket.RECAP, + court=self.court_appellate, + pacer_case_id="104490", + ) + # Merge docket entries + async_to_sync(add_docket_entries)(d, self.de_data["docket_entries"]) + + recap_documents = RECAPDocument.objects.all().order_by("date_created") + self.assertEqual(recap_documents.count(), 1) + main_rd = recap_documents[0] + self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) + self.assertEqual(main_rd.attachment_number, None) + self.assertEqual(main_rd.docket_entry.description, "Lorem ipsum") + + pq = ProcessingQueue.objects.create( + court=self.court_appellate, + uploader=self.user, + pacer_case_id=d.pacer_case_id, + pacer_doc_id="04505578699", + document_number=1, + attachment_number=2, + upload_type=UPLOAD_TYPE.PDF, + filepath_local=self.f, + ) + async_to_sync(process_recap_upload)(pq) + + entry_rds = RECAPDocument.objects.filter( + docket_entry=main_rd.docket_entry + ) + # Confirm a new RD was created by the att PDF upload. + self.assertEqual(entry_rds.count(), 2, msg="Wrong number of RDs.") + + pq.refresh_from_db() + att_2_rd = pq.recap_document + + self.assertEqual(att_2_rd.document_type, RECAPDocument.ATTACHMENT) + self.assertEqual(att_2_rd.attachment_number, 2) + + main_rd.refresh_from_db() + + self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) + + de_data_2 = DocketEntriesDataFactory( + docket_entries=[ + DocketEntryDataFactory( + pacer_doc_id="04505578698", + document_number=1, + description="Motion", + ) + ], + ) + + async_to_sync(add_docket_entries)(d, de_data_2["docket_entries"]) + self.assertEqual(entry_rds.count(), 2, msg="Wrong number of RDs.") + main_rd.refresh_from_db() + + self.assertEqual(main_rd.docket_entry.description, "Motion") + self.assertEqual( + main_rd.document_type, + RECAPDocument.ATTACHMENT, + msg="Wrong document type.", + ) + self.assertEqual(main_rd.attachment_number, 1) + + def test_avoid_merging_att_zero_on_pdf_uploads(self, mock_upload): + """ """ + + d = DocketFactory( + source=Docket.RECAP, + court=self.court_appellate, + pacer_case_id="104490", + ) + # Merge docket entries + async_to_sync(add_docket_entries)(d, self.de_data["docket_entries"]) + + recap_documents = RECAPDocument.objects.all().order_by("date_created") + self.assertEqual(recap_documents.count(), 1) + main_rd = recap_documents[0] + self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) + self.assertEqual(main_rd.attachment_number, None) + + pq = ProcessingQueue.objects.create( + court=self.court_appellate, + uploader=self.user, + pacer_case_id=d.pacer_case_id, + pacer_doc_id="04505578698", + document_number=1, + attachment_number=0, + upload_type=UPLOAD_TYPE.PDF, + filepath_local=self.f, + ) + async_to_sync(process_recap_upload)(pq) + + entry_rds = RECAPDocument.objects.filter( + docket_entry=main_rd.docket_entry + ) + + pq.refresh_from_db() + main_rd = pq.recap_document + + self.assertEqual(entry_rds.count(), 1, msg="Wrong number of RDs.") + self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) + self.assertEqual(main_rd.attachment_number, None) + async def test_uploading_a_case_query_result_page(self, mock): """Can we upload a case query result page and have it be saved correctly? From 232857dc38a74d53cd2d160e08e557a4e6cc03cc Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Fri, 17 Jan 2025 14:44:51 -0600 Subject: [PATCH 2/5] fix(recap): Updated the approach for matching appellate RD without attachment data - Improved tests. --- cl/recap/mergers.py | 13 ++------- cl/recap/tasks.py | 3 +- cl/recap/tests.py | 70 +++++++++++++++++++++++++++++++++------------ 3 files changed, 55 insertions(+), 31 deletions(-) diff --git a/cl/recap/mergers.py b/cl/recap/mergers.py index 105c54ced2..d588190e9c 100644 --- a/cl/recap/mergers.py +++ b/cl/recap/mergers.py @@ -950,7 +950,7 @@ async def add_docket_entries( ).aexists() appellate_rd_att_exists = False if de_created is False and appellate_court_id_exists: - # In existing appellate entries merges. Check if the entry has at + # In existing appellate entry merges, check if the entry has at # least one attachment. appellate_rd_att_exists = await de.recap_documents.filter( document_type=RECAPDocument.ATTACHMENT @@ -963,18 +963,9 @@ async def add_docket_entries( if de_created is False and not appellate_court_id_exists: get_params["pacer_doc_id"] = docket_entry["pacer_doc_id"] if de_created is False: - # Try to match the RD regardless of document_type + # Try to match the RD regardless of the document_type. del get_params["document_type"] rd = await RECAPDocument.objects.aget(**get_params) - if ( - appellate_rd_att_exists - and rd.document_type == RECAPDocument.PACER_DOCUMENT - ): - # If the entry already has an attachment, it means the main document - # matched should be attachment #1. - rd.document_type = RECAPDocument.ATTACHMENT - rd.attachment_number = 1 - rds_updated.append(rd) except RECAPDocument.DoesNotExist: rd = None diff --git a/cl/recap/tasks.py b/cl/recap/tasks.py index 7c502919dc..236c0bd74b 100644 --- a/cl/recap/tasks.py +++ b/cl/recap/tasks.py @@ -247,9 +247,10 @@ async def process_recap_pdf(pk): document_type = ( RECAPDocument.PACER_DOCUMENT - if not pq.attachment_number + if not pq.attachment_number # This check includes attachment_number set to None or 0 else RECAPDocument.ATTACHMENT ) + # Set attachment_number to None if it is 0 pq.attachment_number = ( None if not pq.attachment_number else pq.attachment_number ) diff --git a/cl/recap/tests.py b/cl/recap/tests.py index 2871ac8a4b..f6a90291b2 100644 --- a/cl/recap/tests.py +++ b/cl/recap/tests.py @@ -165,7 +165,9 @@ def setUpTestData(cls): cls.att_data = AppellateAttachmentPageFactory( attachments=[ AppellateAttachmentFactory( - pacer_doc_id="04505578698", attachment_number=1 + pacer_doc_id="04505578698", + attachment_number=1, + description="Order entered", ), AppellateAttachmentFactory( pacer_doc_id="04505578699", attachment_number=2 @@ -179,7 +181,7 @@ def setUpTestData(cls): DocketEntryDataFactory( pacer_doc_id="04505578698", document_number=1, - description="Lorem ipsum", + short_description="Lorem ipsum", ) ], ) @@ -575,12 +577,12 @@ def test_reprocess_appellate_docket_after_adding_attachments( self.att_data["attachments"][0]["description"], ) - def test_merge_main_rd_entry_with_attachments_and_no_att_data( + def test_match_appellate_main_rd_with_attachments_and_no_att_data( self, mock_upload ): - """Can we match the main RECAPDocument when merging a docket entry from - a docket sheet after a PDF upload that added attachments, prior to - having the attachment page for the entry? + """Can we match the main RECAPDocument when merging an appellate docket + entry from a docket sheet after a PDF upload has added attachments, + but before the attachment page for the entry is available? """ d = DocketFactory( @@ -588,16 +590,18 @@ def test_merge_main_rd_entry_with_attachments_and_no_att_data( court=self.court_appellate, pacer_case_id="104490", ) - # Merge docket entries + # Merge docket entry #1 async_to_sync(add_docket_entries)(d, self.de_data["docket_entries"]) + # Confirm that the main RD has been properly merged. recap_documents = RECAPDocument.objects.all().order_by("date_created") self.assertEqual(recap_documents.count(), 1) main_rd = recap_documents[0] self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) self.assertEqual(main_rd.attachment_number, None) - self.assertEqual(main_rd.docket_entry.description, "Lorem ipsum") + self.assertEqual(main_rd.description, "Lorem ipsum") + # Upload a PDF for attachment 2 in the same entry #1. pq = ProcessingQueue.objects.create( court=self.court_appellate, uploader=self.user, @@ -618,20 +622,17 @@ def test_merge_main_rd_entry_with_attachments_and_no_att_data( pq.refresh_from_db() att_2_rd = pq.recap_document - + # The new RD should be attachment #2 self.assertEqual(att_2_rd.document_type, RECAPDocument.ATTACHMENT) self.assertEqual(att_2_rd.attachment_number, 2) - main_rd.refresh_from_db() - - self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) - + # Simulate a docket sheet merge containing entry #1 again: de_data_2 = DocketEntriesDataFactory( docket_entries=[ DocketEntryDataFactory( pacer_doc_id="04505578698", document_number=1, - description="Motion", + short_description="Motion", ) ], ) @@ -640,7 +641,33 @@ def test_merge_main_rd_entry_with_attachments_and_no_att_data( self.assertEqual(entry_rds.count(), 2, msg="Wrong number of RDs.") main_rd.refresh_from_db() - self.assertEqual(main_rd.docket_entry.description, "Motion") + # Confirm the main RD was properly matched and updated. + self.assertEqual(main_rd.description, "Motion") + self.assertEqual( + main_rd.document_type, + RECAPDocument.PACER_DOCUMENT, + msg="Wrong document type.", + ) + self.assertEqual(main_rd.attachment_number, None) + + # Now merge the Attachment page. + pq = ProcessingQueue.objects.create( + court=self.court_appellate, + uploader=self.user, + pacer_case_id="104490", + upload_type=UPLOAD_TYPE.ATTACHMENT_PAGE, + filepath_local=self.f, + ) + with mock.patch( + "cl.recap.tasks.get_data_from_appellate_att_report", + side_effect=lambda x, y: self.att_data, + ): + # Process the appellate attachment page containing 2 attachments. + async_to_sync(process_recap_appellate_attachment)(pq.pk) + + # Confirm that the main_rd is properly converted into an attachment. + self.assertEqual(recap_documents.count(), 2) + main_rd.refresh_from_db() self.assertEqual( main_rd.document_type, RECAPDocument.ATTACHMENT, @@ -649,22 +676,26 @@ def test_merge_main_rd_entry_with_attachments_and_no_att_data( self.assertEqual(main_rd.attachment_number, 1) def test_avoid_merging_att_zero_on_pdf_uploads(self, mock_upload): - """ """ + """Confirm that a RECAP PDF upload containing attachment number 0 + matches the main RD.""" d = DocketFactory( source=Docket.RECAP, court=self.court_appellate, pacer_case_id="104490", ) - # Merge docket entries + # Merge docket entry #1 async_to_sync(add_docket_entries)(d, self.de_data["docket_entries"]) + # Confirm that the main RD has been properly merged. recap_documents = RECAPDocument.objects.all().order_by("date_created") self.assertEqual(recap_documents.count(), 1) main_rd = recap_documents[0] self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) self.assertEqual(main_rd.attachment_number, None) + self.assertEqual(main_rd.is_available, False) + # Upload a PDF for attachment number 0. pq = ProcessingQueue.objects.create( court=self.court_appellate, uploader=self.user, @@ -676,17 +707,18 @@ def test_avoid_merging_att_zero_on_pdf_uploads(self, mock_upload): filepath_local=self.f, ) async_to_sync(process_recap_upload)(pq) - entry_rds = RECAPDocument.objects.filter( docket_entry=main_rd.docket_entry ) - pq.refresh_from_db() main_rd = pq.recap_document + # Confirm that the main RD is properly matched and that + # attachment_number is not set to 0. self.assertEqual(entry_rds.count(), 1, msg="Wrong number of RDs.") self.assertEqual(main_rd.document_type, RECAPDocument.PACER_DOCUMENT) self.assertEqual(main_rd.attachment_number, None) + self.assertEqual(main_rd.is_available, True) async def test_uploading_a_case_query_result_page(self, mock): """Can we upload a case query result page and have it be saved From 4079155811a72e1f1421e2bea28be21a1a749690 Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Wed, 22 Jan 2025 15:07:40 -0600 Subject: [PATCH 3/5] fix(corpus_importer): Allow iquery probes to trigger sweep tasks even if the docket already exists Fixes: #4737 --- cl/corpus_importer/signals.py | 13 +++++++++---- cl/corpus_importer/tests.py | 31 +++++++++++++++++++++++-------- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/cl/corpus_importer/signals.py b/cl/corpus_importer/signals.py index 08254d7d85..c557d70aaf 100644 --- a/cl/corpus_importer/signals.py +++ b/cl/corpus_importer/signals.py @@ -119,11 +119,16 @@ def handle_update_latest_case_id_and_schedule_iquery_sweep( # pacer_case_id) return None - # Only call update_latest_case_id_and_schedule_iquery_sweep if this is a - # new RECAP district or bankruptcy docket with pacer_case_id not added by - # iquery sweep tasks. + # Only call update_latest_case_id_and_schedule_iquery_sweep if: + # - The docket belongs to a RECAP district or bankruptcy court, + # - The docket has a pacer_case_id, + # - The docket was newly created (when IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED=True), or + # - The docket was created or updated by the last probe iteration from probe_iquery_pages. + check_probe_or_created = ( + not getattr(instance, "avoid_trigger_signal", False) or created + ) if ( - created + check_probe_or_created and instance.pacer_case_id and instance.court_id in list( diff --git a/cl/corpus_importer/tests.py b/cl/corpus_importer/tests.py index b631fc3829..9c566deb79 100644 --- a/cl/corpus_importer/tests.py +++ b/cl/corpus_importer/tests.py @@ -2427,7 +2427,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - DocketFactory( + Docket.objects.create( court=self.court_gand, source=Docket.RECAP, case_name="New Incoming Docket", @@ -2460,7 +2460,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - DocketFactory( + Docket.objects.create( court=self.court_gand, source=Docket.RECAP, docket_number="2:20-cv-00600", @@ -2496,7 +2496,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - DocketFactory( + Docket.objects.create( court=self.court_gand, source=Docket.RECAP, case_name="New Incoming Docket", @@ -2530,7 +2530,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - DocketFactory( + Docket.objects.create( court=self.court_ca1, source=Docket.RECAP, docket_number="2:20-cv-00603", @@ -2576,7 +2576,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) # Probing will add 3 dockets (12, 16, 24) + 2 added for the sweep task (13,18). self.assertEqual( - dockets.count(), 5, msg="Docket number doesn't match." + dockets.count(), 5, msg="Docket count doesn't match." ) # 7 additional PACER HTML files should be stored by now, 3 added by the # probing task + 4 added by the sweep task. @@ -2589,8 +2589,22 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ### Integration test probing task + sweep # IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED False + with override_settings(IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED=False): + # Create docket pacer_case_id 12, which is the last docket in + # the probe. Even though it already exists, it should trigger + # a sweep task. + Docket.objects.create( + court=self.court_txed, + source=Docket.RECAP, + case_name="New Incoming Docket 12", + docket_number="2:10-cv-00602", + pacer_case_id="12", + ) + dockets = Docket.objects.filter(court_id=self.court_txed.pk) - self.assertEqual(dockets.count(), 0) + self.assertEqual( + dockets.count(), 1, msg="Docket count doesn't match for txed." + ) r = get_redis_interface("CACHE") # Simulate a highest_known_pacer_case_id = 8 r.hset("iquery:highest_known_pacer_case_id", self.court_txed.pk, 8) @@ -2615,9 +2629,10 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( 1, msg="Wrong number of sweep task called.", ) - # Probing will add 3 dockets (9,10,12) + 1 added for the sweep task (11). + # Probing will add 3 dockets (9,10) + 1 added for the sweep task (11). + # Docket 12 already exists however, it should still trigger the sweep task that adds 11. self.assertEqual( - dockets.count(), 4, msg="Docket number doesn't match for txed." + dockets.count(), 4, msg="Docket count doesn't match for txed." ) finally: # Ensure the signal is disconnected after the test From f1890f9de6c8f71fd0e3daf3149619f78cc501ca Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Wed, 22 Jan 2025 16:11:31 -0600 Subject: [PATCH 4/5] fix(factories): Fixed duplicate post_save signals triggered during DocketFactory creation --- cl/corpus_importer/tests.py | 10 +++++----- cl/search/factories.py | 7 ++++++- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/cl/corpus_importer/tests.py b/cl/corpus_importer/tests.py index 9c566deb79..d4e68d5d93 100644 --- a/cl/corpus_importer/tests.py +++ b/cl/corpus_importer/tests.py @@ -2427,7 +2427,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - Docket.objects.create( + DocketFactory( court=self.court_gand, source=Docket.RECAP, case_name="New Incoming Docket", @@ -2460,7 +2460,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - Docket.objects.create( + DocketFactory( court=self.court_gand, source=Docket.RECAP, docket_number="2:20-cv-00600", @@ -2496,7 +2496,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - Docket.objects.create( + DocketFactory( court=self.court_gand, source=Docket.RECAP, case_name="New Incoming Docket", @@ -2530,7 +2530,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( ) as mock_iquery_sweep, self.captureOnCommitCallbacks( execute=True ): - Docket.objects.create( + DocketFactory( court=self.court_ca1, source=Docket.RECAP, docket_number="2:20-cv-00603", @@ -2593,7 +2593,7 @@ def test_update_latest_case_id_and_schedule_iquery_sweep_integration( # Create docket pacer_case_id 12, which is the last docket in # the probe. Even though it already exists, it should trigger # a sweep task. - Docket.objects.create( + DocketFactory( court=self.court_txed, source=Docket.RECAP, case_name="New Incoming Docket 12", diff --git a/cl/search/factories.py b/cl/search/factories.py index 4ec250de58..fdf60e379c 100644 --- a/cl/search/factories.py +++ b/cl/search/factories.py @@ -307,7 +307,12 @@ def filepath_local(self, create, extracted, **kwargs): self.filepath_local = FileField().evaluate(None, None, kwargs) if create: - self.save(update_fields=["filepath_local"]) + # Use a Docket queryset to persist filepath_local instead of calling + # save(), which can trigger duplicate post_save signals, potentially + # causing issues in certain testing scenarios. + Docket.objects.filter(pk=self.pk).update( + filepath_local=self.filepath_local + ) class DocketWithChildrenFactory(DocketFactory): From 77f718deabb0d7cf05541d47f072173f2c9fa9f4 Mon Sep 17 00:00:00 2001 From: Alberto Islas Date: Thu, 23 Jan 2025 13:18:06 -0600 Subject: [PATCH 5/5] fix(corpus_imported): Made IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED setting a bool --- cl/settings/project/corpus_importer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cl/settings/project/corpus_importer.py b/cl/settings/project/corpus_importer.py index f2f375845f..27988c1f13 100644 --- a/cl/settings/project/corpus_importer.py +++ b/cl/settings/project/corpus_importer.py @@ -11,7 +11,7 @@ "IQUERY_COURT_BLOCKED_MAX_ATTEMPTS", default=6 ) IQUERY_EMPTY_PROBES_LIMIT = env.int("IQUERY_EMPTY_PROBES_LIMIT", default=15) -IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED = env( +IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED = env.bool( "IQUERY_SWEEP_UPLOADS_SIGNAL_ENABLED", default=False ) IQUERY_COURT_RATE = env("IQUERY_COURT_RATE", default="100/s")