Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit e768d5d

Browse files
committedJan 27, 2025·
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent b543007 commit e768d5d

21 files changed

+80
-88
lines changed
 

‎backend/src/app/celery/background_jobs/__init__.py

+12-12
Original file line numberDiff line numberDiff line change
@@ -103,9 +103,9 @@ def execute_text_preprocessing_pipeline_apply_async(
103103
execute_text_preprocessing_pipeline_task,
104104
)
105105

106-
assert isinstance(
107-
execute_text_preprocessing_pipeline_task, Task
108-
), "Not a Celery Task"
106+
assert isinstance(execute_text_preprocessing_pipeline_task, Task), (
107+
"Not a Celery Task"
108+
)
109109

110110
for cargo in cargos:
111111
execute_text_preprocessing_pipeline_task.apply_async(kwargs={"cargo": cargo})
@@ -118,9 +118,9 @@ def execute_image_preprocessing_pipeline_apply_async(
118118
execute_image_preprocessing_pipeline_task,
119119
)
120120

121-
assert isinstance(
122-
execute_image_preprocessing_pipeline_task, Task
123-
), "Not a Celery Task"
121+
assert isinstance(execute_image_preprocessing_pipeline_task, Task), (
122+
"Not a Celery Task"
123+
)
124124

125125
for cargo in cargos:
126126
execute_image_preprocessing_pipeline_task.apply_async(kwargs={"cargo": cargo})
@@ -133,9 +133,9 @@ def execute_audio_preprocessing_pipeline_apply_async(
133133
execute_audio_preprocessing_pipeline_task,
134134
)
135135

136-
assert isinstance(
137-
execute_audio_preprocessing_pipeline_task, Task
138-
), "Not a Celery Task"
136+
assert isinstance(execute_audio_preprocessing_pipeline_task, Task), (
137+
"Not a Celery Task"
138+
)
139139

140140
for cargo in cargos:
141141
execute_audio_preprocessing_pipeline_task.apply_async(kwargs={"cargo": cargo})
@@ -148,9 +148,9 @@ def execute_video_preprocessing_pipeline_apply_async(
148148
execute_video_preprocessing_pipeline_task,
149149
)
150150

151-
assert isinstance(
152-
execute_video_preprocessing_pipeline_task, Task
153-
), "Not a Celery Task"
151+
assert isinstance(execute_video_preprocessing_pipeline_task, Task), (
152+
"Not a Celery Task"
153+
)
154154

155155
for cargo in cargos:
156156
execute_video_preprocessing_pipeline_task.apply_async(kwargs={"cargo": cargo})

‎backend/src/app/core/analysis/cota/pipeline/pipeline.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,7 @@ def execute(self, job: COTARefinementJobRead) -> COTARefinementJobRead:
8282
stop_t = time.perf_counter()
8383

8484
logger.info(
85-
f"Executing the COTARefinementPipeline took"
86-
f" {stop_t - start_t:0.4f} seconds"
85+
f"Executing the COTARefinementPipeline took {stop_t - start_t:0.4f} seconds"
8786
)
8887

8988
return job
@@ -163,7 +162,7 @@ def _run_step(self, cargo: Cargo, step: PipelineStep) -> Cargo:
163162
logger.error(msg)
164163
raise ValueError(msg)
165164

166-
logger.info((f"Running: {step} for " f"COTARefinementJob {cargo.job.id} "))
165+
logger.info((f"Running: {step} for COTARefinementJob {cargo.job.id} "))
167166
cargo = self._update_cota_job(
168167
cargo=cargo,
169168
current_step_name=step.name,

‎backend/src/app/core/data/crud/memo.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -138,9 +138,9 @@ def create_for_attached_object(
138138
raise NotImplementedError(
139139
f"Unknown AttachedObjectType: {attached_object_type}"
140140
)
141-
assert (
142-
oh_create_dto is not None
143-
), f"Unknown AttachedObjectType: {attached_object_type}"
141+
assert oh_create_dto is not None, (
142+
f"Unknown AttachedObjectType: {attached_object_type}"
143+
)
144144

145145
# create an ObjectHandle for the attached object
146146
oh_db_obj = crud_object_handle.create(db=db, create_dto=oh_create_dto)

‎backend/src/app/core/data/dto/search.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ class ElasticSearchMemoRead(BaseModel):
7070
description="The ID of the Object the Memo is attached to"
7171
)
7272
attached_object_type: Optional[AttachedObjectType] = Field(
73-
description=("The type of the Object the Memo is " "attached to")
73+
description=("The type of the Object the Memo is attached to")
7474
)
7575
updated: Optional[datetime] = Field(
7676
description="The created date of the Memo", default=datetime.now()

‎backend/src/app/core/data/llm/llm_service.py

+24-24
Original file line numberDiff line numberDiff line change
@@ -448,12 +448,12 @@ def _llm_document_tagging(
448448
approach_parameters: ZeroShotParams,
449449
task_parameters: DocumentTaggingParams,
450450
) -> LLMJobResult:
451-
assert isinstance(
452-
task_parameters, DocumentTaggingParams
453-
), "Wrong task parameters!"
454-
assert isinstance(
455-
approach_parameters, ZeroShotParams
456-
), "Wrong approach parameters!"
451+
assert isinstance(task_parameters, DocumentTaggingParams), (
452+
"Wrong task parameters!"
453+
)
454+
assert isinstance(approach_parameters, ZeroShotParams), (
455+
"Wrong approach parameters!"
456+
)
457457

458458
msg = f"Started LLMJob - Document Tagging, num docs: {len(task_parameters.sdoc_ids)}"
459459
self._update_llm_job_description(
@@ -573,12 +573,12 @@ def _llm_metadata_extraction(
573573
approach_parameters: ZeroShotParams,
574574
task_parameters: MetadataExtractionParams,
575575
) -> LLMJobResult:
576-
assert isinstance(
577-
task_parameters, MetadataExtractionParams
578-
), "Wrong task parameters!"
579-
assert isinstance(
580-
approach_parameters, ZeroShotParams
581-
), "Wrong approach parameters!"
576+
assert isinstance(task_parameters, MetadataExtractionParams), (
577+
"Wrong task parameters!"
578+
)
579+
assert isinstance(approach_parameters, ZeroShotParams), (
580+
"Wrong approach parameters!"
581+
)
582582

583583
msg = f"Started LLMJob - Metadata Extraction, num docs: {len(task_parameters.sdoc_ids)}"
584584
self._update_llm_job_description(
@@ -721,9 +721,9 @@ def _llm_annotation(
721721
task_parameters: AnnotationParams,
722722
) -> LLMJobResult:
723723
assert isinstance(task_parameters, AnnotationParams), "Wrong task parameters!"
724-
assert isinstance(
725-
approach_parameters, ZeroShotParams
726-
), "Wrong approach parameters!"
724+
assert isinstance(approach_parameters, ZeroShotParams), (
725+
"Wrong approach parameters!"
726+
)
727727

728728
msg = f"Started LLMJob - Annotation, num docs: {len(task_parameters.sdoc_ids)}"
729729
self._update_llm_job_description(
@@ -887,9 +887,9 @@ def _llm_sentence_annotation(
887887
approach_parameters: Union[ZeroShotParams, FewShotParams],
888888
task_parameters: SentenceAnnotationParams,
889889
) -> LLMJobResult:
890-
assert isinstance(
891-
task_parameters, SentenceAnnotationParams
892-
), "Wrong task parameters!"
890+
assert isinstance(task_parameters, SentenceAnnotationParams), (
891+
"Wrong task parameters!"
892+
)
893893
assert isinstance(approach_parameters, ZeroShotParams) or isinstance(
894894
approach_parameters, FewShotParams
895895
), "Wrong approach parameters!"
@@ -1101,12 +1101,12 @@ def _ray_sentence_annotation(
11011101
approach_parameters: ModelTrainingParams,
11021102
task_parameters: SentenceAnnotationParams,
11031103
) -> LLMJobResult:
1104-
assert isinstance(
1105-
task_parameters, SentenceAnnotationParams
1106-
), "Wrong task parameters!"
1107-
assert isinstance(
1108-
approach_parameters, ModelTrainingParams
1109-
), "Wrong approach parameters!"
1104+
assert isinstance(task_parameters, SentenceAnnotationParams), (
1105+
"Wrong task parameters!"
1106+
)
1107+
assert isinstance(approach_parameters, ModelTrainingParams), (
1108+
"Wrong approach parameters!"
1109+
)
11101110

11111111
msg = f"Started LLMJob - Sentence Annotation (RAY), num docs: {len(task_parameters.sdoc_ids)}"
11121112
self._update_llm_job(

‎backend/src/app/core/data/llm/ollama_service.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ def __new__(cls, *args, **kwargs):
4040
)
4141
logger.info(f"Model {model} has been created successfully.")
4242
available_models = [x.model for x in ollamac.list()["models"]]
43-
assert (
44-
model in available_models
45-
), f"Model {model} is not available. Available models are: {available_models}"
43+
assert model in available_models, (
44+
f"Model {model} is not available. Available models are: {available_models}"
45+
)
4646

4747
cls.__model = model
4848
cls.__client = ollamac

‎backend/src/app/core/data/llm/prompts/sentence_annotation_prompt_builder.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -140,9 +140,9 @@ def _build_user_prompt_template(
140140

141141
# check that there are at least 4 examples per code
142142
for code_id, annotations in code_id2sentence_annotations.items():
143-
assert (
144-
len(annotations) >= sent_anno_conf.few_shot_threshold
145-
), f"Code {code_id} has less than {sent_anno_conf.few_shot_threshold} annotations!"
143+
assert len(annotations) >= sent_anno_conf.few_shot_threshold, (
144+
f"Code {code_id} has less than {sent_anno_conf.few_shot_threshold} annotations!"
145+
)
146146

147147
# find corrsponding sdoc datas
148148
sdoc_ids = [sa.sdoc_id for sa in sentence_annotations]

‎backend/src/app/core/db/simsearch_service.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -71,9 +71,9 @@ def _encode_text(self, text: List[str], return_avg_emb: bool = False) -> np.ndar
7171
def _get_image_path_from_sdoc_id(self, sdoc_id: int) -> Path:
7272
with self.sqls.db_session() as db:
7373
sdoc = SourceDocumentRead.model_validate(crud_sdoc.read(db=db, id=sdoc_id))
74-
assert (
75-
sdoc.doctype == DocType.image
76-
), f"SourceDocument with {sdoc_id=} is not an image!"
74+
assert sdoc.doctype == DocType.image, (
75+
f"SourceDocument with {sdoc_id=} is not an image!"
76+
)
7777
return self.repo.get_path_to_sdoc_file(sdoc=sdoc, raise_if_not_exists=True)
7878

7979
def _encode_image(self, image_sdoc_id: int) -> np.ndarray:

‎backend/src/app/preprocessing/pipeline/preprocessing_pipeline.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,7 @@ def __update_status_of_ppj(
143143
) -> PipelineCargo:
144144
ppj_id = cargo.ppj_payload.prepro_job_id
145145
update_dto = PreprocessingJobUpdate(status=status)
146-
logger.info(
147-
f"Updating PreprocessingJob {ppj_id} " f"Status to {status.value}..."
148-
)
146+
logger.info(f"Updating PreprocessingJob {ppj_id} Status to {status.value}...")
149147
with self.sqls.db_session() as db:
150148
_ = crud_prepro_job.update(db=db, uuid=ppj_id, update_dto=update_dto)
151149
return cargo

‎backend/src/app/preprocessing/pipeline/steps/image/store_metadata_to_database.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,6 @@ def store_metadata_to_database(cargo: PipelineCargo) -> PipelineCargo:
8585
db.rollback()
8686
raise e
8787
else:
88-
logger.info(f"Persisted SourceDocument Metadata " f"for {ppid.filename}!")
88+
logger.info(f"Persisted SourceDocument Metadata for {ppid.filename}!")
8989

9090
return cargo

‎backend/src/app/preprocessing/pipeline/steps/image/write_ppid_to_database.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -121,9 +121,7 @@ def write_ppid_to_database(cargo: PipelineCargo) -> PipelineCargo:
121121
db.rollback()
122122
raise e
123123
else:
124-
logger.info(
125-
f"Persisted PreprocessingPipeline Results " f"for {ppid.filename}!"
126-
)
124+
logger.info(f"Persisted PreprocessingPipeline Results for {ppid.filename}!")
127125

128126
cargo.data["sdoc_id"] = sdoc_db_obj.id
129127
return cargo

‎backend/src/app/preprocessing/pipeline/steps/text/write_pptd_to_database.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -233,9 +233,7 @@ def write_pptd_to_database(cargo: PipelineCargo) -> PipelineCargo:
233233
db.rollback()
234234
raise e
235235
else:
236-
logger.info(
237-
f"Persisted PreprocessingPipeline Results " f"for {pptd.filename}!"
238-
)
236+
logger.info(f"Persisted PreprocessingPipeline Results for {pptd.filename}!")
239237

240238
cargo.data["sdoc_id"] = sdoc_db_obj.id
241239

‎backend/src/app/preprocessing/pipeline/steps/video/write_ppvd_to_database.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -119,9 +119,7 @@ def write_ppvd_to_database(cargo: PipelineCargo) -> PipelineCargo:
119119
db.rollback()
120120
raise e
121121
else:
122-
logger.info(
123-
f"Persisted PreprocessingPipeline Results " f"for {ppvd.filename}!"
124-
)
122+
logger.info(f"Persisted PreprocessingPipeline Results for {ppvd.filename}!")
125123

126124
cargo.data["sdoc_id"] = sdoc_db_obj.id
127125
return cargo

‎backend/src/app/preprocessing/preprocessing_service.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -206,8 +206,7 @@ def abort_preprocessing_job(self, ppj_id: str) -> PreprocessingJobRead:
206206
if ppj.status != BackgroundJobStatus.RUNNING:
207207
raise HTTPException(
208208
detail=(
209-
f"Cannot abort PreprocessingJob {ppj_id} "
210-
"because it is not running!"
209+
f"Cannot abort PreprocessingJob {ppj_id} because it is not running!"
211210
),
212211
status_code=400,
213212
)

‎backend/src/app/preprocessing/ray_model_worker/models/blip2.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,9 @@ def __init__(self):
5757
device_map=device_map,
5858
torch_dtype=data_type,
5959
)
60-
assert isinstance(
61-
captioning_model, Blip2ForConditionalGeneration
62-
), "Failed to load captioning model"
60+
assert isinstance(captioning_model, Blip2ForConditionalGeneration), (
61+
"Failed to load captioning model"
62+
)
6363

6464
captioning_model.eval()
6565
self.data_type = data_type

‎backend/src/app/preprocessing/ray_model_worker/models/detr.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -23,15 +23,15 @@ class DETRModel:
2323
def __init__(self):
2424
logger.debug(f"Loading DetrFeatureExtractor {MODEL} ...")
2525
feature_extractor = DetrFeatureExtractor.from_pretrained(MODEL, device=DEVICE)
26-
assert isinstance(
27-
feature_extractor, DetrFeatureExtractor
28-
), "Failed to load feature extractor"
26+
assert isinstance(feature_extractor, DetrFeatureExtractor), (
27+
"Failed to load feature extractor"
28+
)
2929

3030
logger.debug(f"Loading DetrForObjectDetection {MODEL} ...")
3131
object_detection_model = DetrForObjectDetection.from_pretrained(MODEL)
32-
assert isinstance(
33-
object_detection_model, DetrForObjectDetection
34-
), "Failed to load object detection model"
32+
assert isinstance(object_detection_model, DetrForObjectDetection), (
33+
"Failed to load object detection model"
34+
)
3535

3636
object_detection_model.to(DEVICE)
3737
object_detection_model.eval()

‎backend/src/app/preprocessing/ray_model_worker/models/seqsenttagger.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,9 @@ def collate_fn(batch):
200200
== len(new_padded_embeddings)
201201
== len(new_padded_labels)
202202
== len(new_mask)
203-
), f"Lengths must match: {len(padded_embeddings)}, {len(padded_labels)}, {len(mask)}, {len(new_padded_embeddings)}, {len(new_padded_labels)}, {len(new_mask)}"
203+
), (
204+
f"Lengths must match: {len(padded_embeddings)}, {len(padded_labels)}, {len(mask)}, {len(new_padded_embeddings)}, {len(new_padded_labels)}, {len(new_mask)}"
205+
)
204206

205207
return (
206208
torch.tensor(new_padded_embeddings),

‎backend/src/main.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -274,9 +274,9 @@ def invalid_error_handler(_, exc: InvalidError):
274274
def main() -> None:
275275
# read port from config
276276
port = int(conf.api.port)
277-
assert (
278-
port is not None and isinstance(port, int) and port > 0
279-
), "The API port has to be a positive integer! E.g. 8081"
277+
assert port is not None and isinstance(port, int) and port > 0, (
278+
"The API port has to be a positive integer! E.g. 8081"
279+
)
280280

281281
is_debug = conf.api.production_mode == "0"
282282

‎backend/src/test/api/endpoints/test_authentication.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -41,9 +41,9 @@ def test_authentication_required():
4141
and dep.dependency.__qualname__ == "get_current_user"
4242
]
4343

44-
assert (
45-
len(auth_dep) == 1
46-
), f"route {route.methods} {route.path} is not protected by authentication"
44+
assert len(auth_dep) == 1, (
45+
f"route {route.methods} {route.path} is not protected by authentication"
46+
)
4747
elif isinstance(route, starlette.routing.Route):
4848
# These routes are usually built into FastAPI, and
4949
# we expect them to not require authentication.

‎backend/src/test/app/core/data/crud/test_user_crud.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414

1515
def test_create_delete_user(db: Session) -> None:
16-
email = f'{"".join(random.choices(string.ascii_letters, k=15))}@gmail.com'
16+
email = f"{''.join(random.choices(string.ascii_letters, k=15))}@gmail.com"
1717
first_name = "".join(random.choices(string.ascii_letters, k=15))
1818
last_name = "".join(random.choices(string.ascii_letters, k=15))
1919
password = "".join(random.choices(string.ascii_letters, k=15))
@@ -44,7 +44,7 @@ def test_create_delete_user(db: Session) -> None:
4444

4545

4646
def test_update_user(db: Session, user: UserORM) -> None:
47-
email = f'{"".join(random.choices(string.ascii_letters, k=15))}@gmail.com'
47+
email = f"{''.join(random.choices(string.ascii_letters, k=15))}@gmail.com"
4848
first_name = "".join(random.choices(string.ascii_letters, k=15))
4949
last_name = "".join(random.choices(string.ascii_letters, k=15))
5050
password = "".join(random.choices(string.ascii_letters, k=15))

‎backend/src/test/conftest.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -317,9 +317,9 @@ def create(self, upload_list: list, user: dict, project: dict):
317317
response = client.put(
318318
f"/project/{project['id']}/sdoc", headers=user_headers, files=files
319319
)
320-
assert (
321-
response.status_code == 200
322-
), f"Failed to upload files. Response: {response}. Files: {files}"
320+
assert response.status_code == 200, (
321+
f"Failed to upload files. Response: {response}. Files: {files}"
322+
)
323323
response = response.json()
324324
docs = {}
325325
for file in response["payloads"]:

0 commit comments

Comments
 (0)
Please sign in to comment.