Skip to content

Commit

Permalink
fixed test config, mypy and typeguard
Browse files Browse the repository at this point in the history
  • Loading branch information
picciama committed Jan 12, 2024
1 parent 6553b63 commit d3d0998
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 13 deletions.
8 changes: 4 additions & 4 deletions oktoberfest/predict/koina.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ def __merge_list_dict_array(dict_list: List[Dict[str, np.ndarray]]) -> Dict[str,

def __async_callback(
self,
infer_results: Dict[int, Union[InferResult, InferenceServerException]],
infer_results: Dict[int, Union[Dict[str, np.ndarray], InferenceServerException]],
request_id: int,
result: Optional[InferResult],
error: Optional[InferenceServerException],
Expand All @@ -391,7 +391,7 @@ def __async_callback(
def __async_predict_batch(
self,
data: Dict[str, np.ndarray],
infer_results: Dict[int, Union[InferResult, InferenceServerException]],
infer_results: Dict[int, Union[Dict[str, np.ndarray], InferenceServerException]],
request_id: int,
timeout: int = 10000,
retries: int = 10,
Expand Down Expand Up @@ -490,7 +490,7 @@ def __predict_async(self, data: Dict[str, np.ndarray], debug=False) -> Dict[str,
:return: A dictionary containing the model's predictions. Keys are output names, and values are numpy arrays
representing the model's output.
"""
infer_results: Dict[int, Union[InferResult, InferenceServerException]] = {}
infer_results: Dict[int, Union[Dict[str, np.ndarray], InferenceServerException]] = {}
tasks = []
for i, data_batch in enumerate(self.__slice_dict(data, self.batchsize)):
tasks.append(self.__async_predict_batch(data_batch, infer_results, request_id=i, retries=3))
Expand Down Expand Up @@ -521,7 +521,7 @@ def __predict_async(self, data: Dict[str, np.ndarray], debug=False) -> Dict[str,
return self.__handle_results(infer_results, debug)

def __handle_results(
self, infer_results: Dict[int, Union[InferResult, InferenceServerException]], debug: bool
self, infer_results: Dict[int, Union[Dict[str, np.ndarray], InferenceServerException]], debug: bool
) -> Dict[str, np.ndarray]:
"""
Handles the results.
Expand Down
12 changes: 3 additions & 9 deletions oktoberfest/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,21 +232,15 @@ def _speclib_from_digestion(config: Config) -> Spectra:


def _get_writer_and_output(results_path: Path, output_format: str) -> Tuple[Type[SpectralLibrary], Path]:
spectral_library: Type[SpectralLibrary]
if output_format == "msp":
spectral_library = MSP
out_file = results_path / "myPrositLib.msp"
return MSP, results_path / "myPrositLib.msp"
elif output_format == "spectronaut":
spectral_library = Spectronaut
out_file = results_path / "myPrositLib.csv"
return Spectronaut, results_path / "myPrositLib.csv"
elif output_format == "dlib":
spectral_library = DLib
out_file = results_path / "myPrositLib.dlib"
return DLib, results_path / "myPrositLib.dlib"
else:
raise ValueError(f"{output_format} is not supported as spectral library type")

return spectral_library, out_file


def _get_batches_and_mode(out_file: Path, failed_batch_file: Path, no_of_spectra: int, batchsize: int):
if out_file.is_file():
Expand Down
8 changes: 8 additions & 0 deletions tests/unit_tests/configs/spectral_library_with_digest.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,14 @@
"outputFormat": "spectronaut",
"prediction_server": "koina.proteomicsdb.org:443",
"ssl": true,
"spectralLibraryOptions": {
"fragmentation": "HCD",
"collisionEnergy": 30,
"precursorCharge": [2, 3],
"minIntensity": 5e-4,
"batchsize": 10000,
"format": "msp"
},
"fastaDigestOptions": {
"fragmentation": "HCD",
"digestion": "full",
Expand Down

0 comments on commit d3d0998

Please sign in to comment.