Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Fix mypy errors: made implicit optional typing to be explicit
  • Loading branch information
LennartPurucker authored and mfeurer committed Apr 18, 2023
commit d766d9804b753b5f2c73c03bcc6e7178070d4763
3 changes: 2 additions & 1 deletion doc/progress.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,16 @@ Changelog
0.13.1
~~~~~~

* DOC #1241 #1229 #1231: Minor documentation fixes and resolve documentation examples not working.
* ADD #1028: Add functions to delete runs, flows, datasets, and tasks (e.g., ``openml.datasets.delete_dataset``).
* ADD #1144: Add locally computed results to the ``OpenMLRun`` object's representation if the run was created locally and not downloaded from the server.
* ADD #1180: Improve the error message when the checksum of a downloaded dataset does not match the checksum provided by the API.
* ADD #1201: Make ``OpenMLTraceIteration`` a dataclass.
* DOC #1069: Add argument documentation for the ``OpenMLRun`` class.
* DOC #1241 #1229 #1231: Minor documentation fixes and resolve documentation examples not working.
* FIX #1197 #559 #1131: Fix the order of ground truth and predictions in the ``OpenMLRun`` object and in ``format_prediction``.
* FIX #1198: Support numpy 1.24 and higher.
* FIX #1216: Allow unknown task types on the server. This is only relevant when new task types are added to the test server.
* FIX #1223: Fix mypy errors for implicit optional typing.
* MAINT #1155: Add dependabot github action to automatically update other github actions.
* MAINT #1199: Obtain pre-commit's flake8 from github.com instead of gitlab.com.
* MAINT #1215: Support latest numpy version.
Expand Down
2 changes: 1 addition & 1 deletion openml/_api_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ def _download_minio_bucket(
def _download_text_file(
source: str,
output_path: Optional[str] = None,
md5_checksum: str = None,
md5_checksum: Optional[str] = None,
exists_ok: bool = True,
encoding: str = "utf8",
) -> Optional[str]:
Expand Down
12 changes: 7 additions & 5 deletions openml/datasets/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def get_datasets(
def get_dataset(
dataset_id: Union[int, str],
download_data: bool = True,
version: int = None,
version: Optional[int] = None,
error_if_multiple: bool = False,
cache_format: str = "pickle",
download_qualities: bool = True,
Expand Down Expand Up @@ -982,7 +982,7 @@ def _get_dataset_description(did_cache_dir, dataset_id):

def _get_dataset_parquet(
description: Union[Dict, OpenMLDataset],
cache_directory: str = None,
cache_directory: Optional[str] = None,
download_all_files: bool = False,
) -> Optional[str]:
"""Return the path to the local parquet file of the dataset. If is not cached, it is downloaded.
Expand Down Expand Up @@ -1049,7 +1049,9 @@ def _get_dataset_parquet(
return output_file_path


def _get_dataset_arff(description: Union[Dict, OpenMLDataset], cache_directory: str = None) -> str:
def _get_dataset_arff(
description: Union[Dict, OpenMLDataset], cache_directory: Optional[str] = None
) -> str:
"""Return the path to the local arff file of the dataset. If is not cached, it is downloaded.

Checks if the file is in the cache, if yes, return the path to the file.
Expand Down Expand Up @@ -1171,8 +1173,8 @@ def _create_dataset_from_description(
description: Dict[str, str],
features_file: str,
qualities_file: str,
arff_file: str = None,
parquet_file: str = None,
arff_file: Optional[str] = None,
parquet_file: Optional[str] = None,
cache_format: str = "pickle",
) -> OpenMLDataset:
"""Create a dataset object from a description dict.
Expand Down
4 changes: 3 additions & 1 deletion openml/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# License: BSD 3-Clause

from typing import Optional


class PyOpenMLError(Exception):
def __init__(self, message: str):
Expand All @@ -20,7 +22,7 @@ class OpenMLServerException(OpenMLServerError):

# Code needs to be optional to allow the exception to be picklable:
# https://stackoverflow.com/questions/16244923/how-to-make-a-custom-exception-class-with-multiple-init-args-pickleable # noqa: E501
def __init__(self, message: str, code: int = None, url: str = None):
def __init__(self, message: str, code: Optional[int] = None, url: Optional[str] = None):
self.message = message
self.code = code
self.url = url
Expand Down
2 changes: 1 addition & 1 deletion openml/flows/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ def _check_flow_for_server_id(flow: OpenMLFlow) -> None:
def assert_flows_equal(
flow1: OpenMLFlow,
flow2: OpenMLFlow,
ignore_parameter_values_on_older_children: str = None,
ignore_parameter_values_on_older_children: Optional[str] = None,
ignore_parameter_values: bool = False,
ignore_custom_name_if_none: bool = False,
check_description: bool = True,
Expand Down
12 changes: 6 additions & 6 deletions openml/runs/functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def run_model_on_task(
model: Any,
task: Union[int, str, OpenMLTask],
avoid_duplicate_runs: bool = True,
flow_tags: List[str] = None,
seed: int = None,
flow_tags: Optional[List[str]] = None,
seed: Optional[int] = None,
add_local_measures: bool = True,
upload_flow: bool = False,
return_flow: bool = False,
Expand Down Expand Up @@ -148,8 +148,8 @@ def run_flow_on_task(
flow: OpenMLFlow,
task: OpenMLTask,
avoid_duplicate_runs: bool = True,
flow_tags: List[str] = None,
seed: int = None,
flow_tags: Optional[List[str]] = None,
seed: Optional[int] = None,
add_local_measures: bool = True,
upload_flow: bool = False,
dataset_format: str = "dataframe",
Expand Down Expand Up @@ -438,7 +438,7 @@ def _run_task_get_arffcontent(
extension: "Extension",
add_local_measures: bool,
dataset_format: str,
n_jobs: int = None,
n_jobs: Optional[int] = None,
) -> Tuple[
List[List],
Optional[OpenMLRunTrace],
Expand Down Expand Up @@ -621,7 +621,7 @@ def _run_task_get_arffcontent_parallel_helper(
sample_no: int,
task: OpenMLTask,
dataset_format: str,
configuration: Dict = None,
configuration: Optional[Dict] = None,
) -> Tuple[
np.ndarray,
Optional[pd.DataFrame],
Expand Down