|
4 | 4 | import unittest.mock |
5 | 5 | import pytest |
6 | 6 | import openml |
| 7 | +import requests |
7 | 8 | from openml.testing import _check_dataset |
8 | 9 |
|
9 | 10 |
|
@@ -43,11 +44,6 @@ def min_number_evaluations_on_test_server() -> int: |
43 | 44 | return 8 |
44 | 45 |
|
45 | 46 |
|
46 | | -def _mocked_perform_api_call(call, request_method): |
47 | | - url = openml.config.server + call |
48 | | - return openml._api_calls._download_text_file(url) |
49 | | - |
50 | | - |
51 | 47 | @pytest.mark.test_server() |
52 | 48 | def test_list_all(): |
53 | 49 | openml.utils._list_all(listing_call=openml.tasks.functions._list_tasks) |
@@ -115,13 +111,12 @@ def test_list_all_for_evaluations(min_number_evaluations_on_test_server): |
115 | 111 | assert min_number_evaluations_on_test_server == len(evaluations) |
116 | 112 |
|
117 | 113 |
|
118 | | -@unittest.mock.patch("openml._api_calls._perform_api_call", side_effect=_mocked_perform_api_call) |
| 114 | +@unittest.mock.patch.object(requests.Session, "request", autospec=True, wraps=requests.Session.request) |
119 | 115 | @pytest.mark.test_server() |
120 | | -def test_list_all_few_results_available(_perform_api_call): |
| 116 | +def test_list_all_few_results_available(mocked_request): |
121 | 117 | datasets = openml.datasets.list_datasets(size=1000, data_name="iris", data_version=1) |
122 | 118 | assert len(datasets) == 1, "only one iris dataset version 1 should be present" |
123 | | - # TODO: _perform_api_call no more used |
124 | | - #assert _perform_api_call.call_count == 1, "expect just one call to get one dataset" |
| 119 | + assert mocked_request.call_count == 1, "expect just one call to get one dataset" |
125 | 120 |
|
126 | 121 |
|
127 | 122 | @unittest.skipIf(os.name == "nt", "https://github.com/openml/openml-python/issues/1033") |
|
0 commit comments