Deprecate RandomState (using names only) #3372
Merged
Azure Pipelines / scverse.scanpy
failed
Nov 18, 2024 in 26m 59s
Build #20241118.2 had test failures
Details
- Failed: 24 (0.26%)
- Passed: 8,593 (92.70%)
- Other: 653 (7.04%)
- Total: 9,270
- 9849 of 12907 line covered (76.31%)
Annotations
Check failure on line 4789 in Build log
azure-pipelines / scverse.scanpy
Build log #L4789
Bash exited with code '1'.
azure-pipelines / scverse.scanpy
tests/test_pca.py::test_pca_warnings[dask_array_dense-zero_center-None-None]
@pytest.mark.parametrize(
("array_type", "zero_center", "svd_solver", "warn_pat_expected"),
[
pytest.param(
array_type.values[0],
zero_center,
svd_solver,
warn_pat_expected,
marks=(
array_type.marks
if xfail_reason is None
else [pytest.mark.xfail(reason=xfail_reason)]
),
id=(
f"{array_type.id}-{'zero_center' if zero_center else 'no_zero_center'}-"
f"{svd_solver or svd_solver_type}-{'xfail' if xfail_reason else warn_pat_expected}"
),
)
for array_type in ARRAY_TYPES
for zero_center in [True, False]
for svd_solver_type in [None, "valid", "invalid"]
for svd_solver, warn_pat_expected, xfail_reason in gen_pca_params(
array_type=array_type.values[0],
zero_center=zero_center,
svd_solver_type=svd_solver_type,
)
],
)
def test_pca_warnings(
*,
array_type: ArrayType,
zero_center: bool,
svd_solver: SVDSolver,
warn_pat_expected: str | None,
):
A = array_type(A_list).astype("float32")
adata = AnnData(A)
if warn_pat_expected is not None:
with pytest.warns((UserWarning, FutureWarning), match=warn_pat_expected):
warnings.filterwarnings(
"ignore", r".*Using a dense eigensolver instead of LOBPCG", UserWarning
)
sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
return
warnings.simplefilter("error")
warnings.filterwarnings(
"ignore", "pkg_resources is deprecated as an API", DeprecationWarning
)
> sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
tests/test_pca.py:240:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/scanpy/preprocessing/_pca/__init__.py:345: in pca
from dask_ml.decomposition import PCA
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/__init__.py:2: in <module>
from dask_ml.model_selection import _normalize # noqa: F401
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/__init__.py:9: in <module>
from ._search import GridSearchCV, RandomizedSearchCV, check_cv, compute_n_splits
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
from __future__ import absolute_import, division, print_function
import logging
import numbers
from collections import defaultdict
from itertools import repeat
from multiprocessing import cpu_count
from operator import getitem
import dask
import numpy as np
import packaging.version
from dask.base import tokenize
from dask.delayed import delayed
from dask.distributed import as_completed
from dask.utils import derived_from
from sklearn import model_selection
from sklearn.base import BaseEstimator, MetaEstimatorMixin, clone, is_classifier
from sklearn.exceptions import NotFittedError
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._split import (
BaseShuffleSplit,
KFold,
LeaveOneGroupOut,
LeaveOneOut,
LeavePGroupsOut,
LeavePOut,
PredefinedSplit,
StratifiedKFold,
_BaseKFold,
_CVIterableWrapper,
)
from sklearn.pipeline import FeatureUnion, Pipeline
> from sklearn.utils._tags import _safe_tags
E ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.p
Raw output
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/_search.py:34: ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.py)
azure-pipelines / scverse.scanpy
tests/test_pca.py::test_pca_warnings[dask_array_dense-zero_center-auto-None]
@pytest.mark.parametrize(
("array_type", "zero_center", "svd_solver", "warn_pat_expected"),
[
pytest.param(
array_type.values[0],
zero_center,
svd_solver,
warn_pat_expected,
marks=(
array_type.marks
if xfail_reason is None
else [pytest.mark.xfail(reason=xfail_reason)]
),
id=(
f"{array_type.id}-{'zero_center' if zero_center else 'no_zero_center'}-"
f"{svd_solver or svd_solver_type}-{'xfail' if xfail_reason else warn_pat_expected}"
),
)
for array_type in ARRAY_TYPES
for zero_center in [True, False]
for svd_solver_type in [None, "valid", "invalid"]
for svd_solver, warn_pat_expected, xfail_reason in gen_pca_params(
array_type=array_type.values[0],
zero_center=zero_center,
svd_solver_type=svd_solver_type,
)
],
)
def test_pca_warnings(
*,
array_type: ArrayType,
zero_center: bool,
svd_solver: SVDSolver,
warn_pat_expected: str | None,
):
A = array_type(A_list).astype("float32")
adata = AnnData(A)
if warn_pat_expected is not None:
with pytest.warns((UserWarning, FutureWarning), match=warn_pat_expected):
warnings.filterwarnings(
"ignore", r".*Using a dense eigensolver instead of LOBPCG", UserWarning
)
sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
return
warnings.simplefilter("error")
warnings.filterwarnings(
"ignore", "pkg_resources is deprecated as an API", DeprecationWarning
)
> sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
tests/test_pca.py:240:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/scanpy/preprocessing/_pca/__init__.py:345: in pca
from dask_ml.decomposition import PCA
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/__init__.py:2: in <module>
from dask_ml.model_selection import _normalize # noqa: F401
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/__init__.py:9: in <module>
from ._search import GridSearchCV, RandomizedSearchCV, check_cv, compute_n_splits
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
from __future__ import absolute_import, division, print_function
import logging
import numbers
from collections import defaultdict
from itertools import repeat
from multiprocessing import cpu_count
from operator import getitem
import dask
import numpy as np
import packaging.version
from dask.base import tokenize
from dask.delayed import delayed
from dask.distributed import as_completed
from dask.utils import derived_from
from sklearn import model_selection
from sklearn.base import BaseEstimator, MetaEstimatorMixin, clone, is_classifier
from sklearn.exceptions import NotFittedError
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._split import (
BaseShuffleSplit,
KFold,
LeaveOneGroupOut,
LeaveOneOut,
LeavePGroupsOut,
LeavePOut,
PredefinedSplit,
StratifiedKFold,
_BaseKFold,
_CVIterableWrapper,
)
from sklearn.pipeline import FeatureUnion, Pipeline
> from sklearn.utils._tags import _safe_tags
E ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.p
Raw output
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/_search.py:34: ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.py)
azure-pipelines / scverse.scanpy
tests/test_pca.py::test_pca_warnings[dask_array_dense-zero_center-full-None]
@pytest.mark.parametrize(
("array_type", "zero_center", "svd_solver", "warn_pat_expected"),
[
pytest.param(
array_type.values[0],
zero_center,
svd_solver,
warn_pat_expected,
marks=(
array_type.marks
if xfail_reason is None
else [pytest.mark.xfail(reason=xfail_reason)]
),
id=(
f"{array_type.id}-{'zero_center' if zero_center else 'no_zero_center'}-"
f"{svd_solver or svd_solver_type}-{'xfail' if xfail_reason else warn_pat_expected}"
),
)
for array_type in ARRAY_TYPES
for zero_center in [True, False]
for svd_solver_type in [None, "valid", "invalid"]
for svd_solver, warn_pat_expected, xfail_reason in gen_pca_params(
array_type=array_type.values[0],
zero_center=zero_center,
svd_solver_type=svd_solver_type,
)
],
)
def test_pca_warnings(
*,
array_type: ArrayType,
zero_center: bool,
svd_solver: SVDSolver,
warn_pat_expected: str | None,
):
A = array_type(A_list).astype("float32")
adata = AnnData(A)
if warn_pat_expected is not None:
with pytest.warns((UserWarning, FutureWarning), match=warn_pat_expected):
warnings.filterwarnings(
"ignore", r".*Using a dense eigensolver instead of LOBPCG", UserWarning
)
sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
return
warnings.simplefilter("error")
warnings.filterwarnings(
"ignore", "pkg_resources is deprecated as an API", DeprecationWarning
)
> sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
tests/test_pca.py:240:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/scanpy/preprocessing/_pca/__init__.py:345: in pca
from dask_ml.decomposition import PCA
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/__init__.py:2: in <module>
from dask_ml.model_selection import _normalize # noqa: F401
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/__init__.py:9: in <module>
from ._search import GridSearchCV, RandomizedSearchCV, check_cv, compute_n_splits
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
from __future__ import absolute_import, division, print_function
import logging
import numbers
from collections import defaultdict
from itertools import repeat
from multiprocessing import cpu_count
from operator import getitem
import dask
import numpy as np
import packaging.version
from dask.base import tokenize
from dask.delayed import delayed
from dask.distributed import as_completed
from dask.utils import derived_from
from sklearn import model_selection
from sklearn.base import BaseEstimator, MetaEstimatorMixin, clone, is_classifier
from sklearn.exceptions import NotFittedError
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._split import (
BaseShuffleSplit,
KFold,
LeaveOneGroupOut,
LeaveOneOut,
LeavePGroupsOut,
LeavePOut,
PredefinedSplit,
StratifiedKFold,
_BaseKFold,
_CVIterableWrapper,
)
from sklearn.pipeline import FeatureUnion, Pipeline
> from sklearn.utils._tags import _safe_tags
E ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.p
Raw output
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/_search.py:34: ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.py)
azure-pipelines / scverse.scanpy
tests/test_pca.py::test_pca_warnings[dask_array_dense-zero_center-randomized-None]
@pytest.mark.parametrize(
("array_type", "zero_center", "svd_solver", "warn_pat_expected"),
[
pytest.param(
array_type.values[0],
zero_center,
svd_solver,
warn_pat_expected,
marks=(
array_type.marks
if xfail_reason is None
else [pytest.mark.xfail(reason=xfail_reason)]
),
id=(
f"{array_type.id}-{'zero_center' if zero_center else 'no_zero_center'}-"
f"{svd_solver or svd_solver_type}-{'xfail' if xfail_reason else warn_pat_expected}"
),
)
for array_type in ARRAY_TYPES
for zero_center in [True, False]
for svd_solver_type in [None, "valid", "invalid"]
for svd_solver, warn_pat_expected, xfail_reason in gen_pca_params(
array_type=array_type.values[0],
zero_center=zero_center,
svd_solver_type=svd_solver_type,
)
],
)
def test_pca_warnings(
*,
array_type: ArrayType,
zero_center: bool,
svd_solver: SVDSolver,
warn_pat_expected: str | None,
):
A = array_type(A_list).astype("float32")
adata = AnnData(A)
if warn_pat_expected is not None:
with pytest.warns((UserWarning, FutureWarning), match=warn_pat_expected):
warnings.filterwarnings(
"ignore", r".*Using a dense eigensolver instead of LOBPCG", UserWarning
)
sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
return
warnings.simplefilter("error")
warnings.filterwarnings(
"ignore", "pkg_resources is deprecated as an API", DeprecationWarning
)
> sc.pp.pca(adata, svd_solver=svd_solver, zero_center=zero_center)
tests/test_pca.py:240:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/scanpy/preprocessing/_pca/__init__.py:345: in pca
from dask_ml.decomposition import PCA
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/__init__.py:2: in <module>
from dask_ml.model_selection import _normalize # noqa: F401
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/__init__.py:9: in <module>
from ._search import GridSearchCV, RandomizedSearchCV, check_cv, compute_n_splits
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
from __future__ import absolute_import, division, print_function
import logging
import numbers
from collections import defaultdict
from itertools import repeat
from multiprocessing import cpu_count
from operator import getitem
import dask
import numpy as np
import packaging.version
from dask.base import tokenize
from dask.delayed import delayed
from dask.distributed import as_completed
from dask.utils import derived_from
from sklearn import model_selection
from sklearn.base import BaseEstimator, MetaEstimatorMixin, clone, is_classifier
from sklearn.exceptions import NotFittedError
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._split import (
BaseShuffleSplit,
KFold,
LeaveOneGroupOut,
LeaveOneOut,
LeavePGroupsOut,
LeavePOut,
PredefinedSplit,
StratifiedKFold,
_BaseKFold,
_CVIterableWrapper,
)
from sklearn.pipeline import FeatureUnion, Pipeline
> from sklearn.utils._tags import _safe_tags
E ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.p
Raw output
/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/dask_ml/model_selection/_search.py:34: ImportError: cannot import name '_safe_tags' from 'sklearn.utils._tags' (/opt/hostedtoolcache/Python/3.12.7/x64/lib/python3.12/site-packages/sklearn/utils/_tags.py)
Loading