Switch from using rubric in release notes #3172
Merged
Azure Pipelines / scverse.scanpy
failed
Jul 26, 2024 in 23m 25s
Build #20240726.10 had test failures
Details
- Failed: 2 (0.02%)
- Passed: 8,088 (93.13%)
- Other: 595 (6.85%)
- Total: 8,685
- 9566 of 12526 line covered (76.37%)
Annotations
Check failure on line 2686 in Build log
azure-pipelines / scverse.scanpy
Build log #L2686
Bash exited with code '1'.
Check failure on line 1 in tests/test_utils.py::test_is_constant_dask[csr_matrix-0]
azure-pipelines / scverse.scanpy
tests/test_utils.py::test_is_constant_dask[csr_matrix-0]
TypeError: int() argument must be a string, a bytes-like object or a number, not 'csr_matrix'
The above exception was the direct cause of the following exception:
axis = 0, expected = [True, True, False, False]
block_type = <class 'scipy.sparse._csr.csr_matrix'>
@needs.dask
@pytest.mark.parametrize(
("axis", "expected"),
[
pytest.param(None, False, id="None"),
pytest.param(0, [True, True, False, False], id="0"),
pytest.param(1, [False, False, True, True, False, True], id="1"),
],
)
@pytest.mark.parametrize("block_type", [np.array, csr_matrix])
def test_is_constant_dask(axis, expected, block_type):
import dask.array as da
if (axis is None) and block_type is csr_matrix:
pytest.skip("Dask has weak support for scipy sparse matrices")
x_data = [
[0, 0, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 0],
]
x = da.from_array(np.array(x_data), chunks=2).map_blocks(block_type)
> result = is_constant(x, axis=axis).compute()
tests/test_utils.py:243:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/base.py:315: in compute
(result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/base.py:600: in compute
results = schedule(dsk, keys, **kwargs)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/threaded.py:89: in get
results = get_async(
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:511: in get_async
raise_exception(exc, tb)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:319: in reraise
raise exc
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:224: in execute_task
result = _execute_task(task, data)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in _execute_task
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in <genexpr>
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in _execute_task
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/array/core.py:5302: in concatenate_axes
return concatenate3(transposelist(arrays, axes, extradims=extradims))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
arrays = [[<2x2 sparse matrix of type '<class 'numpy.int64'>'
with 4 stored elements in Compressed Sparse Row format>], [<2x2 ...ormat>], [<2x2 sparse matrix of type '<class 'numpy.int64'>'
with 1 stored elements in Compressed Sparse Row format>]]
def concatenate3(arrays):
"""Recursive np.concatenate
Input should be a nested list of numpy arrays arranged in the order they
should appear in the array itself. Each array should have the same number
of dimensions as the desired output and the nesting of the lists.
>>> x = np.array([[1, 2]])
>>> concatenate3([[x, x, x], [x, x, x]])
array([[1, 2, 1, 2, 1, 2],
[1, 2, 1, 2, 1, 2]])
>>> concatenate3([[x, x], [x, x], [x, x]])
array([[1, 2, 1, 2],
[1, 2, 1, 2],
[1, 2, 1, 2]])
"""
# We need this as __array_function__ may not exist on older NumPy versions.
# And to reduce verbosity.
NDARRAY_ARRAY_FUNCTION = getattr(np.ndarray, "__array_function__", None)
arrays = concrete(a
Raw output
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/array/core.py:5291: ValueError: setting an array element with a sequence.
Check failure on line 1 in tests/test_utils.py::test_is_constant_dask[csr_matrix-1]
azure-pipelines / scverse.scanpy
tests/test_utils.py::test_is_constant_dask[csr_matrix-1]
TypeError: int() argument must be a string, a bytes-like object or a number, not 'csr_matrix'
The above exception was the direct cause of the following exception:
axis = 1, expected = [False, False, True, True, False, True]
block_type = <class 'scipy.sparse._csr.csr_matrix'>
@needs.dask
@pytest.mark.parametrize(
("axis", "expected"),
[
pytest.param(None, False, id="None"),
pytest.param(0, [True, True, False, False], id="0"),
pytest.param(1, [False, False, True, True, False, True], id="1"),
],
)
@pytest.mark.parametrize("block_type", [np.array, csr_matrix])
def test_is_constant_dask(axis, expected, block_type):
import dask.array as da
if (axis is None) and block_type is csr_matrix:
pytest.skip("Dask has weak support for scipy sparse matrices")
x_data = [
[0, 0, 1, 1],
[0, 0, 1, 1],
[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 0],
]
x = da.from_array(np.array(x_data), chunks=2).map_blocks(block_type)
> result = is_constant(x, axis=axis).compute()
tests/test_utils.py:243:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/base.py:315: in compute
(result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/base.py:600: in compute
results = schedule(dsk, keys, **kwargs)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/threaded.py:89: in get
results = get_async(
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:511: in get_async
raise_exception(exc, tb)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:319: in reraise
raise exc
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/local.py:224: in execute_task
result = _execute_task(task, data)
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in _execute_task
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in <genexpr>
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/core.py:119: in _execute_task
return func(*(_execute_task(a, cache) for a in args))
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/array/core.py:5302: in concatenate_axes
return concatenate3(transposelist(arrays, axes, extradims=extradims))
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
arrays = [[<2x2 sparse matrix of type '<class 'numpy.int64'>'
with 0 stored elements in Compressed Sparse Row format>, <2x2 sparse matrix of type '<class 'numpy.int64'>'
with 4 stored elements in Compressed Sparse Row format>]]
def concatenate3(arrays):
"""Recursive np.concatenate
Input should be a nested list of numpy arrays arranged in the order they
should appear in the array itself. Each array should have the same number
of dimensions as the desired output and the nesting of the lists.
>>> x = np.array([[1, 2]])
>>> concatenate3([[x, x, x], [x, x, x]])
array([[1, 2, 1, 2, 1, 2],
[1, 2, 1, 2, 1, 2]])
>>> concatenate3([[x, x], [x, x], [x, x]])
array([[1, 2, 1, 2],
[1, 2, 1, 2],
[1, 2, 1, 2]])
"""
# We need this as __array_function__ may not exist on older NumPy versions.
# And to reduce verbosity.
NDARRAY_ARRAY_FUNCTION = getattr(np.ndarray, "__array_function__", None)
arrays = concrete(arrays)
Raw output
/opt/hostedtoolcache/Python/3.9.19/x64/lib/python3.9/site-packages/dask/array/core.py:5291: ValueError: setting an array element with a sequence.
Loading