Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dask-version #1711

Open
wants to merge 1 commit into
base: main
Choose a base branch
from

Update pyproject.toml

dd69c0a
Select commit
Loading
Failed to load commit list.
Open

dask-version #1711

Update pyproject.toml
dd69c0a
Select commit
Loading
Failed to load commit list.
Azure Pipelines / scverse.anndata failed Oct 14, 2024 in 17m 43s

Build #20241014.3 had test failures

Details

Tests

  • Failed: 106 (1.08%)
  • Passed: 8,317 (84.59%)
  • Other: 1,409 (14.33%)
  • Total: 9,832
Code coverage

  • 5062 of 6026 line covered (84.00%)

Annotations

Check failure on line 19 in Build log

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

Build log #L19

Bash exited with code '4'.

Check failure on line 10 in Build log

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

Build log #L10

No code coverage results were found to publish.

Check failure on line 19 in Build log

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

Build log #L19

Bash exited with code '4'.

Check failure on line 6051 in Build log

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

Build log #L6051

Bash exited with code '1'.

Check failure on line 1 in test_concatenate_roundtrip[inner-sparse_dask_array-concat_func0-False]

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

test_concatenate_roundtrip[inner-sparse_dask_array-concat_func0-False]

ValueError: zero-dimensional arrays cannot be concatenated
Raw output
join_type = 'inner'
array_type = <function as_sparse_dask_array at 0x7fad43daf280>
concat_func = functools.partial(<function concat at 0x7fad43e0eb80>, merge='unique')
backwards_compat = False

    @mark_legacy_concatenate
    @pytest.mark.parametrize(
        ("concat_func", "backwards_compat"),
        [
            (partial(concat, merge="unique"), False),
            (lambda x, **kwargs: x[0].concatenate(x[1:], **kwargs), True),
        ],
    )
    def test_concatenate_roundtrip(join_type, array_type, concat_func, backwards_compat):
        adata = gen_adata((100, 10), X_type=array_type, **GEN_ADATA_DASK_ARGS)
    
        remaining = adata.obs_names
        subsets = []
        while len(remaining) > 0:
            n = min(len(remaining), np.random.choice(50))
            subset_idx = np.random.choice(remaining, n, replace=False)
            subsets.append(adata[subset_idx])
            remaining = remaining.difference(subset_idx)
    
        result = concat_func(subsets, join=join_type, uns_merge="same", index_unique=None)
    
        # Correcting for known differences
        orig, result = fix_known_differences(
            adata, result, backwards_compat=backwards_compat
        )
    
>       assert_equal(result[orig.obs_names].copy(), orig)

/home/vsts/work/1/s/tests/test_concatenate.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:764: in assert_adata_equal
    assert_equal(
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:627: in assert_equal_dask_array
    assert_equal(b, a.compute(), exact, elem_name)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:376: in compute
    (result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:662: in compute
    results = schedule(dsk, keys, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

arrs = [<5x5 sparse matrix of type '<class 'numpy.float32'>'
	with 12 stored elements in Compressed Sparse Row format>, <5x5 sparse matrix of type '<class 'numpy.float32'>'
	with 11 stored elements in Compressed Sparse Row format>]
sorter = array([7, 5, 0, 9, 6, 4, 1, 2, 3, 8]), axis = 0

    def concatenate_arrays(arrs, sorter, axis):
>       return np.take(np.concatenate(arrs, axis=axis), np.argsort(sorter), axis=axis)
E       ValueError: zero-dimensional arrays cannot be concatenated

/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/array/_shuffle.py:190: ValueError

Check failure on line 1 in test_concatenate_roundtrip[inner-sparse_dask_array-<lambda>-True]

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

test_concatenate_roundtrip[inner-sparse_dask_array-<lambda>-True]

ValueError: zero-dimensional arrays cannot be concatenated
Raw output
join_type = 'inner'
array_type = <function as_sparse_dask_array at 0x7fad43daf280>
concat_func = <function <lambda> at 0x7fad40ed2b80>, backwards_compat = True

    @mark_legacy_concatenate
    @pytest.mark.parametrize(
        ("concat_func", "backwards_compat"),
        [
            (partial(concat, merge="unique"), False),
            (lambda x, **kwargs: x[0].concatenate(x[1:], **kwargs), True),
        ],
    )
    def test_concatenate_roundtrip(join_type, array_type, concat_func, backwards_compat):
        adata = gen_adata((100, 10), X_type=array_type, **GEN_ADATA_DASK_ARGS)
    
        remaining = adata.obs_names
        subsets = []
        while len(remaining) > 0:
            n = min(len(remaining), np.random.choice(50))
            subset_idx = np.random.choice(remaining, n, replace=False)
            subsets.append(adata[subset_idx])
            remaining = remaining.difference(subset_idx)
    
        result = concat_func(subsets, join=join_type, uns_merge="same", index_unique=None)
    
        # Correcting for known differences
        orig, result = fix_known_differences(
            adata, result, backwards_compat=backwards_compat
        )
    
>       assert_equal(result[orig.obs_names].copy(), orig)

/home/vsts/work/1/s/tests/test_concatenate.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:764: in assert_adata_equal
    assert_equal(
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:627: in assert_equal_dask_array
    assert_equal(b, a.compute(), exact, elem_name)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:376: in compute
    (result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:662: in compute
    results = schedule(dsk, keys, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

arrs = [<8x5 sparse matrix of type '<class 'numpy.float32'>'
	with 16 stored elements in Compressed Sparse Row format>, <10x5 sparse matrix of type '<class 'numpy.float32'>'
	with 12 stored elements in Compressed Sparse Row format>]
sorter = array([16,  6,  7, 11,  0,  4,  3, 13,  9, 10, 14, 15,  1,  5, 12,  2,  8,
       17])
axis = 0

    def concatenate_arrays(arrs, sorter, axis):
>       return np.take(np.concatenate(arrs, axis=axis), np.argsort(sorter), axis=axis)
E       ValueError: zero-dimensional arrays cannot be concatenated

/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/array/_shuffle.py:190: ValueError

Check failure on line 1 in test_concatenate_roundtrip[outer-sparse_dask_array-<lambda>-True]

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

test_concatenate_roundtrip[outer-sparse_dask_array-<lambda>-True]

ValueError: zero-dimensional arrays cannot be concatenated
Raw output
join_type = 'outer'
array_type = <function as_sparse_dask_array at 0x7fad43daf280>
concat_func = <function <lambda> at 0x7fad40ed2b80>, backwards_compat = True

    @mark_legacy_concatenate
    @pytest.mark.parametrize(
        ("concat_func", "backwards_compat"),
        [
            (partial(concat, merge="unique"), False),
            (lambda x, **kwargs: x[0].concatenate(x[1:], **kwargs), True),
        ],
    )
    def test_concatenate_roundtrip(join_type, array_type, concat_func, backwards_compat):
        adata = gen_adata((100, 10), X_type=array_type, **GEN_ADATA_DASK_ARGS)
    
        remaining = adata.obs_names
        subsets = []
        while len(remaining) > 0:
            n = min(len(remaining), np.random.choice(50))
            subset_idx = np.random.choice(remaining, n, replace=False)
            subsets.append(adata[subset_idx])
            remaining = remaining.difference(subset_idx)
    
        result = concat_func(subsets, join=join_type, uns_merge="same", index_unique=None)
    
        # Correcting for known differences
        orig, result = fix_known_differences(
            adata, result, backwards_compat=backwards_compat
        )
    
>       assert_equal(result[orig.obs_names].copy(), orig)

/home/vsts/work/1/s/tests/test_concatenate.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:764: in assert_adata_equal
    assert_equal(
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:627: in assert_equal_dask_array
    assert_equal(b, a.compute(), exact, elem_name)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:376: in compute
    (result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:662: in compute
    results = schedule(dsk, keys, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

arrs = [<20x5 sparse matrix of type '<class 'numpy.float32'>'
	with 40 stored elements in Compressed Sparse Row format>, <11x5 sparse matrix of type '<class 'numpy.float32'>'
	with 22 stored elements in Compressed Sparse Row format>]
sorter = array([ 2, 13, 30,  0, 12,  8, 22, 24, 10, 23,  1, 26, 18, 17, 19, 25, 15,
       29, 21,  6,  7,  9, 27, 14, 11, 28, 20,  5, 16,  4,  3])
axis = 0

    def concatenate_arrays(arrs, sorter, axis):
>       return np.take(np.concatenate(arrs, axis=axis), np.argsort(sorter), axis=axis)
E       ValueError: zero-dimensional arrays cannot be concatenated

/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/array/_shuffle.py:190: ValueError

Check failure on line 1 in test_concatenate_roundtrip[outer-sparse_dask_array-concat_func0-False]

See this annotation in the file changed.

@azure-pipelines azure-pipelines / scverse.anndata

test_concatenate_roundtrip[outer-sparse_dask_array-concat_func0-False]

ValueError: zero-dimensional arrays cannot be concatenated
Raw output
join_type = 'outer'
array_type = <function as_sparse_dask_array at 0x7fad43daf280>
concat_func = functools.partial(<function concat at 0x7fad43e0eb80>, merge='unique')
backwards_compat = False

    @mark_legacy_concatenate
    @pytest.mark.parametrize(
        ("concat_func", "backwards_compat"),
        [
            (partial(concat, merge="unique"), False),
            (lambda x, **kwargs: x[0].concatenate(x[1:], **kwargs), True),
        ],
    )
    def test_concatenate_roundtrip(join_type, array_type, concat_func, backwards_compat):
        adata = gen_adata((100, 10), X_type=array_type, **GEN_ADATA_DASK_ARGS)
    
        remaining = adata.obs_names
        subsets = []
        while len(remaining) > 0:
            n = min(len(remaining), np.random.choice(50))
            subset_idx = np.random.choice(remaining, n, replace=False)
            subsets.append(adata[subset_idx])
            remaining = remaining.difference(subset_idx)
    
        result = concat_func(subsets, join=join_type, uns_merge="same", index_unique=None)
    
        # Correcting for known differences
        orig, result = fix_known_differences(
            adata, result, backwards_compat=backwards_compat
        )
    
>       assert_equal(result[orig.obs_names].copy(), orig)

/home/vsts/work/1/s/tests/test_concatenate.py:197: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:764: in assert_adata_equal
    assert_equal(
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/functools.py:888: in wrapper
    return dispatch(args[0].__class__)(*args, **kw)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/anndata/tests/helpers.py:627: in assert_equal_dask_array
    assert_equal(b, a.compute(), exact, elem_name)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:376: in compute
    (result,) = compute(self, traverse=False, **kwargs)
/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/base.py:662: in compute
    results = schedule(dsk, keys, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

arrs = [<19x5 sparse matrix of type '<class 'numpy.float32'>'
	with 38 stored elements in Compressed Sparse Row format>, <15x5 sparse matrix of type '<class 'numpy.float32'>'
	with 28 stored elements in Compressed Sparse Row format>]
sorter = array([33,  5,  2, 27, 30, 11, 14, 15, 20, 18, 22, 16, 21, 25,  9,  7,  1,
       28,  6, 26, 32, 29,  3, 19, 31,  0,  4, 17, 23, 10, 12, 13, 24,  8])
axis = 0

    def concatenate_arrays(arrs, sorter, axis):
>       return np.take(np.concatenate(arrs, axis=axis), np.argsort(sorter), axis=axis)
E       ValueError: zero-dimensional arrays cannot be concatenated

/opt/hostedtoolcache/Python/3.9.20/x64/lib/python3.9/site-packages/dask/array/_shuffle.py:190: ValueError