Skip to content

Commit

Permalink
Fix issues in parallel processing code
Browse files Browse the repository at this point in the history
  • Loading branch information
constantinpape committed Jul 10, 2024
1 parent 87ee6af commit cfc1773
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 6 deletions.
2 changes: 1 addition & 1 deletion elf/parallel/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def _is_chunk_aligned(shape, chunks):
return all(ch % sh == 0 for sh, ch in zip(shape, chunks))
return all(sh % ch == 0 for sh, ch in zip(shape, chunks))


def get_blocking(data, block_shape, roi, n_threads):
Expand Down
2 changes: 2 additions & 0 deletions elf/parallel/size_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ def size_filter(data, out, min_size=None, max_size=None,
mapping = {idx: ii for ii, idx in enumerate(remaining_ids)}
if 0 in mapping:
assert mapping[0] == 0
else:
mapping[0] = 0

@threadpool_limits.wrap(limits=1) # restrict the numpy threadpool to 1 to avoid oversubscription
def _relabel(seg, block_mask):
Expand Down
23 changes: 18 additions & 5 deletions elf/parallel/unique.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _unique(block_id):
# check if we have a mask and if we do if we
# have pixels in the mask
if mask is not None:
m = mask[bb].astype('bool')
m = mask[bb].astype("bool")
if m.sum() == 0:
return None

Expand All @@ -60,14 +60,27 @@ def _unique(block_id):

unique_values = [res[0] for res in results]
count_values = [res[1] for res in results]
uniques = np.unique(np.concatenate(unique_values))
counts = np.zeros(int(uniques[-1]) + 1, dtype='uint64')

# We may have no values at all if everything was masked.
# In that case return zero as only value and full count.
try:
uniques = np.unique(np.concatenate(unique_values))
except ValueError:
return np.array([0], dtype=data.dtype), np.array([data.size], dtype="uint64")

counts = np.zeros(int(uniques[-1]) + 1, dtype="uint64")

for uniques_v, counts_v in zip(unique_values, count_values):
counts[uniques_v] += counts_v.astype('uint64')
counts[uniques_v] += counts_v.astype("uint64")
counts = counts[counts != 0]
assert len(counts) == len(uniques)
return uniques, counts

else:
return np.unique(np.concatenate(results))

try:
uniques = np.unique(np.concatenate(results))
except ValueError:
return np.array([0], dtype=data.dtype)

return uniques

0 comments on commit cfc1773

Please sign in to comment.