Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_clusters_data(self, load_all=None):
"""Return a list of Bunch instances, with attributes pos and spike_ids."""
if not len(self.cluster_ids):
return
cluster_ids = list(self.cluster_ids)
# Don't need the background when splitting.
if not load_all:
# Add None cluster which means background spikes.
cluster_ids = [None] + cluster_ids
bunchs = self.amplitudes[self.amplitudes_type](cluster_ids, load_all=load_all) or ()
# Add a pos attribute in bunchs in addition to x and y.
for i, (cluster_id, bunch) in enumerate(zip(cluster_ids, bunchs)):
spike_ids = _as_array(bunch.spike_ids)
spike_times = _as_array(bunch.spike_times)
amplitudes = _as_array(bunch.amplitudes)
assert spike_ids.shape == spike_times.shape == amplitudes.shape
# Ensure that bunch.pos exists, as it used by the LassoMixin.
bunch.pos = np.c_[spike_times, amplitudes]
assert bunch.pos.ndim == 2
bunch.cluster_id = cluster_id
bunch.color = (
selected_cluster_color(i - 1, self.marker_alpha)
# Background amplitude color.
if cluster_id is not None else (.5, .5, .5, .5))
return bunchs
def _concatenate_spike_clusters(*pairs):
"""Concatenate a list of pairs (spike_ids, spike_clusters)."""
pairs = [(_as_array(x), _as_array(y)) for (x, y) in pairs]
concat = np.vstack([np.hstack((x[:, None], y[:, None])) for x, y in pairs])
reorder = np.argsort(concat[:, 0])
concat = concat[reorder, :]
return concat[:, 0].astype(np.int64), concat[:, 1].astype(np.int64)
def _zoom_aspect(self, zoom=None):
zoom = zoom if zoom is not None else self._zoom
zoom = _as_array(zoom)
aspect = (self._canvas_aspect * self._aspect if self._aspect is not None else 1.)
return zoom * aspect
def _do_assign(self, spike_ids, new_spike_clusters):
"""Make spike-cluster assignments after the spike selection has
been extended to full clusters."""
# Ensure spike_clusters has the right shape.
spike_ids = _as_array(spike_ids)
if len(new_spike_clusters) == 1 and len(spike_ids) > 1:
new_spike_clusters = np.ones(len(spike_ids), dtype=np.int64) * new_spike_clusters[0]
old_spike_clusters = self._spike_clusters[spike_ids]
assert len(spike_ids) == len(old_spike_clusters)
assert len(new_spike_clusters) == len(spike_ids)
# Update the spikes per cluster structure.
old_clusters = _unique(old_spike_clusters)
# NOTE: shortcut to a merge if this assignment is effectively a merge
# i.e. if all spikes are assigned to a single cluster.
# The fact that spike selection has been previously extended to
# whole clusters is critical here.
new_clusters = _unique(new_spike_clusters)
if len(new_clusters) == 1:
def get_clusters_data(self, load_all=None):
"""Return a list of Bunch instances, with attributes pos and spike_ids."""
if not len(self.cluster_ids):
return
cluster_ids = list(self.cluster_ids)
# Don't need the background when splitting.
if not load_all:
# Add None cluster which means background spikes.
cluster_ids = [None] + cluster_ids
bunchs = self.amplitudes[self.amplitudes_type](cluster_ids, load_all=load_all) or ()
# Add a pos attribute in bunchs in addition to x and y.
for i, (cluster_id, bunch) in enumerate(zip(cluster_ids, bunchs)):
spike_ids = _as_array(bunch.spike_ids)
spike_times = _as_array(bunch.spike_times)
amplitudes = _as_array(bunch.amplitudes)
assert spike_ids.shape == spike_times.shape == amplitudes.shape
# Ensure that bunch.pos exists, as it used by the LassoMixin.
bunch.pos = np.c_[spike_times, amplitudes]
assert bunch.pos.ndim == 2
bunch.cluster_id = cluster_id
bunch.color = (
selected_cluster_color(i - 1, self.marker_alpha)
# Background amplitude color.
if cluster_id is not None else (.5, .5, .5, .5))
return bunchs
def _extend_assignment(spike_ids, old_spike_clusters, spike_clusters_rel, new_cluster_id):
# 1. Add spikes that belong to modified clusters.
# 2. Find new cluster ids for all changed clusters.
old_spike_clusters = _as_array(old_spike_clusters)
spike_ids = _as_array(spike_ids)
assert isinstance(spike_clusters_rel, (list, np.ndarray))
spike_clusters_rel = _as_array(spike_clusters_rel)
assert spike_clusters_rel.min() >= 0
# We renumber the new cluster indices.
new_spike_clusters = (spike_clusters_rel + (new_cluster_id - spike_clusters_rel.min()))
# We find the spikes belonging to modified clusters.
extended_spike_ids = _extend_spikes(spike_ids, old_spike_clusters)
if len(extended_spike_ids) == 0:
return spike_ids, new_spike_clusters
# We take their clusters.
extended_spike_clusters = old_spike_clusters[extended_spike_ids]
def _extend_assignment(spike_ids, old_spike_clusters, spike_clusters_rel, new_cluster_id):
# 1. Add spikes that belong to modified clusters.
# 2. Find new cluster ids for all changed clusters.
old_spike_clusters = _as_array(old_spike_clusters)
spike_ids = _as_array(spike_ids)
assert isinstance(spike_clusters_rel, (list, np.ndarray))
spike_clusters_rel = _as_array(spike_clusters_rel)
assert spike_clusters_rel.min() >= 0
# We renumber the new cluster indices.
new_spike_clusters = (spike_clusters_rel + (new_cluster_id - spike_clusters_rel.min()))
# We find the spikes belonging to modified clusters.
extended_spike_ids = _extend_spikes(spike_ids, old_spike_clusters)
if len(extended_spike_ids) == 0:
return spike_ids, new_spike_clusters
# We take their clusters.
extended_spike_clusters = old_spike_clusters[extended_spike_ids]
# Use relative numbers in extended_spike_clusters.
def get_clusters_data(self, load_all=None):
"""Return a list of Bunch instances, with attributes pos and spike_ids."""
if not len(self.cluster_ids):
return
cluster_ids = list(self.cluster_ids)
# Don't need the background when splitting.
if not load_all:
# Add None cluster which means background spikes.
cluster_ids = [None] + cluster_ids
bunchs = self.amplitudes[self.amplitudes_type](cluster_ids, load_all=load_all) or ()
# Add a pos attribute in bunchs in addition to x and y.
for i, (cluster_id, bunch) in enumerate(zip(cluster_ids, bunchs)):
spike_ids = _as_array(bunch.spike_ids)
spike_times = _as_array(bunch.spike_times)
amplitudes = _as_array(bunch.amplitudes)
assert spike_ids.shape == spike_times.shape == amplitudes.shape
# Ensure that bunch.pos exists, as it used by the LassoMixin.
bunch.pos = np.c_[spike_times, amplitudes]
assert bunch.pos.ndim == 2
bunch.cluster_id = cluster_id
bunch.color = (
selected_cluster_color(i - 1, self.marker_alpha)
# Background amplitude color.
if cluster_id is not None else (.5, .5, .5, .5))
return bunchs