Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self._cluster_ids = _unique(self._spike_clusters)
# Clusters to remove.
if to_remove is not None:
for clu in to_remove:
self._spikes_per_cluster.pop(clu, None)
# Clusters to add.
if to_add:
for clu, spk in to_add.items():
self._spikes_per_cluster[clu] = spk
# If spikes_per_cluster is invalid, recompute the entire
# spikes_per_cluster array.
coherent = np.all(np.in1d(self._cluster_ids, sorted(self._spikes_per_cluster)))
if not coherent:
logger.debug("Recompute spikes_per_cluster manually: this might take a while.")
sc = self._spike_clusters
self._spikes_per_cluster = _spikes_per_cluster(sc)
def __init__(self, model):
self.model = model
self.dir_path = Path(model.dir_path)
self.spc = _spikes_per_cluster(model.spike_clusters)
self.renames = _FILE_RENAMES
# The fact that spike selection has been previously extended to
# whole clusters is critical here.
new_clusters = _unique(new_spike_clusters)
if len(new_clusters) == 1:
return self._do_merge(spike_ids, old_clusters, new_clusters[0])
# We return the UpdateInfo structure.
up = _assign_update_info(spike_ids, old_spike_clusters, new_spike_clusters)
# We update the new cluster id (strictly increasing during a session).
self._new_cluster_id = max(self._new_cluster_id, max(up.added) + 1)
# We make the assignments.
self._spike_clusters[spike_ids] = new_spike_clusters
# OPTIM: we update spikes_per_cluster manually.
new_spc = _spikes_per_cluster(new_spike_clusters, spike_ids)
self._update_cluster_ids(to_remove=old_clusters, to_add=new_spc)
return up