Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _compute_delta_vi(ctable, fragments0, fragments1):
c0 = np.sum(ctable[list(fragments0)], axis=0)
c1 = np.sum(ctable[list(fragments1)], axis=0)
cr = c0 + c1
p0 = np.sum(c0)
p1 = np.sum(c1)
pr = np.sum(cr)
p0g = np.sum(ev.xlogx(c0))
p1g = np.sum(ev.xlogx(c1))
prg = np.sum(ev.xlogx(cr))
return (pr * np.log2(pr) - p0 * np.log2(p0) - p1 * np.log2(p1) -
2 * (prg - p0g - p1g))
def label_fct(a):
relabeled, fmap, imap = evaluate.relabel_from_one(a)
return relabeled, len(imap)
def remove_small(a):
def _compute_delta_vi(ctable, fragments0, fragments1):
c0 = np.sum(ctable[list(fragments0)], axis=0)
c1 = np.sum(ctable[list(fragments1)], axis=0)
cr = c0 + c1
p0 = np.sum(c0)
p1 = np.sum(c1)
pr = np.sum(cr)
p0g = np.sum(ev.xlogx(c0))
p1g = np.sum(ev.xlogx(c1))
prg = np.sum(ev.xlogx(cr))
return (pr * np.log2(pr) - p0 * np.log2(p0) - p1 * np.log2(p1) -
2 * (prg - p0g - p1g))
weights : 1D array of float, length 2
The VI and RI change of the merge.
nodes : tuple of int
The given edge.
"""
n1, n2 = edge
features = feature_map(self, n1, n2).ravel()
# Calculate weights for weighting data points
s1, s2 = [self.node[n]['size'] for n in [n1, n2]]
weights = \
compute_local_vi_change(s1, s2, self.volume_size), \
compute_local_rand_change(s1, s2, self.volume_size)
# Get the fraction of times that n1 and n2 assigned to
# same segment in the ground truths
cont_labels = [
[(-1) ** (np.all(ev.nzcol(a, n1) == ev.nzcol(a, n2)))
for a in assignments],
[compute_true_delta_vi(ctable, n1, n2) for ctable in ctables],
[-compute_true_delta_rand(ctable, n1, n2, self.volume_size)
for ctable in ctables]
]
labels = [np.sign(mean(cont_label)) for cont_label in cont_labels]
if any(map(isnan, labels)) or any([label == 0 for label in labels]):
logging.debug('NaN or 0 labels found. ' +
' '.join(map(str, [labels, (n1, n2)])))
labels = [1 if i==0 or isnan(i) or n1 in self.frozen_nodes or
n2 in self.frozen_nodes or (n1, n2) in self.frozen_edges else
i for i in labels]
return features, labels, weights, (n1, n2)
Plot this many isoclines between the minimum and maximum VI
contributions.
subplot : bool, optional
If True, plot oversegmentation and undersegmentation in separate
subplots.
figsize : tuple of float, optional
The figure width and height, in inches.
**kwargs : dict
Additional keyword arguments for `matplotlib.pyplot.plot`.
Returns
-------
None
"""
plt.ion()
pxy,px,py,hxgy,hygx,lpygx,lpxgy = evaluate.vi_tables(seg, gt,
ignore_seg, ignore_gt)
cu = -px*lpygx
co = -py*lpxgy
if hlines is None:
hlines = []
elif hlines == True:
hlines = 10
if type(hlines) == int:
maxc = max(cu[cu!=0].max(), co[co!=0].max())
hlines = np.arange(maxc/hlines, maxc, maxc/hlines)
plt.figure(figsize=figsize)
if subplot: plt.subplot(1,2,1)
plot_vi_breakdown_panel(px, -lpygx,
'False merges', 'p(S=seg)', 'H(G|S=seg)',
hlines, c='blue', **kwargs)
if subplot: plt.subplot(1,2,2)
- the labels, shape ``(n_samples, 3)``. A value of `-1`
means "should merge", while `1` means "should
not merge". The columns correspond to the three
labeling methods: assignment, VI sign, or RI sign.
- the VI and RI change of each merge, ``(n_edges, 2)``.
- the list of merged edges ``(n_edges, 2)``.
See Also
--------
learn_agglomerate
"""
if type(gts) != list:
gts = [gts] # allow using single ground truth as input
ctables = [merge_contingency_table(self.get_segmentation(), gt)
for gt in gts]
assignments = [ev.assignment_table(ct) for ct in ctables]
return list(map(array, zip(*[
self.learn_edge(e, ctables, assignments, feature_map)
for e in self.real_edges()])))