Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def testAggloRFBuild(self):
from gala import agglo
from gala import features
from gala import classify
self.datadir = os.path.abspath(os.path.dirname(sys.modules["gala"].__file__)) + "/testdata/"
cl = classify.load_classifier(self.datadir + "agglomclassifier.rf.h5")
fm_info = json.loads(str(cl.feature_description))
fm = features.io.create_fm(fm_info)
mpf = agglo.classifier_probability(fm, cl)
watershed, dummy, prediction = self.gen_watershed()
stack = agglo.Rag(watershed, prediction, mpf, feature_manager=fm, nozeros=True)
self.assertEqual(stack.number_of_nodes(), 3630)
stack.agglomerate(0.1)
self.assertEqual(stack.number_of_nodes(), 88)
stack.remove_inclusions()
self.assertEqual(stack.number_of_nodes(), 86)
def test_segment_with_classifier_4_channel():
if PYTHON_VERSION == 2:
rf = classify.load_classifier(
os.path.join(rundir, 'example-data/rf-4.joblib'))
else:
fn = os.path.join(rundir, 'example-data/rf4-py3.joblib')
with tar_extract(fn) as fn:
rf = joblib.load(fn)
learned_policy = agglo.classifier_probability(fc, rf)
g_test = agglo.Rag(ws_test, p4_test, learned_policy, feature_manager=fc)
g_test.agglomerate(0.5)
seg_test = g_test.get_segmentation()
seg_expected = imio.read_h5_stack(
os.path.join(rundir, 'example-data/test-seg-4.lzf.h5'))
assert_allclose(ev.vi(seg_test, seg_expected), 0.0)
def testNPRFBuild(self):
if not np_installed:
self.assertTrue(np_installed)
from gala import stack_np
from gala import classify
self.datadir = os.path.abspath(os.path.dirname(sys.modules["gala"].__file__)) + "/testdata/"
cl = classify.load_classifier(self.datadir + "agglomclassifier_np.rf.h5")
fm_info = json.loads(str(cl.feature_description))
watershed, boundary, prediction = self.gen_watershed()
stack = stack_np.Stack(watershed, prediction, single_channel=False,
classifier=cl, feature_info=fm_info)
self.assertEqual(stack.number_of_nodes(), 3629)
stack.agglomerate(0.1)
self.assertEqual(stack.number_of_nodes(), 80)
stack.remove_inclusions()
self.assertEqual(stack.number_of_nodes(), 78)
def testAggloRFBuild(self):
from gala import agglo
from gala import features
from gala import classify
self.datadir = os.path.abspath(os.path.dirname(sys.modules["gala"].__file__)) + "/testdata/"
cl = classify.load_classifier(self.datadir + "agglomclassifier.rf.h5")
fm_info = json.loads(str(cl.feature_description))
fm = features.io.create_fm(fm_info)
mpf = agglo.classifier_probability(fm, cl)
watershed, dummy, prediction = self.gen_watershed()
stack = agglo.Rag(watershed, prediction, mpf, feature_manager=fm, nozeros=True)
self.assertEqual(stack.number_of_nodes(), 3630)
stack.agglomerate(0.1)
self.assertEqual(stack.number_of_nodes(), 88)
stack.remove_inclusions()
self.assertEqual(stack.number_of_nodes(), 86)
def testNPRFBuild(self):
if not np_installed:
self.assertTrue(np_installed)
from gala import stack_np
from gala import classify
self.datadir = os.path.abspath(os.path.dirname(sys.modules["gala"].__file__)) + "/testdata/"
cl = classify.load_classifier(self.datadir + "agglomclassifier_np.rf.h5")
fm_info = json.loads(str(cl.feature_description))
watershed, boundary, prediction = self.gen_watershed()
stack = stack_np.Stack(watershed, prediction, single_channel=False,
classifier=cl, feature_info=fm_info)
self.assertEqual(stack.number_of_nodes(), 3629)
stack.agglomerate(0.1)
self.assertEqual(stack.number_of_nodes(), 80)
stack.remove_inclusions()
self.assertEqual(stack.number_of_nodes(), 78)
def flow_perform_agglomeration(options, supervoxels, prediction, image_stack,
session_location, sps_out, master_logger):
# make synapse constraints
synapse_volume = numpy.array([])
if not options.use_neuroproof and options.synapse_file is not None:
pre_post_pairs = syngeo.io.raveler_synapse_annotations_to_coords(
options.synapse_file)
synapse_volume = \
syngeo.io.volume_synapse_view(pre_post_pairs, supervoxels.shape)
# ?! build RAG (automatically load features if classifier file is available, default to median
# if no classifier, check if np mode or not, automatically load features in NP as well)
if options.classifier is not None:
cl = classify.load_classifier(options.classifier)
fm_info = json.loads(str(cl.feature_description))
master_logger.info("Building RAG")
if fm_info is None or fm_info["neuroproof_features"] is None:
raise Exception("agglomeration classifier to old to be used")
if options.use_neuroproof:
if not fm_info["neuroproof_features"]:
raise Exception("random forest created not using neuroproof")
agglom_stack = stack_np.Stack(supervoxels, prediction,
single_channel=False, classifier=cl, feature_info=fm_info,
synapse_file=options.synapse_file, master_logger=master_logger)
else:
if fm_info["neuroproof_features"]:
master_logger.warning("random forest created using neuroproof features -- should still work")
fm = features.io.create_fm(fm_info)
if options.expected_vi:
# 4. 0,0 is the lower-left corner of the image
# 5. assume coordinates in json is x,y,z
# prevent stitch if hashes are different
match_hash = None
for region in options.regions:
hashes = re.findall(r'-[0-9a-f]+-',region)
match_hash_temp = hashes[-1]
if match_hash is not None and match_hash_temp != match_hash:
raise Exception("Incompatible segmentations: hashes do not match")
match_hash = match_hash_temp
md5_str = hashlib.md5(' '.join(sys.argv)).hexdigest()
cl = classify.load_classifier(options.classifier)
fm_info = json.loads(str(cl.feature_description))
border_size = None
regions_blocks = []
regions_blocks_temp = []
for region in options.regions:
blocks = []
region_json = json.load(open(region))
blocks_temp = region_json["subvolumes"]
regions_blocks_temp.append(blocks_temp)
border_size_temp = region_json["border"]
if border_size is not None and border_size != border_size_temp:
raise Exception("border attrubute not the same in all regions")