repository_name
stringlengths 7
55
| func_path_in_repository
stringlengths 4
223
| func_name
stringlengths 1
134
| whole_func_string
stringlengths 75
104k
| language
stringclasses 1
value | func_code_string
stringlengths 75
104k
| func_code_tokens
sequencelengths 19
28.4k
| func_documentation_string
stringlengths 1
46.9k
| func_documentation_tokens
sequencelengths 1
1.97k
| split_name
stringclasses 1
value | func_code_url
stringlengths 87
315
|
---|---|---|---|---|---|---|---|---|---|---|
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__msgc_step3_discontinuity_localization | def __msgc_step3_discontinuity_localization(self):
"""
Estimate discontinuity in basis of low resolution image segmentation.
:return: discontinuity in low resolution
"""
import scipy
start = self._start_time
seg = 1 - self.segmentation.astype(np.int8)
self.stats["low level object voxels"] = np.sum(seg)
self.stats["low level image voxels"] = np.prod(seg.shape)
# in seg is now stored low resolution segmentation
# back to normal parameters
# step 2: discontinuity localization
# self.segparams = sparams_hi
seg_border = scipy.ndimage.filters.laplace(seg, mode="constant")
logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None))
# logger.debug(str(np.max(seg_border)))
# logger.debug(str(np.min(seg_border)))
seg_border[seg_border != 0] = 1
logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None))
# scipy.ndimage.morphology.distance_transform_edt
boundary_dilatation_distance = self.segparams["boundary_dilatation_distance"]
seg = scipy.ndimage.morphology.binary_dilation(
seg_border,
# seg,
np.ones(
[
(boundary_dilatation_distance * 2) + 1,
(boundary_dilatation_distance * 2) + 1,
(boundary_dilatation_distance * 2) + 1,
]
),
)
if self.keep_temp_properties:
self.temp_msgc_lowres_discontinuity = seg
else:
self.temp_msgc_lowres_discontinuity = None
if self.debug_images:
import sed3
pd = sed3.sed3(seg_border) # ), contour=seg)
pd.show()
pd = sed3.sed3(seg) # ), contour=seg)
pd.show()
# segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom,
# order=0).astype('int8')
self.stats["t3"] = time.time() - start
return seg | python | def __msgc_step3_discontinuity_localization(self):
"""
Estimate discontinuity in basis of low resolution image segmentation.
:return: discontinuity in low resolution
"""
import scipy
start = self._start_time
seg = 1 - self.segmentation.astype(np.int8)
self.stats["low level object voxels"] = np.sum(seg)
self.stats["low level image voxels"] = np.prod(seg.shape)
# in seg is now stored low resolution segmentation
# back to normal parameters
# step 2: discontinuity localization
# self.segparams = sparams_hi
seg_border = scipy.ndimage.filters.laplace(seg, mode="constant")
logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None))
# logger.debug(str(np.max(seg_border)))
# logger.debug(str(np.min(seg_border)))
seg_border[seg_border != 0] = 1
logger.debug("seg_border: %s", scipy.stats.describe(seg_border, axis=None))
# scipy.ndimage.morphology.distance_transform_edt
boundary_dilatation_distance = self.segparams["boundary_dilatation_distance"]
seg = scipy.ndimage.morphology.binary_dilation(
seg_border,
# seg,
np.ones(
[
(boundary_dilatation_distance * 2) + 1,
(boundary_dilatation_distance * 2) + 1,
(boundary_dilatation_distance * 2) + 1,
]
),
)
if self.keep_temp_properties:
self.temp_msgc_lowres_discontinuity = seg
else:
self.temp_msgc_lowres_discontinuity = None
if self.debug_images:
import sed3
pd = sed3.sed3(seg_border) # ), contour=seg)
pd.show()
pd = sed3.sed3(seg) # ), contour=seg)
pd.show()
# segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom,
# order=0).astype('int8')
self.stats["t3"] = time.time() - start
return seg | [
"def",
"__msgc_step3_discontinuity_localization",
"(",
"self",
")",
":",
"import",
"scipy",
"start",
"=",
"self",
".",
"_start_time",
"seg",
"=",
"1",
"-",
"self",
".",
"segmentation",
".",
"astype",
"(",
"np",
".",
"int8",
")",
"self",
".",
"stats",
"[",
"\"low level object voxels\"",
"]",
"=",
"np",
".",
"sum",
"(",
"seg",
")",
"self",
".",
"stats",
"[",
"\"low level image voxels\"",
"]",
"=",
"np",
".",
"prod",
"(",
"seg",
".",
"shape",
")",
"# in seg is now stored low resolution segmentation",
"# back to normal parameters",
"# step 2: discontinuity localization",
"# self.segparams = sparams_hi",
"seg_border",
"=",
"scipy",
".",
"ndimage",
".",
"filters",
".",
"laplace",
"(",
"seg",
",",
"mode",
"=",
"\"constant\"",
")",
"logger",
".",
"debug",
"(",
"\"seg_border: %s\"",
",",
"scipy",
".",
"stats",
".",
"describe",
"(",
"seg_border",
",",
"axis",
"=",
"None",
")",
")",
"# logger.debug(str(np.max(seg_border)))",
"# logger.debug(str(np.min(seg_border)))",
"seg_border",
"[",
"seg_border",
"!=",
"0",
"]",
"=",
"1",
"logger",
".",
"debug",
"(",
"\"seg_border: %s\"",
",",
"scipy",
".",
"stats",
".",
"describe",
"(",
"seg_border",
",",
"axis",
"=",
"None",
")",
")",
"# scipy.ndimage.morphology.distance_transform_edt",
"boundary_dilatation_distance",
"=",
"self",
".",
"segparams",
"[",
"\"boundary_dilatation_distance\"",
"]",
"seg",
"=",
"scipy",
".",
"ndimage",
".",
"morphology",
".",
"binary_dilation",
"(",
"seg_border",
",",
"# seg,",
"np",
".",
"ones",
"(",
"[",
"(",
"boundary_dilatation_distance",
"*",
"2",
")",
"+",
"1",
",",
"(",
"boundary_dilatation_distance",
"*",
"2",
")",
"+",
"1",
",",
"(",
"boundary_dilatation_distance",
"*",
"2",
")",
"+",
"1",
",",
"]",
")",
",",
")",
"if",
"self",
".",
"keep_temp_properties",
":",
"self",
".",
"temp_msgc_lowres_discontinuity",
"=",
"seg",
"else",
":",
"self",
".",
"temp_msgc_lowres_discontinuity",
"=",
"None",
"if",
"self",
".",
"debug_images",
":",
"import",
"sed3",
"pd",
"=",
"sed3",
".",
"sed3",
"(",
"seg_border",
")",
"# ), contour=seg)",
"pd",
".",
"show",
"(",
")",
"pd",
"=",
"sed3",
".",
"sed3",
"(",
"seg",
")",
"# ), contour=seg)",
"pd",
".",
"show",
"(",
")",
"# segzoom = scipy.ndimage.interpolation.zoom(seg.astype('float'), zoom,",
"# order=0).astype('int8')",
"self",
".",
"stats",
"[",
"\"t3\"",
"]",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start",
"return",
"seg"
] | Estimate discontinuity in basis of low resolution image segmentation.
:return: discontinuity in low resolution | [
"Estimate",
"discontinuity",
"in",
"basis",
"of",
"low",
"resolution",
"image",
"segmentation",
".",
":",
"return",
":",
"discontinuity",
"in",
"low",
"resolution"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L323-L372 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__multiscale_gc_lo2hi_run | def __multiscale_gc_lo2hi_run(self): # , pyed):
"""
Run Graph-Cut segmentation with refinement of low resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties`
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
self._msgc_lo2hi_resize_init()
self.__msgc_step0_init()
hard_constraints = self.__msgc_step12_low_resolution_segmentation()
# ===== high resolution data processing
seg = self.__msgc_step3_discontinuity_localization()
self.stats["t3.1"] = (time.time() - self._start_time)
graph = Graph(
seg,
voxelsize=self.voxelsize,
nsplit=self.segparams["block_size"],
edge_weight_table=self._msgc_npenalty_table,
compute_low_nodes_index=True,
)
# graph.run() = graph.generate_base_grid() + graph.split_voxels()
# graph.run()
graph.generate_base_grid()
self.stats["t3.2"] = (time.time() - self._start_time)
graph.split_voxels()
self.stats["t3.3"] = (time.time() - self._start_time)
self.stats.update(graph.stats)
self.stats["t4"] = (time.time() - self._start_time)
mul_mask, mul_val = self.__msgc_tlinks_area_weight_from_low_segmentation(seg)
area_weight = 1
unariesalt = self.__create_tlinks(
self.img,
self.voxelsize,
self.seeds,
area_weight=area_weight,
hard_constraints=hard_constraints,
mul_mask=None,
mul_val=None,
)
# N-links prepared
self.stats["t5"] = (time.time() - self._start_time)
un, ind = np.unique(graph.msinds, return_index=True)
self.stats["t6"] = (time.time() - self._start_time)
self.stats["t7"] = (time.time() - self._start_time)
unariesalt2_lo2hi = np.hstack(
[unariesalt[ind, 0, 0].reshape(-1, 1), unariesalt[ind, 0, 1].reshape(-1, 1)]
)
nlinks_lo2hi = np.hstack([graph.edges, graph.edges_weights.reshape(-1, 1)])
if self.debug_images:
import sed3
ed = sed3.sed3(unariesalt[:, :, 0].reshape(self.img.shape))
ed.show()
import sed3
ed = sed3.sed3(unariesalt[:, :, 1].reshape(self.img.shape))
ed.show()
# ed = sed3.sed3(seg)
# ed.show()
# import sed3
# ed = sed3.sed3(graph.data)
# ed.show()
# import sed3
# ed = sed3.sed3(graph.msinds)
# ed.show()
# nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg)
# self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)
self.__msgc_step9_finish_perform_gc_and_reshape(
nlinks_lo2hi, unariesalt2_lo2hi, graph.msinds
)
self._msgc_lo2hi_resize_clean_finish() | python | def __multiscale_gc_lo2hi_run(self): # , pyed):
"""
Run Graph-Cut segmentation with refinement of low resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties`
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
self._msgc_lo2hi_resize_init()
self.__msgc_step0_init()
hard_constraints = self.__msgc_step12_low_resolution_segmentation()
# ===== high resolution data processing
seg = self.__msgc_step3_discontinuity_localization()
self.stats["t3.1"] = (time.time() - self._start_time)
graph = Graph(
seg,
voxelsize=self.voxelsize,
nsplit=self.segparams["block_size"],
edge_weight_table=self._msgc_npenalty_table,
compute_low_nodes_index=True,
)
# graph.run() = graph.generate_base_grid() + graph.split_voxels()
# graph.run()
graph.generate_base_grid()
self.stats["t3.2"] = (time.time() - self._start_time)
graph.split_voxels()
self.stats["t3.3"] = (time.time() - self._start_time)
self.stats.update(graph.stats)
self.stats["t4"] = (time.time() - self._start_time)
mul_mask, mul_val = self.__msgc_tlinks_area_weight_from_low_segmentation(seg)
area_weight = 1
unariesalt = self.__create_tlinks(
self.img,
self.voxelsize,
self.seeds,
area_weight=area_weight,
hard_constraints=hard_constraints,
mul_mask=None,
mul_val=None,
)
# N-links prepared
self.stats["t5"] = (time.time() - self._start_time)
un, ind = np.unique(graph.msinds, return_index=True)
self.stats["t6"] = (time.time() - self._start_time)
self.stats["t7"] = (time.time() - self._start_time)
unariesalt2_lo2hi = np.hstack(
[unariesalt[ind, 0, 0].reshape(-1, 1), unariesalt[ind, 0, 1].reshape(-1, 1)]
)
nlinks_lo2hi = np.hstack([graph.edges, graph.edges_weights.reshape(-1, 1)])
if self.debug_images:
import sed3
ed = sed3.sed3(unariesalt[:, :, 0].reshape(self.img.shape))
ed.show()
import sed3
ed = sed3.sed3(unariesalt[:, :, 1].reshape(self.img.shape))
ed.show()
# ed = sed3.sed3(seg)
# ed.show()
# import sed3
# ed = sed3.sed3(graph.data)
# ed.show()
# import sed3
# ed = sed3.sed3(graph.msinds)
# ed.show()
# nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg)
# self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)
self.__msgc_step9_finish_perform_gc_and_reshape(
nlinks_lo2hi, unariesalt2_lo2hi, graph.msinds
)
self._msgc_lo2hi_resize_clean_finish() | [
"def",
"__multiscale_gc_lo2hi_run",
"(",
"self",
")",
":",
"# , pyed):",
"# from PyQt4.QtCore import pyqtRemoveInputHook",
"# pyqtRemoveInputHook()",
"self",
".",
"_msgc_lo2hi_resize_init",
"(",
")",
"self",
".",
"__msgc_step0_init",
"(",
")",
"hard_constraints",
"=",
"self",
".",
"__msgc_step12_low_resolution_segmentation",
"(",
")",
"# ===== high resolution data processing",
"seg",
"=",
"self",
".",
"__msgc_step3_discontinuity_localization",
"(",
")",
"self",
".",
"stats",
"[",
"\"t3.1\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"graph",
"=",
"Graph",
"(",
"seg",
",",
"voxelsize",
"=",
"self",
".",
"voxelsize",
",",
"nsplit",
"=",
"self",
".",
"segparams",
"[",
"\"block_size\"",
"]",
",",
"edge_weight_table",
"=",
"self",
".",
"_msgc_npenalty_table",
",",
"compute_low_nodes_index",
"=",
"True",
",",
")",
"# graph.run() = graph.generate_base_grid() + graph.split_voxels()",
"# graph.run()",
"graph",
".",
"generate_base_grid",
"(",
")",
"self",
".",
"stats",
"[",
"\"t3.2\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"graph",
".",
"split_voxels",
"(",
")",
"self",
".",
"stats",
"[",
"\"t3.3\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"self",
".",
"stats",
".",
"update",
"(",
"graph",
".",
"stats",
")",
"self",
".",
"stats",
"[",
"\"t4\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"mul_mask",
",",
"mul_val",
"=",
"self",
".",
"__msgc_tlinks_area_weight_from_low_segmentation",
"(",
"seg",
")",
"area_weight",
"=",
"1",
"unariesalt",
"=",
"self",
".",
"__create_tlinks",
"(",
"self",
".",
"img",
",",
"self",
".",
"voxelsize",
",",
"self",
".",
"seeds",
",",
"area_weight",
"=",
"area_weight",
",",
"hard_constraints",
"=",
"hard_constraints",
",",
"mul_mask",
"=",
"None",
",",
"mul_val",
"=",
"None",
",",
")",
"# N-links prepared",
"self",
".",
"stats",
"[",
"\"t5\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"un",
",",
"ind",
"=",
"np",
".",
"unique",
"(",
"graph",
".",
"msinds",
",",
"return_index",
"=",
"True",
")",
"self",
".",
"stats",
"[",
"\"t6\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"self",
".",
"stats",
"[",
"\"t7\"",
"]",
"=",
"(",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"_start_time",
")",
"unariesalt2_lo2hi",
"=",
"np",
".",
"hstack",
"(",
"[",
"unariesalt",
"[",
"ind",
",",
"0",
",",
"0",
"]",
".",
"reshape",
"(",
"-",
"1",
",",
"1",
")",
",",
"unariesalt",
"[",
"ind",
",",
"0",
",",
"1",
"]",
".",
"reshape",
"(",
"-",
"1",
",",
"1",
")",
"]",
")",
"nlinks_lo2hi",
"=",
"np",
".",
"hstack",
"(",
"[",
"graph",
".",
"edges",
",",
"graph",
".",
"edges_weights",
".",
"reshape",
"(",
"-",
"1",
",",
"1",
")",
"]",
")",
"if",
"self",
".",
"debug_images",
":",
"import",
"sed3",
"ed",
"=",
"sed3",
".",
"sed3",
"(",
"unariesalt",
"[",
":",
",",
":",
",",
"0",
"]",
".",
"reshape",
"(",
"self",
".",
"img",
".",
"shape",
")",
")",
"ed",
".",
"show",
"(",
")",
"import",
"sed3",
"ed",
"=",
"sed3",
".",
"sed3",
"(",
"unariesalt",
"[",
":",
",",
":",
",",
"1",
"]",
".",
"reshape",
"(",
"self",
".",
"img",
".",
"shape",
")",
")",
"ed",
".",
"show",
"(",
")",
"# ed = sed3.sed3(seg)",
"# ed.show()",
"# import sed3",
"# ed = sed3.sed3(graph.data)",
"# ed.show()",
"# import sed3",
"# ed = sed3.sed3(graph.msinds)",
"# ed.show()",
"# nlinks, unariesalt2, msinds = self.__msgc_step45678_construct_graph(area_weight, hard_constraints, seg)",
"# self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds)",
"self",
".",
"__msgc_step9_finish_perform_gc_and_reshape",
"(",
"nlinks_lo2hi",
",",
"unariesalt2_lo2hi",
",",
"graph",
".",
"msinds",
")",
"self",
".",
"_msgc_lo2hi_resize_clean_finish",
"(",
")"
] | Run Graph-Cut segmentation with refinement of low resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties` | [
"Run",
"Graph",
"-",
"Cut",
"segmentation",
"with",
"refinement",
"of",
"low",
"resolution",
"multiscale",
"graph",
".",
"In",
"first",
"step",
"is",
"performed",
"normal",
"GC",
"on",
"low",
"resolution",
"data",
"Second",
"step",
"construct",
"finer",
"grid",
"on",
"edges",
"of",
"segmentation",
"from",
"first",
"step",
".",
"There",
"is",
"no",
"option",
"for",
"use",
"without",
"use_boundary_penalties"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L526-L606 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__multiscale_gc_hi2lo_run | def __multiscale_gc_hi2lo_run(self): # , pyed):
"""
Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties`
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
self.__msgc_step0_init()
hard_constraints = self.__msgc_step12_low_resolution_segmentation()
# ===== high resolution data processing
seg = self.__msgc_step3_discontinuity_localization()
nlinks, unariesalt2, msinds = self.__msgc_step45678_hi2lo_construct_graph(
hard_constraints, seg
)
self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds) | python | def __multiscale_gc_hi2lo_run(self): # , pyed):
"""
Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties`
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
self.__msgc_step0_init()
hard_constraints = self.__msgc_step12_low_resolution_segmentation()
# ===== high resolution data processing
seg = self.__msgc_step3_discontinuity_localization()
nlinks, unariesalt2, msinds = self.__msgc_step45678_hi2lo_construct_graph(
hard_constraints, seg
)
self.__msgc_step9_finish_perform_gc_and_reshape(nlinks, unariesalt2, msinds) | [
"def",
"__multiscale_gc_hi2lo_run",
"(",
"self",
")",
":",
"# , pyed):",
"# from PyQt4.QtCore import pyqtRemoveInputHook",
"# pyqtRemoveInputHook()",
"self",
".",
"__msgc_step0_init",
"(",
")",
"hard_constraints",
"=",
"self",
".",
"__msgc_step12_low_resolution_segmentation",
"(",
")",
"# ===== high resolution data processing",
"seg",
"=",
"self",
".",
"__msgc_step3_discontinuity_localization",
"(",
")",
"nlinks",
",",
"unariesalt2",
",",
"msinds",
"=",
"self",
".",
"__msgc_step45678_hi2lo_construct_graph",
"(",
"hard_constraints",
",",
"seg",
")",
"self",
".",
"__msgc_step9_finish_perform_gc_and_reshape",
"(",
"nlinks",
",",
"unariesalt2",
",",
"msinds",
")"
] | Run Graph-Cut segmentation with simplifiyng of high resolution multiscale graph.
In first step is performed normal GC on low resolution data
Second step construct finer grid on edges of segmentation from first
step.
There is no option for use without `use_boundary_penalties` | [
"Run",
"Graph",
"-",
"Cut",
"segmentation",
"with",
"simplifiyng",
"of",
"high",
"resolution",
"multiscale",
"graph",
".",
"In",
"first",
"step",
"is",
"performed",
"normal",
"GC",
"on",
"low",
"resolution",
"data",
"Second",
"step",
"construct",
"finer",
"grid",
"on",
"edges",
"of",
"segmentation",
"from",
"first",
"step",
".",
"There",
"is",
"no",
"option",
"for",
"use",
"without",
"use_boundary_penalties"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L608-L626 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__ordered_values_by_indexes | def __ordered_values_by_indexes(self, data, inds):
"""
Return values (intensities) by indexes.
Used for multiscale graph cut.
data = [[0 1 1],
[0 2 2],
[0 2 2]]
inds = [[0 1 2],
[3 4 4],
[5 4 4]]
return: [0, 1, 1, 0, 2, 0]
If the data are not consistent, it will take the maximal value
"""
# get unique labels and their first indexes
# lab, linds = np.unique(inds, return_index=True)
# compute values by indexes
# values = data.reshape(-1)[linds]
# alternative slow implementation
# if there are different data on same index, it will take
# maximal value
# lab = np.unique(inds)
# values = [0]*len(lab)
# for label in lab:
# values[label] = np.max(data[inds == label])
#
# values = np.asarray(values)
# yet another implementation
values = [None] * (np.max(inds) + 1)
linear_inds = inds.ravel()
linear_data = data.ravel()
for i in range(0, len(linear_inds)):
# going over all data pixels
if values[linear_inds[i]] is None:
# this index is found for first
values[linear_inds[i]] = linear_data[i]
elif values[linear_inds[i]] < linear_data[i]:
# here can be changed maximal or minimal value
values[linear_inds[i]] = linear_data[i]
values = np.asarray(values)
return values | python | def __ordered_values_by_indexes(self, data, inds):
"""
Return values (intensities) by indexes.
Used for multiscale graph cut.
data = [[0 1 1],
[0 2 2],
[0 2 2]]
inds = [[0 1 2],
[3 4 4],
[5 4 4]]
return: [0, 1, 1, 0, 2, 0]
If the data are not consistent, it will take the maximal value
"""
# get unique labels and their first indexes
# lab, linds = np.unique(inds, return_index=True)
# compute values by indexes
# values = data.reshape(-1)[linds]
# alternative slow implementation
# if there are different data on same index, it will take
# maximal value
# lab = np.unique(inds)
# values = [0]*len(lab)
# for label in lab:
# values[label] = np.max(data[inds == label])
#
# values = np.asarray(values)
# yet another implementation
values = [None] * (np.max(inds) + 1)
linear_inds = inds.ravel()
linear_data = data.ravel()
for i in range(0, len(linear_inds)):
# going over all data pixels
if values[linear_inds[i]] is None:
# this index is found for first
values[linear_inds[i]] = linear_data[i]
elif values[linear_inds[i]] < linear_data[i]:
# here can be changed maximal or minimal value
values[linear_inds[i]] = linear_data[i]
values = np.asarray(values)
return values | [
"def",
"__ordered_values_by_indexes",
"(",
"self",
",",
"data",
",",
"inds",
")",
":",
"# get unique labels and their first indexes",
"# lab, linds = np.unique(inds, return_index=True)",
"# compute values by indexes",
"# values = data.reshape(-1)[linds]",
"# alternative slow implementation",
"# if there are different data on same index, it will take",
"# maximal value",
"# lab = np.unique(inds)",
"# values = [0]*len(lab)",
"# for label in lab:",
"# values[label] = np.max(data[inds == label])",
"#",
"# values = np.asarray(values)",
"# yet another implementation",
"values",
"=",
"[",
"None",
"]",
"*",
"(",
"np",
".",
"max",
"(",
"inds",
")",
"+",
"1",
")",
"linear_inds",
"=",
"inds",
".",
"ravel",
"(",
")",
"linear_data",
"=",
"data",
".",
"ravel",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"linear_inds",
")",
")",
":",
"# going over all data pixels",
"if",
"values",
"[",
"linear_inds",
"[",
"i",
"]",
"]",
"is",
"None",
":",
"# this index is found for first",
"values",
"[",
"linear_inds",
"[",
"i",
"]",
"]",
"=",
"linear_data",
"[",
"i",
"]",
"elif",
"values",
"[",
"linear_inds",
"[",
"i",
"]",
"]",
"<",
"linear_data",
"[",
"i",
"]",
":",
"# here can be changed maximal or minimal value",
"values",
"[",
"linear_inds",
"[",
"i",
"]",
"]",
"=",
"linear_data",
"[",
"i",
"]",
"values",
"=",
"np",
".",
"asarray",
"(",
"values",
")",
"return",
"values"
] | Return values (intensities) by indexes.
Used for multiscale graph cut.
data = [[0 1 1],
[0 2 2],
[0 2 2]]
inds = [[0 1 2],
[3 4 4],
[5 4 4]]
return: [0, 1, 1, 0, 2, 0]
If the data are not consistent, it will take the maximal value | [
"Return",
"values",
"(",
"intensities",
")",
"by",
"indexes",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L628-L678 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__hi2lo_multiscale_indexes | def __hi2lo_multiscale_indexes(self, mask, orig_shape): # , zoom):
"""
Function computes multiscale indexes of ndarray.
mask: Says where is original resolution (0) and where is small
resolution (1). Mask is in small resolution.
orig_shape: Original shape of input data.
zoom: Usually number greater then 1
result = [[0 1 2],
[3 4 4],
[5 4 4]]
"""
mask_orig = zoom_to_shape(mask, orig_shape, dtype=np.int8)
inds_small = np.arange(mask.size).reshape(mask.shape)
inds_small_in_orig = zoom_to_shape(inds_small, orig_shape, dtype=np.int8)
inds_orig = np.arange(np.prod(orig_shape)).reshape(orig_shape)
# inds_orig = inds_orig * mask_orig
inds_orig += np.max(inds_small_in_orig) + 1
# print 'indexes'
# import py3DSeedEditor as ped
# import pdb; pdb.set_trace() # BREAKPOINT
# '==' is not the same as 'is' for numpy.array
inds_small_in_orig[mask_orig == True] = inds_orig[mask_orig == True] # noqa
inds = inds_small_in_orig
# print np.max(inds)
# print np.min(inds)
inds = relabel_squeeze(inds)
logger.debug(
"Index after relabeling: %s", scipy.stats.describe(inds, axis=None)
)
# logger.debug("Minimal index after relabeling: " + str(np.min(inds)))
# inds_orig[mask_orig==True] = 0
# inds_small_in_orig[mask_orig==False] = 0
# inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig
return inds, mask_orig | python | def __hi2lo_multiscale_indexes(self, mask, orig_shape): # , zoom):
"""
Function computes multiscale indexes of ndarray.
mask: Says where is original resolution (0) and where is small
resolution (1). Mask is in small resolution.
orig_shape: Original shape of input data.
zoom: Usually number greater then 1
result = [[0 1 2],
[3 4 4],
[5 4 4]]
"""
mask_orig = zoom_to_shape(mask, orig_shape, dtype=np.int8)
inds_small = np.arange(mask.size).reshape(mask.shape)
inds_small_in_orig = zoom_to_shape(inds_small, orig_shape, dtype=np.int8)
inds_orig = np.arange(np.prod(orig_shape)).reshape(orig_shape)
# inds_orig = inds_orig * mask_orig
inds_orig += np.max(inds_small_in_orig) + 1
# print 'indexes'
# import py3DSeedEditor as ped
# import pdb; pdb.set_trace() # BREAKPOINT
# '==' is not the same as 'is' for numpy.array
inds_small_in_orig[mask_orig == True] = inds_orig[mask_orig == True] # noqa
inds = inds_small_in_orig
# print np.max(inds)
# print np.min(inds)
inds = relabel_squeeze(inds)
logger.debug(
"Index after relabeling: %s", scipy.stats.describe(inds, axis=None)
)
# logger.debug("Minimal index after relabeling: " + str(np.min(inds)))
# inds_orig[mask_orig==True] = 0
# inds_small_in_orig[mask_orig==False] = 0
# inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig
return inds, mask_orig | [
"def",
"__hi2lo_multiscale_indexes",
"(",
"self",
",",
"mask",
",",
"orig_shape",
")",
":",
"# , zoom):",
"mask_orig",
"=",
"zoom_to_shape",
"(",
"mask",
",",
"orig_shape",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"inds_small",
"=",
"np",
".",
"arange",
"(",
"mask",
".",
"size",
")",
".",
"reshape",
"(",
"mask",
".",
"shape",
")",
"inds_small_in_orig",
"=",
"zoom_to_shape",
"(",
"inds_small",
",",
"orig_shape",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"inds_orig",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"prod",
"(",
"orig_shape",
")",
")",
".",
"reshape",
"(",
"orig_shape",
")",
"# inds_orig = inds_orig * mask_orig",
"inds_orig",
"+=",
"np",
".",
"max",
"(",
"inds_small_in_orig",
")",
"+",
"1",
"# print 'indexes'",
"# import py3DSeedEditor as ped",
"# import pdb; pdb.set_trace() # BREAKPOINT",
"# '==' is not the same as 'is' for numpy.array",
"inds_small_in_orig",
"[",
"mask_orig",
"==",
"True",
"]",
"=",
"inds_orig",
"[",
"mask_orig",
"==",
"True",
"]",
"# noqa",
"inds",
"=",
"inds_small_in_orig",
"# print np.max(inds)",
"# print np.min(inds)",
"inds",
"=",
"relabel_squeeze",
"(",
"inds",
")",
"logger",
".",
"debug",
"(",
"\"Index after relabeling: %s\"",
",",
"scipy",
".",
"stats",
".",
"describe",
"(",
"inds",
",",
"axis",
"=",
"None",
")",
")",
"# logger.debug(\"Minimal index after relabeling: \" + str(np.min(inds)))",
"# inds_orig[mask_orig==True] = 0",
"# inds_small_in_orig[mask_orig==False] = 0",
"# inds = (inds_orig + np.max(inds_small_in_orig) + 1) + inds_small_in_orig",
"return",
"inds",
",",
"mask_orig"
] | Function computes multiscale indexes of ndarray.
mask: Says where is original resolution (0) and where is small
resolution (1). Mask is in small resolution.
orig_shape: Original shape of input data.
zoom: Usually number greater then 1
result = [[0 1 2],
[3 4 4],
[5 4 4]] | [
"Function",
"computes",
"multiscale",
"indexes",
"of",
"ndarray",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L680-L721 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.interactivity | def interactivity(self, min_val=None, max_val=None, qt_app=None):
"""
Interactive seed setting with 3d seed editor
"""
from .seed_editor_qt import QTSeedEditor
from PyQt4.QtGui import QApplication
if min_val is None:
min_val = np.min(self.img)
if max_val is None:
max_val = np.max(self.img)
window_c = (max_val + min_val) / 2 # .astype(np.int16)
window_w = max_val - min_val # .astype(np.int16)
if qt_app is None:
qt_app = QApplication(sys.argv)
pyed = QTSeedEditor(
self.img,
modeFun=self.interactivity_loop,
voxelSize=self.voxelsize,
seeds=self.seeds,
volume_unit=self.volume_unit,
)
pyed.changeC(window_c)
pyed.changeW(window_w)
qt_app.exec_() | python | def interactivity(self, min_val=None, max_val=None, qt_app=None):
"""
Interactive seed setting with 3d seed editor
"""
from .seed_editor_qt import QTSeedEditor
from PyQt4.QtGui import QApplication
if min_val is None:
min_val = np.min(self.img)
if max_val is None:
max_val = np.max(self.img)
window_c = (max_val + min_val) / 2 # .astype(np.int16)
window_w = max_val - min_val # .astype(np.int16)
if qt_app is None:
qt_app = QApplication(sys.argv)
pyed = QTSeedEditor(
self.img,
modeFun=self.interactivity_loop,
voxelSize=self.voxelsize,
seeds=self.seeds,
volume_unit=self.volume_unit,
)
pyed.changeC(window_c)
pyed.changeW(window_w)
qt_app.exec_() | [
"def",
"interactivity",
"(",
"self",
",",
"min_val",
"=",
"None",
",",
"max_val",
"=",
"None",
",",
"qt_app",
"=",
"None",
")",
":",
"from",
".",
"seed_editor_qt",
"import",
"QTSeedEditor",
"from",
"PyQt4",
".",
"QtGui",
"import",
"QApplication",
"if",
"min_val",
"is",
"None",
":",
"min_val",
"=",
"np",
".",
"min",
"(",
"self",
".",
"img",
")",
"if",
"max_val",
"is",
"None",
":",
"max_val",
"=",
"np",
".",
"max",
"(",
"self",
".",
"img",
")",
"window_c",
"=",
"(",
"max_val",
"+",
"min_val",
")",
"/",
"2",
"# .astype(np.int16)",
"window_w",
"=",
"max_val",
"-",
"min_val",
"# .astype(np.int16)",
"if",
"qt_app",
"is",
"None",
":",
"qt_app",
"=",
"QApplication",
"(",
"sys",
".",
"argv",
")",
"pyed",
"=",
"QTSeedEditor",
"(",
"self",
".",
"img",
",",
"modeFun",
"=",
"self",
".",
"interactivity_loop",
",",
"voxelSize",
"=",
"self",
".",
"voxelsize",
",",
"seeds",
"=",
"self",
".",
"seeds",
",",
"volume_unit",
"=",
"self",
".",
"volume_unit",
",",
")",
"pyed",
".",
"changeC",
"(",
"window_c",
")",
"pyed",
".",
"changeW",
"(",
"window_w",
")",
"qt_app",
".",
"exec_",
"(",
")"
] | Interactive seed setting with 3d seed editor | [
"Interactive",
"seed",
"setting",
"with",
"3d",
"seed",
"editor"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L723-L753 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.set_seeds | def set_seeds(self, seeds):
"""
Function for manual seed setting. Sets variable seeds and prepares
voxels for density model.
:param seeds: ndarray (0 - nothing, 1 - object, 2 - background,
3 - object just hard constraints, no model training, 4 - background
just hard constraints, no model training)
"""
if self.img.shape != seeds.shape:
raise Exception("Seeds must be same size as input image")
self.seeds = seeds.astype("int8")
self.voxels1 = self.img[self.seeds == 1]
self.voxels2 = self.img[self.seeds == 2] | python | def set_seeds(self, seeds):
"""
Function for manual seed setting. Sets variable seeds and prepares
voxels for density model.
:param seeds: ndarray (0 - nothing, 1 - object, 2 - background,
3 - object just hard constraints, no model training, 4 - background
just hard constraints, no model training)
"""
if self.img.shape != seeds.shape:
raise Exception("Seeds must be same size as input image")
self.seeds = seeds.astype("int8")
self.voxels1 = self.img[self.seeds == 1]
self.voxels2 = self.img[self.seeds == 2] | [
"def",
"set_seeds",
"(",
"self",
",",
"seeds",
")",
":",
"if",
"self",
".",
"img",
".",
"shape",
"!=",
"seeds",
".",
"shape",
":",
"raise",
"Exception",
"(",
"\"Seeds must be same size as input image\"",
")",
"self",
".",
"seeds",
"=",
"seeds",
".",
"astype",
"(",
"\"int8\"",
")",
"self",
".",
"voxels1",
"=",
"self",
".",
"img",
"[",
"self",
".",
"seeds",
"==",
"1",
"]",
"self",
".",
"voxels2",
"=",
"self",
".",
"img",
"[",
"self",
".",
"seeds",
"==",
"2",
"]"
] | Function for manual seed setting. Sets variable seeds and prepares
voxels for density model.
:param seeds: ndarray (0 - nothing, 1 - object, 2 - background,
3 - object just hard constraints, no model training, 4 - background
just hard constraints, no model training) | [
"Function",
"for",
"manual",
"seed",
"setting",
".",
"Sets",
"variable",
"seeds",
"and",
"prepares",
"voxels",
"for",
"density",
"model",
".",
":",
"param",
"seeds",
":",
"ndarray",
"(",
"0",
"-",
"nothing",
"1",
"-",
"object",
"2",
"-",
"background",
"3",
"-",
"object",
"just",
"hard",
"constraints",
"no",
"model",
"training",
"4",
"-",
"background",
"just",
"hard",
"constraints",
"no",
"model",
"training",
")"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L755-L768 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.run | def run(self, run_fit_model=True):
"""
Run the Graph Cut segmentation according to preset parameters.
:param run_fit_model: Allow to skip model fit when the model is prepared before
:return:
"""
if run_fit_model:
self.fit_model(self.img, self.voxelsize, self.seeds)
self._start_time = time.time()
if self.segparams["method"].lower() in ("graphcut", "gc"):
self.__single_scale_gc_run()
elif self.segparams["method"].lower() in (
"multiscale_graphcut",
"multiscale_gc",
"msgc",
"msgc_lo2hi",
"lo2hi",
"multiscale_graphcut_lo2hi",
):
logger.debug("performing multiscale Graph-Cut lo2hi")
self.__multiscale_gc_lo2hi_run()
elif self.segparams["method"].lower() in (
"msgc_hi2lo",
"hi2lo",
"multiscale_graphcut_hi2lo",
):
logger.debug("performing multiscale Graph-Cut hi2lo")
self.__multiscale_gc_hi2lo_run()
else:
logger.error("Unknown segmentation method: " + self.segparams["method"]) | python | def run(self, run_fit_model=True):
"""
Run the Graph Cut segmentation according to preset parameters.
:param run_fit_model: Allow to skip model fit when the model is prepared before
:return:
"""
if run_fit_model:
self.fit_model(self.img, self.voxelsize, self.seeds)
self._start_time = time.time()
if self.segparams["method"].lower() in ("graphcut", "gc"):
self.__single_scale_gc_run()
elif self.segparams["method"].lower() in (
"multiscale_graphcut",
"multiscale_gc",
"msgc",
"msgc_lo2hi",
"lo2hi",
"multiscale_graphcut_lo2hi",
):
logger.debug("performing multiscale Graph-Cut lo2hi")
self.__multiscale_gc_lo2hi_run()
elif self.segparams["method"].lower() in (
"msgc_hi2lo",
"hi2lo",
"multiscale_graphcut_hi2lo",
):
logger.debug("performing multiscale Graph-Cut hi2lo")
self.__multiscale_gc_hi2lo_run()
else:
logger.error("Unknown segmentation method: " + self.segparams["method"]) | [
"def",
"run",
"(",
"self",
",",
"run_fit_model",
"=",
"True",
")",
":",
"if",
"run_fit_model",
":",
"self",
".",
"fit_model",
"(",
"self",
".",
"img",
",",
"self",
".",
"voxelsize",
",",
"self",
".",
"seeds",
")",
"self",
".",
"_start_time",
"=",
"time",
".",
"time",
"(",
")",
"if",
"self",
".",
"segparams",
"[",
"\"method\"",
"]",
".",
"lower",
"(",
")",
"in",
"(",
"\"graphcut\"",
",",
"\"gc\"",
")",
":",
"self",
".",
"__single_scale_gc_run",
"(",
")",
"elif",
"self",
".",
"segparams",
"[",
"\"method\"",
"]",
".",
"lower",
"(",
")",
"in",
"(",
"\"multiscale_graphcut\"",
",",
"\"multiscale_gc\"",
",",
"\"msgc\"",
",",
"\"msgc_lo2hi\"",
",",
"\"lo2hi\"",
",",
"\"multiscale_graphcut_lo2hi\"",
",",
")",
":",
"logger",
".",
"debug",
"(",
"\"performing multiscale Graph-Cut lo2hi\"",
")",
"self",
".",
"__multiscale_gc_lo2hi_run",
"(",
")",
"elif",
"self",
".",
"segparams",
"[",
"\"method\"",
"]",
".",
"lower",
"(",
")",
"in",
"(",
"\"msgc_hi2lo\"",
",",
"\"hi2lo\"",
",",
"\"multiscale_graphcut_hi2lo\"",
",",
")",
":",
"logger",
".",
"debug",
"(",
"\"performing multiscale Graph-Cut hi2lo\"",
")",
"self",
".",
"__multiscale_gc_hi2lo_run",
"(",
")",
"else",
":",
"logger",
".",
"error",
"(",
"\"Unknown segmentation method: \"",
"+",
"self",
".",
"segparams",
"[",
"\"method\"",
"]",
")"
] | Run the Graph Cut segmentation according to preset parameters.
:param run_fit_model: Allow to skip model fit when the model is prepared before
:return: | [
"Run",
"the",
"Graph",
"Cut",
"segmentation",
"according",
"to",
"preset",
"parameters",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L770-L802 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__set_hard_hard_constraints | def __set_hard_hard_constraints(self, tdata1, tdata2, seeds):
"""
it works with seed labels:
0: nothing
1: object 1 - full seeds
2: object 2 - full seeds
3: object 1 - not a training seeds
4: object 2 - not a training seeds
"""
seeds_mask = (seeds == 1) | (seeds == 3)
tdata2[seeds_mask] = np.max(tdata2) + 1
tdata1[seeds_mask] = 0
seeds_mask = (seeds == 2) | (seeds == 4)
tdata1[seeds_mask] = np.max(tdata1) + 1
tdata2[seeds_mask] = 0
return tdata1, tdata2 | python | def __set_hard_hard_constraints(self, tdata1, tdata2, seeds):
"""
it works with seed labels:
0: nothing
1: object 1 - full seeds
2: object 2 - full seeds
3: object 1 - not a training seeds
4: object 2 - not a training seeds
"""
seeds_mask = (seeds == 1) | (seeds == 3)
tdata2[seeds_mask] = np.max(tdata2) + 1
tdata1[seeds_mask] = 0
seeds_mask = (seeds == 2) | (seeds == 4)
tdata1[seeds_mask] = np.max(tdata1) + 1
tdata2[seeds_mask] = 0
return tdata1, tdata2 | [
"def",
"__set_hard_hard_constraints",
"(",
"self",
",",
"tdata1",
",",
"tdata2",
",",
"seeds",
")",
":",
"seeds_mask",
"=",
"(",
"seeds",
"==",
"1",
")",
"|",
"(",
"seeds",
"==",
"3",
")",
"tdata2",
"[",
"seeds_mask",
"]",
"=",
"np",
".",
"max",
"(",
"tdata2",
")",
"+",
"1",
"tdata1",
"[",
"seeds_mask",
"]",
"=",
"0",
"seeds_mask",
"=",
"(",
"seeds",
"==",
"2",
")",
"|",
"(",
"seeds",
"==",
"4",
")",
"tdata1",
"[",
"seeds_mask",
"]",
"=",
"np",
".",
"max",
"(",
"tdata1",
")",
"+",
"1",
"tdata2",
"[",
"seeds_mask",
"]",
"=",
"0",
"return",
"tdata1",
",",
"tdata2"
] | it works with seed labels:
0: nothing
1: object 1 - full seeds
2: object 2 - full seeds
3: object 1 - not a training seeds
4: object 2 - not a training seeds | [
"it",
"works",
"with",
"seed",
"labels",
":",
"0",
":",
"nothing",
"1",
":",
"object",
"1",
"-",
"full",
"seeds",
"2",
":",
"object",
"2",
"-",
"full",
"seeds",
"3",
":",
"object",
"1",
"-",
"not",
"a",
"training",
"seeds",
"4",
":",
"object",
"2",
"-",
"not",
"a",
"training",
"seeds"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L834-L851 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__similarity_for_tlinks_obj_bgr | def __similarity_for_tlinks_obj_bgr(
self,
data,
voxelsize,
# voxels1, voxels2,
# seeds, otherfeatures=None
):
"""
Compute edge values for graph cut tlinks based on image intensity
and texture.
"""
# self.fit_model(data, voxelsize, seeds)
# There is a need to have small vaues for good fit
# R(obj) = -ln( Pr (Ip | O) )
# R(bck) = -ln( Pr (Ip | B) )
# Boykov2001b
# ln is computed in likelihood
tdata1 = (-(self.mdl.likelihood_from_image(data, voxelsize, 1))) * 10
tdata2 = (-(self.mdl.likelihood_from_image(data, voxelsize, 2))) * 10
# to spare some memory
dtype = np.int16
if np.any(tdata1 > 32760):
dtype = np.float32
if np.any(tdata2 > 32760):
dtype = np.float32
if self.segparams["use_apriori_if_available"] and self.apriori is not None:
logger.debug("using apriori information")
gamma = self.segparams["apriori_gamma"]
a1 = (-np.log(self.apriori * 0.998 + 0.001)) * 10
a2 = (-np.log(0.999 - (self.apriori * 0.998))) * 10
# logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))
# logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))
# logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1)))
# logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2)))
tdata1u = (((1 - gamma) * tdata1) + (gamma * a1)).astype(dtype)
tdata2u = (((1 - gamma) * tdata2) + (gamma * a2)).astype(dtype)
tdata1 = tdata1u
tdata2 = tdata2u
# logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))
# logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))
# logger.debug('gamma ' + str(gamma))
# import sed3
# ed = sed3.show_slices(tdata1)
# ed = sed3.show_slices(tdata2)
del tdata1u
del tdata2u
del a1
del a2
# if np.any(tdata1 < 0) or np.any(tdata2 <0):
# logger.error("Problem with tlinks. Likelihood is < 0")
# if self.debug_images:
# self.__show_debug_tdata_images(tdata1, tdata2, suptitle="likelihood")
return tdata1, tdata2 | python | def __similarity_for_tlinks_obj_bgr(
self,
data,
voxelsize,
# voxels1, voxels2,
# seeds, otherfeatures=None
):
"""
Compute edge values for graph cut tlinks based on image intensity
and texture.
"""
# self.fit_model(data, voxelsize, seeds)
# There is a need to have small vaues for good fit
# R(obj) = -ln( Pr (Ip | O) )
# R(bck) = -ln( Pr (Ip | B) )
# Boykov2001b
# ln is computed in likelihood
tdata1 = (-(self.mdl.likelihood_from_image(data, voxelsize, 1))) * 10
tdata2 = (-(self.mdl.likelihood_from_image(data, voxelsize, 2))) * 10
# to spare some memory
dtype = np.int16
if np.any(tdata1 > 32760):
dtype = np.float32
if np.any(tdata2 > 32760):
dtype = np.float32
if self.segparams["use_apriori_if_available"] and self.apriori is not None:
logger.debug("using apriori information")
gamma = self.segparams["apriori_gamma"]
a1 = (-np.log(self.apriori * 0.998 + 0.001)) * 10
a2 = (-np.log(0.999 - (self.apriori * 0.998))) * 10
# logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))
# logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))
# logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1)))
# logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2)))
tdata1u = (((1 - gamma) * tdata1) + (gamma * a1)).astype(dtype)
tdata2u = (((1 - gamma) * tdata2) + (gamma * a2)).astype(dtype)
tdata1 = tdata1u
tdata2 = tdata2u
# logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))
# logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))
# logger.debug('gamma ' + str(gamma))
# import sed3
# ed = sed3.show_slices(tdata1)
# ed = sed3.show_slices(tdata2)
del tdata1u
del tdata2u
del a1
del a2
# if np.any(tdata1 < 0) or np.any(tdata2 <0):
# logger.error("Problem with tlinks. Likelihood is < 0")
# if self.debug_images:
# self.__show_debug_tdata_images(tdata1, tdata2, suptitle="likelihood")
return tdata1, tdata2 | [
"def",
"__similarity_for_tlinks_obj_bgr",
"(",
"self",
",",
"data",
",",
"voxelsize",
",",
"# voxels1, voxels2,",
"# seeds, otherfeatures=None",
")",
":",
"# self.fit_model(data, voxelsize, seeds)",
"# There is a need to have small vaues for good fit",
"# R(obj) = -ln( Pr (Ip | O) )",
"# R(bck) = -ln( Pr (Ip | B) )",
"# Boykov2001b",
"# ln is computed in likelihood",
"tdata1",
"=",
"(",
"-",
"(",
"self",
".",
"mdl",
".",
"likelihood_from_image",
"(",
"data",
",",
"voxelsize",
",",
"1",
")",
")",
")",
"*",
"10",
"tdata2",
"=",
"(",
"-",
"(",
"self",
".",
"mdl",
".",
"likelihood_from_image",
"(",
"data",
",",
"voxelsize",
",",
"2",
")",
")",
")",
"*",
"10",
"# to spare some memory",
"dtype",
"=",
"np",
".",
"int16",
"if",
"np",
".",
"any",
"(",
"tdata1",
">",
"32760",
")",
":",
"dtype",
"=",
"np",
".",
"float32",
"if",
"np",
".",
"any",
"(",
"tdata2",
">",
"32760",
")",
":",
"dtype",
"=",
"np",
".",
"float32",
"if",
"self",
".",
"segparams",
"[",
"\"use_apriori_if_available\"",
"]",
"and",
"self",
".",
"apriori",
"is",
"not",
"None",
":",
"logger",
".",
"debug",
"(",
"\"using apriori information\"",
")",
"gamma",
"=",
"self",
".",
"segparams",
"[",
"\"apriori_gamma\"",
"]",
"a1",
"=",
"(",
"-",
"np",
".",
"log",
"(",
"self",
".",
"apriori",
"*",
"0.998",
"+",
"0.001",
")",
")",
"*",
"10",
"a2",
"=",
"(",
"-",
"np",
".",
"log",
"(",
"0.999",
"-",
"(",
"self",
".",
"apriori",
"*",
"0.998",
")",
")",
")",
"*",
"10",
"# logger.debug('max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))",
"# logger.debug('max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))",
"# logger.debug('max ' + str(np.max(a1)) + ' min ' + str(np.min(a1)))",
"# logger.debug('max ' + str(np.max(a2)) + ' min ' + str(np.min(a2)))",
"tdata1u",
"=",
"(",
"(",
"(",
"1",
"-",
"gamma",
")",
"*",
"tdata1",
")",
"+",
"(",
"gamma",
"*",
"a1",
")",
")",
".",
"astype",
"(",
"dtype",
")",
"tdata2u",
"=",
"(",
"(",
"(",
"1",
"-",
"gamma",
")",
"*",
"tdata2",
")",
"+",
"(",
"gamma",
"*",
"a2",
")",
")",
".",
"astype",
"(",
"dtype",
")",
"tdata1",
"=",
"tdata1u",
"tdata2",
"=",
"tdata2u",
"# logger.debug(' max ' + str(np.max(tdata1)) + ' min ' + str(np.min(tdata1)))",
"# logger.debug(' max ' + str(np.max(tdata2)) + ' min ' + str(np.min(tdata2)))",
"# logger.debug('gamma ' + str(gamma))",
"# import sed3",
"# ed = sed3.show_slices(tdata1)",
"# ed = sed3.show_slices(tdata2)",
"del",
"tdata1u",
"del",
"tdata2u",
"del",
"a1",
"del",
"a2",
"# if np.any(tdata1 < 0) or np.any(tdata2 <0):",
"# logger.error(\"Problem with tlinks. Likelihood is < 0\")",
"# if self.debug_images:",
"# self.__show_debug_tdata_images(tdata1, tdata2, suptitle=\"likelihood\")",
"return",
"tdata1",
",",
"tdata2"
] | Compute edge values for graph cut tlinks based on image intensity
and texture. | [
"Compute",
"edge",
"values",
"for",
"graph",
"cut",
"tlinks",
"based",
"on",
"image",
"intensity",
"and",
"texture",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1023-L1080 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.__create_nlinks | def __create_nlinks(self, data, inds=None, boundary_penalties_fcn=None):
"""
Compute nlinks grid from data shape information. For boundary penalties
are data (intensities) values are used.
ins: Default is None. Used for multiscale GC. This are indexes of
multiscale pixels. Next example shows one superpixel witn index 2.
inds = [
[1 2 2],
[3 2 2],
[4 5 6]]
boundary_penalties_fcn: is function with one argument - axis. It can
it can be used for setting penalty weights between neighbooring
pixels.
"""
# use the gerneral graph algorithm
# first, we construct the grid graph
start = time.time()
if inds is None:
inds = np.arange(data.size).reshape(data.shape)
# if not self.segparams['use_boundary_penalties'] and \
# boundary_penalties_fcn is None :
if boundary_penalties_fcn is None:
# This is faster for some specific format
edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()]
edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()]
edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()]
else:
logger.info("use_boundary_penalties")
bpw = self.segparams["boundary_penalties_weight"]
bpa = boundary_penalties_fcn(2)
# id1=inds[:, :, :-1].ravel()
edgx = np.c_[
inds[:, :, :-1].ravel(),
inds[:, :, 1:].ravel(),
# cc * np.ones(id1.shape)
bpw * bpa[:, :, 1:].ravel(),
]
bpa = boundary_penalties_fcn(1)
# id1 =inds[:, 1:, :].ravel()
edgy = np.c_[
inds[:, :-1, :].ravel(),
inds[:, 1:, :].ravel(),
# cc * np.ones(id1.shape)]
bpw * bpa[:, 1:, :].ravel(),
]
bpa = boundary_penalties_fcn(0)
# id1 = inds[1:, :, :].ravel()
edgz = np.c_[
inds[:-1, :, :].ravel(),
inds[1:, :, :].ravel(),
# cc * np.ones(id1.shape)]
bpw * bpa[1:, :, :].ravel(),
]
# import pdb; pdb.set_trace()
edges = np.vstack([edgx, edgy, edgz]).astype(np.int32)
# edges - seznam indexu hran, kteres spolu sousedi\
elapsed = time.time() - start
self.stats["_create_nlinks time"] = elapsed
logger.info("__create nlinks time " + str(elapsed))
return edges | python | def __create_nlinks(self, data, inds=None, boundary_penalties_fcn=None):
"""
Compute nlinks grid from data shape information. For boundary penalties
are data (intensities) values are used.
ins: Default is None. Used for multiscale GC. This are indexes of
multiscale pixels. Next example shows one superpixel witn index 2.
inds = [
[1 2 2],
[3 2 2],
[4 5 6]]
boundary_penalties_fcn: is function with one argument - axis. It can
it can be used for setting penalty weights between neighbooring
pixels.
"""
# use the gerneral graph algorithm
# first, we construct the grid graph
start = time.time()
if inds is None:
inds = np.arange(data.size).reshape(data.shape)
# if not self.segparams['use_boundary_penalties'] and \
# boundary_penalties_fcn is None :
if boundary_penalties_fcn is None:
# This is faster for some specific format
edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()]
edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()]
edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()]
else:
logger.info("use_boundary_penalties")
bpw = self.segparams["boundary_penalties_weight"]
bpa = boundary_penalties_fcn(2)
# id1=inds[:, :, :-1].ravel()
edgx = np.c_[
inds[:, :, :-1].ravel(),
inds[:, :, 1:].ravel(),
# cc * np.ones(id1.shape)
bpw * bpa[:, :, 1:].ravel(),
]
bpa = boundary_penalties_fcn(1)
# id1 =inds[:, 1:, :].ravel()
edgy = np.c_[
inds[:, :-1, :].ravel(),
inds[:, 1:, :].ravel(),
# cc * np.ones(id1.shape)]
bpw * bpa[:, 1:, :].ravel(),
]
bpa = boundary_penalties_fcn(0)
# id1 = inds[1:, :, :].ravel()
edgz = np.c_[
inds[:-1, :, :].ravel(),
inds[1:, :, :].ravel(),
# cc * np.ones(id1.shape)]
bpw * bpa[1:, :, :].ravel(),
]
# import pdb; pdb.set_trace()
edges = np.vstack([edgx, edgy, edgz]).astype(np.int32)
# edges - seznam indexu hran, kteres spolu sousedi\
elapsed = time.time() - start
self.stats["_create_nlinks time"] = elapsed
logger.info("__create nlinks time " + str(elapsed))
return edges | [
"def",
"__create_nlinks",
"(",
"self",
",",
"data",
",",
"inds",
"=",
"None",
",",
"boundary_penalties_fcn",
"=",
"None",
")",
":",
"# use the gerneral graph algorithm",
"# first, we construct the grid graph",
"start",
"=",
"time",
".",
"time",
"(",
")",
"if",
"inds",
"is",
"None",
":",
"inds",
"=",
"np",
".",
"arange",
"(",
"data",
".",
"size",
")",
".",
"reshape",
"(",
"data",
".",
"shape",
")",
"# if not self.segparams['use_boundary_penalties'] and \\",
"# boundary_penalties_fcn is None :",
"if",
"boundary_penalties_fcn",
"is",
"None",
":",
"# This is faster for some specific format",
"edgx",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
",",
":",
"-",
"1",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
":",
",",
"1",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edgy",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
"-",
"1",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
"1",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edgz",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
"-",
"1",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
"1",
":",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"else",
":",
"logger",
".",
"info",
"(",
"\"use_boundary_penalties\"",
")",
"bpw",
"=",
"self",
".",
"segparams",
"[",
"\"boundary_penalties_weight\"",
"]",
"bpa",
"=",
"boundary_penalties_fcn",
"(",
"2",
")",
"# id1=inds[:, :, :-1].ravel()",
"edgx",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
",",
":",
"-",
"1",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
":",
",",
"1",
":",
"]",
".",
"ravel",
"(",
")",
",",
"# cc * np.ones(id1.shape)",
"bpw",
"*",
"bpa",
"[",
":",
",",
":",
",",
"1",
":",
"]",
".",
"ravel",
"(",
")",
",",
"]",
"bpa",
"=",
"boundary_penalties_fcn",
"(",
"1",
")",
"# id1 =inds[:, 1:, :].ravel()",
"edgy",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
"-",
"1",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
"1",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"# cc * np.ones(id1.shape)]",
"bpw",
"*",
"bpa",
"[",
":",
",",
"1",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"]",
"bpa",
"=",
"boundary_penalties_fcn",
"(",
"0",
")",
"# id1 = inds[1:, :, :].ravel()",
"edgz",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
"-",
"1",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
"1",
":",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"# cc * np.ones(id1.shape)]",
"bpw",
"*",
"bpa",
"[",
"1",
":",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"]",
"# import pdb; pdb.set_trace()",
"edges",
"=",
"np",
".",
"vstack",
"(",
"[",
"edgx",
",",
"edgy",
",",
"edgz",
"]",
")",
".",
"astype",
"(",
"np",
".",
"int32",
")",
"# edges - seznam indexu hran, kteres spolu sousedi\\",
"elapsed",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start",
"self",
".",
"stats",
"[",
"\"_create_nlinks time\"",
"]",
"=",
"elapsed",
"logger",
".",
"info",
"(",
"\"__create nlinks time \"",
"+",
"str",
"(",
"elapsed",
")",
")",
"return",
"edges"
] | Compute nlinks grid from data shape information. For boundary penalties
are data (intensities) values are used.
ins: Default is None. Used for multiscale GC. This are indexes of
multiscale pixels. Next example shows one superpixel witn index 2.
inds = [
[1 2 2],
[3 2 2],
[4 5 6]]
boundary_penalties_fcn: is function with one argument - axis. It can
it can be used for setting penalty weights between neighbooring
pixels. | [
"Compute",
"nlinks",
"grid",
"from",
"data",
"shape",
"information",
".",
"For",
"boundary",
"penalties",
"are",
"data",
"(",
"intensities",
")",
"values",
"are",
"used",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1152-L1220 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.debug_get_reconstructed_similarity | def debug_get_reconstructed_similarity(
self,
data3d=None,
voxelsize=None,
seeds=None,
area_weight=1,
hard_constraints=True,
return_unariesalt=False,
):
"""
Use actual model to calculate similarity. If no input is given the last image is used.
:param data3d:
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param return_unariesalt:
:return:
"""
if data3d is None:
data3d = self.img
if voxelsize is None:
voxelsize = self.voxelsize
if seeds is None:
seeds = self.seeds
unariesalt = self.__create_tlinks(
data3d,
voxelsize,
# voxels1, voxels2,
seeds,
area_weight,
hard_constraints,
)
if return_unariesalt:
return unariesalt
else:
return self._reshape_unariesalt_to_similarity(unariesalt, data3d.shape) | python | def debug_get_reconstructed_similarity(
self,
data3d=None,
voxelsize=None,
seeds=None,
area_weight=1,
hard_constraints=True,
return_unariesalt=False,
):
"""
Use actual model to calculate similarity. If no input is given the last image is used.
:param data3d:
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param return_unariesalt:
:return:
"""
if data3d is None:
data3d = self.img
if voxelsize is None:
voxelsize = self.voxelsize
if seeds is None:
seeds = self.seeds
unariesalt = self.__create_tlinks(
data3d,
voxelsize,
# voxels1, voxels2,
seeds,
area_weight,
hard_constraints,
)
if return_unariesalt:
return unariesalt
else:
return self._reshape_unariesalt_to_similarity(unariesalt, data3d.shape) | [
"def",
"debug_get_reconstructed_similarity",
"(",
"self",
",",
"data3d",
"=",
"None",
",",
"voxelsize",
"=",
"None",
",",
"seeds",
"=",
"None",
",",
"area_weight",
"=",
"1",
",",
"hard_constraints",
"=",
"True",
",",
"return_unariesalt",
"=",
"False",
",",
")",
":",
"if",
"data3d",
"is",
"None",
":",
"data3d",
"=",
"self",
".",
"img",
"if",
"voxelsize",
"is",
"None",
":",
"voxelsize",
"=",
"self",
".",
"voxelsize",
"if",
"seeds",
"is",
"None",
":",
"seeds",
"=",
"self",
".",
"seeds",
"unariesalt",
"=",
"self",
".",
"__create_tlinks",
"(",
"data3d",
",",
"voxelsize",
",",
"# voxels1, voxels2,",
"seeds",
",",
"area_weight",
",",
"hard_constraints",
",",
")",
"if",
"return_unariesalt",
":",
"return",
"unariesalt",
"else",
":",
"return",
"self",
".",
"_reshape_unariesalt_to_similarity",
"(",
"unariesalt",
",",
"data3d",
".",
"shape",
")"
] | Use actual model to calculate similarity. If no input is given the last image is used.
:param data3d:
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param return_unariesalt:
:return: | [
"Use",
"actual",
"model",
"to",
"calculate",
"similarity",
".",
"If",
"no",
"input",
"is",
"given",
"the",
"last",
"image",
"is",
"used",
".",
":",
"param",
"data3d",
":",
":",
"param",
"voxelsize",
":",
":",
"param",
"seeds",
":",
":",
"param",
"area_weight",
":",
":",
"param",
"hard_constraints",
":",
":",
"param",
"return_unariesalt",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1222-L1259 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.debug_show_reconstructed_similarity | def debug_show_reconstructed_similarity(
self,
data3d=None,
voxelsize=None,
seeds=None,
area_weight=1,
hard_constraints=True,
show=True,
bins=20,
slice_number=None,
):
"""
Show tlinks.
:param data3d: ndarray with input data
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param show:
:param bins: histogram bins number
:param slice_number:
:return:
"""
unariesalt = self.debug_get_reconstructed_similarity(
data3d,
voxelsize=voxelsize,
seeds=seeds,
area_weight=area_weight,
hard_constraints=hard_constraints,
return_unariesalt=True,
)
self._debug_show_unariesalt(
unariesalt, show=show, bins=bins, slice_number=slice_number
) | python | def debug_show_reconstructed_similarity(
self,
data3d=None,
voxelsize=None,
seeds=None,
area_weight=1,
hard_constraints=True,
show=True,
bins=20,
slice_number=None,
):
"""
Show tlinks.
:param data3d: ndarray with input data
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param show:
:param bins: histogram bins number
:param slice_number:
:return:
"""
unariesalt = self.debug_get_reconstructed_similarity(
data3d,
voxelsize=voxelsize,
seeds=seeds,
area_weight=area_weight,
hard_constraints=hard_constraints,
return_unariesalt=True,
)
self._debug_show_unariesalt(
unariesalt, show=show, bins=bins, slice_number=slice_number
) | [
"def",
"debug_show_reconstructed_similarity",
"(",
"self",
",",
"data3d",
"=",
"None",
",",
"voxelsize",
"=",
"None",
",",
"seeds",
"=",
"None",
",",
"area_weight",
"=",
"1",
",",
"hard_constraints",
"=",
"True",
",",
"show",
"=",
"True",
",",
"bins",
"=",
"20",
",",
"slice_number",
"=",
"None",
",",
")",
":",
"unariesalt",
"=",
"self",
".",
"debug_get_reconstructed_similarity",
"(",
"data3d",
",",
"voxelsize",
"=",
"voxelsize",
",",
"seeds",
"=",
"seeds",
",",
"area_weight",
"=",
"area_weight",
",",
"hard_constraints",
"=",
"hard_constraints",
",",
"return_unariesalt",
"=",
"True",
",",
")",
"self",
".",
"_debug_show_unariesalt",
"(",
"unariesalt",
",",
"show",
"=",
"show",
",",
"bins",
"=",
"bins",
",",
"slice_number",
"=",
"slice_number",
")"
] | Show tlinks.
:param data3d: ndarray with input data
:param voxelsize:
:param seeds:
:param area_weight:
:param hard_constraints:
:param show:
:param bins: histogram bins number
:param slice_number:
:return: | [
"Show",
"tlinks",
".",
":",
"param",
"data3d",
":",
"ndarray",
"with",
"input",
"data",
":",
"param",
"voxelsize",
":",
":",
"param",
"seeds",
":",
":",
"param",
"area_weight",
":",
":",
"param",
"hard_constraints",
":",
":",
"param",
"show",
":",
":",
"param",
"bins",
":",
"histogram",
"bins",
"number",
":",
"param",
"slice_number",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1261-L1296 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.debug_inspect_node | def debug_inspect_node(self, node_msindex):
"""
Get info about the node. See pycut.inspect_node() for details.
Processing is done in temporary shape.
:param node_seed:
:return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds
"""
return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex) | python | def debug_inspect_node(self, node_msindex):
"""
Get info about the node. See pycut.inspect_node() for details.
Processing is done in temporary shape.
:param node_seed:
:return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds
"""
return inspect_node(self.nlinks, self.unariesalt2, self.msinds, node_msindex) | [
"def",
"debug_inspect_node",
"(",
"self",
",",
"node_msindex",
")",
":",
"return",
"inspect_node",
"(",
"self",
".",
"nlinks",
",",
"self",
".",
"unariesalt2",
",",
"self",
".",
"msinds",
",",
"node_msindex",
")"
] | Get info about the node. See pycut.inspect_node() for details.
Processing is done in temporary shape.
:param node_seed:
:return: node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds | [
"Get",
"info",
"about",
"the",
"node",
".",
"See",
"pycut",
".",
"inspect_node",
"()",
"for",
"details",
".",
"Processing",
"is",
"done",
"in",
"temporary",
"shape",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1298-L1306 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut.debug_interactive_inspect_node | def debug_interactive_inspect_node(self):
"""
Call after segmentation to see selected node neighborhood.
User have to select one node by click.
:return:
"""
if (
np.sum(
np.abs(
np.asarray(self.msinds.shape) - np.asarray(self.segmentation.shape)
)
)
== 0
):
segmentation = self.segmentation
else:
segmentation = self.temp_msgc_resized_segmentation
logger.info("Click to select one voxel of interest")
import sed3
ed = sed3.sed3(self.msinds, contour=segmentation == 0)
ed.show()
edseeds = ed.seeds
node_msindex = get_node_msindex(self.msinds, edseeds)
node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds = self.debug_inspect_node(
node_msindex
)
import sed3
ed = sed3.sed3(
self.msinds, contour=segmentation == 0, seeds=node_neighboor_seeds
)
ed.show()
return (
node_unariesalt,
node_neighboor_edges_and_weights,
node_neighboor_seeds,
node_msindex,
) | python | def debug_interactive_inspect_node(self):
"""
Call after segmentation to see selected node neighborhood.
User have to select one node by click.
:return:
"""
if (
np.sum(
np.abs(
np.asarray(self.msinds.shape) - np.asarray(self.segmentation.shape)
)
)
== 0
):
segmentation = self.segmentation
else:
segmentation = self.temp_msgc_resized_segmentation
logger.info("Click to select one voxel of interest")
import sed3
ed = sed3.sed3(self.msinds, contour=segmentation == 0)
ed.show()
edseeds = ed.seeds
node_msindex = get_node_msindex(self.msinds, edseeds)
node_unariesalt, node_neighboor_edges_and_weights, node_neighboor_seeds = self.debug_inspect_node(
node_msindex
)
import sed3
ed = sed3.sed3(
self.msinds, contour=segmentation == 0, seeds=node_neighboor_seeds
)
ed.show()
return (
node_unariesalt,
node_neighboor_edges_and_weights,
node_neighboor_seeds,
node_msindex,
) | [
"def",
"debug_interactive_inspect_node",
"(",
"self",
")",
":",
"if",
"(",
"np",
".",
"sum",
"(",
"np",
".",
"abs",
"(",
"np",
".",
"asarray",
"(",
"self",
".",
"msinds",
".",
"shape",
")",
"-",
"np",
".",
"asarray",
"(",
"self",
".",
"segmentation",
".",
"shape",
")",
")",
")",
"==",
"0",
")",
":",
"segmentation",
"=",
"self",
".",
"segmentation",
"else",
":",
"segmentation",
"=",
"self",
".",
"temp_msgc_resized_segmentation",
"logger",
".",
"info",
"(",
"\"Click to select one voxel of interest\"",
")",
"import",
"sed3",
"ed",
"=",
"sed3",
".",
"sed3",
"(",
"self",
".",
"msinds",
",",
"contour",
"=",
"segmentation",
"==",
"0",
")",
"ed",
".",
"show",
"(",
")",
"edseeds",
"=",
"ed",
".",
"seeds",
"node_msindex",
"=",
"get_node_msindex",
"(",
"self",
".",
"msinds",
",",
"edseeds",
")",
"node_unariesalt",
",",
"node_neighboor_edges_and_weights",
",",
"node_neighboor_seeds",
"=",
"self",
".",
"debug_inspect_node",
"(",
"node_msindex",
")",
"import",
"sed3",
"ed",
"=",
"sed3",
".",
"sed3",
"(",
"self",
".",
"msinds",
",",
"contour",
"=",
"segmentation",
"==",
"0",
",",
"seeds",
"=",
"node_neighboor_seeds",
")",
"ed",
".",
"show",
"(",
")",
"return",
"(",
"node_unariesalt",
",",
"node_neighboor_edges_and_weights",
",",
"node_neighboor_seeds",
",",
"node_msindex",
",",
")"
] | Call after segmentation to see selected node neighborhood.
User have to select one node by click.
:return: | [
"Call",
"after",
"segmentation",
"to",
"see",
"selected",
"node",
"neighborhood",
".",
"User",
"have",
"to",
"select",
"one",
"node",
"by",
"click",
".",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1318-L1359 |
mjirik/imcut | imcut/pycut.py | ImageGraphCut._ssgc_prepare_data_and_run_computation | def _ssgc_prepare_data_and_run_computation(
self,
# voxels1, voxels2,
hard_constraints=True,
area_weight=1,
):
"""
Setting of data.
You need set seeds if you want use hard_constraints.
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
# import pdb; pdb.set_trace() # BREAKPOINT
unariesalt = self.__create_tlinks(
self.img,
self.voxelsize,
# voxels1, voxels2,
self.seeds,
area_weight,
hard_constraints,
)
# některém testu organ semgmentation dosahují unaries -15. což je podiné
# stačí vyhodit print před if a je to vidět
logger.debug("unaries %.3g , %.3g" % (np.max(unariesalt), np.min(unariesalt)))
# create potts pairwise
# pairwiseAlpha = -10
pairwise = -(np.eye(2) - 1)
pairwise = (self.segparams["pairwise_alpha"] * pairwise).astype(np.int32)
# pairwise = np.array([[0,30],[30,0]]).astype(np.int32)
# print pairwise
self.iparams = {}
if self.segparams["use_boundary_penalties"]:
sigma = self.segparams["boundary_penalties_sigma"]
# set boundary penalties function
# Default are penalties based on intensity differences
boundary_penalties_fcn = lambda ax: self._boundary_penalties_array(
axis=ax, sigma=sigma
)
else:
boundary_penalties_fcn = None
nlinks = self.__create_nlinks(
self.img, boundary_penalties_fcn=boundary_penalties_fcn
)
self.stats["tlinks shape"].append(unariesalt.reshape(-1, 2).shape)
self.stats["nlinks shape"].append(nlinks.shape)
# we flatten the unaries
# result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2),
# pairwise)
start = time.time()
if self.debug_images:
self._debug_show_unariesalt(unariesalt)
result_graph = pygco.cut_from_graph(nlinks, unariesalt.reshape(-1, 2), pairwise)
elapsed = time.time() - start
self.stats["gc time"] = elapsed
result_labeling = result_graph.reshape(self.img.shape)
return result_labeling | python | def _ssgc_prepare_data_and_run_computation(
self,
# voxels1, voxels2,
hard_constraints=True,
area_weight=1,
):
"""
Setting of data.
You need set seeds if you want use hard_constraints.
"""
# from PyQt4.QtCore import pyqtRemoveInputHook
# pyqtRemoveInputHook()
# import pdb; pdb.set_trace() # BREAKPOINT
unariesalt = self.__create_tlinks(
self.img,
self.voxelsize,
# voxels1, voxels2,
self.seeds,
area_weight,
hard_constraints,
)
# některém testu organ semgmentation dosahují unaries -15. což je podiné
# stačí vyhodit print před if a je to vidět
logger.debug("unaries %.3g , %.3g" % (np.max(unariesalt), np.min(unariesalt)))
# create potts pairwise
# pairwiseAlpha = -10
pairwise = -(np.eye(2) - 1)
pairwise = (self.segparams["pairwise_alpha"] * pairwise).astype(np.int32)
# pairwise = np.array([[0,30],[30,0]]).astype(np.int32)
# print pairwise
self.iparams = {}
if self.segparams["use_boundary_penalties"]:
sigma = self.segparams["boundary_penalties_sigma"]
# set boundary penalties function
# Default are penalties based on intensity differences
boundary_penalties_fcn = lambda ax: self._boundary_penalties_array(
axis=ax, sigma=sigma
)
else:
boundary_penalties_fcn = None
nlinks = self.__create_nlinks(
self.img, boundary_penalties_fcn=boundary_penalties_fcn
)
self.stats["tlinks shape"].append(unariesalt.reshape(-1, 2).shape)
self.stats["nlinks shape"].append(nlinks.shape)
# we flatten the unaries
# result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2),
# pairwise)
start = time.time()
if self.debug_images:
self._debug_show_unariesalt(unariesalt)
result_graph = pygco.cut_from_graph(nlinks, unariesalt.reshape(-1, 2), pairwise)
elapsed = time.time() - start
self.stats["gc time"] = elapsed
result_labeling = result_graph.reshape(self.img.shape)
return result_labeling | [
"def",
"_ssgc_prepare_data_and_run_computation",
"(",
"self",
",",
"# voxels1, voxels2,",
"hard_constraints",
"=",
"True",
",",
"area_weight",
"=",
"1",
",",
")",
":",
"# from PyQt4.QtCore import pyqtRemoveInputHook",
"# pyqtRemoveInputHook()",
"# import pdb; pdb.set_trace() # BREAKPOINT",
"unariesalt",
"=",
"self",
".",
"__create_tlinks",
"(",
"self",
".",
"img",
",",
"self",
".",
"voxelsize",
",",
"# voxels1, voxels2,",
"self",
".",
"seeds",
",",
"area_weight",
",",
"hard_constraints",
",",
")",
"# některém testu organ semgmentation dosahují unaries -15. což je podiné",
"# stačí vyhodit print před if a je to vidět",
"logger",
".",
"debug",
"(",
"\"unaries %.3g , %.3g\"",
"%",
"(",
"np",
".",
"max",
"(",
"unariesalt",
")",
",",
"np",
".",
"min",
"(",
"unariesalt",
")",
")",
")",
"# create potts pairwise",
"# pairwiseAlpha = -10",
"pairwise",
"=",
"-",
"(",
"np",
".",
"eye",
"(",
"2",
")",
"-",
"1",
")",
"pairwise",
"=",
"(",
"self",
".",
"segparams",
"[",
"\"pairwise_alpha\"",
"]",
"*",
"pairwise",
")",
".",
"astype",
"(",
"np",
".",
"int32",
")",
"# pairwise = np.array([[0,30],[30,0]]).astype(np.int32)",
"# print pairwise",
"self",
".",
"iparams",
"=",
"{",
"}",
"if",
"self",
".",
"segparams",
"[",
"\"use_boundary_penalties\"",
"]",
":",
"sigma",
"=",
"self",
".",
"segparams",
"[",
"\"boundary_penalties_sigma\"",
"]",
"# set boundary penalties function",
"# Default are penalties based on intensity differences",
"boundary_penalties_fcn",
"=",
"lambda",
"ax",
":",
"self",
".",
"_boundary_penalties_array",
"(",
"axis",
"=",
"ax",
",",
"sigma",
"=",
"sigma",
")",
"else",
":",
"boundary_penalties_fcn",
"=",
"None",
"nlinks",
"=",
"self",
".",
"__create_nlinks",
"(",
"self",
".",
"img",
",",
"boundary_penalties_fcn",
"=",
"boundary_penalties_fcn",
")",
"self",
".",
"stats",
"[",
"\"tlinks shape\"",
"]",
".",
"append",
"(",
"unariesalt",
".",
"reshape",
"(",
"-",
"1",
",",
"2",
")",
".",
"shape",
")",
"self",
".",
"stats",
"[",
"\"nlinks shape\"",
"]",
".",
"append",
"(",
"nlinks",
".",
"shape",
")",
"# we flatten the unaries",
"# result_graph = cut_from_graph(nlinks, unaries.reshape(-1, 2),",
"# pairwise)",
"start",
"=",
"time",
".",
"time",
"(",
")",
"if",
"self",
".",
"debug_images",
":",
"self",
".",
"_debug_show_unariesalt",
"(",
"unariesalt",
")",
"result_graph",
"=",
"pygco",
".",
"cut_from_graph",
"(",
"nlinks",
",",
"unariesalt",
".",
"reshape",
"(",
"-",
"1",
",",
"2",
")",
",",
"pairwise",
")",
"elapsed",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start",
"self",
".",
"stats",
"[",
"\"gc time\"",
"]",
"=",
"elapsed",
"result_labeling",
"=",
"result_graph",
".",
"reshape",
"(",
"self",
".",
"img",
".",
"shape",
")",
"return",
"result_labeling"
] | Setting of data.
You need set seeds if you want use hard_constraints. | [
"Setting",
"of",
"data",
".",
"You",
"need",
"set",
"seeds",
"if",
"you",
"want",
"use",
"hard_constraints",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/pycut.py#L1370-L1430 |
mjirik/imcut | imcut/image_manipulation.py | resize_to_shape | def resize_to_shape(data, shape, zoom=None, mode="nearest", order=0):
"""
Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest'
"""
# @TODO remove old code in except part
# TODO use function from library in future
try:
# rint 'pred vyjimkou'
# aise Exception ('test without skimage')
# rint 'za vyjimkou'
import skimage
import skimage.transform
# Now we need reshape seeds and segmentation to original size
# with warnings.catch_warnings():
# warnings.filterwarnings("ignore", ".*'constant', will be changed to.*")
segm_orig_scale = skimage.transform.resize(
data, shape, order=0, preserve_range=True, mode="reflect"
)
segmentation = segm_orig_scale
logger.debug("resize to orig with skimage")
except:
if zoom is None:
zoom = shape / np.asarray(data.shape).astype(np.double)
segmentation = resize_to_shape_with_zoom(
data, zoom=zoom, mode=mode, order=order
)
return segmentation | python | def resize_to_shape(data, shape, zoom=None, mode="nearest", order=0):
"""
Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest'
"""
# @TODO remove old code in except part
# TODO use function from library in future
try:
# rint 'pred vyjimkou'
# aise Exception ('test without skimage')
# rint 'za vyjimkou'
import skimage
import skimage.transform
# Now we need reshape seeds and segmentation to original size
# with warnings.catch_warnings():
# warnings.filterwarnings("ignore", ".*'constant', will be changed to.*")
segm_orig_scale = skimage.transform.resize(
data, shape, order=0, preserve_range=True, mode="reflect"
)
segmentation = segm_orig_scale
logger.debug("resize to orig with skimage")
except:
if zoom is None:
zoom = shape / np.asarray(data.shape).astype(np.double)
segmentation = resize_to_shape_with_zoom(
data, zoom=zoom, mode=mode, order=order
)
return segmentation | [
"def",
"resize_to_shape",
"(",
"data",
",",
"shape",
",",
"zoom",
"=",
"None",
",",
"mode",
"=",
"\"nearest\"",
",",
"order",
"=",
"0",
")",
":",
"# @TODO remove old code in except part",
"# TODO use function from library in future",
"try",
":",
"# rint 'pred vyjimkou'",
"# aise Exception ('test without skimage')",
"# rint 'za vyjimkou'",
"import",
"skimage",
"import",
"skimage",
".",
"transform",
"# Now we need reshape seeds and segmentation to original size",
"# with warnings.catch_warnings():",
"# warnings.filterwarnings(\"ignore\", \".*'constant', will be changed to.*\")",
"segm_orig_scale",
"=",
"skimage",
".",
"transform",
".",
"resize",
"(",
"data",
",",
"shape",
",",
"order",
"=",
"0",
",",
"preserve_range",
"=",
"True",
",",
"mode",
"=",
"\"reflect\"",
")",
"segmentation",
"=",
"segm_orig_scale",
"logger",
".",
"debug",
"(",
"\"resize to orig with skimage\"",
")",
"except",
":",
"if",
"zoom",
"is",
"None",
":",
"zoom",
"=",
"shape",
"/",
"np",
".",
"asarray",
"(",
"data",
".",
"shape",
")",
".",
"astype",
"(",
"np",
".",
"double",
")",
"segmentation",
"=",
"resize_to_shape_with_zoom",
"(",
"data",
",",
"zoom",
"=",
"zoom",
",",
"mode",
"=",
"mode",
",",
"order",
"=",
"order",
")",
"return",
"segmentation"
] | Function resize input data to specific shape.
:param data: input 3d array-like data
:param shape: shape of output data
:param zoom: zoom is used for back compatibility
:mode: default is 'nearest' | [
"Function",
"resize",
"input",
"data",
"to",
"specific",
"shape",
".",
":",
"param",
"data",
":",
"input",
"3d",
"array",
"-",
"like",
"data",
":",
"param",
"shape",
":",
"shape",
"of",
"output",
"data",
":",
"param",
"zoom",
":",
"zoom",
"is",
"used",
"for",
"back",
"compatibility",
":",
"mode",
":",
"default",
"is",
"nearest"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L14-L49 |
mjirik/imcut | imcut/image_manipulation.py | seed_zoom | def seed_zoom(seeds, zoom):
"""
Smart zoom for sparse matrix. If there is resize to bigger resolution
thin line of label could be lost. This function prefers labels larger
then zero. If there is only one small voxel in larger volume with zeros
it is selected.
"""
# import scipy
# loseeds=seeds
labels = np.unique(seeds)
# remove first label - 0
labels = np.delete(labels, 0)
# @TODO smart interpolation for seeds in one block
# loseeds = scipy.ndimage.interpolation.zoom(
# seeds, zoom, order=0)
loshape = np.ceil(np.array(seeds.shape) * 1.0 / zoom).astype(np.int)
loseeds = np.zeros(loshape, dtype=np.int8)
loseeds = loseeds.astype(np.int8)
for label in labels:
a, b, c = np.where(seeds == label)
loa = np.round(a // zoom)
lob = np.round(b // zoom)
loc = np.round(c // zoom)
# loseeds = np.zeros(loshape)
loseeds[loa, lob, loc] += label
# this is to detect conflict seeds
loseeds[loseeds > label] = 100
# remove conflict seeds
loseeds[loseeds > 99] = 0
# import py3DSeedEditor
# ped = py3DSeedEditor.py3DSeedEditor(loseeds)
# ped.show()
return loseeds | python | def seed_zoom(seeds, zoom):
"""
Smart zoom for sparse matrix. If there is resize to bigger resolution
thin line of label could be lost. This function prefers labels larger
then zero. If there is only one small voxel in larger volume with zeros
it is selected.
"""
# import scipy
# loseeds=seeds
labels = np.unique(seeds)
# remove first label - 0
labels = np.delete(labels, 0)
# @TODO smart interpolation for seeds in one block
# loseeds = scipy.ndimage.interpolation.zoom(
# seeds, zoom, order=0)
loshape = np.ceil(np.array(seeds.shape) * 1.0 / zoom).astype(np.int)
loseeds = np.zeros(loshape, dtype=np.int8)
loseeds = loseeds.astype(np.int8)
for label in labels:
a, b, c = np.where(seeds == label)
loa = np.round(a // zoom)
lob = np.round(b // zoom)
loc = np.round(c // zoom)
# loseeds = np.zeros(loshape)
loseeds[loa, lob, loc] += label
# this is to detect conflict seeds
loseeds[loseeds > label] = 100
# remove conflict seeds
loseeds[loseeds > 99] = 0
# import py3DSeedEditor
# ped = py3DSeedEditor.py3DSeedEditor(loseeds)
# ped.show()
return loseeds | [
"def",
"seed_zoom",
"(",
"seeds",
",",
"zoom",
")",
":",
"# import scipy",
"# loseeds=seeds",
"labels",
"=",
"np",
".",
"unique",
"(",
"seeds",
")",
"# remove first label - 0",
"labels",
"=",
"np",
".",
"delete",
"(",
"labels",
",",
"0",
")",
"# @TODO smart interpolation for seeds in one block",
"# loseeds = scipy.ndimage.interpolation.zoom(",
"# seeds, zoom, order=0)",
"loshape",
"=",
"np",
".",
"ceil",
"(",
"np",
".",
"array",
"(",
"seeds",
".",
"shape",
")",
"*",
"1.0",
"/",
"zoom",
")",
".",
"astype",
"(",
"np",
".",
"int",
")",
"loseeds",
"=",
"np",
".",
"zeros",
"(",
"loshape",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"loseeds",
"=",
"loseeds",
".",
"astype",
"(",
"np",
".",
"int8",
")",
"for",
"label",
"in",
"labels",
":",
"a",
",",
"b",
",",
"c",
"=",
"np",
".",
"where",
"(",
"seeds",
"==",
"label",
")",
"loa",
"=",
"np",
".",
"round",
"(",
"a",
"//",
"zoom",
")",
"lob",
"=",
"np",
".",
"round",
"(",
"b",
"//",
"zoom",
")",
"loc",
"=",
"np",
".",
"round",
"(",
"c",
"//",
"zoom",
")",
"# loseeds = np.zeros(loshape)",
"loseeds",
"[",
"loa",
",",
"lob",
",",
"loc",
"]",
"+=",
"label",
"# this is to detect conflict seeds",
"loseeds",
"[",
"loseeds",
">",
"label",
"]",
"=",
"100",
"# remove conflict seeds",
"loseeds",
"[",
"loseeds",
">",
"99",
"]",
"=",
"0",
"# import py3DSeedEditor",
"# ped = py3DSeedEditor.py3DSeedEditor(loseeds)",
"# ped.show()",
"return",
"loseeds"
] | Smart zoom for sparse matrix. If there is resize to bigger resolution
thin line of label could be lost. This function prefers labels larger
then zero. If there is only one small voxel in larger volume with zeros
it is selected. | [
"Smart",
"zoom",
"for",
"sparse",
"matrix",
".",
"If",
"there",
"is",
"resize",
"to",
"bigger",
"resolution",
"thin",
"line",
"of",
"label",
"could",
"be",
"lost",
".",
"This",
"function",
"prefers",
"labels",
"larger",
"then",
"zero",
".",
"If",
"there",
"is",
"only",
"one",
"small",
"voxel",
"in",
"larger",
"volume",
"with",
"zeros",
"it",
"is",
"selected",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L85-L121 |
mjirik/imcut | imcut/image_manipulation.py | zoom_to_shape | def zoom_to_shape(data, shape, dtype=None):
"""
Zoom data to specific shape.
"""
import scipy
import scipy.ndimage
zoomd = np.array(shape) / np.array(data.shape, dtype=np.double)
import warnings
datares = scipy.ndimage.interpolation.zoom(data, zoomd, order=0, mode="reflect")
if datares.shape != shape:
logger.warning("Zoom with different output shape")
dataout = np.zeros(shape, dtype=dtype)
shpmin = np.minimum(dataout.shape, shape)
dataout[: shpmin[0], : shpmin[1], : shpmin[2]] = datares[
: shpmin[0], : shpmin[1], : shpmin[2]
]
return datares | python | def zoom_to_shape(data, shape, dtype=None):
"""
Zoom data to specific shape.
"""
import scipy
import scipy.ndimage
zoomd = np.array(shape) / np.array(data.shape, dtype=np.double)
import warnings
datares = scipy.ndimage.interpolation.zoom(data, zoomd, order=0, mode="reflect")
if datares.shape != shape:
logger.warning("Zoom with different output shape")
dataout = np.zeros(shape, dtype=dtype)
shpmin = np.minimum(dataout.shape, shape)
dataout[: shpmin[0], : shpmin[1], : shpmin[2]] = datares[
: shpmin[0], : shpmin[1], : shpmin[2]
]
return datares | [
"def",
"zoom_to_shape",
"(",
"data",
",",
"shape",
",",
"dtype",
"=",
"None",
")",
":",
"import",
"scipy",
"import",
"scipy",
".",
"ndimage",
"zoomd",
"=",
"np",
".",
"array",
"(",
"shape",
")",
"/",
"np",
".",
"array",
"(",
"data",
".",
"shape",
",",
"dtype",
"=",
"np",
".",
"double",
")",
"import",
"warnings",
"datares",
"=",
"scipy",
".",
"ndimage",
".",
"interpolation",
".",
"zoom",
"(",
"data",
",",
"zoomd",
",",
"order",
"=",
"0",
",",
"mode",
"=",
"\"reflect\"",
")",
"if",
"datares",
".",
"shape",
"!=",
"shape",
":",
"logger",
".",
"warning",
"(",
"\"Zoom with different output shape\"",
")",
"dataout",
"=",
"np",
".",
"zeros",
"(",
"shape",
",",
"dtype",
"=",
"dtype",
")",
"shpmin",
"=",
"np",
".",
"minimum",
"(",
"dataout",
".",
"shape",
",",
"shape",
")",
"dataout",
"[",
":",
"shpmin",
"[",
"0",
"]",
",",
":",
"shpmin",
"[",
"1",
"]",
",",
":",
"shpmin",
"[",
"2",
"]",
"]",
"=",
"datares",
"[",
":",
"shpmin",
"[",
"0",
"]",
",",
":",
"shpmin",
"[",
"1",
"]",
",",
":",
"shpmin",
"[",
"2",
"]",
"]",
"return",
"datares"
] | Zoom data to specific shape. | [
"Zoom",
"data",
"to",
"specific",
"shape",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L124-L144 |
mjirik/imcut | imcut/image_manipulation.py | crop | def crop(data, crinfo):
"""
Crop the data.
crop(data, crinfo)
:param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]]
"""
crinfo = fix_crinfo(crinfo)
return data[
__int_or_none(crinfo[0][0]) : __int_or_none(crinfo[0][1]),
__int_or_none(crinfo[1][0]) : __int_or_none(crinfo[1][1]),
__int_or_none(crinfo[2][0]) : __int_or_none(crinfo[2][1]),
] | python | def crop(data, crinfo):
"""
Crop the data.
crop(data, crinfo)
:param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]]
"""
crinfo = fix_crinfo(crinfo)
return data[
__int_or_none(crinfo[0][0]) : __int_or_none(crinfo[0][1]),
__int_or_none(crinfo[1][0]) : __int_or_none(crinfo[1][1]),
__int_or_none(crinfo[2][0]) : __int_or_none(crinfo[2][1]),
] | [
"def",
"crop",
"(",
"data",
",",
"crinfo",
")",
":",
"crinfo",
"=",
"fix_crinfo",
"(",
"crinfo",
")",
"return",
"data",
"[",
"__int_or_none",
"(",
"crinfo",
"[",
"0",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"__int_or_none",
"(",
"crinfo",
"[",
"1",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"1",
"]",
"[",
"1",
"]",
")",
",",
"__int_or_none",
"(",
"crinfo",
"[",
"2",
"]",
"[",
"0",
"]",
")",
":",
"__int_or_none",
"(",
"crinfo",
"[",
"2",
"]",
"[",
"1",
"]",
")",
",",
"]"
] | Crop the data.
crop(data, crinfo)
:param crinfo: min and max for each axis - [[minX, maxX], [minY, maxY], [minZ, maxZ]] | [
"Crop",
"the",
"data",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L361-L375 |
mjirik/imcut | imcut/image_manipulation.py | combinecrinfo | def combinecrinfo(crinfo1, crinfo2):
"""
Combine two crinfos. First used is crinfo1, second used is crinfo2.
"""
crinfo1 = fix_crinfo(crinfo1)
crinfo2 = fix_crinfo(crinfo2)
crinfo = [
[crinfo1[0][0] + crinfo2[0][0], crinfo1[0][0] + crinfo2[0][1]],
[crinfo1[1][0] + crinfo2[1][0], crinfo1[1][0] + crinfo2[1][1]],
[crinfo1[2][0] + crinfo2[2][0], crinfo1[2][0] + crinfo2[2][1]],
]
return crinfo | python | def combinecrinfo(crinfo1, crinfo2):
"""
Combine two crinfos. First used is crinfo1, second used is crinfo2.
"""
crinfo1 = fix_crinfo(crinfo1)
crinfo2 = fix_crinfo(crinfo2)
crinfo = [
[crinfo1[0][0] + crinfo2[0][0], crinfo1[0][0] + crinfo2[0][1]],
[crinfo1[1][0] + crinfo2[1][0], crinfo1[1][0] + crinfo2[1][1]],
[crinfo1[2][0] + crinfo2[2][0], crinfo1[2][0] + crinfo2[2][1]],
]
return crinfo | [
"def",
"combinecrinfo",
"(",
"crinfo1",
",",
"crinfo2",
")",
":",
"crinfo1",
"=",
"fix_crinfo",
"(",
"crinfo1",
")",
"crinfo2",
"=",
"fix_crinfo",
"(",
"crinfo2",
")",
"crinfo",
"=",
"[",
"[",
"crinfo1",
"[",
"0",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"0",
"]",
"[",
"0",
"]",
",",
"crinfo1",
"[",
"0",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"0",
"]",
"[",
"1",
"]",
"]",
",",
"[",
"crinfo1",
"[",
"1",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"1",
"]",
"[",
"0",
"]",
",",
"crinfo1",
"[",
"1",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"1",
"]",
"[",
"1",
"]",
"]",
",",
"[",
"crinfo1",
"[",
"2",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"2",
"]",
"[",
"0",
"]",
",",
"crinfo1",
"[",
"2",
"]",
"[",
"0",
"]",
"+",
"crinfo2",
"[",
"2",
"]",
"[",
"1",
"]",
"]",
",",
"]",
"return",
"crinfo"
] | Combine two crinfos. First used is crinfo1, second used is crinfo2. | [
"Combine",
"two",
"crinfos",
".",
"First",
"used",
"is",
"crinfo1",
"second",
"used",
"is",
"crinfo2",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L384-L397 |
mjirik/imcut | imcut/image_manipulation.py | crinfo_from_specific_data | def crinfo_from_specific_data(data, margin=0):
"""
Create crinfo of minimum orthogonal nonzero block in input data.
:param data: input data
:param margin: add margin to minimum block
:return:
"""
# hledáme automatický ořez, nonzero dá indexy
logger.debug("crinfo")
logger.debug(str(margin))
nzi = np.nonzero(data)
logger.debug(str(nzi))
if np.isscalar(margin):
margin = [margin] * 3
x1 = np.min(nzi[0]) - margin[0]
x2 = np.max(nzi[0]) + margin[0] + 1
y1 = np.min(nzi[1]) - margin[0]
y2 = np.max(nzi[1]) + margin[0] + 1
z1 = np.min(nzi[2]) - margin[0]
z2 = np.max(nzi[2]) + margin[0] + 1
# ošetření mezí polí
if x1 < 0:
x1 = 0
if y1 < 0:
y1 = 0
if z1 < 0:
z1 = 0
if x2 > data.shape[0]:
x2 = data.shape[0] - 1
if y2 > data.shape[1]:
y2 = data.shape[1] - 1
if z2 > data.shape[2]:
z2 = data.shape[2] - 1
# ořez
crinfo = [[x1, x2], [y1, y2], [z1, z2]]
return crinfo | python | def crinfo_from_specific_data(data, margin=0):
"""
Create crinfo of minimum orthogonal nonzero block in input data.
:param data: input data
:param margin: add margin to minimum block
:return:
"""
# hledáme automatický ořez, nonzero dá indexy
logger.debug("crinfo")
logger.debug(str(margin))
nzi = np.nonzero(data)
logger.debug(str(nzi))
if np.isscalar(margin):
margin = [margin] * 3
x1 = np.min(nzi[0]) - margin[0]
x2 = np.max(nzi[0]) + margin[0] + 1
y1 = np.min(nzi[1]) - margin[0]
y2 = np.max(nzi[1]) + margin[0] + 1
z1 = np.min(nzi[2]) - margin[0]
z2 = np.max(nzi[2]) + margin[0] + 1
# ošetření mezí polí
if x1 < 0:
x1 = 0
if y1 < 0:
y1 = 0
if z1 < 0:
z1 = 0
if x2 > data.shape[0]:
x2 = data.shape[0] - 1
if y2 > data.shape[1]:
y2 = data.shape[1] - 1
if z2 > data.shape[2]:
z2 = data.shape[2] - 1
# ořez
crinfo = [[x1, x2], [y1, y2], [z1, z2]]
return crinfo | [
"def",
"crinfo_from_specific_data",
"(",
"data",
",",
"margin",
"=",
"0",
")",
":",
"# hledáme automatický ořez, nonzero dá indexy",
"logger",
".",
"debug",
"(",
"\"crinfo\"",
")",
"logger",
".",
"debug",
"(",
"str",
"(",
"margin",
")",
")",
"nzi",
"=",
"np",
".",
"nonzero",
"(",
"data",
")",
"logger",
".",
"debug",
"(",
"str",
"(",
"nzi",
")",
")",
"if",
"np",
".",
"isscalar",
"(",
"margin",
")",
":",
"margin",
"=",
"[",
"margin",
"]",
"*",
"3",
"x1",
"=",
"np",
".",
"min",
"(",
"nzi",
"[",
"0",
"]",
")",
"-",
"margin",
"[",
"0",
"]",
"x2",
"=",
"np",
".",
"max",
"(",
"nzi",
"[",
"0",
"]",
")",
"+",
"margin",
"[",
"0",
"]",
"+",
"1",
"y1",
"=",
"np",
".",
"min",
"(",
"nzi",
"[",
"1",
"]",
")",
"-",
"margin",
"[",
"0",
"]",
"y2",
"=",
"np",
".",
"max",
"(",
"nzi",
"[",
"1",
"]",
")",
"+",
"margin",
"[",
"0",
"]",
"+",
"1",
"z1",
"=",
"np",
".",
"min",
"(",
"nzi",
"[",
"2",
"]",
")",
"-",
"margin",
"[",
"0",
"]",
"z2",
"=",
"np",
".",
"max",
"(",
"nzi",
"[",
"2",
"]",
")",
"+",
"margin",
"[",
"0",
"]",
"+",
"1",
"# ošetření mezí polí",
"if",
"x1",
"<",
"0",
":",
"x1",
"=",
"0",
"if",
"y1",
"<",
"0",
":",
"y1",
"=",
"0",
"if",
"z1",
"<",
"0",
":",
"z1",
"=",
"0",
"if",
"x2",
">",
"data",
".",
"shape",
"[",
"0",
"]",
":",
"x2",
"=",
"data",
".",
"shape",
"[",
"0",
"]",
"-",
"1",
"if",
"y2",
">",
"data",
".",
"shape",
"[",
"1",
"]",
":",
"y2",
"=",
"data",
".",
"shape",
"[",
"1",
"]",
"-",
"1",
"if",
"z2",
">",
"data",
".",
"shape",
"[",
"2",
"]",
":",
"z2",
"=",
"data",
".",
"shape",
"[",
"2",
"]",
"-",
"1",
"# ořez",
"crinfo",
"=",
"[",
"[",
"x1",
",",
"x2",
"]",
",",
"[",
"y1",
",",
"y2",
"]",
",",
"[",
"z1",
",",
"z2",
"]",
"]",
"return",
"crinfo"
] | Create crinfo of minimum orthogonal nonzero block in input data.
:param data: input data
:param margin: add margin to minimum block
:return: | [
"Create",
"crinfo",
"of",
"minimum",
"orthogonal",
"nonzero",
"block",
"in",
"input",
"data",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L400-L441 |
mjirik/imcut | imcut/image_manipulation.py | uncrop | def uncrop(data, crinfo, orig_shape, resize=False, outside_mode="constant", cval=0):
"""
Put some boundary to input image.
:param data: input data
:param crinfo: array with minimum and maximum index along each axis
[[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0].
If crinfo is just series of three numbers, it is used as an initial point for input image placement.
:param orig_shape: shape of uncropped image
:param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape.
:param outside_mode: 'constant', 'nearest'
:return:
"""
if crinfo is None:
crinfo = list(zip([0] * data.ndim, orig_shape))
elif np.asarray(crinfo).size == data.ndim:
crinfo = list(zip(crinfo, np.asarray(crinfo) + data.shape))
crinfo = fix_crinfo(crinfo)
data_out = np.ones(orig_shape, dtype=data.dtype) * cval
# print 'uncrop ', crinfo
# print orig_shape
# print data.shape
if resize:
data = resize_to_shape(data, crinfo[:, 1] - crinfo[:, 0])
startx = np.round(crinfo[0][0]).astype(int)
starty = np.round(crinfo[1][0]).astype(int)
startz = np.round(crinfo[2][0]).astype(int)
data_out[
# np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1,
# np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1,
# np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1
startx : startx + data.shape[0],
starty : starty + data.shape[1],
startz : startz + data.shape[2],
] = data
if outside_mode == "nearest":
# for ax in range(data.ndims):
# ax = 0
# copy border slice to pixels out of boundary - the higher part
for ax in range(data.ndim):
# the part under the crop
start = np.round(crinfo[ax][0]).astype(int)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = start
repeated_slice = np.expand_dims(data_out[slices], ax)
append_sz = start
if append_sz > 0:
tile0 = np.repeat(repeated_slice, append_sz, axis=ax)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = slice(None, start)
# data_out[start + data.shape[ax] : , :, :] = tile0
data_out[slices] = tile0
# plt.imshow(np.squeeze(repeated_slice))
# plt.show()
# the part over the crop
start = np.round(crinfo[ax][0]).astype(int)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = start + data.shape[ax] - 1
repeated_slice = np.expand_dims(data_out[slices], ax)
append_sz = data_out.shape[ax] - (start + data.shape[ax])
if append_sz > 0:
tile0 = np.repeat(repeated_slice, append_sz, axis=ax)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = slice(start + data.shape[ax], None)
# data_out[start + data.shape[ax] : , :, :] = tile0
data_out[slices] = tile0
# plt.imshow(np.squeeze(repeated_slice))
# plt.show()
return data_out | python | def uncrop(data, crinfo, orig_shape, resize=False, outside_mode="constant", cval=0):
"""
Put some boundary to input image.
:param data: input data
:param crinfo: array with minimum and maximum index along each axis
[[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0].
If crinfo is just series of three numbers, it is used as an initial point for input image placement.
:param orig_shape: shape of uncropped image
:param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape.
:param outside_mode: 'constant', 'nearest'
:return:
"""
if crinfo is None:
crinfo = list(zip([0] * data.ndim, orig_shape))
elif np.asarray(crinfo).size == data.ndim:
crinfo = list(zip(crinfo, np.asarray(crinfo) + data.shape))
crinfo = fix_crinfo(crinfo)
data_out = np.ones(orig_shape, dtype=data.dtype) * cval
# print 'uncrop ', crinfo
# print orig_shape
# print data.shape
if resize:
data = resize_to_shape(data, crinfo[:, 1] - crinfo[:, 0])
startx = np.round(crinfo[0][0]).astype(int)
starty = np.round(crinfo[1][0]).astype(int)
startz = np.round(crinfo[2][0]).astype(int)
data_out[
# np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1,
# np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1,
# np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1
startx : startx + data.shape[0],
starty : starty + data.shape[1],
startz : startz + data.shape[2],
] = data
if outside_mode == "nearest":
# for ax in range(data.ndims):
# ax = 0
# copy border slice to pixels out of boundary - the higher part
for ax in range(data.ndim):
# the part under the crop
start = np.round(crinfo[ax][0]).astype(int)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = start
repeated_slice = np.expand_dims(data_out[slices], ax)
append_sz = start
if append_sz > 0:
tile0 = np.repeat(repeated_slice, append_sz, axis=ax)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = slice(None, start)
# data_out[start + data.shape[ax] : , :, :] = tile0
data_out[slices] = tile0
# plt.imshow(np.squeeze(repeated_slice))
# plt.show()
# the part over the crop
start = np.round(crinfo[ax][0]).astype(int)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = start + data.shape[ax] - 1
repeated_slice = np.expand_dims(data_out[slices], ax)
append_sz = data_out.shape[ax] - (start + data.shape[ax])
if append_sz > 0:
tile0 = np.repeat(repeated_slice, append_sz, axis=ax)
slices = [slice(None), slice(None), slice(None)]
slices[ax] = slice(start + data.shape[ax], None)
# data_out[start + data.shape[ax] : , :, :] = tile0
data_out[slices] = tile0
# plt.imshow(np.squeeze(repeated_slice))
# plt.show()
return data_out | [
"def",
"uncrop",
"(",
"data",
",",
"crinfo",
",",
"orig_shape",
",",
"resize",
"=",
"False",
",",
"outside_mode",
"=",
"\"constant\"",
",",
"cval",
"=",
"0",
")",
":",
"if",
"crinfo",
"is",
"None",
":",
"crinfo",
"=",
"list",
"(",
"zip",
"(",
"[",
"0",
"]",
"*",
"data",
".",
"ndim",
",",
"orig_shape",
")",
")",
"elif",
"np",
".",
"asarray",
"(",
"crinfo",
")",
".",
"size",
"==",
"data",
".",
"ndim",
":",
"crinfo",
"=",
"list",
"(",
"zip",
"(",
"crinfo",
",",
"np",
".",
"asarray",
"(",
"crinfo",
")",
"+",
"data",
".",
"shape",
")",
")",
"crinfo",
"=",
"fix_crinfo",
"(",
"crinfo",
")",
"data_out",
"=",
"np",
".",
"ones",
"(",
"orig_shape",
",",
"dtype",
"=",
"data",
".",
"dtype",
")",
"*",
"cval",
"# print 'uncrop ', crinfo",
"# print orig_shape",
"# print data.shape",
"if",
"resize",
":",
"data",
"=",
"resize_to_shape",
"(",
"data",
",",
"crinfo",
"[",
":",
",",
"1",
"]",
"-",
"crinfo",
"[",
":",
",",
"0",
"]",
")",
"startx",
"=",
"np",
".",
"round",
"(",
"crinfo",
"[",
"0",
"]",
"[",
"0",
"]",
")",
".",
"astype",
"(",
"int",
")",
"starty",
"=",
"np",
".",
"round",
"(",
"crinfo",
"[",
"1",
"]",
"[",
"0",
"]",
")",
".",
"astype",
"(",
"int",
")",
"startz",
"=",
"np",
".",
"round",
"(",
"crinfo",
"[",
"2",
"]",
"[",
"0",
"]",
")",
".",
"astype",
"(",
"int",
")",
"data_out",
"[",
"# np.round(crinfo[0][0]).astype(int):np.round(crinfo[0][1]).astype(int)+1,",
"# np.round(crinfo[1][0]).astype(int):np.round(crinfo[1][1]).astype(int)+1,",
"# np.round(crinfo[2][0]).astype(int):np.round(crinfo[2][1]).astype(int)+1",
"startx",
":",
"startx",
"+",
"data",
".",
"shape",
"[",
"0",
"]",
",",
"starty",
":",
"starty",
"+",
"data",
".",
"shape",
"[",
"1",
"]",
",",
"startz",
":",
"startz",
"+",
"data",
".",
"shape",
"[",
"2",
"]",
",",
"]",
"=",
"data",
"if",
"outside_mode",
"==",
"\"nearest\"",
":",
"# for ax in range(data.ndims):",
"# ax = 0",
"# copy border slice to pixels out of boundary - the higher part",
"for",
"ax",
"in",
"range",
"(",
"data",
".",
"ndim",
")",
":",
"# the part under the crop",
"start",
"=",
"np",
".",
"round",
"(",
"crinfo",
"[",
"ax",
"]",
"[",
"0",
"]",
")",
".",
"astype",
"(",
"int",
")",
"slices",
"=",
"[",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
"]",
"slices",
"[",
"ax",
"]",
"=",
"start",
"repeated_slice",
"=",
"np",
".",
"expand_dims",
"(",
"data_out",
"[",
"slices",
"]",
",",
"ax",
")",
"append_sz",
"=",
"start",
"if",
"append_sz",
">",
"0",
":",
"tile0",
"=",
"np",
".",
"repeat",
"(",
"repeated_slice",
",",
"append_sz",
",",
"axis",
"=",
"ax",
")",
"slices",
"=",
"[",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
"]",
"slices",
"[",
"ax",
"]",
"=",
"slice",
"(",
"None",
",",
"start",
")",
"# data_out[start + data.shape[ax] : , :, :] = tile0",
"data_out",
"[",
"slices",
"]",
"=",
"tile0",
"# plt.imshow(np.squeeze(repeated_slice))",
"# plt.show()",
"# the part over the crop",
"start",
"=",
"np",
".",
"round",
"(",
"crinfo",
"[",
"ax",
"]",
"[",
"0",
"]",
")",
".",
"astype",
"(",
"int",
")",
"slices",
"=",
"[",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
"]",
"slices",
"[",
"ax",
"]",
"=",
"start",
"+",
"data",
".",
"shape",
"[",
"ax",
"]",
"-",
"1",
"repeated_slice",
"=",
"np",
".",
"expand_dims",
"(",
"data_out",
"[",
"slices",
"]",
",",
"ax",
")",
"append_sz",
"=",
"data_out",
".",
"shape",
"[",
"ax",
"]",
"-",
"(",
"start",
"+",
"data",
".",
"shape",
"[",
"ax",
"]",
")",
"if",
"append_sz",
">",
"0",
":",
"tile0",
"=",
"np",
".",
"repeat",
"(",
"repeated_slice",
",",
"append_sz",
",",
"axis",
"=",
"ax",
")",
"slices",
"=",
"[",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
",",
"slice",
"(",
"None",
")",
"]",
"slices",
"[",
"ax",
"]",
"=",
"slice",
"(",
"start",
"+",
"data",
".",
"shape",
"[",
"ax",
"]",
",",
"None",
")",
"# data_out[start + data.shape[ax] : , :, :] = tile0",
"data_out",
"[",
"slices",
"]",
"=",
"tile0",
"# plt.imshow(np.squeeze(repeated_slice))",
"# plt.show()",
"return",
"data_out"
] | Put some boundary to input image.
:param data: input data
:param crinfo: array with minimum and maximum index along each axis
[[minX, maxX],[minY, maxY],[minZ, maxZ]]. If crinfo is None, the whole input image is placed into [0, 0, 0].
If crinfo is just series of three numbers, it is used as an initial point for input image placement.
:param orig_shape: shape of uncropped image
:param resize: True or False (default). Usefull if the data.shape does not fit to crinfo shape.
:param outside_mode: 'constant', 'nearest'
:return: | [
"Put",
"some",
"boundary",
"to",
"input",
"image",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L444-L522 |
mjirik/imcut | imcut/image_manipulation.py | fix_crinfo | def fix_crinfo(crinfo, to="axis"):
"""
Function recognize order of crinfo and convert it to proper format.
"""
crinfo = np.asarray(crinfo)
if crinfo.shape[0] == 2:
crinfo = crinfo.T
return crinfo | python | def fix_crinfo(crinfo, to="axis"):
"""
Function recognize order of crinfo and convert it to proper format.
"""
crinfo = np.asarray(crinfo)
if crinfo.shape[0] == 2:
crinfo = crinfo.T
return crinfo | [
"def",
"fix_crinfo",
"(",
"crinfo",
",",
"to",
"=",
"\"axis\"",
")",
":",
"crinfo",
"=",
"np",
".",
"asarray",
"(",
"crinfo",
")",
"if",
"crinfo",
".",
"shape",
"[",
"0",
"]",
"==",
"2",
":",
"crinfo",
"=",
"crinfo",
".",
"T",
"return",
"crinfo"
] | Function recognize order of crinfo and convert it to proper format. | [
"Function",
"recognize",
"order",
"of",
"crinfo",
"and",
"convert",
"it",
"to",
"proper",
"format",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/image_manipulation.py#L525-L534 |
mjirik/imcut | imcut/graph.py | grid_edges | def grid_edges(shape, inds=None, return_directions=True):
"""
Get list of grid edges
:param shape:
:param inds:
:param return_directions:
:return:
"""
if inds is None:
inds = np.arange(np.prod(shape)).reshape(shape)
# if not self.segparams['use_boundary_penalties'] and \
# boundary_penalties_fcn is None :
if len(shape) == 2:
edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()]
edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()]
edges = [edgx, edgy]
directions = [
np.ones([edgx.shape[0]], dtype=np.int8) * 0,
np.ones([edgy.shape[0]], dtype=np.int8) * 1,
]
elif len(shape) == 3:
# This is faster for some specific format
edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()]
edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()]
edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()]
edges = [edgx, edgy, edgz]
else:
logger.error("Expected 2D or 3D data")
# for all edges along first direction put 0, for second direction put 1, for third direction put 3
if return_directions:
directions = []
for idirection in range(len(shape)):
directions.append(
np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection
)
edges = np.concatenate(edges)
if return_directions:
edge_dir = np.concatenate(directions)
return edges, edge_dir
else:
return edges | python | def grid_edges(shape, inds=None, return_directions=True):
"""
Get list of grid edges
:param shape:
:param inds:
:param return_directions:
:return:
"""
if inds is None:
inds = np.arange(np.prod(shape)).reshape(shape)
# if not self.segparams['use_boundary_penalties'] and \
# boundary_penalties_fcn is None :
if len(shape) == 2:
edgx = np.c_[inds[:, :-1].ravel(), inds[:, 1:].ravel()]
edgy = np.c_[inds[:-1, :].ravel(), inds[1:, :].ravel()]
edges = [edgx, edgy]
directions = [
np.ones([edgx.shape[0]], dtype=np.int8) * 0,
np.ones([edgy.shape[0]], dtype=np.int8) * 1,
]
elif len(shape) == 3:
# This is faster for some specific format
edgx = np.c_[inds[:, :, :-1].ravel(), inds[:, :, 1:].ravel()]
edgy = np.c_[inds[:, :-1, :].ravel(), inds[:, 1:, :].ravel()]
edgz = np.c_[inds[:-1, :, :].ravel(), inds[1:, :, :].ravel()]
edges = [edgx, edgy, edgz]
else:
logger.error("Expected 2D or 3D data")
# for all edges along first direction put 0, for second direction put 1, for third direction put 3
if return_directions:
directions = []
for idirection in range(len(shape)):
directions.append(
np.ones([edges[idirection].shape[0]], dtype=np.int8) * idirection
)
edges = np.concatenate(edges)
if return_directions:
edge_dir = np.concatenate(directions)
return edges, edge_dir
else:
return edges | [
"def",
"grid_edges",
"(",
"shape",
",",
"inds",
"=",
"None",
",",
"return_directions",
"=",
"True",
")",
":",
"if",
"inds",
"is",
"None",
":",
"inds",
"=",
"np",
".",
"arange",
"(",
"np",
".",
"prod",
"(",
"shape",
")",
")",
".",
"reshape",
"(",
"shape",
")",
"# if not self.segparams['use_boundary_penalties'] and \\",
"# boundary_penalties_fcn is None :",
"if",
"len",
"(",
"shape",
")",
"==",
"2",
":",
"edgx",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
"-",
"1",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
"1",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edgy",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
"-",
"1",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
"1",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edges",
"=",
"[",
"edgx",
",",
"edgy",
"]",
"directions",
"=",
"[",
"np",
".",
"ones",
"(",
"[",
"edgx",
".",
"shape",
"[",
"0",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"*",
"0",
",",
"np",
".",
"ones",
"(",
"[",
"edgy",
".",
"shape",
"[",
"0",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"*",
"1",
",",
"]",
"elif",
"len",
"(",
"shape",
")",
"==",
"3",
":",
"# This is faster for some specific format",
"edgx",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
",",
":",
"-",
"1",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
":",
",",
"1",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edgy",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
",",
":",
"-",
"1",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
":",
",",
"1",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edgz",
"=",
"np",
".",
"c_",
"[",
"inds",
"[",
":",
"-",
"1",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
",",
"inds",
"[",
"1",
":",
",",
":",
",",
":",
"]",
".",
"ravel",
"(",
")",
"]",
"edges",
"=",
"[",
"edgx",
",",
"edgy",
",",
"edgz",
"]",
"else",
":",
"logger",
".",
"error",
"(",
"\"Expected 2D or 3D data\"",
")",
"# for all edges along first direction put 0, for second direction put 1, for third direction put 3",
"if",
"return_directions",
":",
"directions",
"=",
"[",
"]",
"for",
"idirection",
"in",
"range",
"(",
"len",
"(",
"shape",
")",
")",
":",
"directions",
".",
"append",
"(",
"np",
".",
"ones",
"(",
"[",
"edges",
"[",
"idirection",
"]",
".",
"shape",
"[",
"0",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"int8",
")",
"*",
"idirection",
")",
"edges",
"=",
"np",
".",
"concatenate",
"(",
"edges",
")",
"if",
"return_directions",
":",
"edge_dir",
"=",
"np",
".",
"concatenate",
"(",
"directions",
")",
"return",
"edges",
",",
"edge_dir",
"else",
":",
"return",
"edges"
] | Get list of grid edges
:param shape:
:param inds:
:param return_directions:
:return: | [
"Get",
"list",
"of",
"grid",
"edges",
":",
"param",
"shape",
":",
":",
"param",
"inds",
":",
":",
"param",
"return_directions",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L524-L568 |
mjirik/imcut | imcut/graph.py | gen_grid_2d | def gen_grid_2d(shape, voxelsize):
"""
Generate list of edges for a base grid.
"""
nr, nc = shape
nrm1, ncm1 = nr - 1, nc - 1
# sh = nm.asarray(shape)
# calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols)
nedges = 0
for direction in range(len(shape)):
sh = copy.copy(list(shape))
sh[direction] += -1
nedges += nm.prod(sh)
nedges_old = ncm1 * nr + nrm1 * nc
edges = nm.zeros((nedges, 2), dtype=nm.int16)
edge_dir = nm.zeros((ncm1 * nr + nrm1 * nc,), dtype=nm.bool)
nodes = nm.zeros((nm.prod(shape), 3), dtype=nm.float32)
# edges
idx = 0
row = nm.zeros((ncm1, 2), dtype=nm.int16)
row[:, 0] = nm.arange(ncm1)
row[:, 1] = nm.arange(ncm1) + 1
for ii in range(nr):
edges[slice(idx, idx + ncm1), :] = row + nc * ii
idx += ncm1
edge_dir[slice(0, idx)] = 0 # horizontal dir
idx0 = idx
col = nm.zeros((nrm1, 2), dtype=nm.int16)
col[:, 0] = nm.arange(nrm1) * nc
col[:, 1] = nm.arange(nrm1) * nc + nc
for ii in range(nc):
edges[slice(idx, idx + nrm1), :] = col + ii
idx += nrm1
edge_dir[slice(idx0, idx)] = 1 # vertical dir
# nodes
idx = 0
row = nm.zeros((nc, 3), dtype=nm.float32)
row[:, 0] = voxelsize[0] * (nm.arange(nc) + 0.5)
row[:, 1] = voxelsize[1] * 0.5
for ii in range(nr):
nodes[slice(idx, idx + nc), :] = row
row[:, 1] += voxelsize[1]
idx += nc
return nodes, edges, edge_dir | python | def gen_grid_2d(shape, voxelsize):
"""
Generate list of edges for a base grid.
"""
nr, nc = shape
nrm1, ncm1 = nr - 1, nc - 1
# sh = nm.asarray(shape)
# calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols)
nedges = 0
for direction in range(len(shape)):
sh = copy.copy(list(shape))
sh[direction] += -1
nedges += nm.prod(sh)
nedges_old = ncm1 * nr + nrm1 * nc
edges = nm.zeros((nedges, 2), dtype=nm.int16)
edge_dir = nm.zeros((ncm1 * nr + nrm1 * nc,), dtype=nm.bool)
nodes = nm.zeros((nm.prod(shape), 3), dtype=nm.float32)
# edges
idx = 0
row = nm.zeros((ncm1, 2), dtype=nm.int16)
row[:, 0] = nm.arange(ncm1)
row[:, 1] = nm.arange(ncm1) + 1
for ii in range(nr):
edges[slice(idx, idx + ncm1), :] = row + nc * ii
idx += ncm1
edge_dir[slice(0, idx)] = 0 # horizontal dir
idx0 = idx
col = nm.zeros((nrm1, 2), dtype=nm.int16)
col[:, 0] = nm.arange(nrm1) * nc
col[:, 1] = nm.arange(nrm1) * nc + nc
for ii in range(nc):
edges[slice(idx, idx + nrm1), :] = col + ii
idx += nrm1
edge_dir[slice(idx0, idx)] = 1 # vertical dir
# nodes
idx = 0
row = nm.zeros((nc, 3), dtype=nm.float32)
row[:, 0] = voxelsize[0] * (nm.arange(nc) + 0.5)
row[:, 1] = voxelsize[1] * 0.5
for ii in range(nr):
nodes[slice(idx, idx + nc), :] = row
row[:, 1] += voxelsize[1]
idx += nc
return nodes, edges, edge_dir | [
"def",
"gen_grid_2d",
"(",
"shape",
",",
"voxelsize",
")",
":",
"nr",
",",
"nc",
"=",
"shape",
"nrm1",
",",
"ncm1",
"=",
"nr",
"-",
"1",
",",
"nc",
"-",
"1",
"# sh = nm.asarray(shape)",
"# calculate number of edges, in 2D: (nrows * (ncols - 1)) + ((nrows - 1) * ncols)",
"nedges",
"=",
"0",
"for",
"direction",
"in",
"range",
"(",
"len",
"(",
"shape",
")",
")",
":",
"sh",
"=",
"copy",
".",
"copy",
"(",
"list",
"(",
"shape",
")",
")",
"sh",
"[",
"direction",
"]",
"+=",
"-",
"1",
"nedges",
"+=",
"nm",
".",
"prod",
"(",
"sh",
")",
"nedges_old",
"=",
"ncm1",
"*",
"nr",
"+",
"nrm1",
"*",
"nc",
"edges",
"=",
"nm",
".",
"zeros",
"(",
"(",
"nedges",
",",
"2",
")",
",",
"dtype",
"=",
"nm",
".",
"int16",
")",
"edge_dir",
"=",
"nm",
".",
"zeros",
"(",
"(",
"ncm1",
"*",
"nr",
"+",
"nrm1",
"*",
"nc",
",",
")",
",",
"dtype",
"=",
"nm",
".",
"bool",
")",
"nodes",
"=",
"nm",
".",
"zeros",
"(",
"(",
"nm",
".",
"prod",
"(",
"shape",
")",
",",
"3",
")",
",",
"dtype",
"=",
"nm",
".",
"float32",
")",
"# edges",
"idx",
"=",
"0",
"row",
"=",
"nm",
".",
"zeros",
"(",
"(",
"ncm1",
",",
"2",
")",
",",
"dtype",
"=",
"nm",
".",
"int16",
")",
"row",
"[",
":",
",",
"0",
"]",
"=",
"nm",
".",
"arange",
"(",
"ncm1",
")",
"row",
"[",
":",
",",
"1",
"]",
"=",
"nm",
".",
"arange",
"(",
"ncm1",
")",
"+",
"1",
"for",
"ii",
"in",
"range",
"(",
"nr",
")",
":",
"edges",
"[",
"slice",
"(",
"idx",
",",
"idx",
"+",
"ncm1",
")",
",",
":",
"]",
"=",
"row",
"+",
"nc",
"*",
"ii",
"idx",
"+=",
"ncm1",
"edge_dir",
"[",
"slice",
"(",
"0",
",",
"idx",
")",
"]",
"=",
"0",
"# horizontal dir",
"idx0",
"=",
"idx",
"col",
"=",
"nm",
".",
"zeros",
"(",
"(",
"nrm1",
",",
"2",
")",
",",
"dtype",
"=",
"nm",
".",
"int16",
")",
"col",
"[",
":",
",",
"0",
"]",
"=",
"nm",
".",
"arange",
"(",
"nrm1",
")",
"*",
"nc",
"col",
"[",
":",
",",
"1",
"]",
"=",
"nm",
".",
"arange",
"(",
"nrm1",
")",
"*",
"nc",
"+",
"nc",
"for",
"ii",
"in",
"range",
"(",
"nc",
")",
":",
"edges",
"[",
"slice",
"(",
"idx",
",",
"idx",
"+",
"nrm1",
")",
",",
":",
"]",
"=",
"col",
"+",
"ii",
"idx",
"+=",
"nrm1",
"edge_dir",
"[",
"slice",
"(",
"idx0",
",",
"idx",
")",
"]",
"=",
"1",
"# vertical dir",
"# nodes",
"idx",
"=",
"0",
"row",
"=",
"nm",
".",
"zeros",
"(",
"(",
"nc",
",",
"3",
")",
",",
"dtype",
"=",
"nm",
".",
"float32",
")",
"row",
"[",
":",
",",
"0",
"]",
"=",
"voxelsize",
"[",
"0",
"]",
"*",
"(",
"nm",
".",
"arange",
"(",
"nc",
")",
"+",
"0.5",
")",
"row",
"[",
":",
",",
"1",
"]",
"=",
"voxelsize",
"[",
"1",
"]",
"*",
"0.5",
"for",
"ii",
"in",
"range",
"(",
"nr",
")",
":",
"nodes",
"[",
"slice",
"(",
"idx",
",",
"idx",
"+",
"nc",
")",
",",
":",
"]",
"=",
"row",
"row",
"[",
":",
",",
"1",
"]",
"+=",
"voxelsize",
"[",
"1",
"]",
"idx",
"+=",
"nc",
"return",
"nodes",
",",
"edges",
",",
"edge_dir"
] | Generate list of edges for a base grid. | [
"Generate",
"list",
"of",
"edges",
"for",
"a",
"base",
"grid",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L586-L636 |
mjirik/imcut | imcut/graph.py | write_grid_to_vtk | def write_grid_to_vtk(fname, nodes, edges, node_flag=None, edge_flag=None):
"""
Write nodes and edges to VTK file
:param fname: VTK filename
:param nodes:
:param edges:
:param node_flag: set if this node is really used in output
:param edge_flag: set if this flag is used in output
:return:
"""
if node_flag is None:
node_flag = np.ones([nodes.shape[0]], dtype=np.bool)
if edge_flag is None:
edge_flag = np.ones([edges.shape[0]], dtype=np.bool)
nodes = make_nodes_3d(nodes)
f = open(fname, "w")
f.write("# vtk DataFile Version 2.6\n")
f.write("output file\nASCII\nDATASET UNSTRUCTURED_GRID\n")
idxs = nm.where(node_flag > 0)[0]
nnd = len(idxs)
aux = -nm.ones(node_flag.shape, dtype=nm.int32)
aux[idxs] = nm.arange(nnd, dtype=nm.int32)
f.write("\nPOINTS %d float\n" % nnd)
for ndi in idxs:
f.write("%.6f %.6f %.6f\n" % tuple(nodes[ndi, :]))
idxs = nm.where(edge_flag > 0)[0]
ned = len(idxs)
f.write("\nCELLS %d %d\n" % (ned, ned * 3))
for edi in idxs:
f.write("2 %d %d\n" % tuple(aux[edges[edi, :]]))
f.write("\nCELL_TYPES %d\n" % ned)
for edi in idxs:
f.write("3\n") | python | def write_grid_to_vtk(fname, nodes, edges, node_flag=None, edge_flag=None):
"""
Write nodes and edges to VTK file
:param fname: VTK filename
:param nodes:
:param edges:
:param node_flag: set if this node is really used in output
:param edge_flag: set if this flag is used in output
:return:
"""
if node_flag is None:
node_flag = np.ones([nodes.shape[0]], dtype=np.bool)
if edge_flag is None:
edge_flag = np.ones([edges.shape[0]], dtype=np.bool)
nodes = make_nodes_3d(nodes)
f = open(fname, "w")
f.write("# vtk DataFile Version 2.6\n")
f.write("output file\nASCII\nDATASET UNSTRUCTURED_GRID\n")
idxs = nm.where(node_flag > 0)[0]
nnd = len(idxs)
aux = -nm.ones(node_flag.shape, dtype=nm.int32)
aux[idxs] = nm.arange(nnd, dtype=nm.int32)
f.write("\nPOINTS %d float\n" % nnd)
for ndi in idxs:
f.write("%.6f %.6f %.6f\n" % tuple(nodes[ndi, :]))
idxs = nm.where(edge_flag > 0)[0]
ned = len(idxs)
f.write("\nCELLS %d %d\n" % (ned, ned * 3))
for edi in idxs:
f.write("2 %d %d\n" % tuple(aux[edges[edi, :]]))
f.write("\nCELL_TYPES %d\n" % ned)
for edi in idxs:
f.write("3\n") | [
"def",
"write_grid_to_vtk",
"(",
"fname",
",",
"nodes",
",",
"edges",
",",
"node_flag",
"=",
"None",
",",
"edge_flag",
"=",
"None",
")",
":",
"if",
"node_flag",
"is",
"None",
":",
"node_flag",
"=",
"np",
".",
"ones",
"(",
"[",
"nodes",
".",
"shape",
"[",
"0",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"bool",
")",
"if",
"edge_flag",
"is",
"None",
":",
"edge_flag",
"=",
"np",
".",
"ones",
"(",
"[",
"edges",
".",
"shape",
"[",
"0",
"]",
"]",
",",
"dtype",
"=",
"np",
".",
"bool",
")",
"nodes",
"=",
"make_nodes_3d",
"(",
"nodes",
")",
"f",
"=",
"open",
"(",
"fname",
",",
"\"w\"",
")",
"f",
".",
"write",
"(",
"\"# vtk DataFile Version 2.6\\n\"",
")",
"f",
".",
"write",
"(",
"\"output file\\nASCII\\nDATASET UNSTRUCTURED_GRID\\n\"",
")",
"idxs",
"=",
"nm",
".",
"where",
"(",
"node_flag",
">",
"0",
")",
"[",
"0",
"]",
"nnd",
"=",
"len",
"(",
"idxs",
")",
"aux",
"=",
"-",
"nm",
".",
"ones",
"(",
"node_flag",
".",
"shape",
",",
"dtype",
"=",
"nm",
".",
"int32",
")",
"aux",
"[",
"idxs",
"]",
"=",
"nm",
".",
"arange",
"(",
"nnd",
",",
"dtype",
"=",
"nm",
".",
"int32",
")",
"f",
".",
"write",
"(",
"\"\\nPOINTS %d float\\n\"",
"%",
"nnd",
")",
"for",
"ndi",
"in",
"idxs",
":",
"f",
".",
"write",
"(",
"\"%.6f %.6f %.6f\\n\"",
"%",
"tuple",
"(",
"nodes",
"[",
"ndi",
",",
":",
"]",
")",
")",
"idxs",
"=",
"nm",
".",
"where",
"(",
"edge_flag",
">",
"0",
")",
"[",
"0",
"]",
"ned",
"=",
"len",
"(",
"idxs",
")",
"f",
".",
"write",
"(",
"\"\\nCELLS %d %d\\n\"",
"%",
"(",
"ned",
",",
"ned",
"*",
"3",
")",
")",
"for",
"edi",
"in",
"idxs",
":",
"f",
".",
"write",
"(",
"\"2 %d %d\\n\"",
"%",
"tuple",
"(",
"aux",
"[",
"edges",
"[",
"edi",
",",
":",
"]",
"]",
")",
")",
"f",
".",
"write",
"(",
"\"\\nCELL_TYPES %d\\n\"",
"%",
"ned",
")",
"for",
"edi",
"in",
"idxs",
":",
"f",
".",
"write",
"(",
"\"3\\n\"",
")"
] | Write nodes and edges to VTK file
:param fname: VTK filename
:param nodes:
:param edges:
:param node_flag: set if this node is really used in output
:param edge_flag: set if this flag is used in output
:return: | [
"Write",
"nodes",
"and",
"edges",
"to",
"VTK",
"file",
":",
"param",
"fname",
":",
"VTK",
"filename",
":",
"param",
"nodes",
":",
":",
"param",
"edges",
":",
":",
"param",
"node_flag",
":",
"set",
"if",
"this",
"node",
"is",
"really",
"used",
"in",
"output",
":",
"param",
"edge_flag",
":",
"set",
"if",
"this",
"flag",
"is",
"used",
"in",
"output",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L646-L683 |
mjirik/imcut | imcut/graph.py | Graph.add_nodes | def add_nodes(self, coors, node_low_or_high=None):
"""
Add new nodes at the end of the list.
"""
last = self.lastnode
if type(coors) is nm.ndarray:
if len(coors.shape) == 1:
coors = coors.reshape((1, coors.size))
nadd = coors.shape[0]
idx = slice(last, last + nadd)
else:
nadd = 1
idx = self.lastnode
right_dimension = coors.shape[1]
self.nodes[idx, :right_dimension] = coors
self.node_flag[idx] = True
self.lastnode += nadd
self.nnodes += nadd | python | def add_nodes(self, coors, node_low_or_high=None):
"""
Add new nodes at the end of the list.
"""
last = self.lastnode
if type(coors) is nm.ndarray:
if len(coors.shape) == 1:
coors = coors.reshape((1, coors.size))
nadd = coors.shape[0]
idx = slice(last, last + nadd)
else:
nadd = 1
idx = self.lastnode
right_dimension = coors.shape[1]
self.nodes[idx, :right_dimension] = coors
self.node_flag[idx] = True
self.lastnode += nadd
self.nnodes += nadd | [
"def",
"add_nodes",
"(",
"self",
",",
"coors",
",",
"node_low_or_high",
"=",
"None",
")",
":",
"last",
"=",
"self",
".",
"lastnode",
"if",
"type",
"(",
"coors",
")",
"is",
"nm",
".",
"ndarray",
":",
"if",
"len",
"(",
"coors",
".",
"shape",
")",
"==",
"1",
":",
"coors",
"=",
"coors",
".",
"reshape",
"(",
"(",
"1",
",",
"coors",
".",
"size",
")",
")",
"nadd",
"=",
"coors",
".",
"shape",
"[",
"0",
"]",
"idx",
"=",
"slice",
"(",
"last",
",",
"last",
"+",
"nadd",
")",
"else",
":",
"nadd",
"=",
"1",
"idx",
"=",
"self",
".",
"lastnode",
"right_dimension",
"=",
"coors",
".",
"shape",
"[",
"1",
"]",
"self",
".",
"nodes",
"[",
"idx",
",",
":",
"right_dimension",
"]",
"=",
"coors",
"self",
".",
"node_flag",
"[",
"idx",
"]",
"=",
"True",
"self",
".",
"lastnode",
"+=",
"nadd",
"self",
".",
"nnodes",
"+=",
"nadd"
] | Add new nodes at the end of the list. | [
"Add",
"new",
"nodes",
"at",
"the",
"end",
"of",
"the",
"list",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L153-L171 |
mjirik/imcut | imcut/graph.py | Graph.add_edges | def add_edges(self, conn, edge_direction, edge_group=None, edge_low_or_high=None):
"""
Add new edges at the end of the list.
:param edge_direction: direction flag
:param edge_group: describes group of edges from same low super node and same direction
:param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution.
It is used to set weight from weight table.
"""
last = self.lastedge
if type(conn) is nm.ndarray:
nadd = conn.shape[0]
idx = slice(last, last + nadd)
if edge_group is None:
edge_group = nm.arange(nadd) + last
else:
nadd = 1
idx = nm.array([last])
conn = nm.array(conn).reshape((1, 2))
if edge_group is None:
edge_group = idx
self.edges[idx, :] = conn
self.edge_flag[idx] = True
# t_start0 = time.time()
# self.edge_flag_idx.extend(list(range(idx.start, idx.stop)))
# self.stats["t split 082"] += time.time() - t_start0
self.edge_dir[idx] = edge_direction
self.edge_group[idx] = edge_group
# TODO change this just to array of low_or_high_resolution
if edge_low_or_high is not None and self._edge_weight_table is not None:
self.edges_weights[idx] = self._edge_weight_table[
edge_low_or_high, edge_direction
]
self.lastedge += nadd
self.nedges += nadd | python | def add_edges(self, conn, edge_direction, edge_group=None, edge_low_or_high=None):
"""
Add new edges at the end of the list.
:param edge_direction: direction flag
:param edge_group: describes group of edges from same low super node and same direction
:param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution.
It is used to set weight from weight table.
"""
last = self.lastedge
if type(conn) is nm.ndarray:
nadd = conn.shape[0]
idx = slice(last, last + nadd)
if edge_group is None:
edge_group = nm.arange(nadd) + last
else:
nadd = 1
idx = nm.array([last])
conn = nm.array(conn).reshape((1, 2))
if edge_group is None:
edge_group = idx
self.edges[idx, :] = conn
self.edge_flag[idx] = True
# t_start0 = time.time()
# self.edge_flag_idx.extend(list(range(idx.start, idx.stop)))
# self.stats["t split 082"] += time.time() - t_start0
self.edge_dir[idx] = edge_direction
self.edge_group[idx] = edge_group
# TODO change this just to array of low_or_high_resolution
if edge_low_or_high is not None and self._edge_weight_table is not None:
self.edges_weights[idx] = self._edge_weight_table[
edge_low_or_high, edge_direction
]
self.lastedge += nadd
self.nedges += nadd | [
"def",
"add_edges",
"(",
"self",
",",
"conn",
",",
"edge_direction",
",",
"edge_group",
"=",
"None",
",",
"edge_low_or_high",
"=",
"None",
")",
":",
"last",
"=",
"self",
".",
"lastedge",
"if",
"type",
"(",
"conn",
")",
"is",
"nm",
".",
"ndarray",
":",
"nadd",
"=",
"conn",
".",
"shape",
"[",
"0",
"]",
"idx",
"=",
"slice",
"(",
"last",
",",
"last",
"+",
"nadd",
")",
"if",
"edge_group",
"is",
"None",
":",
"edge_group",
"=",
"nm",
".",
"arange",
"(",
"nadd",
")",
"+",
"last",
"else",
":",
"nadd",
"=",
"1",
"idx",
"=",
"nm",
".",
"array",
"(",
"[",
"last",
"]",
")",
"conn",
"=",
"nm",
".",
"array",
"(",
"conn",
")",
".",
"reshape",
"(",
"(",
"1",
",",
"2",
")",
")",
"if",
"edge_group",
"is",
"None",
":",
"edge_group",
"=",
"idx",
"self",
".",
"edges",
"[",
"idx",
",",
":",
"]",
"=",
"conn",
"self",
".",
"edge_flag",
"[",
"idx",
"]",
"=",
"True",
"# t_start0 = time.time()",
"# self.edge_flag_idx.extend(list(range(idx.start, idx.stop)))",
"# self.stats[\"t split 082\"] += time.time() - t_start0",
"self",
".",
"edge_dir",
"[",
"idx",
"]",
"=",
"edge_direction",
"self",
".",
"edge_group",
"[",
"idx",
"]",
"=",
"edge_group",
"# TODO change this just to array of low_or_high_resolution",
"if",
"edge_low_or_high",
"is",
"not",
"None",
"and",
"self",
".",
"_edge_weight_table",
"is",
"not",
"None",
":",
"self",
".",
"edges_weights",
"[",
"idx",
"]",
"=",
"self",
".",
"_edge_weight_table",
"[",
"edge_low_or_high",
",",
"edge_direction",
"]",
"self",
".",
"lastedge",
"+=",
"nadd",
"self",
".",
"nedges",
"+=",
"nadd"
] | Add new edges at the end of the list.
:param edge_direction: direction flag
:param edge_group: describes group of edges from same low super node and same direction
:param edge_low_or_high: zero for low to low resolution, one for high to high or high to low resolution.
It is used to set weight from weight table. | [
"Add",
"new",
"edges",
"at",
"the",
"end",
"of",
"the",
"list",
".",
":",
"param",
"edge_direction",
":",
"direction",
"flag",
":",
"param",
"edge_group",
":",
"describes",
"group",
"of",
"edges",
"from",
"same",
"low",
"super",
"node",
"and",
"same",
"direction",
":",
"param",
"edge_low_or_high",
":",
"zero",
"for",
"low",
"to",
"low",
"resolution",
"one",
"for",
"high",
"to",
"high",
"or",
"high",
"to",
"low",
"resolution",
".",
"It",
"is",
"used",
"to",
"set",
"weight",
"from",
"weight",
"table",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L173-L207 |
mjirik/imcut | imcut/graph.py | Graph._edge_group_substitution | def _edge_group_substitution(
self, ndid, nsplit, idxs, sr_tab, ndoffset, ed_remove, into_or_from
):
"""
Reconnect edges.
:param ndid: id of low resolution edges
:param nsplit: number of split
:param idxs: indexes of low resolution
:param sr_tab:
:param ndoffset:
:param ed_remove:
:param into_or_from: if zero, connection of input edges is done. If one, connection of output edges
is performed.
:return:
"""
# this is useful for type(idxs) == np.ndarray
eidxs = idxs[nm.where(self.edges[idxs, 1 - into_or_from] == ndid)[0]]
# selected_edges = self.edges[idxs, 1 - into_or_from]
# selected_edges == ndid
# whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid)
# whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0]
# eidxs = [idxs[i] for i in idxs]
for igrp in self.edges_by_group(eidxs):
if igrp.shape[0] > 1:
# high resolution block to high resolution block
# all directions are the same
directions = self.edge_dir[igrp[0]]
edge_indexes = sr_tab[directions, :].T.flatten() + ndoffset
# debug code
# if len(igrp) != len(edge_indexes):
# print("Problem ")
self.edges[igrp, 1] = edge_indexes
if self._edge_weight_table is not None:
self.edges_weights[igrp] = self._edge_weight_table[1, directions]
else:
# low res block to hi res block, if into_or_from is set to 0
# hig res block to low res block, if into_or_from is set to 1
ed_remove.append(igrp[0])
# number of new edges is equal to number of pixels on one side of the box (in 2D and D too)
nnewed = np.power(nsplit, self.data.ndim - 1)
muleidxs = nm.tile(igrp, nnewed)
# copy the low-res edge multipletime
newed = self.edges[muleidxs, :]
neweddir = self.edge_dir[muleidxs]
local_node_ids = sr_tab[
self.edge_dir[igrp] + self.data.ndim * into_or_from, :
].T.flatten()
# first or second (the actual) node id is substitued by new node indexes
newed[:, 1 - into_or_from] = local_node_ids + ndoffset
if self._edge_weight_table is not None:
self.add_edges(
newed, neweddir, self.edge_group[igrp], edge_low_or_high=1
)
else:
self.add_edges(
newed, neweddir, self.edge_group[igrp], edge_low_or_high=None
)
return ed_remove | python | def _edge_group_substitution(
self, ndid, nsplit, idxs, sr_tab, ndoffset, ed_remove, into_or_from
):
"""
Reconnect edges.
:param ndid: id of low resolution edges
:param nsplit: number of split
:param idxs: indexes of low resolution
:param sr_tab:
:param ndoffset:
:param ed_remove:
:param into_or_from: if zero, connection of input edges is done. If one, connection of output edges
is performed.
:return:
"""
# this is useful for type(idxs) == np.ndarray
eidxs = idxs[nm.where(self.edges[idxs, 1 - into_or_from] == ndid)[0]]
# selected_edges = self.edges[idxs, 1 - into_or_from]
# selected_edges == ndid
# whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid)
# whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0]
# eidxs = [idxs[i] for i in idxs]
for igrp in self.edges_by_group(eidxs):
if igrp.shape[0] > 1:
# high resolution block to high resolution block
# all directions are the same
directions = self.edge_dir[igrp[0]]
edge_indexes = sr_tab[directions, :].T.flatten() + ndoffset
# debug code
# if len(igrp) != len(edge_indexes):
# print("Problem ")
self.edges[igrp, 1] = edge_indexes
if self._edge_weight_table is not None:
self.edges_weights[igrp] = self._edge_weight_table[1, directions]
else:
# low res block to hi res block, if into_or_from is set to 0
# hig res block to low res block, if into_or_from is set to 1
ed_remove.append(igrp[0])
# number of new edges is equal to number of pixels on one side of the box (in 2D and D too)
nnewed = np.power(nsplit, self.data.ndim - 1)
muleidxs = nm.tile(igrp, nnewed)
# copy the low-res edge multipletime
newed = self.edges[muleidxs, :]
neweddir = self.edge_dir[muleidxs]
local_node_ids = sr_tab[
self.edge_dir[igrp] + self.data.ndim * into_or_from, :
].T.flatten()
# first or second (the actual) node id is substitued by new node indexes
newed[:, 1 - into_or_from] = local_node_ids + ndoffset
if self._edge_weight_table is not None:
self.add_edges(
newed, neweddir, self.edge_group[igrp], edge_low_or_high=1
)
else:
self.add_edges(
newed, neweddir, self.edge_group[igrp], edge_low_or_high=None
)
return ed_remove | [
"def",
"_edge_group_substitution",
"(",
"self",
",",
"ndid",
",",
"nsplit",
",",
"idxs",
",",
"sr_tab",
",",
"ndoffset",
",",
"ed_remove",
",",
"into_or_from",
")",
":",
"# this is useful for type(idxs) == np.ndarray",
"eidxs",
"=",
"idxs",
"[",
"nm",
".",
"where",
"(",
"self",
".",
"edges",
"[",
"idxs",
",",
"1",
"-",
"into_or_from",
"]",
"==",
"ndid",
")",
"[",
"0",
"]",
"]",
"# selected_edges = self.edges[idxs, 1 - into_or_from]",
"# selected_edges == ndid",
"# whre = nm.where(self.edges[idxs, 1 - into_or_from] == ndid)",
"# whre0 = (nm.where(self.edges[idxs, 1 - into_or_from] == ndid) == ndid)[0]",
"# eidxs = [idxs[i] for i in idxs]",
"for",
"igrp",
"in",
"self",
".",
"edges_by_group",
"(",
"eidxs",
")",
":",
"if",
"igrp",
".",
"shape",
"[",
"0",
"]",
">",
"1",
":",
"# high resolution block to high resolution block",
"# all directions are the same",
"directions",
"=",
"self",
".",
"edge_dir",
"[",
"igrp",
"[",
"0",
"]",
"]",
"edge_indexes",
"=",
"sr_tab",
"[",
"directions",
",",
":",
"]",
".",
"T",
".",
"flatten",
"(",
")",
"+",
"ndoffset",
"# debug code",
"# if len(igrp) != len(edge_indexes):",
"# print(\"Problem \")",
"self",
".",
"edges",
"[",
"igrp",
",",
"1",
"]",
"=",
"edge_indexes",
"if",
"self",
".",
"_edge_weight_table",
"is",
"not",
"None",
":",
"self",
".",
"edges_weights",
"[",
"igrp",
"]",
"=",
"self",
".",
"_edge_weight_table",
"[",
"1",
",",
"directions",
"]",
"else",
":",
"# low res block to hi res block, if into_or_from is set to 0",
"# hig res block to low res block, if into_or_from is set to 1",
"ed_remove",
".",
"append",
"(",
"igrp",
"[",
"0",
"]",
")",
"# number of new edges is equal to number of pixels on one side of the box (in 2D and D too)",
"nnewed",
"=",
"np",
".",
"power",
"(",
"nsplit",
",",
"self",
".",
"data",
".",
"ndim",
"-",
"1",
")",
"muleidxs",
"=",
"nm",
".",
"tile",
"(",
"igrp",
",",
"nnewed",
")",
"# copy the low-res edge multipletime",
"newed",
"=",
"self",
".",
"edges",
"[",
"muleidxs",
",",
":",
"]",
"neweddir",
"=",
"self",
".",
"edge_dir",
"[",
"muleidxs",
"]",
"local_node_ids",
"=",
"sr_tab",
"[",
"self",
".",
"edge_dir",
"[",
"igrp",
"]",
"+",
"self",
".",
"data",
".",
"ndim",
"*",
"into_or_from",
",",
":",
"]",
".",
"T",
".",
"flatten",
"(",
")",
"# first or second (the actual) node id is substitued by new node indexes",
"newed",
"[",
":",
",",
"1",
"-",
"into_or_from",
"]",
"=",
"local_node_ids",
"+",
"ndoffset",
"if",
"self",
".",
"_edge_weight_table",
"is",
"not",
"None",
":",
"self",
".",
"add_edges",
"(",
"newed",
",",
"neweddir",
",",
"self",
".",
"edge_group",
"[",
"igrp",
"]",
",",
"edge_low_or_high",
"=",
"1",
")",
"else",
":",
"self",
".",
"add_edges",
"(",
"newed",
",",
"neweddir",
",",
"self",
".",
"edge_group",
"[",
"igrp",
"]",
",",
"edge_low_or_high",
"=",
"None",
")",
"return",
"ed_remove"
] | Reconnect edges.
:param ndid: id of low resolution edges
:param nsplit: number of split
:param idxs: indexes of low resolution
:param sr_tab:
:param ndoffset:
:param ed_remove:
:param into_or_from: if zero, connection of input edges is done. If one, connection of output edges
is performed.
:return: | [
"Reconnect",
"edges",
".",
":",
"param",
"ndid",
":",
"id",
"of",
"low",
"resolution",
"edges",
":",
"param",
"nsplit",
":",
"number",
"of",
"split",
":",
"param",
"idxs",
":",
"indexes",
"of",
"low",
"resolution",
":",
"param",
"sr_tab",
":",
":",
"param",
"ndoffset",
":",
":",
"param",
"ed_remove",
":",
":",
"param",
"into_or_from",
":",
"if",
"zero",
"connection",
"of",
"input",
"edges",
"is",
"done",
".",
"If",
"one",
"connection",
"of",
"output",
"edges",
"is",
"performed",
".",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L264-L321 |
mjirik/imcut | imcut/graph.py | Graph.generate_base_grid | def generate_base_grid(self, vtk_filename=None):
"""
Run first step of algorithm. Next step is split_voxels
:param vtk_filename:
:return:
"""
nd, ed, ed_dir = self.gen_grid_fcn(self.data.shape, self.voxelsize)
self.add_nodes(nd)
self.add_edges(ed, ed_dir, edge_low_or_high=0)
if vtk_filename is not None:
self.write_vtk(vtk_filename) | python | def generate_base_grid(self, vtk_filename=None):
"""
Run first step of algorithm. Next step is split_voxels
:param vtk_filename:
:return:
"""
nd, ed, ed_dir = self.gen_grid_fcn(self.data.shape, self.voxelsize)
self.add_nodes(nd)
self.add_edges(ed, ed_dir, edge_low_or_high=0)
if vtk_filename is not None:
self.write_vtk(vtk_filename) | [
"def",
"generate_base_grid",
"(",
"self",
",",
"vtk_filename",
"=",
"None",
")",
":",
"nd",
",",
"ed",
",",
"ed_dir",
"=",
"self",
".",
"gen_grid_fcn",
"(",
"self",
".",
"data",
".",
"shape",
",",
"self",
".",
"voxelsize",
")",
"self",
".",
"add_nodes",
"(",
"nd",
")",
"self",
".",
"add_edges",
"(",
"ed",
",",
"ed_dir",
",",
"edge_low_or_high",
"=",
"0",
")",
"if",
"vtk_filename",
"is",
"not",
"None",
":",
"self",
".",
"write_vtk",
"(",
"vtk_filename",
")"
] | Run first step of algorithm. Next step is split_voxels
:param vtk_filename:
:return: | [
"Run",
"first",
"step",
"of",
"algorithm",
".",
"Next",
"step",
"is",
"split_voxels",
":",
"param",
"vtk_filename",
":",
":",
"return",
":"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L404-L415 |
mjirik/imcut | imcut/graph.py | Graph.split_voxels | def split_voxels(self, vtk_filename=None):
"""
Second step of algorithm
:return:()
"""
self.cache = {}
self.stats["t graph 10"] = time.time() - self.start_time
self.msi = MultiscaleArray(self.data.shape, block_size=self.nsplit)
# old implementation
# idxs = nm.where(self.data)
# nr, nc = self.data.shape
# for k, (ir, ic) in enumerate(zip(*idxs)):
# ndid = ic + ir * nc
# self.split_voxel(ndid, self.nsplit)
# new_implementation
# for ndid in np.flatnonzero(self.data):
# self.split_voxel(ndid, self.nsplit)
# even newer implementation
self.stats["t graph 11"] = time.time() - self.start_time
for ndid, val in enumerate(self.data.ravel()):
t_split_start = time.time()
if val == 0:
if self.compute_msindex:
self.msi.set_block_lowres(ndid, ndid)
self.stats["t graph low"] += time.time() - t_split_start
else:
self.split_voxel(ndid)
self.stats["t graph high"] += time.time() - t_split_start
self.stats["t graph 13"] = time.time() - self.start_time
self.finish()
if vtk_filename is not None:
self.write_vtk(vtk_filename)
self.stats["t graph 14"] = time.time() - self.start_time | python | def split_voxels(self, vtk_filename=None):
"""
Second step of algorithm
:return:()
"""
self.cache = {}
self.stats["t graph 10"] = time.time() - self.start_time
self.msi = MultiscaleArray(self.data.shape, block_size=self.nsplit)
# old implementation
# idxs = nm.where(self.data)
# nr, nc = self.data.shape
# for k, (ir, ic) in enumerate(zip(*idxs)):
# ndid = ic + ir * nc
# self.split_voxel(ndid, self.nsplit)
# new_implementation
# for ndid in np.flatnonzero(self.data):
# self.split_voxel(ndid, self.nsplit)
# even newer implementation
self.stats["t graph 11"] = time.time() - self.start_time
for ndid, val in enumerate(self.data.ravel()):
t_split_start = time.time()
if val == 0:
if self.compute_msindex:
self.msi.set_block_lowres(ndid, ndid)
self.stats["t graph low"] += time.time() - t_split_start
else:
self.split_voxel(ndid)
self.stats["t graph high"] += time.time() - t_split_start
self.stats["t graph 13"] = time.time() - self.start_time
self.finish()
if vtk_filename is not None:
self.write_vtk(vtk_filename)
self.stats["t graph 14"] = time.time() - self.start_time | [
"def",
"split_voxels",
"(",
"self",
",",
"vtk_filename",
"=",
"None",
")",
":",
"self",
".",
"cache",
"=",
"{",
"}",
"self",
".",
"stats",
"[",
"\"t graph 10\"",
"]",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"start_time",
"self",
".",
"msi",
"=",
"MultiscaleArray",
"(",
"self",
".",
"data",
".",
"shape",
",",
"block_size",
"=",
"self",
".",
"nsplit",
")",
"# old implementation",
"# idxs = nm.where(self.data)",
"# nr, nc = self.data.shape",
"# for k, (ir, ic) in enumerate(zip(*idxs)):",
"# ndid = ic + ir * nc",
"# self.split_voxel(ndid, self.nsplit)",
"# new_implementation",
"# for ndid in np.flatnonzero(self.data):",
"# self.split_voxel(ndid, self.nsplit)",
"# even newer implementation",
"self",
".",
"stats",
"[",
"\"t graph 11\"",
"]",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"start_time",
"for",
"ndid",
",",
"val",
"in",
"enumerate",
"(",
"self",
".",
"data",
".",
"ravel",
"(",
")",
")",
":",
"t_split_start",
"=",
"time",
".",
"time",
"(",
")",
"if",
"val",
"==",
"0",
":",
"if",
"self",
".",
"compute_msindex",
":",
"self",
".",
"msi",
".",
"set_block_lowres",
"(",
"ndid",
",",
"ndid",
")",
"self",
".",
"stats",
"[",
"\"t graph low\"",
"]",
"+=",
"time",
".",
"time",
"(",
")",
"-",
"t_split_start",
"else",
":",
"self",
".",
"split_voxel",
"(",
"ndid",
")",
"self",
".",
"stats",
"[",
"\"t graph high\"",
"]",
"+=",
"time",
".",
"time",
"(",
")",
"-",
"t_split_start",
"self",
".",
"stats",
"[",
"\"t graph 13\"",
"]",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"start_time",
"self",
".",
"finish",
"(",
")",
"if",
"vtk_filename",
"is",
"not",
"None",
":",
"self",
".",
"write_vtk",
"(",
"vtk_filename",
")",
"self",
".",
"stats",
"[",
"\"t graph 14\"",
"]",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"start_time"
] | Second step of algorithm
:return:() | [
"Second",
"step",
"of",
"algorithm",
":",
"return",
":",
"()"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L417-L453 |
mjirik/imcut | imcut/graph.py | MultiscaleArray.mul_block | def mul_block(self, index, val):
"""Multiply values in block"""
self._prepare_cache_slice(index)
self.msinds[self.cache_slice] *= val | python | def mul_block(self, index, val):
"""Multiply values in block"""
self._prepare_cache_slice(index)
self.msinds[self.cache_slice] *= val | [
"def",
"mul_block",
"(",
"self",
",",
"index",
",",
"val",
")",
":",
"self",
".",
"_prepare_cache_slice",
"(",
"index",
")",
"self",
".",
"msinds",
"[",
"self",
".",
"cache_slice",
"]",
"*=",
"val"
] | Multiply values in block | [
"Multiply",
"values",
"in",
"block"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/graph.py#L714-L717 |
mjirik/imcut | imcut/features.py | select_from_fv_by_seeds | def select_from_fv_by_seeds(fv, seeds, unique_cls):
"""
Tool to make simple feature functions take features from feature array by seeds.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv_selection, seeds_selection - selection from feature vector and selection from seeds
"""
logger.debug("seeds" + str(seeds))
# fvlin = fv.reshape(-1, int(fv.size/seeds.size))
expected_shape = [seeds.size, int(fv.size/seeds.size)]
if fv.shape[0] != expected_shape[0] or fv.shape[1] != expected_shape[1]:
raise AssertionError("Wrong shape of input feature vector array fv")
# sd = seeds.reshape(-1, 1)
selection = np.in1d(seeds, unique_cls)
fv_selection = fv[selection]
seeds_selection = seeds.flatten()[selection]
# sd = sd[]
return fv_selection, seeds_selection | python | def select_from_fv_by_seeds(fv, seeds, unique_cls):
"""
Tool to make simple feature functions take features from feature array by seeds.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv_selection, seeds_selection - selection from feature vector and selection from seeds
"""
logger.debug("seeds" + str(seeds))
# fvlin = fv.reshape(-1, int(fv.size/seeds.size))
expected_shape = [seeds.size, int(fv.size/seeds.size)]
if fv.shape[0] != expected_shape[0] or fv.shape[1] != expected_shape[1]:
raise AssertionError("Wrong shape of input feature vector array fv")
# sd = seeds.reshape(-1, 1)
selection = np.in1d(seeds, unique_cls)
fv_selection = fv[selection]
seeds_selection = seeds.flatten()[selection]
# sd = sd[]
return fv_selection, seeds_selection | [
"def",
"select_from_fv_by_seeds",
"(",
"fv",
",",
"seeds",
",",
"unique_cls",
")",
":",
"logger",
".",
"debug",
"(",
"\"seeds\"",
"+",
"str",
"(",
"seeds",
")",
")",
"# fvlin = fv.reshape(-1, int(fv.size/seeds.size))",
"expected_shape",
"=",
"[",
"seeds",
".",
"size",
",",
"int",
"(",
"fv",
".",
"size",
"/",
"seeds",
".",
"size",
")",
"]",
"if",
"fv",
".",
"shape",
"[",
"0",
"]",
"!=",
"expected_shape",
"[",
"0",
"]",
"or",
"fv",
".",
"shape",
"[",
"1",
"]",
"!=",
"expected_shape",
"[",
"1",
"]",
":",
"raise",
"AssertionError",
"(",
"\"Wrong shape of input feature vector array fv\"",
")",
"# sd = seeds.reshape(-1, 1)",
"selection",
"=",
"np",
".",
"in1d",
"(",
"seeds",
",",
"unique_cls",
")",
"fv_selection",
"=",
"fv",
"[",
"selection",
"]",
"seeds_selection",
"=",
"seeds",
".",
"flatten",
"(",
")",
"[",
"selection",
"]",
"# sd = sd[]",
"return",
"fv_selection",
",",
"seeds_selection"
] | Tool to make simple feature functions take features from feature array by seeds.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv_selection, seeds_selection - selection from feature vector and selection from seeds | [
"Tool",
"to",
"make",
"simple",
"feature",
"functions",
"take",
"features",
"from",
"feature",
"array",
"by",
"seeds",
".",
":",
"param",
"fv",
":",
"ndarray",
"with",
"lineariezed",
"feature",
".",
"It",
"s",
"shape",
"is",
"MxN",
"where",
"M",
"is",
"number",
"of",
"image",
"pixels",
"and",
"N",
"is",
"number",
"of",
"features",
":",
"param",
"seeds",
":",
"ndarray",
"with",
"seeds",
".",
"Does",
"not",
"to",
"be",
"linear",
".",
":",
"param",
"unique_cls",
":",
"number",
"of",
"used",
"seeds",
"clases",
".",
"Like",
"[",
"1",
"2",
"]",
":",
"return",
":",
"fv_selection",
"seeds_selection",
"-",
"selection",
"from",
"feature",
"vector",
"and",
"selection",
"from",
"seeds"
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/features.py#L39-L58 |
mjirik/imcut | imcut/features.py | return_fv_by_seeds | def return_fv_by_seeds(fv, seeds=None, unique_cls=None):
"""
Return features selected by seeds and unique_cls or selection from features and corresponding seed classes.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image
"""
if seeds is not None:
if unique_cls is not None:
return select_from_fv_by_seeds(fv, seeds, unique_cls)
else:
raise AssertionError("Input unique_cls has to be not None if seeds is not None.")
else:
return fv | python | def return_fv_by_seeds(fv, seeds=None, unique_cls=None):
"""
Return features selected by seeds and unique_cls or selection from features and corresponding seed classes.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image
"""
if seeds is not None:
if unique_cls is not None:
return select_from_fv_by_seeds(fv, seeds, unique_cls)
else:
raise AssertionError("Input unique_cls has to be not None if seeds is not None.")
else:
return fv | [
"def",
"return_fv_by_seeds",
"(",
"fv",
",",
"seeds",
"=",
"None",
",",
"unique_cls",
"=",
"None",
")",
":",
"if",
"seeds",
"is",
"not",
"None",
":",
"if",
"unique_cls",
"is",
"not",
"None",
":",
"return",
"select_from_fv_by_seeds",
"(",
"fv",
",",
"seeds",
",",
"unique_cls",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"\"Input unique_cls has to be not None if seeds is not None.\"",
")",
"else",
":",
"return",
"fv"
] | Return features selected by seeds and unique_cls or selection from features and corresponding seed classes.
:param fv: ndarray with lineariezed feature. It's shape is MxN, where M is number of image pixels and N is number
of features
:param seeds: ndarray with seeds. Does not to be linear.
:param unique_cls: number of used seeds clases. Like [1, 2]
:return: fv, sd - selection from feature vector and selection from seeds or just fv for whole image | [
"Return",
"features",
"selected",
"by",
"seeds",
"and",
"unique_cls",
"or",
"selection",
"from",
"features",
"and",
"corresponding",
"seed",
"classes",
"."
] | train | https://github.com/mjirik/imcut/blob/1b38e7cd18a7a38fe683c1cabe1222fe5fa03aa3/imcut/features.py#L60-L76 |
chitamoor/Rester | rester/manifest.py | Variables.expand | def expand(self, expression):
"""Expands logical constructions."""
self.logger.debug("expand : expression %s", str(expression))
if not is_string(expression):
return expression
result = self._pattern.sub(lambda var: str(self._variables[var.group(1)]), expression)
result = result.strip()
self.logger.debug('expand : %s - result : %s', expression, result)
if is_number(result):
if result.isdigit():
self.logger.debug(' expand is integer !!!')
return int(result)
else:
self.logger.debug(' expand is float !!!')
return float(result)
return result | python | def expand(self, expression):
"""Expands logical constructions."""
self.logger.debug("expand : expression %s", str(expression))
if not is_string(expression):
return expression
result = self._pattern.sub(lambda var: str(self._variables[var.group(1)]), expression)
result = result.strip()
self.logger.debug('expand : %s - result : %s', expression, result)
if is_number(result):
if result.isdigit():
self.logger.debug(' expand is integer !!!')
return int(result)
else:
self.logger.debug(' expand is float !!!')
return float(result)
return result | [
"def",
"expand",
"(",
"self",
",",
"expression",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"expand : expression %s\"",
",",
"str",
"(",
"expression",
")",
")",
"if",
"not",
"is_string",
"(",
"expression",
")",
":",
"return",
"expression",
"result",
"=",
"self",
".",
"_pattern",
".",
"sub",
"(",
"lambda",
"var",
":",
"str",
"(",
"self",
".",
"_variables",
"[",
"var",
".",
"group",
"(",
"1",
")",
"]",
")",
",",
"expression",
")",
"result",
"=",
"result",
".",
"strip",
"(",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'expand : %s - result : %s'",
",",
"expression",
",",
"result",
")",
"if",
"is_number",
"(",
"result",
")",
":",
"if",
"result",
".",
"isdigit",
"(",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"' expand is integer !!!'",
")",
"return",
"int",
"(",
"result",
")",
"else",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"' expand is float !!!'",
")",
"return",
"float",
"(",
"result",
")",
"return",
"result"
] | Expands logical constructions. | [
"Expands",
"logical",
"constructions",
"."
] | train | https://github.com/chitamoor/Rester/blob/1865b17f70b7c597aeadde2d0907cb1b59f10c0f/rester/manifest.py#L34-L52 |
disqus/gutter | gutter/client/__init__.py | get_gutter_client | def get_gutter_client(
alias='default',
cache=CLIENT_CACHE,
**kwargs
):
"""
Creates gutter clients and memoizes them in a registry for future quick access.
Args:
alias (str or None): Name of the client. Used for caching.
If name is falsy then do not use the cache.
cache (dict): cache to store gutter managers in.
**kwargs: kwargs to be passed the Manger class.
Returns (Manager):
A gutter client.
"""
from gutter.client.models import Manager
if not alias:
return Manager(**kwargs)
elif alias not in cache:
cache[alias] = Manager(**kwargs)
return cache[alias] | python | def get_gutter_client(
alias='default',
cache=CLIENT_CACHE,
**kwargs
):
"""
Creates gutter clients and memoizes them in a registry for future quick access.
Args:
alias (str or None): Name of the client. Used for caching.
If name is falsy then do not use the cache.
cache (dict): cache to store gutter managers in.
**kwargs: kwargs to be passed the Manger class.
Returns (Manager):
A gutter client.
"""
from gutter.client.models import Manager
if not alias:
return Manager(**kwargs)
elif alias not in cache:
cache[alias] = Manager(**kwargs)
return cache[alias] | [
"def",
"get_gutter_client",
"(",
"alias",
"=",
"'default'",
",",
"cache",
"=",
"CLIENT_CACHE",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"gutter",
".",
"client",
".",
"models",
"import",
"Manager",
"if",
"not",
"alias",
":",
"return",
"Manager",
"(",
"*",
"*",
"kwargs",
")",
"elif",
"alias",
"not",
"in",
"cache",
":",
"cache",
"[",
"alias",
"]",
"=",
"Manager",
"(",
"*",
"*",
"kwargs",
")",
"return",
"cache",
"[",
"alias",
"]"
] | Creates gutter clients and memoizes them in a registry for future quick access.
Args:
alias (str or None): Name of the client. Used for caching.
If name is falsy then do not use the cache.
cache (dict): cache to store gutter managers in.
**kwargs: kwargs to be passed the Manger class.
Returns (Manager):
A gutter client. | [
"Creates",
"gutter",
"clients",
"and",
"memoizes",
"them",
"in",
"a",
"registry",
"for",
"future",
"quick",
"access",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/__init__.py#L17-L42 |
disqus/gutter | gutter/client/operators/misc.py | PercentRange._modulo | def _modulo(self, decimal_argument):
"""
The mod operator is prone to floating point errors, so use decimal.
101.1 % 100
>>> 1.0999999999999943
decimal_context.divmod(Decimal('100.1'), 100)
>>> (Decimal('1'), Decimal('0.1'))
"""
_times, remainder = self._context.divmod(decimal_argument, 100)
# match the builtin % behavior by adding the N to the result if negative
return remainder if remainder >= 0 else remainder + 100 | python | def _modulo(self, decimal_argument):
"""
The mod operator is prone to floating point errors, so use decimal.
101.1 % 100
>>> 1.0999999999999943
decimal_context.divmod(Decimal('100.1'), 100)
>>> (Decimal('1'), Decimal('0.1'))
"""
_times, remainder = self._context.divmod(decimal_argument, 100)
# match the builtin % behavior by adding the N to the result if negative
return remainder if remainder >= 0 else remainder + 100 | [
"def",
"_modulo",
"(",
"self",
",",
"decimal_argument",
")",
":",
"_times",
",",
"remainder",
"=",
"self",
".",
"_context",
".",
"divmod",
"(",
"decimal_argument",
",",
"100",
")",
"# match the builtin % behavior by adding the N to the result if negative",
"return",
"remainder",
"if",
"remainder",
">=",
"0",
"else",
"remainder",
"+",
"100"
] | The mod operator is prone to floating point errors, so use decimal.
101.1 % 100
>>> 1.0999999999999943
decimal_context.divmod(Decimal('100.1'), 100)
>>> (Decimal('1'), Decimal('0.1')) | [
"The",
"mod",
"operator",
"is",
"prone",
"to",
"floating",
"point",
"errors",
"so",
"use",
"decimal",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/operators/misc.py#L16-L29 |
disqus/gutter | gutter/client/models.py | Switch.enabled_for | def enabled_for(self, inpt):
"""
Checks to see if this switch is enabled for the provided input.
If ``compounded``, all switch conditions must be ``True`` for the switch
to be enabled. Otherwise, *any* condition needs to be ``True`` for the
switch to be enabled.
The switch state is then checked to see if it is ``GLOBAL`` or
``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each
condition is checked.
Keyword Arguments:
inpt -- An instance of the ``Input`` class.
"""
signals.switch_checked.call(self)
signal_decorated = partial(self.__signal_and_return, inpt)
if self.state is self.states.GLOBAL:
return signal_decorated(True)
elif self.state is self.states.DISABLED:
return signal_decorated(False)
conditions_dict = ConditionsDict.from_conditions_list(self.conditions)
conditions = conditions_dict.get_by_input(inpt)
if conditions:
result = self.__enabled_func(
cond.call(inpt)
for cond
in conditions
if cond.argument(inpt).applies
)
else:
result = None
return signal_decorated(result) | python | def enabled_for(self, inpt):
"""
Checks to see if this switch is enabled for the provided input.
If ``compounded``, all switch conditions must be ``True`` for the switch
to be enabled. Otherwise, *any* condition needs to be ``True`` for the
switch to be enabled.
The switch state is then checked to see if it is ``GLOBAL`` or
``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each
condition is checked.
Keyword Arguments:
inpt -- An instance of the ``Input`` class.
"""
signals.switch_checked.call(self)
signal_decorated = partial(self.__signal_and_return, inpt)
if self.state is self.states.GLOBAL:
return signal_decorated(True)
elif self.state is self.states.DISABLED:
return signal_decorated(False)
conditions_dict = ConditionsDict.from_conditions_list(self.conditions)
conditions = conditions_dict.get_by_input(inpt)
if conditions:
result = self.__enabled_func(
cond.call(inpt)
for cond
in conditions
if cond.argument(inpt).applies
)
else:
result = None
return signal_decorated(result) | [
"def",
"enabled_for",
"(",
"self",
",",
"inpt",
")",
":",
"signals",
".",
"switch_checked",
".",
"call",
"(",
"self",
")",
"signal_decorated",
"=",
"partial",
"(",
"self",
".",
"__signal_and_return",
",",
"inpt",
")",
"if",
"self",
".",
"state",
"is",
"self",
".",
"states",
".",
"GLOBAL",
":",
"return",
"signal_decorated",
"(",
"True",
")",
"elif",
"self",
".",
"state",
"is",
"self",
".",
"states",
".",
"DISABLED",
":",
"return",
"signal_decorated",
"(",
"False",
")",
"conditions_dict",
"=",
"ConditionsDict",
".",
"from_conditions_list",
"(",
"self",
".",
"conditions",
")",
"conditions",
"=",
"conditions_dict",
".",
"get_by_input",
"(",
"inpt",
")",
"if",
"conditions",
":",
"result",
"=",
"self",
".",
"__enabled_func",
"(",
"cond",
".",
"call",
"(",
"inpt",
")",
"for",
"cond",
"in",
"conditions",
"if",
"cond",
".",
"argument",
"(",
"inpt",
")",
".",
"applies",
")",
"else",
":",
"result",
"=",
"None",
"return",
"signal_decorated",
"(",
"result",
")"
] | Checks to see if this switch is enabled for the provided input.
If ``compounded``, all switch conditions must be ``True`` for the switch
to be enabled. Otherwise, *any* condition needs to be ``True`` for the
switch to be enabled.
The switch state is then checked to see if it is ``GLOBAL`` or
``DISABLED``. If it is not, then the switch is ``SELECTIVE`` and each
condition is checked.
Keyword Arguments:
inpt -- An instance of the ``Input`` class. | [
"Checks",
"to",
"see",
"if",
"this",
"switch",
"is",
"enabled",
"for",
"the",
"provided",
"input",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L150-L187 |
disqus/gutter | gutter/client/models.py | Condition.call | def call(self, inpt):
"""
Returns if the condition applies to the ``inpt``.
If the class ``inpt`` is an instance of is not the same class as the
condition's own ``argument``, then ``False`` is returned. This also
applies to the ``NONE`` input.
Otherwise, ``argument`` is called, with ``inpt`` as the instance and
the value is compared to the ``operator`` and the Value is returned. If
the condition is ``negative``, then then ``not`` the value is returned.
Keyword Arguments:
inpt -- An instance of the ``Input`` class.
"""
if inpt is Manager.NONE_INPUT:
return False
# Call (construct) the argument with the input object
argument_instance = self.argument(inpt)
if not argument_instance.applies:
return False
application = self.__apply(argument_instance, inpt)
if self.negative:
application = not application
return application | python | def call(self, inpt):
"""
Returns if the condition applies to the ``inpt``.
If the class ``inpt`` is an instance of is not the same class as the
condition's own ``argument``, then ``False`` is returned. This also
applies to the ``NONE`` input.
Otherwise, ``argument`` is called, with ``inpt`` as the instance and
the value is compared to the ``operator`` and the Value is returned. If
the condition is ``negative``, then then ``not`` the value is returned.
Keyword Arguments:
inpt -- An instance of the ``Input`` class.
"""
if inpt is Manager.NONE_INPUT:
return False
# Call (construct) the argument with the input object
argument_instance = self.argument(inpt)
if not argument_instance.applies:
return False
application = self.__apply(argument_instance, inpt)
if self.negative:
application = not application
return application | [
"def",
"call",
"(",
"self",
",",
"inpt",
")",
":",
"if",
"inpt",
"is",
"Manager",
".",
"NONE_INPUT",
":",
"return",
"False",
"# Call (construct) the argument with the input object",
"argument_instance",
"=",
"self",
".",
"argument",
"(",
"inpt",
")",
"if",
"not",
"argument_instance",
".",
"applies",
":",
"return",
"False",
"application",
"=",
"self",
".",
"__apply",
"(",
"argument_instance",
",",
"inpt",
")",
"if",
"self",
".",
"negative",
":",
"application",
"=",
"not",
"application",
"return",
"application"
] | Returns if the condition applies to the ``inpt``.
If the class ``inpt`` is an instance of is not the same class as the
condition's own ``argument``, then ``False`` is returned. This also
applies to the ``NONE`` input.
Otherwise, ``argument`` is called, with ``inpt`` as the instance and
the value is compared to the ``operator`` and the Value is returned. If
the condition is ``negative``, then then ``not`` the value is returned.
Keyword Arguments:
inpt -- An instance of the ``Input`` class. | [
"Returns",
"if",
"the",
"condition",
"applies",
"to",
"the",
"inpt",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L333-L362 |
disqus/gutter | gutter/client/models.py | Manager.switches | def switches(self):
"""
List of all switches currently registered.
"""
results = [
switch for name, switch in self.storage.iteritems()
if name.startswith(self.__joined_namespace)
]
return results | python | def switches(self):
"""
List of all switches currently registered.
"""
results = [
switch for name, switch in self.storage.iteritems()
if name.startswith(self.__joined_namespace)
]
return results | [
"def",
"switches",
"(",
"self",
")",
":",
"results",
"=",
"[",
"switch",
"for",
"name",
",",
"switch",
"in",
"self",
".",
"storage",
".",
"iteritems",
"(",
")",
"if",
"name",
".",
"startswith",
"(",
"self",
".",
"__joined_namespace",
")",
"]",
"return",
"results"
] | List of all switches currently registered. | [
"List",
"of",
"all",
"switches",
"currently",
"registered",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L438-L447 |
disqus/gutter | gutter/client/models.py | Manager.switch | def switch(self, name):
"""
Returns the switch with the provided ``name``.
If ``autocreate`` is set to ``True`` and no switch with that name
exists, a ``DISABLED`` switch will be with that name.
Keyword Arguments:
name -- A name of a switch.
"""
try:
switch = self.storage[self.__namespaced(name)]
except KeyError:
if not self.autocreate:
raise ValueError("No switch named '%s' registered in '%s'" % (name, self.namespace))
switch = self.__create_and_register_disabled_switch(name)
switch.manager = self
return switch | python | def switch(self, name):
"""
Returns the switch with the provided ``name``.
If ``autocreate`` is set to ``True`` and no switch with that name
exists, a ``DISABLED`` switch will be with that name.
Keyword Arguments:
name -- A name of a switch.
"""
try:
switch = self.storage[self.__namespaced(name)]
except KeyError:
if not self.autocreate:
raise ValueError("No switch named '%s' registered in '%s'" % (name, self.namespace))
switch = self.__create_and_register_disabled_switch(name)
switch.manager = self
return switch | [
"def",
"switch",
"(",
"self",
",",
"name",
")",
":",
"try",
":",
"switch",
"=",
"self",
".",
"storage",
"[",
"self",
".",
"__namespaced",
"(",
"name",
")",
"]",
"except",
"KeyError",
":",
"if",
"not",
"self",
".",
"autocreate",
":",
"raise",
"ValueError",
"(",
"\"No switch named '%s' registered in '%s'\"",
"%",
"(",
"name",
",",
"self",
".",
"namespace",
")",
")",
"switch",
"=",
"self",
".",
"__create_and_register_disabled_switch",
"(",
"name",
")",
"switch",
".",
"manager",
"=",
"self",
"return",
"switch"
] | Returns the switch with the provided ``name``.
If ``autocreate`` is set to ``True`` and no switch with that name
exists, a ``DISABLED`` switch will be with that name.
Keyword Arguments:
name -- A name of a switch. | [
"Returns",
"the",
"switch",
"with",
"the",
"provided",
"name",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L449-L468 |
disqus/gutter | gutter/client/models.py | Manager.register | def register(self, switch, signal=signals.switch_registered):
'''
Register a switch and persist it to the storage.
'''
if not switch.name:
raise ValueError('Switch name cannot be blank')
switch.manager = self
self.__persist(switch)
signal.call(switch) | python | def register(self, switch, signal=signals.switch_registered):
'''
Register a switch and persist it to the storage.
'''
if not switch.name:
raise ValueError('Switch name cannot be blank')
switch.manager = self
self.__persist(switch)
signal.call(switch) | [
"def",
"register",
"(",
"self",
",",
"switch",
",",
"signal",
"=",
"signals",
".",
"switch_registered",
")",
":",
"if",
"not",
"switch",
".",
"name",
":",
"raise",
"ValueError",
"(",
"'Switch name cannot be blank'",
")",
"switch",
".",
"manager",
"=",
"self",
"self",
".",
"__persist",
"(",
"switch",
")",
"signal",
".",
"call",
"(",
"switch",
")"
] | Register a switch and persist it to the storage. | [
"Register",
"a",
"switch",
"and",
"persist",
"it",
"to",
"the",
"storage",
"."
] | train | https://github.com/disqus/gutter/blob/d686fa3cd0551cacfc5630c8e7b5fa75e6dcfdf5/gutter/client/models.py#L479-L489 |
kaste/mockito-python | mockito/mockito.py | verify | def verify(obj, times=1, atleast=None, atmost=None, between=None,
inorder=False):
"""Central interface to verify interactions.
`verify` uses a fluent interface::
verify(<obj>, times=2).<method_name>(<args>)
`args` can be as concrete as necessary. Often a catch-all is enough,
especially if you're working with strict mocks, bc they throw at call
time on unwanted, unconfigured arguments::
from mockito import ANY, ARGS, KWARGS
when(manager).add_tasks(1, 2, 3)
...
# no need to duplicate the specification; every other argument pattern
# would have raised anyway.
verify(manager).add_tasks(1, 2, 3) # duplicates `when`call
verify(manager).add_tasks(*ARGS)
verify(manager).add_tasks(...) # Py3
verify(manager).add_tasks(Ellipsis) # Py2
"""
if isinstance(obj, str):
obj = get_obj(obj)
verification_fn = _get_wanted_verification(
times=times, atleast=atleast, atmost=atmost, between=between)
if inorder:
verification_fn = verification.InOrder(verification_fn)
# FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj
theMock = _get_mock_or_raise(obj)
class Verify(object):
def __getattr__(self, method_name):
return invocation.VerifiableInvocation(
theMock, method_name, verification_fn)
return Verify() | python | def verify(obj, times=1, atleast=None, atmost=None, between=None,
inorder=False):
"""Central interface to verify interactions.
`verify` uses a fluent interface::
verify(<obj>, times=2).<method_name>(<args>)
`args` can be as concrete as necessary. Often a catch-all is enough,
especially if you're working with strict mocks, bc they throw at call
time on unwanted, unconfigured arguments::
from mockito import ANY, ARGS, KWARGS
when(manager).add_tasks(1, 2, 3)
...
# no need to duplicate the specification; every other argument pattern
# would have raised anyway.
verify(manager).add_tasks(1, 2, 3) # duplicates `when`call
verify(manager).add_tasks(*ARGS)
verify(manager).add_tasks(...) # Py3
verify(manager).add_tasks(Ellipsis) # Py2
"""
if isinstance(obj, str):
obj = get_obj(obj)
verification_fn = _get_wanted_verification(
times=times, atleast=atleast, atmost=atmost, between=between)
if inorder:
verification_fn = verification.InOrder(verification_fn)
# FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj
theMock = _get_mock_or_raise(obj)
class Verify(object):
def __getattr__(self, method_name):
return invocation.VerifiableInvocation(
theMock, method_name, verification_fn)
return Verify() | [
"def",
"verify",
"(",
"obj",
",",
"times",
"=",
"1",
",",
"atleast",
"=",
"None",
",",
"atmost",
"=",
"None",
",",
"between",
"=",
"None",
",",
"inorder",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"str",
")",
":",
"obj",
"=",
"get_obj",
"(",
"obj",
")",
"verification_fn",
"=",
"_get_wanted_verification",
"(",
"times",
"=",
"times",
",",
"atleast",
"=",
"atleast",
",",
"atmost",
"=",
"atmost",
",",
"between",
"=",
"between",
")",
"if",
"inorder",
":",
"verification_fn",
"=",
"verification",
".",
"InOrder",
"(",
"verification_fn",
")",
"# FIXME?: Catch error if obj is neither a Mock nor a known stubbed obj",
"theMock",
"=",
"_get_mock_or_raise",
"(",
"obj",
")",
"class",
"Verify",
"(",
"object",
")",
":",
"def",
"__getattr__",
"(",
"self",
",",
"method_name",
")",
":",
"return",
"invocation",
".",
"VerifiableInvocation",
"(",
"theMock",
",",
"method_name",
",",
"verification_fn",
")",
"return",
"Verify",
"(",
")"
] | Central interface to verify interactions.
`verify` uses a fluent interface::
verify(<obj>, times=2).<method_name>(<args>)
`args` can be as concrete as necessary. Often a catch-all is enough,
especially if you're working with strict mocks, bc they throw at call
time on unwanted, unconfigured arguments::
from mockito import ANY, ARGS, KWARGS
when(manager).add_tasks(1, 2, 3)
...
# no need to duplicate the specification; every other argument pattern
# would have raised anyway.
verify(manager).add_tasks(1, 2, 3) # duplicates `when`call
verify(manager).add_tasks(*ARGS)
verify(manager).add_tasks(...) # Py3
verify(manager).add_tasks(Ellipsis) # Py2 | [
"Central",
"interface",
"to",
"verify",
"interactions",
"."
] | train | https://github.com/kaste/mockito-python/blob/d6b22b003f56ee5b156dbd9d8ba209faf35b6713/mockito/mockito.py#L100-L140 |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
- Downloads last month
- 5