python类data()的实例源码

utils.py 文件源码 项目:pisap 作者: neurospin 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def scaling(image, method="stretching"):
    """
    Change the image dynamic.

    Parameters
    ----------
    image: Image
        the image to be transformed.
    method: str, default 'stretching'
        the normalization method: 'stretching', 'equalization' or 'adaptive'.

    Returns
    -------
    normalize_image: Image
        the normalized image.
    """
    # Contrast stretching
    if method == "stretching":
        p2, p98 = np.percentile(image.data, (2, 98))
        norm_data = exposure.rescale_intensity(image.data, in_range=(p2, p98))

    # Equalization
    elif method == "equalization":
        norm_data = exposure.equalize_hist(image.data)

    # Adaptive Equalization
    elif method == "adaptive":
        norm_data = exposure.equalize_adapthist(image.data, clip_limit=0.03)

    # Unknown method
    else:
        raise ValueError("Unknown normalization '{0}'.".format(method))

    normalize_image = pisap.Image(data=norm_data)

    return normalize_image
utils.py 文件源码 项目:pisap 作者: neurospin 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def histogram(image, nbins=256, lower_cut=0., cumulate=0):
    """
    Compute the histogram of an input dataset.

    Parameters
    ----------
    image: Image
        the image that contains the dataset to be analysed.
    nbins: int, default 256
        the histogram number of bins.
    lower_cut: float, default 0
        do not consider the intensities under this threshold.
    cumulate: bool, default False
        if set compute the cumulate histogram.

    Returns
    -------
    hist_im: Image
        the generated histogram.
    """
    hist, bins = np.histogram(image.data[image.data > lower_cut], nbins)
    if cumulate:
        cdf = hist.cumsum()
        cdf_normalized = cdf * hist.max() / cdf.max()
        hist_im = pisap.Image(data=cdf_normalized)
    else:
        hist_im = pisap.Image(data=hist)
    return hist_im
train_validation_split.py 文件源码 项目:piecewisecrf 作者: Vaan5 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def _label_statistics(image_paths):
    '''

    Calculates label statistics (number of picked pixels for each class)

    Parameters
    ----------
    image_paths : list
        List of absolute paths for picked images

    Returns
    -------
    array: numpy array
        Number of selected pixels per class


    '''
    ds = KittiDataset()

    def _rgb_2_label(rgb):
        return ds.color2label[tuple(rgb)].trainId

    total_counts = np.zeros(ds.num_classes())
    for img in image_paths:
        rgb = skimage.data.load(img)
        labels = np.apply_along_axis(_rgb_2_label, 2, rgb)
        indices, counts = np.unique(labels, return_counts=True)
        if indices[-1] >= ds.num_classes():
            indices = indices[0:-1]
            counts = counts[0:-1]
        total_counts[indices] += counts
    return total_counts
lung_seg.py 文件源码 项目:huaat_ml_dl 作者: ieee820 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def cut_cube(npy_img,voxelCoord, z, width, y_bias, x_bias):
    #voxelcoord: y,x,z
    npy_ct = npy_img[int(voxelCoord[2] - z / 2):int(voxelCoord[2] + z / 2),:,:]
    # datatype(z,y,x) = float32,to input to tensorflow
    # y_bias, x bias for data augmentation
    cube = np.ndarray([z, width, width], dtype=np.float32)
    cube[:, :, :] = npy_ct[:,
                    int(voxelCoord[0] - width / 2 + y_bias):int(voxelCoord[0] + width / 2 + y_bias),
                    int(voxelCoord[1] - width / 2 + x_bias):int(voxelCoord[1] + width / 2 + x_bias)]
    cube = normalizePlanes(cube)
    return cube
samples_plgs.py 文件源码 项目:imagepy 作者: Image-Py 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def __init__(self, title):
        self.title = title
        if hasattr(data, title):
            self.data = getattr(data, title)
        else : self.data = getattr(misc, title)
samples_plgs.py 文件源码 项目:imagepy 作者: Image-Py 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def run(self, para = None):
        img = self.data()
        if img.dtype != np.uint8: 
            img = img.astype(np.uint8)
        IPy.show_img([img], self.title)


问题


面经


文章

微信
公众号

扫码关注公众号