本文整理汇总了Python中skimage.transform.estimate_transform函数的典型用法代码示例。如果您正苦于以下问题:Python estimate_transform函数的具体用法?Python estimate_transform怎么用?Python estimate_transform使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了estimate_transform函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_estimate_transform
def test_estimate_transform():
for tform in ('euclidean', 'similarity', 'affine', 'projective',
'polynomial'):
estimate_transform(tform, SRC[:2, :], DST[:2, :])
with pytest.raises(ValueError):
estimate_transform('foobar',
SRC[:2, :], DST[:2, :])
开发者ID:andreydung,项目名称:scikit-image,代码行数:7,代码来源:test_geometric.py
示例2: test_deprecated_params_attributes
def test_deprecated_params_attributes():
for t in ('projective', 'affine', 'similarity'):
tform = estimate_transform(t, SRC, DST)
assert_equal(tform._matrix, tform.params)
tform = estimate_transform('polynomial', SRC, DST, order=3)
assert_equal(tform._params, tform.params)
开发者ID:JeanKossaifi,项目名称:scikit-image,代码行数:7,代码来源:test_geometric.py
示例3: test_deprecated_params_attributes
def test_deprecated_params_attributes():
for t in ('projective', 'affine', 'similarity'):
tform = estimate_transform(t, SRC, DST)
with expected_warnings(['`_matrix`.*deprecated']):
assert_equal(tform._matrix, tform.params)
tform = estimate_transform('polynomial', SRC, DST, order=3)
with expected_warnings(['`_params`.*deprecated']):
assert_equal(tform._params, tform.params)
开发者ID:haohao200609,项目名称:Hybrid,代码行数:9,代码来源:test_geometric.py
示例4: test_projective_estimation
def test_projective_estimation():
# exact solution
tform = estimate_transform('projective', SRC[:4, :], DST[:4, :])
assert_almost_equal(tform(SRC[:4, :]), DST[:4, :])
# over-determined
tform2 = estimate_transform('projective', SRC, DST)
assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = ProjectiveTransform()
tform3.estimate(SRC, DST)
assert_almost_equal(tform3.params, tform2.params)
开发者ID:AbdealiJK,项目名称:scikit-image,代码行数:13,代码来源:test_geometric.py
示例5: test_affine_estimation
def test_affine_estimation():
# exact solution
tform = estimate_transform('affine', SRC[:3, :], DST[:3, :])
assert_array_almost_equal(tform(SRC[:3, :]), DST[:3, :])
# over-determined
tform2 = estimate_transform('affine', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
# via estimate method
tform3 = AffineTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
开发者ID:Autodidact24,项目名称:scikit-image,代码行数:13,代码来源:test_geometric.py
示例6: main
def main(base_dir):
BASE_DIR = base_dir
# Load the set of pictures
ic = io.ImageCollection(BASE_DIR + '*.JPG')
# Select points on the first picture
f, ax = plt.subplots(1,1)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
ax.autoscale(enable=True, axis='both', tight=True);
plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
ax.imshow(ic[0])
coords = [plt.ginput(8, timeout=0)]
plt.close()
# Load first picture side-by side with second, select points.
# Scroll through images one-by-one
for i, img in enumerate(ic[1:]):
ax1 = plt.subplot2grid((6,10),(0,1), rowspan=6, colspan=9)
ax0 = plt.subplot2grid((6,10),(0,0))
for ax in [ax0, ax1]:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
plt.tight_layout(pad=0.4, w_pad=0.0, h_pad=0.0)
#f, (ax0,ax1) = plt.subplots(1,2)
ax0.imshow(ic[i])
for coord in coords[i]:
ax0.scatter(coord[0],coord[1])
ax1.imshow(img)
coords.append(plt.ginput(8, timeout=0))
plt.close()
# Use a similarity transformation to transform each one.
if not os.path.exists(BASE_DIR + 'corrected'):
os.mkdir(BASE_DIR + 'corrected')
np.save(BASE_DIR + 'corrected/coords.npy', coords)
io.imsave(BASE_DIR + 'corrected/0.jpg', ic[0])
for i, img in enumerate(ic[1:]):
tf = transform.estimate_transform('similarity', np.array(coords[0]), np.array(coords[i+1]))
# Use a translation transformation to center both images for display purposes
img_warped = transform.warp(img, inverse_map=tf,
output_shape=(1728,3072))
print BASE_DIR + 'corrected/%d.jpg' %(i+1)
print img_warped
io.imsave(BASE_DIR + 'corrected/%d.jpg' %(i+1), img_warped)
开发者ID:dkadish,项目名称:khronos,代码行数:60,代码来源:manual-register.py
示例7: estimate_coordinate_transform
def estimate_coordinate_transform(source, target, method, **method_kwargs):
"""Calculates a transformation from a source list of coordinates to a
target list of coordinates.
Parameters
----------
source : Nx2 array
(x, y) coordinate pairs from source image.
target : Nx2 array
(x, y) coordinate pairs from target image. Must be same shape as
'source'.
method : string, optional
Method to use for transform estimation.
**method_kwargs : optional
Additional arguments can be passed in specific to the particular
method. For example, 'order' for a polynomial transform estimation.
Returns
-------
transform : skimage.transform._geometric.GeometricTransform
An skimage transform object.
See Also
--------
skimage.transform.estimate_transform
"""
return tf.estimate_transform(method, source, target, **method_kwargs)
开发者ID:vjlbym,项目名称:sima,代码行数:29,代码来源:__init__.py
示例8: gen_data
def gen_data(name):
reftracker = scio.loadmat('data/images_tracker.00047.mat')['tracker']
desttracker = scio.loadmat('data/images_tracker/'+name+'.mat')['tracker']
refpos = np.floor(np.mean(reftracker, 0))
xxc, yyc = np.meshgrid(np.arange(1, 1801, dtype=np.int), np.arange(1, 2001, dtype=np.int))
#normalize x and y channels
xxc = (xxc - 600 - refpos[0]) * 1.0 / 600
yyc = (yyc - 600 - refpos[1]) * 1.0 / 600
maskimg = Image.open('data/meanmask.png')
maskc = np.array(maskimg, dtype=np.float)
maskc = np.pad(maskc, (600, 600), 'minimum')
# warp is an inverse transform, and so src and dst must be reversed here
tform = transform.estimate_transform('affine', desttracker + 600, reftracker + 600)
img_data = skio.imread('data/images_data/'+name+'.jpg')
# save org mat
warpedxx = transform.warp(xxc, tform, output_shape=xxc.shape)
warpedyy = transform.warp(yyc, tform, output_shape=xxc.shape)
warpedmask = transform.warp(maskc, tform, output_shape=xxc.shape)
warpedxx = warpedxx[600:1400, 600:1200, :]
warpedyy = warpedyy[600:1400, 600:1200, :]
warpedmask = warpedmask[600:1400, 600:1200, :]
img_h, img_w, _ = img_data.shape
mat = np.zeros((img_h, img_w, 6), dtype=np.float)
mat[:, :, 0] = (img_data[2] * 1.0 - 104.008) / 255
mat[:, :, 1] = (img_data[1] * 1.0 - 116.669) / 255
mat[:, :, 2] = (img_data[0] * 1.0 - 122.675) / 255
scio.savemat('portraitFCN_data/' + name + '.mat', {'img':mat})
mat_plus = np.zeros((img_h, img_w, 6), dtype=np.float)
mat_plus[:, :, 0:3] = mat
mat_plus[:, :, 3] = warpedxx
mat_plus[:, :, 4] = warpedyy
mat_plus[:, :, 5] = warpedmask
开发者ID:Selimam,项目名称:AutoPortraitMatting,代码行数:33,代码来源:preprocess.py
示例9: extract_sift
def extract_sift(image,lm=None, shape=[200,300], fix_points='outer',ttype='affine'):
if lm==None:lm = landmarks(image)
if np.any(np.isnan(lm)):
return np.nan*np.ones([out_shape,out_shape,image.shape[2]]).astype(np.float16), np.nan*np.zeros_like(lm)
dst = mean_face[:,p[fix_points]]
dst = dst-dst.mean(1)[:,None]
dst = dst/np.abs(dst).max()
dst *=shape[0]/2
dst +=shape[1]/2
print(dst.min())
print(dst.max())
src = lm[:,p[fix_points]]
tform = transform.estimate_transform(ttype, src.T,dst.T)
lm_reg = tform(lm.T).T
image = transform.warp(image,inverse_map=tform.inverse,output_shape=[shape[1],shape[1]])
image = exposure.equalize_hist(image,mask=image!=0)
S = 12
for l1,l2 in lm_reg.T:
x = np.arange(l2-S,l2+S)
y = np.arange(l1-S,l1+S)
for x_ in x:
for y_ in y:
image[x_,y_,0]=255
return image, lm_reg
开发者ID:RWalecki,项目名称:faxe,代码行数:26,代码来源:extract.py
示例10: infer
def infer(edge_image, edge_lengths, mu, phi, sigma2,
update_slice=slice(None),
scale_estimate=None,
rotation=0,
translation=(0, 0)):
# edge_points = np.array(np.where(edge_image)).T
# edge_points[:, [0, 1]] = edge_points[:, [1, 0]]
# edge_score = edge_image.shape[0] * np.exp(-edge_lengths[edge_image] / (0.25 * edge_image.shape[0])).reshape(-1, 1)
# edge_points = np.concatenate((edge_points, edge_score), axis=1)
#
# edge_nn = NearestNeighbors(n_neighbors=1).fit(edge_points)
edge_near = scipy.ndimage.distance_transform_edt(~edge_image)
edge_near_blur = gaussian(edge_near, 2)
Gy, Gx = np.gradient(edge_near_blur)
mag = np.sqrt(np.power(Gy, 2) + np.power(Gx, 2))
if scale_estimate is None:
scale_estimate = min(edge_image.shape) * 4
mu = (mu.reshape(-1, 2) - mu.reshape(-1, 2).mean(axis=0)).reshape(-1, 1)
average_distance = np.sqrt(np.power(mu.reshape(-1, 2), 2).sum(axis=1)).mean()
scale_estimate /= average_distance * np.sqrt(2)
h = np.zeros((phi.shape[1], 1))
psi = SimilarityTransform(scale=scale_estimate, rotation=rotation, translation=translation)
while True:
w = (mu + phi @ h).reshape(-1, 2)
image_points = matrix_transform(w, psi.params)[update_slice, :]
image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)
# closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
# closest_edge_points = edge_points[closest_edge_point_indices, :2]
closest_edge_points = gradient_step(Gy, Gx, mag, image_points)
w = mu.reshape(-1, 2)
psi = estimate_transform('similarity', w[update_slice, :], closest_edge_points)
image_points = matrix_transform(w, psi.params)[update_slice, :]
image_points = np.concatenate((image_points, np.zeros((image_points.shape[0], 1))), axis=1)
# closest_edge_point_indices = edge_nn.kneighbors(image_points)[1].flatten()
# closest_edge_points = edge_points[closest_edge_point_indices, :2]
closest_edge_points = gradient_step(Gy, Gx, mag, image_points)
mu_slice = mu.reshape(-1, 2)[update_slice, :].reshape(-1, 1)
K = phi.shape[-1]
phi_full = phi.reshape(-1, 2, K)
phi_slice = phi_full[update_slice, :].reshape(-1, K)
h = update_h(sigma2, phi_slice, closest_edge_points, mu_slice, psi)
w = (mu + phi @ h).reshape(-1, 2)
image_points = matrix_transform(w, psi.params)
update_slice = yield image_points, closest_edge_points
开发者ID:jrdurrant,项目名称:vision,代码行数:59,代码来源:subspace_shape.py
示例11: projective
def projective(reference, points, bone, properties_to_transform):
"""
Estimates a projective transform
"""
tform = tf.estimate_transform('projective', points, reference)
transformed = list(map(tform, [ bone[p] for p in properties_to_transform ]))
error = get_error(points, reference, tform)
return transformed, error
开发者ID:selaux,项目名称:master-of-bones,代码行数:8,代码来源:registration.py
示例12: test_similarity_estimation
def test_similarity_estimation():
# exact solution
tform = estimate_transform('similarity', SRC[:2, :], DST[:2, :])
assert_array_almost_equal(tform(SRC[:2, :]), DST[:2, :])
assert_equal(tform._matrix[0, 0], tform._matrix[1, 1])
assert_equal(tform._matrix[0, 1], - tform._matrix[1, 0])
# over-determined
tform2 = estimate_transform('similarity', SRC, DST)
assert_array_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_equal(tform2._matrix[0, 0], tform2._matrix[1, 1])
assert_equal(tform2._matrix[0, 1], - tform2._matrix[1, 0])
# via estimate method
tform3 = SimilarityTransform()
tform3.estimate(SRC, DST)
assert_array_almost_equal(tform3._matrix, tform2._matrix)
开发者ID:Autodidact24,项目名称:scikit-image,代码行数:17,代码来源:test_geometric.py
示例13: test_euclidean_estimation
def test_euclidean_estimation():
# exact solution
tform = estimate_transform('euclidean', SRC[:2, :], SRC[:2, :] + 10)
assert_almost_equal(tform(SRC[:2, :]), SRC[:2, :] + 10)
assert_almost_equal(tform.params[0, 0], tform.params[1, 1])
assert_almost_equal(tform.params[0, 1], - tform.params[1, 0])
# over-determined
tform2 = estimate_transform('euclidean', SRC, DST)
assert_almost_equal(tform2.inverse(tform2(SRC)), SRC)
assert_almost_equal(tform2.params[0, 0], tform2.params[1, 1])
assert_almost_equal(tform2.params[0, 1], - tform2.params[1, 0])
# via estimate method
tform3 = EuclideanTransform()
tform3.estimate(SRC, DST)
assert_almost_equal(tform3.params, tform2.params)
开发者ID:AbdealiJK,项目名称:scikit-image,代码行数:17,代码来源:test_geometric.py
示例14: similarity
def similarity(reference, points, bone, properties_to_transform):
"""
Estimates a similarity transform
"""
tform = tf.estimate_transform('similarity', points, reference)
transformed = list(map(tform, [ bone[p] for p in properties_to_transform ]))
error = get_error(points, reference, tform)
return transformed, error
开发者ID:selaux,项目名称:master-of-bones,代码行数:8,代码来源:registration.py
示例15: test_polynomial_estimation
def test_polynomial_estimation():
# over-determined
tform = estimate_transform('polynomial', SRC, DST, order=10)
assert_array_almost_equal(tform(SRC), DST, 6)
# via estimate method
tform2 = PolynomialTransform()
tform2.estimate(SRC, DST, order=10)
assert_array_almost_equal(tform2._params, tform._params)
开发者ID:Autodidact24,项目名称:scikit-image,代码行数:9,代码来源:test_geometric.py
示例16: shape_features
def shape_features(image, fix_points='Stable', feature_points='inner',lm=None):
if lm==None:lm = landmarks(image)
mf = mean_face[:,p[fix_points]]
lm_fix = lm[:,p[fix_points]]
if np.any(np.isnan(lm_fix)):return None,None
tform = transform.estimate_transform('affine', lm_fix.T,mf.T)
lm_reg = tform(lm.T).T
lm_reg = lm_reg[:,p[feature_points]]
X = lm_reg.flatten()
return X, lm
开发者ID:RWalecki,项目名称:faxe,代码行数:13,代码来源:extract.py
示例17: test_fundamental_matrix_estimation
def test_fundamental_matrix_estimation():
src = np.array(
[
1.839035,
1.924743,
0.543582,
0.375221,
0.473240,
0.142522,
0.964910,
0.598376,
0.102388,
0.140092,
15.994343,
9.622164,
0.285901,
0.430055,
0.091150,
0.254594,
]
).reshape(-1, 2)
dst = np.array(
[
1.002114,
1.129644,
1.521742,
1.846002,
1.084332,
0.275134,
0.293328,
0.588992,
0.839509,
0.087290,
1.779735,
1.116857,
0.878616,
0.602447,
0.642616,
1.028681,
]
).reshape(-1, 2)
tform = estimate_transform("fundamental", src, dst)
# Reference values obtained using COLMAP SfM library.
tform_ref = np.array(
[[-0.217859, 0.419282, -0.0343075], [-0.0717941, 0.0451643, 0.0216073], [0.248062, -0.429478, 0.0221019]]
)
assert_almost_equal(tform.params, tform_ref, 6)
开发者ID:scikit-image,项目名称:scikit-image,代码行数:49,代码来源:test_geometric.py
示例18: test_essential_matrix_estimation
def test_essential_matrix_estimation():
src = np.array(
[
1.839035,
1.924743,
0.543582,
0.375221,
0.473240,
0.142522,
0.964910,
0.598376,
0.102388,
0.140092,
15.994343,
9.622164,
0.285901,
0.430055,
0.091150,
0.254594,
]
).reshape(-1, 2)
dst = np.array(
[
1.002114,
1.129644,
1.521742,
1.846002,
1.084332,
0.275134,
0.293328,
0.588992,
0.839509,
0.087290,
1.779735,
1.116857,
0.878616,
0.602447,
0.642616,
1.028681,
]
).reshape(-1, 2)
tform = estimate_transform("essential", src, dst)
# Reference values obtained using COLMAP SfM library.
tform_ref = np.array(
[[-0.0811666, 0.255449, -0.0478999], [-0.192392, -0.0531675, 0.119547], [0.177784, -0.22008, -0.015203]]
)
assert_almost_equal(tform.params, tform_ref, 6)
开发者ID:scikit-image,项目名称:scikit-image,代码行数:49,代码来源:test_geometric.py
示例19: main
def main():
# image = data.coins() # or any NumPy array!
# edges = filter.sobel(image)
# io.imshow(edges)
# io.show()
image_file_name_0 = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0001.hdf'
image_file_name_180 = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0002.hdf'
image_file_name_white = '/local/decarlo/projects/data/microCT/0_180/hdf4/tilt_020_020_0003.hdf'
image_0 = read_hdf4(image_file_name_0, 'data')
image_180 = read_hdf4(image_file_name_180, 'data')
image_white = read_hdf4(image_file_name_white, 'data')
image_0 = normalize (image_0, image_white)
image_180 = normalize (image_180, image_white)
plt.imshow(image_0+image_180, cmap=plt.cm.hot)
plt.colorbar()
plt.show()
image_180 = np.fliplr(image_180)
tform = tf.estimate_transform('similarity', image_0, image_180)
a, grad = structural_similarity(image_0, image_180, gradient=True)
print a
print "grad shape", grad.shape
# print grad
plt.imshow(grad, cmap=plt.cm.hot)
plt.colorbar()
plt.show()
result = match_template(image_0, image_180)
print result.shape
ij = np.unravel_index(np.argmax(result), result.shape)
x, y = ij[::-1]
print x, y
im2, scale, angle, t = similarity(image_0, image_180)
print "Scale: ", scale, "Angle: ", angle, "Transforamtion Matrix: ", t
rot_axis_shift_x = -t[0]/2.0
rot_axis_tilt = -t[1]/1.0
print "Rotation Axis Shift (x, y):", "(", rot_axis_shift_x, ",", rot_axis_tilt,")"
开发者ID:decarlof,项目名称:projects,代码行数:48,代码来源:align_rot_microCT.py
示例20: estimate_transform
def estimate_transform(src, dst):
""" Create source and destination feature match coordinates"""
src = np.array([keypoints1[elem] for elem in matches12[:,0]])
dst = np.array([keypoints2[elem] for elem in matches12[:,1]])
""" Estimate transform
Available transformations:
(‘similarity’, ‘affine’, ‘piecewise-affine’, ‘projective’, ‘polynomial’)
"""
tform = tf.estimate_transform('similarity', src, dst)
""" Error check transform (should return True)"""
assert np.allclose(tform.inverse(tform(src)), src) == False
return(tform)
开发者ID:ThunderShiviah,项目名称:AllenBrainAtlasAPI,代码行数:17,代码来源:register_methods.py
注:本文中的skimage.transform.estimate_transform函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论