本文整理汇总了Python中skimage.feature.match_template函数的典型用法代码示例。如果您正苦于以下问题:Python match_template函数的具体用法?Python match_template怎么用?Python match_template使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了match_template函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: matchSections
def matchSections():
pitches = cPickle.load(open('AmuInstPitches.pkl'))
sections = cPickle.load(open('AmuInstSections.pkl'))
filenames = cPickle.load(open('AmuFilenames.pkl'))
keys = cPickle.load(open('AmuKeys.pkl'))
for i in range(len(pitches)/2):
pyplot.figure(i,(16,9))
newp = []
for vector in pitches[2*i]:
deq = deque(vector)
deq.rotate(keys[2*i+1]-keys[2*i])
l = list(deq)
newp.append(l)
pitches[2*i] = newp
image = numpy.array(pitches[2*i])
template = numpy.array(pitches[2*i+1][sections[2*i+1][0]:sections[2*i+1][1]])
im = feature.match_template(image,template,pad_input=True)
pyplot.vlines(12,0,im.shape[0],'b')
for j in range(len(sections[2*i+1])-2):
template = numpy.array(pitches[2*i+1][sections[2*i+1][j+1]:sections[2*i+1][j+2]])
temp = feature.match_template(image,template,pad_input=True)
im = numpy.concatenate((im,temp),axis = 1)
pyplot.vlines(12*j+12,0,im.shape[0],'b')
ij = numpy.unravel_index(numpy.argmax(im), im.shape)
x, y = ij[::-1]
pyplot.imshow(im, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.ylabel(os.path.basename(filenames[2*i]) + " (in beats)")
pyplot.xlabel(os.path.basename(filenames[2*i+1]) + " (12 Chroma Values Each)")
pyplot.title('Section Similarity')
pyplot.plot(x,y,'o',markeredgecolor='r',markerfacecolor='none',markersize=10)
pyplot.xlim(0,im.shape[1]-1)
pyplot.ylim(0,im.shape[0])
pyplot.show()
sys.exit()
开发者ID:JordanHawkins,项目名称:AutomaticDJ,代码行数:34,代码来源:AutoMashUp.py
示例2: match_digit
def match_digit(image,templates,min_w=24,is_two_digits=True,debug=False):
h,w = image.shape
if is_two_digits:
#first digit should be: 1,2,3,4,5
ret = []
wlimit = max(w/2,min_w)
for digit in [1,2,3,4,5]:
result = match_template(image[:,:wlimit], templates[digit])
max_correl = np.max(result)
ret += [(max_correl,digit)]
if debug: print "digit1:",digit,max_correl
#second digit should be any except if first digit is 5. That case options are 0,1,2,3,4
if len(ret) > 0:
ret.sort()
#best digit is the last
correl1,digit1 = ret[-1]
else:
return None,None
digits = range(0,10) if digit1 != 5 else [0,1,2,3,4]
ret = []
for digit in digits:
result = match_template(image[:,w - wlimit:], templates[digit])
max_correl = np.max(result)
ret += [(max_correl,digit)]
if debug: print "digit2:",digit,max_correl
if len(ret) > 0:
ret.sort()
#best digit is the last
correl2,digit2 = ret[-1]
else:
return None,None
return (correl1,digit1),(correl2,digit2)
else:
#assert h >= 20, "problem with w {0}:{1}".format(h,20)
#assert w >= 18, "problem with h {0}:{1}".format(w,18)
ret = []
#one digit: 6,7,8,9
for digit in [6,7,8,9]:
result = match_template(image, templates[digit])
max_correl = np.max(result)
if debug: print "digit1:",digit,max_correl
ret += [(max_correl,digit)]
if len(ret) > 0:
ret.sort()
#best digit is the last
return ret[-1],None
开发者ID:exepulveda,项目名称:roots,代码行数:53,代码来源:bound.py
示例3: find_bulb
def find_bulb(image, templ):
"""finds the terminal bulb in an image using template correlation.
Finds the best location (shifted cross-correlation) between image and template
return: location (x,y) and correlation value at the maximal correlation.
"""
image = ndimage.gaussian_filter(image, 2) #- ndimage.gaussian_filter(res, 50)
cut = int(0.1*image.shape[1])
result = match_template(image, templ)
xm = int(result.shape[1]/2.)
res = result[:,max(0,-cut + xm):xm+cut]
ij = np.unravel_index(np.argmax(res), res.shape)
x0, y0 = ij[::-1]
# calculate half template size
t_half = int(templ.shape[0]/2.)
conf = res[y0,x0]
result1 = match_template(image, templ[t_half:,])
res1 = result1[:,max(0,-cut + xm):xm+cut]
ij = np.unravel_index(np.argmax(res1), res1.shape)
x1, y1 = ij[::-1]
conf1 = res1[y1,x1]
if conf1 > conf:
conf = conf1
x0,y0 = x1,y1
res = res1
t_half = int(templ.shape[0]/4.)
result2 = match_template(image, templ[:t_half,])
res2 = result2[:,max(0,-cut + xm):xm+cut]
ij = np.unravel_index(np.argmax(res2), res2.shape)
x2, y2 = ij[::-1]
conf2 = res2[y2,x2]
if conf2 > conf:
conf = conf2
x0,y0 = x2,y2
res = res2
t_half = int(templ.shape[0])/4.
x = max(0, min(x0+templ.shape[1]/2.+cut, image.shape[1]-1))
y = max(0,min(y0+t_half, image.shape[0]-1))
if conf < 0.4 or conf/np.std(res) < 2.5:
conf = 0.0
return y,x, conf
开发者ID:monikascholz,项目名称:pWARP,代码行数:50,代码来源:check_movie.py
示例4: test_normalization
def test_normalization():
"""Test that `match_template` gives the correct normalization.
Normalization gives 1 for a perfect match and -1 for an inverted-match.
This test adds positive and negative squares to a zero-array and matches
the array with a positive template.
"""
n = 5
N = 20
ipos, jpos = (2, 3)
ineg, jneg = (12, 11)
image = np.full((N, N), 0.5)
image[ipos:ipos + n, jpos:jpos + n] = 1
image[ineg:ineg + n, jneg:jneg + n] = 0
# white square with a black border
template = np.zeros((n + 2, n + 2))
template[1:1 + n, 1:1 + n] = 1
result = match_template(image, template)
# get the max and min results.
sorted_result = np.argsort(result.flat)
iflat_min = sorted_result[0]
iflat_max = sorted_result[-1]
min_result = np.unravel_index(iflat_min, result.shape)
max_result = np.unravel_index(iflat_max, result.shape)
# shift result by 1 because of template border
assert np.all((np.array(min_result) + 1) == (ineg, jneg))
assert np.all((np.array(max_result) + 1) == (ipos, jpos))
assert np.allclose(result.flat[iflat_min], -1)
assert np.allclose(result.flat[iflat_max], 1)
开发者ID:TheArindham,项目名称:scikit-image,代码行数:34,代码来源:test_template.py
示例5: test_template
def test_template():
size = 100
# Float prefactors ensure that image range is between 0 and 1
image = np.full((400, 400), 0.5)
target = 0.1 * (np.tri(size) + np.tri(size)[::-1])
target_positions = [(50, 50), (200, 200)]
for x, y in target_positions:
image[x:x + size, y:y + size] = target
np.random.seed(1)
image += 0.1 * np.random.uniform(size=(400, 400))
result = match_template(image, target)
delta = 5
positions = peak_local_max(result, min_distance=delta)
if len(positions) > 2:
# Keep the two maximum peaks.
intensities = result[tuple(positions.T)]
i_maxsort = np.argsort(intensities)[::-1]
positions = positions[i_maxsort][:2]
# Sort so that order matches `target_positions`.
positions = positions[np.argsort(positions[:, 0])]
for xy_target, xy in zip(target_positions, positions):
assert_almost_equal(xy, xy_target)
开发者ID:TheArindham,项目名称:scikit-image,代码行数:27,代码来源:test_template.py
示例6: processImages
def processImages():
sims = cPickle.load(open('AmuInstSimMats.pkl'))
for i,sim in enumerate(sims):
pyplot.figure(0,(16,9))
pyplot.imshow(sim, vmin = 0, vmax = 1, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.title('Unfiltered Sim Matrix ' + str(i))
pyplot.savefig('Unfiltered Sim Matrix ' + str(i) + '.jpg')
pyplot.figure(1,(16,9))
pyplot.imshow(filter.tv_denoise(numpy.array(sim,numpy.float64), weight = 1), vmin = 0, vmax = 1, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.title('TV_Denoise ' + str(i))
pyplot.savefig('TV_Denoise ' + str(i) + '.jpg')
pyplot.figure(2,(16,9))
pyplot.imshow(filter.threshold_adaptive(numpy.array(sim,numpy.float64),21), vmin = 0, vmax = 1, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.title('Threshold_Adaptive ' + str(i))
pyplot.savefig('Threshold_Adaptive ' + str(i) + '.jpg')
pyplot.figure(3,(16,9))
pyplot.imshow(ndimage.minimum_filter(numpy.array(sim,numpy.float64),size=2), vmin = 0, vmax = 1, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.title('Local Minimum_Filter ' + str(i))
pyplot.savefig('Local Minimum_Filter ' + str(i) + '.jpg')
pyplot.figure(4,(16,9))
template = numpy.array([[0,1,1,1,1,1,1,1],[1,0,1,1,1,1,1,1],[1,1,0,1,1,1,1,1],[1,1,1,0,1,1,1,1],
[1,1,1,1,0,1,1,1],[1,1,1,1,1,0,1,1],[1,1,1,1,1,1,0,1],[1,1,1,1,1,1,1,0]])
pyplot.imshow(feature.match_template(numpy.array(sim,numpy.float64),template), vmin = 0, vmax = 1, cmap = pyplot.get_cmap('gray'), aspect = 'auto', origin = 'lower')
pyplot.title('Match_Template with my own 8x8 beat diagonal template ' + str(i))
pyplot.savefig('Match_Template with my own 8x8 beat diagonal template ' + str(i) + '.jpg')
sys.exit()
开发者ID:JordanHawkins,项目名称:AutomaticDJ,代码行数:26,代码来源:AutoMashUp.py
示例7: CalculateImageShift
def CalculateImageShift(imgref,img):
imgH,imgW = imgref.shape
result = match_template(imgref,img,pad_input=True)
ij = np.unravel_index(np.argmax(result),result.shape)
sx,sy = ij[::-1]
sx,sy = imgW/2-sx,imgH/2-sy
return sx,sy
开发者ID:shiragami,项目名称:holo,代码行数:7,代码来源:syuk.py
示例8: transform
def transform(self, X):
from skimage.feature import match_template
X_out = None
n_templates = len(self.template)
raw = self.raw
for i, x in enumerate(X):
if i % 1000 == 0:
print i
for j, template in enumerate(self.template):
result = match_template(x, template, pad_input=True)
if X_out is None:
if raw:
dtype = (X.shape[0], n_templates, result.shape[0],
result.shape[1])
else:
dtype = (X.shape[0], n_templates)
X_out = np.empty(dtype, dtype=np.float32)
if not raw:
result = np.max(result)
X_out[i, j] = result
if raw:
X_out = np.max(X_out, axis=1)
return X_out
开发者ID:Sandy4321,项目名称:kaggle-marinexplore,代码行数:27,代码来源:__init__.py
示例9: test_pad_input
def test_pad_input():
"""Test `match_template` when `pad_input=True`.
This test places two full templates (one with values lower than the image
mean, the other higher) and two half templates, which are on the edges of
the image. The two full templates should score the top (positive and
negative) matches and the centers of the half templates should score 2nd.
"""
# Float prefactors ensure that image range is between 0 and 1
template = 0.5 * diamond(2)
image = 0.5 * np.ones((9, 19))
mid = slice(2, 7)
image[mid, :3] -= template[:, -3:] # half min template centered at 0
image[mid, 4:9] += template # full max template centered at 6
image[mid, -9:-4] -= template # full min template centered at 12
image[mid, -3:] += template[:, :3] # half max template centered at 18
result = match_template(image, template, pad_input=True,
constant_values=image.mean())
# get the max and min results.
sorted_result = np.argsort(result.flat)
i, j = np.unravel_index(sorted_result[:2], result.shape)
assert_equal(j, (12, 0))
i, j = np.unravel_index(sorted_result[-2:], result.shape)
assert_equal(j, (18, 6))
开发者ID:TheArindham,项目名称:scikit-image,代码行数:26,代码来源:test_template.py
示例10: detect_start_end_times
def detect_start_end_times(pattern_wav, recording_wav, sr, overlap):
"""Find matches for the start/end pattern within the recorded audio"""
# Compute the STFT of the recordings
specgram1 = numpy.array(stft.spectrogram(pattern_wav, overlap=overlap))
specgram2 = numpy.array(stft.spectrogram(recording_wav, overlap=overlap))
# Restrict the spectrum to the frequency band occupied by the start/end pattern
pattern = abs(specgram1[7:16,:])
recording = abs(specgram2[7:16,:])
# Search for matches of the pattern in the input recording and return a confidence score
# for each time position of the input recording
confidence = match_template(recording, pattern)
# Search for peaks in the confidence score, and choose the two highest peaks
# Minimum distance between consecutive peaks is set to 1 second
peaks = peakutils.indexes(confidence[0], thres=0, min_dist=seconds_to_samples(1, overlap, sr))
peaks = sorted(peaks, key=lambda p: -confidence[0,p])[:2]
#TODO: throw errors instead of printing, if necessary
if len(peaks) < 1:
print "Could not detect a starting beep!"
elif len(peaks) < 2:
print "Could only detect one starting beep!"
else:
start, end = sorted(peaks)
print "Initial beep detected at " + "%.3f" % samples_to_seconds(start, overlap, sr) + " seconds."
print "Final beep detected at " + "%.3f" % samples_to_seconds(end, overlap, sr) + " seconds."
return samples_to_seconds(start, overlap, sr), samples_to_seconds(end, overlap, sr)
开发者ID:chaosct,项目名称:repoVizzRecorder,代码行数:30,代码来源:repoVizzRecorder.py
示例11: run
def run(self, im, skin_thresh=[-1,1], n_peaks=3):
'''
im : color image
'''
im_skin = im
self.im_skin = im_skin
skin_match_c = match_template(im_skin, self.template, pad_input=True)*(im>0)
self.skin_match = skin_match_c
# cv2.matchTemplate(im_skin, self.template, cv2.cv.CV_TM_SQDIFF_NORMED)
# imshow(cv2.matchTemplate(im_skin.astype(np.float32), self.template.astype(np.float32), cv2.cv.CV_TM_CCOEFF_NORMED))
# Display Predictions - Color Based matching
optima = peak_local_max(skin_match_c, min_distance=20, num_peaks=n_peaks, exclude_border=False)
# Visualize
if len(optima) > 0:
optima_values = skin_match_c[optima[:,0], optima[:,1]]
optima_thresh = np.max(optima_values) / 2
optima = optima.tolist()
for i,o in enumerate(optima):
if optima_values[i] < optima_thresh:
optima.pop(i)
break
self.markers = optima
return self.markers
开发者ID:MerDane,项目名称:pyKinectTools,代码行数:26,代码来源:PoseTracking.py
示例12: findTemplateInImage
def findTemplateInImage(templatePath, imagePath, debug=False):
"""
Returns the position of a template in an image
"""
# load template
template = skimage.transform.rescale(io.imread(templatePath), 0.5)
if debug:
plt.imshow(template)
plt.title("Template")
plt.show()
# load image
image = io.imread(imagePath, False);
image = skimage.transform.rescale(image, 0.5)
if debug:
plt.imshow(image)
plt.title("Image")
plt.show()
# find building position
result = match_template(image, template)
result = result.squeeze()
ij = numpy.unravel_index(numpy.argmax(result), result.shape)
x, y = ij[::-1]
# add image midpoint
x += int(float(len(template[:]))/2)
y += int(float(len(template))/2)
# re-rescale ;)
return numpy.array([x*2, y*2])
开发者ID:highkite,项目名称:alphaBot,代码行数:31,代码来源:mapExtractor.py
示例13: align_converge
def align_converge(y_LR,size=64):
"""iterate until offsets converge"""
(h,w) = y_LR.shape
# split image
y_L = y_LR[:,:w/2]
y_R = y_LR[:,w/2:]
(h,w) = y_L.shape
s = size / 2
# now find n offsets
rand = RandomState(0)
prev_dx, prev_dy = 0, 0
series = []
while True:
# at a random locations in y_L
y = rand.randint(h/4,h*3/4)
x = rand.randint(w/4,w*3/4)
it = y_L[y:y+s,x:x+s] # take an s x s chunk there
tm = match_template(y_R,it) # match it against y_R
ry, rx = maximum_position(tm) # max value is location
series += [((y-ry), (x-rx))] # accumulatea
print series
n = len(series)
if n % 2 == 0:
# take the median
dy, dx = np.median(np.asarray(series),axis=0).astype(int)
if n > 100 or (abs(dy-prev_dy) == 0 and abs(dx-prev_dx) == 0):
return dy, dx
prev_dy, prev_dx = dy, dx
开发者ID:LouisK130,项目名称:oii,代码行数:28,代码来源:quick.py
示例14: test_bounding_values
def test_bounding_values():
image = img_as_float(data.page())
template = np.zeros((3, 3))
template[1, 1] = 1
result = match_template(img_as_float(data.page()), template)
print(result.max())
assert result.max() < 1 + 1e-7
assert result.min() > -1 - 1e-7
开发者ID:TheArindham,项目名称:scikit-image,代码行数:8,代码来源:test_template.py
示例15: _template_matching_shift
def _template_matching_shift(self, im1, im2, template):
index = []
for im in [im1, im2]:
match = match_template(im, template)
index.append(np.unravel_index(np.argmax(match), match.shape))
index = np.array(index)
shift = index[1] - index[0]
return shift
开发者ID:DiamondLightSource,项目名称:Savu,代码行数:8,代码来源:projection_shift.py
示例16: test_padding_reflect
def test_padding_reflect():
template = diamond(2)
image = np.zeros((10, 10))
image[2:7, :3] = template[:, -3:]
result = match_template(image, template, pad_input=True,
mode='reflect')
assert_equal(np.unravel_index(result.argmax(), result.shape), (4, 0))
开发者ID:TheArindham,项目名称:scikit-image,代码行数:9,代码来源:test_template.py
示例17: _process
def _process(self):
"""Finds the Suns and the fiducials."""
# Perform a coarse search for Suns
coarse_image = self.image[::10, ::10]
coarse_match = match_template(coarse_image, template_sun[::10, ::10], pad_input=True)
coarse_peaks = peak_local_max(coarse_match, threshold_abs=0.9, num_peaks=3)
fine_peaks = []
strength = []
fiducials = []
for coarse_peak in coarse_peaks:
# For each coarse detection, do a detection at the full resolution
if coarse_peak[0] < 11 or coarse_peak[0] > 84 or coarse_peak[1] < 11 or coarse_peak[1] > 116:
break
sub_image = self.image[coarse_peak[0] * 10 - 110:coarse_peak[0] * 10 + 111,
coarse_peak[1] * 10 - 110:coarse_peak[1] * 10 + 111]
match = match_template(sub_image, template_sun, pad_input=True)
peak = peak_local_max(match, threshold_abs=0.9, num_peaks=1)
if len(peak) > 0:
peak = peak[0]
peak_r, peak_c = parapeak(match[peak[0] - 1:peak[0] + 2, peak[1] - 1:peak[1] + 2])
peak += coarse_peak * 10 - 110
fine_peaks.append((peak[0] + peak_r, peak[1] + peak_c))
#FIXME: need a more robust estimate of the strength of each peak
strength.append(self.image[peak[0], peak[1]])
# Find fiducials near the center of the Sun
match = match_template(self.image[peak[0]-60:peak[0]+61, peak[1]-60:peak[1]+61],
template_fiducial, pad_input=True)
fids = peak_local_max(match, threshold_abs=0.8)
for fid in fids:
fid_r, fid_c = parapeak(match[fid[0] - 1:fid[0] + 2, fid[1] - 1:fid[1] + 2])
fid += peak - 60
fiducials.append((fid[0] + fid_r, fid[1] + fid_c))
# Sort the peaks in order of decreasing strength
fine_peaks = [peak for (strength, peak) in sorted(zip(strength, fine_peaks), reverse=True)]
return fine_peaks, fiducials
开发者ID:GRIPS,项目名称:gripspy,代码行数:43,代码来源:aspect.py
示例18: detect_keypatch
def detect_keypatch(img, template):
simg = feature.match_template(img, template, pad_input=True)
simg = simg.clip(0, simg.max())
rel_thr = 0.75
peaks = feature.peak_local_max(simg, num_peaks=1, threshold_abs=rel_thr*(simg.max()-simg.min()), exclude_border=False)
ht, wt = template.shape
for i in range(len(peaks)):
peaks[i] = [peaks[i][1]-wt/2, peaks[i][0]-ht/2]
return peaks
开发者ID:democraciaconcodigos,项目名称:recon,代码行数:10,代码来源:telegrama.py
示例19: xcorr
def xcorr(h1,h2):
#print(h1.shape)
h1_ = flattn(h1)
#print(h1_)
h2_ = flattn(h2)
val = (match_template(np.array([h1_]),np.array([h2_]))[0][0])*1000
if val < 0 :
return 1000 + val
else:
return 1000 - val
开发者ID:d-klein,项目名称:image-hash,代码行数:10,代码来源:Lbp.py
示例20: _speckleDisplacementSingleCore_method2
def _speckleDisplacementSingleCore_method2(image, image_ref, halfsubwidth,
halfTemplateSize, stride, verbose):
'''
see http://scikit-image.org/docs/dev/auto_examples/plot_template.html
'''
from skimage.feature import match_template
irange = np.arange(halfsubwidth,
image.shape[0] - halfsubwidth + 1,
stride)
jrange = np.arange(halfsubwidth,
image.shape[1] - halfsubwidth + 1,
stride)
pbar = tqdm(total=np.size(irange)) # progress bar
sx = np.ones(image.shape) * NAN
sy = np.ones(image.shape) * NAN
error = np.ones(image.shape) * NAN
for (i, j) in itertools.product(irange, jrange):
interrogation_window = image_ref[i - halfTemplateSize: \
i + halfTemplateSize+ 1,
j - halfTemplateSize: \
j + halfTemplateSize + 1]
sub_image = image[i - halfsubwidth:i + halfsubwidth + 1,
j - halfsubwidth:j + halfsubwidth + 1]
result = match_template(sub_image, interrogation_window)
shift_y, shift_x = np.unravel_index(np.argmax(result), result.shape)
shift_x -= halfsubwidth - halfTemplateSize
shift_y -= halfsubwidth - halfTemplateSize
error_ij = 1.0 - np.max(result)
sx[i, j] = shift_x
sy[i, j] = shift_y
error[i, j] = error_ij
if j == jrange[-1]: pbar.update() # update progress bar
print(" ")
return (sx[halfsubwidth:-halfsubwidth:stride,
halfsubwidth:-halfsubwidth:stride],
sy[halfsubwidth:-halfsubwidth:stride,
halfsubwidth:-halfsubwidth:stride],
error[halfsubwidth:-halfsubwidth:stride,
halfsubwidth:-halfsubwidth:stride],
stride)
开发者ID:decarlof,项目名称:wavepy,代码行数:55,代码来源:speckletracking.py
注:本文中的skimage.feature.match_template函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论