本文整理汇总了Python中node.short函数的典型用法代码示例。如果您正苦于以下问题:Python short函数的具体用法?Python short怎么用?Python short使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了short函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: checkentry
def checkentry(obj, i, node, seen, linkrevs, f):
lr = obj.linkrev(obj.rev(node))
if lr < 0 or (havecl and lr not in linkrevs):
if lr < 0 or lr >= len(cl):
msg = _("rev %d points to nonexistent changeset %d")
else:
msg = _("rev %d points to unexpected changeset %d")
err(None, msg % (i, lr), f)
if linkrevs:
if f and len(linkrevs) > 1:
try:
# attempt to filter down to real linkrevs
linkrevs = [l for l in linkrevs
if lrugetctx(l)[f].filenode() == node]
except:
pass
warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
lr = None # can't be trusted
try:
p1, p2 = obj.parents(node)
if p1 not in seen and p1 != nullid:
err(lr, _("unknown parent 1 %s of %s") %
(short(p1), short(n)), f)
if p2 not in seen and p2 != nullid:
err(lr, _("unknown parent 2 %s of %s") %
(short(p2), short(p1)), f)
except Exception, inst:
exc(lr, _("checking parents of %s") % short(node), inst, f)
开发者ID:ThissDJ,项目名称:designhub,代码行数:29,代码来源:verify.py
示例2: ancestor
def ancestor(self, c2, warn=False):
"""
return the "best" ancestor context of self and c2
"""
# deal with workingctxs
n2 = c2._node
if n2 is None:
n2 = c2._parents[0]._node
cahs = self._repo.changelog.commonancestorsheads(self._node, n2)
if not cahs:
anc = nullid
elif len(cahs) == 1:
anc = cahs[0]
else:
for r in self._repo.ui.configlist('merge', 'preferancestor'):
ctx = changectx(self._repo, r)
anc = ctx.node()
if anc in cahs:
break
else:
anc = self._repo.changelog.ancestor(self._node, n2)
if warn:
self._repo.ui.status(
(_("note: using %s as ancestor of %s and %s\n") %
(short(anc), short(self._node), short(n2))) +
''.join(_(" alternatively, use --config "
"merge.preferancestor=%s\n") %
short(n) for n in sorted(cahs) if n != anc))
return changectx(self._repo, anc)
开发者ID:leetaizhu,项目名称:Odoo_ENV_MAC_OS,代码行数:29,代码来源:context.py
示例3: filterunknown
def filterunknown(self, repo):
"""remove unknown nodes from the phase boundary
Nothing is lost as unknown nodes only hold data for their descendants.
"""
filtered = False
nodemap = repo.changelog.nodemap # to filter unknown nodes
for phase, nodes in enumerate(self.phaseroots):
missing = sorted(node for node in nodes if node not in nodemap)
if missing:
for mnode in missing:
repo.ui.debug(
'removing unknown node %s from %i-phase boundary\n'
% (short(mnode), phase))
nodes.symmetric_difference_update(missing)
filtered = True
if filtered:
self.dirty = True
# filterunknown is called by repo.destroyed, we may have no changes in
# root but phaserevs contents is certainly invalid (or at least we
# have not proper way to check that). related to issue 3858.
#
# The other caller is __init__ that have no _phaserevs initialized
# anyway. If this change we should consider adding a dedicated
# "destroyed" function to phasecache or a proper cache key mechanism
# (see branchmap one)
self.invalidate()
开发者ID:pierfort123,项目名称:mercurial,代码行数:27,代码来源:phases.py
示例4: _xmerge
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
r = _premerge(repo, toolconf, files, labels=labels)
if r:
tool, toolpath, binary, symlink = toolconf
a, b, c, back = files
out = ""
env = {
"HG_FILE": fcd.path(),
"HG_MY_NODE": short(mynode),
"HG_OTHER_NODE": str(fco.changectx()),
"HG_BASE_NODE": str(fca.changectx()),
"HG_MY_ISLINK": "l" in fcd.flags(),
"HG_OTHER_ISLINK": "l" in fco.flags(),
"HG_BASE_ISLINK": "l" in fca.flags(),
}
ui = repo.ui
args = _toolstr(ui, tool, "args", "$local $base $other")
if "$output" in args:
out, a = a, back # read input from backup, write to original
replace = {"local": a, "base": b, "other": c, "output": out}
args = util.interpolate(r"\$", replace, args, lambda s: util.shellquote(util.localpath(s)))
cmd = toolpath + " " + args
repo.ui.debug("launching merge tool: %s\n" % cmd)
r = ui.system(cmd, cwd=repo.root, environ=env)
repo.ui.debug("merge tool returned: %s\n" % r)
return True, r
return False, 0
开发者ID:pierfort123,项目名称:mercurial,代码行数:29,代码来源:filemerge.py
示例5: _xmerge
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None):
r = _premerge(repo, toolconf, files, labels=labels)
if r:
tool, toolpath, binary, symlink = toolconf
a, b, c, back = files
out = ""
env = {'HG_FILE': fcd.path(),
'HG_MY_NODE': short(mynode),
'HG_OTHER_NODE': str(fco.changectx()),
'HG_BASE_NODE': str(fca.changectx()),
'HG_MY_ISLINK': 'l' in fcd.flags(),
'HG_OTHER_ISLINK': 'l' in fco.flags(),
'HG_BASE_ISLINK': 'l' in fca.flags(),
}
ui = repo.ui
args = _toolstr(ui, tool, "args", '$local $base $other')
if "$output" in args:
out, a = a, back # read input from backup, write to original
replace = {'local': a, 'base': b, 'other': c, 'output': out}
args = util.interpolate(r'\$', replace, args,
lambda s: util.shellquote(util.localpath(s)))
r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
out=ui.fout)
return True, r
return False, 0
开发者ID:ZanderZhang,项目名称:Andriod-Learning,代码行数:27,代码来源:filemerge.py
示例6: _xmerge
def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
r = _premerge(repo, toolconf, files)
if r:
tool, toolpath, binary, symlink = toolconf
a, b, c, back = files
out = ""
env = dict(HG_FILE=fcd.path(),
HG_MY_NODE=short(mynode),
HG_OTHER_NODE=str(fco.changectx()),
HG_BASE_NODE=str(fca.changectx()),
HG_MY_ISLINK='l' in fcd.flags(),
HG_OTHER_ISLINK='l' in fco.flags(),
HG_BASE_ISLINK='l' in fca.flags())
ui = repo.ui
args = _toolstr(ui, tool, "args", '$local $base $other')
if "$output" in args:
out, a = a, back # read input from backup, write to original
replace = dict(local=a, base=b, other=c, output=out)
args = util.interpolate(r'\$', replace, args,
lambda s: '"%s"' % util.localpath(s))
r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
out=ui.fout)
return True, r
return False, 0
开发者ID:Pelonza,项目名称:Learn2Mine-Main,代码行数:26,代码来源:filemerge.py
示例7: _writetagcache
def _writetagcache(ui, repo, heads, tagfnode, cachetags):
try:
cachefile = repo.opener('cache/tags', 'w', atomictemp=True)
except (OSError, IOError):
return
ui.log('tagscache', 'writing tags cache file with %d heads and %d tags\n',
len(heads), len(cachetags))
realheads = repo.heads() # for sanity checks below
for head in heads:
# temporary sanity checks; these can probably be removed
# once this code has been in crew for a few weeks
assert head in repo.changelog.nodemap, \
'trying to write non-existent node %s to tag cache' % short(head)
assert head in realheads, \
'trying to write non-head %s to tag cache' % short(head)
assert head != nullid, \
'trying to write nullid to tag cache'
# This can't fail because of the first assert above. When/if we
# remove that assert, we might want to catch LookupError here
# and downgrade it to a warning.
rev = repo.changelog.rev(head)
fnode = tagfnode.get(head)
if fnode:
cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode)))
else:
cachefile.write('%d %s\n' % (rev, hex(head)))
# Tag names in the cache are in UTF-8 -- which is the whole reason
# we keep them in UTF-8 throughout this module. If we converted
# them local encoding on input, we would lose info writing them to
# the cache.
cachefile.write('\n')
for (name, (node, hist)) in cachetags.iteritems():
for n in hist:
cachefile.write("%s %s\n" % (hex(n), name))
cachefile.write("%s %s\n" % (hex(node), name))
try:
cachefile.close()
except (OSError, IOError):
pass
开发者ID:ZanderZhang,项目名称:Andriod-Learning,代码行数:46,代码来源:tags.py
示例8: _bundle
def _bundle(repo, bases, heads, node, suffix, extranodes=None):
"""create a bundle with the specified revisions as a backup"""
cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
backupdir = repo.join("strip-backup")
if not os.path.isdir(backupdir):
os.mkdir(backupdir)
name = os.path.join(backupdir, "%s-%s" % (short(node), suffix))
repo.ui.warn(_("saving bundle to %s\n") % name)
return changegroup.writebundle(cg, name, "HG10BZ")
开发者ID:Frostman,项目名称:intellij-community,代码行数:9,代码来源:repair.py
示例9: __init__
def __init__(self, name, index, message):
self.name = name
self.index = index
# this can't be called 'message' because at least some installs of
# Python 2.6+ complain about the 'message' property being deprecated
self.lookupmessage = message
if isinstance(name, str) and len(name) == 20:
from node import short
name = short(name)
RevlogError.__init__(self, '%[email protected]%s: %s' % (index, name, message))
开发者ID:pierfort123,项目名称:mercurial,代码行数:10,代码来源:error.py
示例10: _bundle
def _bundle(repo, bases, heads, node, suffix, extranodes=None, compress=True):
"""create a bundle with the specified revisions as a backup"""
cg = repo.changegroupsubset(bases, heads, 'strip', extranodes)
backupdir = repo.join("strip-backup")
if not os.path.isdir(backupdir):
os.mkdir(backupdir)
name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix))
if compress:
bundletype = "HG10BZ"
else:
bundletype = "HG10UN"
return changegroup.writebundle(cg, name, bundletype)
开发者ID:ThissDJ,项目名称:designhub,代码行数:12,代码来源:repair.py
示例11: checkentry
def checkentry(obj, i, node, seen, linkrevs, f):
lr = obj.linkrev(obj.rev(node))
if lr < 0 or (havecl and lr not in linkrevs):
t = "unexpected"
if lr < 0 or lr >= len(cl):
t = "nonexistent"
err(None, _("rev %d point to %s changeset %d") % (i, t, lr), f)
if linkrevs:
warn(_(" (expected %s)") % " ".join(map(str,linkrevs)))
lr = None # can't be trusted
try:
p1, p2 = obj.parents(node)
if p1 not in seen and p1 != nullid:
err(lr, _("unknown parent 1 %s of %s") %
(short(p1), short(n)), f)
if p2 not in seen and p2 != nullid:
err(lr, _("unknown parent 2 %s of %s") %
(short(p2), short(p1)), f)
except Exception, inst:
exc(lr, _("checking parents of %s") % short(node), inst, f)
开发者ID:pombredanne,项目名称:SmartNotes,代码行数:21,代码来源:verify.py
示例12: filterunknown
def filterunknown(repo, phaseroots=None):
"""remove unknown nodes from the phase boundary
no data is lost as unknown node only old data for their descentants
"""
if phaseroots is None:
phaseroots = repo._phaseroots
nodemap = repo.changelog.nodemap # to filter unknown nodes
for phase, nodes in enumerate(phaseroots):
missing = [node for node in nodes if node not in nodemap]
if missing:
for mnode in missing:
msg = 'Removing unknown node %(n)s from %(p)i-phase boundary'
repo.ui.debug(msg, {'n': short(mnode), 'p': phase})
nodes.symmetric_difference_update(missing)
repo._dirtyphases = True
开发者ID:sandeepprasanna,项目名称:ODOO,代码行数:16,代码来源:phases.py
示例13: findglobaltags
def findglobaltags(ui, repo, alltags, tagtypes):
'''Find global tags in a repo.
"alltags" maps tag name to (node, hist) 2-tuples.
"tagtypes" maps tag name to tag type. Global tags always have the
"global" tag type.
The "alltags" and "tagtypes" dicts are updated in place. Empty dicts
should be passed in.
The tags cache is read and updated as a side-effect of calling.
'''
# This is so we can be lazy and assume alltags contains only global
# tags when we pass it to _writetagcache().
assert len(alltags) == len(tagtypes) == 0, \
"findglobaltags() should be called first"
(heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo)
if cachetags is not None:
assert not shouldwrite
# XXX is this really 100% correct? are there oddball special
# cases where a global tag should outrank a local tag but won't,
# because cachetags does not contain rank info?
_updatetags(cachetags, 'global', alltags, tagtypes)
return
seen = set() # set of fnode
fctx = None
for head in reversed(heads): # oldest to newest
assert head in repo.changelog.nodemap, \
"tag cache returned bogus head %s" % short(head)
fnode = tagfnode.get(head)
if fnode and fnode not in seen:
seen.add(fnode)
if not fctx:
fctx = repo.filectx('.hgtags', fileid=fnode)
else:
fctx = fctx.filectx(fnode)
filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
_updatetags(filetags, 'global', alltags, tagtypes)
# and update the cache (if necessary)
if shouldwrite:
_writetagcache(ui, repo, valid, alltags)
开发者ID:RayFerr000,项目名称:PLTL,代码行数:47,代码来源:tags.py
示例14: makefilename
def makefilename(repo, pat, node, desc=None,
total=None, seqno=None, revwidth=None, pathname=None):
node_expander = {
'H': lambda: hex(node),
'R': lambda: str(repo.changelog.rev(node)),
'h': lambda: short(node),
'm': lambda: re.sub('[^\w]', '_', str(desc))
}
expander = {
'%': lambda: '%',
'b': lambda: os.path.basename(repo.root),
}
try:
if node:
expander.update(node_expander)
if node:
expander['r'] = (lambda:
str(repo.changelog.rev(node)).zfill(revwidth or 0))
if total is not None:
expander['N'] = lambda: str(total)
if seqno is not None:
expander['n'] = lambda: str(seqno)
if total is not None and seqno is not None:
expander['n'] = lambda: str(seqno).zfill(len(str(total)))
if pathname is not None:
expander['s'] = lambda: os.path.basename(pathname)
expander['d'] = lambda: os.path.dirname(pathname) or '.'
expander['p'] = lambda: pathname
newname = []
patlen = len(pat)
i = 0
while i < patlen:
c = pat[i]
if c == '%':
i += 1
c = pat[i]
c = expander[c]()
newname.append(c)
i += 1
return ''.join(newname)
except KeyError, inst:
raise util.Abort(_("invalid format spec '%%%s' in output filename") %
inst.args[0])
开发者ID:mortonfox,项目名称:cr48,代码行数:45,代码来源:cmdutil.py
示例15: findglobaltags
def findglobaltags(ui, repo, alltags, tagtypes):
'''Find global tags in repo by reading .hgtags from every head that
has a distinct version of it, using a cache to avoid excess work.
Updates the dicts alltags, tagtypes in place: alltags maps tag name
to (node, hist) pair (see _readtags() below), and tagtypes maps tag
name to tag type ("global" in this case).'''
# This is so we can be lazy and assume alltags contains only global
# tags when we pass it to _writetagcache().
assert len(alltags) == len(tagtypes) == 0, \
"findglobaltags() should be called first"
(heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
if cachetags is not None:
assert not shouldwrite
# XXX is this really 100% correct? are there oddball special
# cases where a global tag should outrank a local tag but won't,
# because cachetags does not contain rank info?
_updatetags(cachetags, 'global', alltags, tagtypes)
return
seen = set() # set of fnode
fctx = None
for head in reversed(heads): # oldest to newest
assert head in repo.changelog.nodemap, \
"tag cache returned bogus head %s" % short(head)
fnode = tagfnode.get(head)
if fnode and fnode not in seen:
seen.add(fnode)
if not fctx:
fctx = repo.filectx('.hgtags', fileid=fnode)
else:
fctx = fctx.filectx(fnode)
filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
_updatetags(filetags, 'global', alltags, tagtypes)
# and update the cache (if necessary)
if shouldwrite:
_writetagcache(ui, repo, heads, tagfnode, alltags)
开发者ID:ZanderZhang,项目名称:Andriod-Learning,代码行数:40,代码来源:tags.py
示例16: verify
def verify(repo):
"""verify the consistency of a repository"""
ret = verifymod.verify(repo)
# Broken subrepo references in hidden csets don't seem worth worrying about,
# since they can't be pushed/pulled, and --hidden can be used if they are a
# concern.
# pathto() is needed for -R case
revs = repo.revs("filelog(%s)", util.pathto(repo.root, repo.getcwd(), ".hgsubstate"))
if revs:
repo.ui.status(_("checking subrepo links\n"))
for rev in revs:
ctx = repo[rev]
try:
for subpath in ctx.substate:
ret = ctx.sub(subpath).verify() or ret
except Exception:
repo.ui.warn(_(".hgsubstate is corrupt in revision %s\n") % node.short(ctx.node()))
return ret
开发者ID:pierfort123,项目名称:mercurial,代码行数:22,代码来源:hg.py
示例17: findglobaltags2
def findglobaltags2(ui, repo, alltags, tagtypes):
'''Same as findglobaltags1(), but with caching.'''
# This is so we can be lazy and assume alltags contains only global
# tags when we pass it to _writetagcache().
assert len(alltags) == len(tagtypes) == 0, \
"findglobaltags() should be called first"
(heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo)
if cachetags is not None:
assert not shouldwrite
# XXX is this really 100% correct? are there oddball special
# cases where a global tag should outrank a local tag but won't,
# because cachetags does not contain rank info?
_updatetags(cachetags, 'global', alltags, tagtypes)
return
_debug(ui, "reading tags from %d head(s): %s\n"
% (len(heads), map(short, reversed(heads))))
seen = set() # set of fnode
fctx = None
for head in reversed(heads): # oldest to newest
assert head in repo.changelog.nodemap, \
"tag cache returned bogus head %s" % short(head)
fnode = tagfnode.get(head)
if fnode and fnode not in seen:
seen.add(fnode)
if not fctx:
fctx = repo.filectx('.hgtags', fileid=fnode)
else:
fctx = fctx.filectx(fnode)
filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx)
_updatetags(filetags, 'global', alltags, tagtypes)
# and update the cache (if necessary)
if shouldwrite:
_writetagcache(ui, repo, heads, tagfnode, alltags)
开发者ID:Frostman,项目名称:intellij-community,代码行数:38,代码来源:tags.py
示例18: __init__
def __init__(self, name, index, message):
self.name = name
if isinstance(name, str) and len(name) == 20:
from node import short
name = short(name)
RevlogError.__init__(self, '%[email protected]%s: %s' % (index, name, message))
开发者ID:32bitfloat,项目名称:intellij-community,代码行数:6,代码来源:error.py
示例19: __str__
def __str__(self):
return short(self.node())
开发者ID:yonas,项目名称:HgWeb-Syntax-Highlighter,代码行数:2,代码来源:context.py
示例20: prepush
#.........这里部分代码省略.........
# Check for each named branch if we're creating new remote heads.
# To be a remote head after push, node must be either:
# - unknown locally
# - a local outgoing head descended from update
# - a remote head that's known locally and not
# ancestral to an outgoing head
# 1. Create set of branches involved in the push.
branches = set(repo[n].branch() for n in outg)
# 2. Check for new branches on the remote.
remotemap = remote.branchmap()
newbranches = branches - set(remotemap)
if newbranches and not newbranch: # new branch requires --new-branch
branchnames = ', '.join(sorted(newbranches))
raise util.Abort(_("push creates new remote branches: %s!")
% branchnames,
hint=_("use 'hg push --new-branch' to create"
" new remote branches"))
branches.difference_update(newbranches)
# 3. Construct the initial oldmap and newmap dicts.
# They contain information about the remote heads before and
# after the push, respectively.
# Heads not found locally are not included in either dict,
# since they won't be affected by the push.
# unsynced contains all branches with incoming changesets.
oldmap = {}
newmap = {}
unsynced = set()
for branch in branches:
remotebrheads = remotemap[branch]
prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
oldmap[branch] = prunedbrheads
newmap[branch] = list(prunedbrheads)
if len(remotebrheads) > len(prunedbrheads):
unsynced.add(branch)
# 4. Update newmap with outgoing changes.
# This will possibly add new heads and remove existing ones.
ctxgen = (repo[n] for n in outg)
repo._updatebranchcache(newmap, ctxgen)
else:
# 1-4b. old servers: Check for new topological heads.
# Construct {old,new}map with branch = None (topological branch).
# (code based on _updatebranchcache)
oldheads = set(h for h in remoteheads if h in cl.nodemap)
newheads = oldheads.union(outg)
if len(newheads) > 1:
for latest in reversed(outg):
if latest not in newheads:
continue
minhrev = min(cl.rev(h) for h in newheads)
reachable = cl.reachable(latest, cl.node(minhrev))
reachable.remove(latest)
newheads.difference_update(reachable)
branches = set([None])
newmap = {None: newheads}
oldmap = {None: oldheads}
unsynced = inc and branches or set()
# 5. Check for new heads.
# If there are more heads after the push than before, a suitable
# error message, depending on unsynced status, is displayed.
error = None
for branch in branches:
newhs = set(newmap[branch])
oldhs = set(oldmap[branch])
if len(newhs) > len(oldhs):
if error is None:
if branch:
error = _("push creates new remote heads "
"on branch '%s'!") % branch
else:
error = _("push creates new remote heads!")
if branch in unsynced:
hint = _("you should pull and merge or "
"use push -f to force")
else:
hint = _("did you forget to merge? "
"use push -f to force")
if branch:
repo.ui.debug("new remote heads on branch '%s'\n" % branch)
for h in (newhs - oldhs):
repo.ui.debug("new remote head %s\n" % short(h))
if error:
raise util.Abort(error, hint=hint)
# 6. Check for unsynced changes on involved branches.
if unsynced:
repo.ui.warn(_("note: unsynced remote changes!\n"))
if revs is None:
# use the fast path, no race possible on push
nodes = repo.changelog.findmissing(common)
cg = repo._changegroup(nodes, 'push')
else:
cg = repo.changegroupsubset(update, revs, 'push')
return cg, remoteheads
开发者ID:MezzLabs,项目名称:mercurial,代码行数:101,代码来源:discovery.py
注:本文中的node.short函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论